Updated links, auto-update _dnslink entry for IPFS
This commit is contained in:
parent
c80240e7f8
commit
7cd1380d06
8
.github/github_install_ipfs.sh
vendored
Executable file
8
.github/github_install_ipfs.sh
vendored
Executable file
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euET -o pipefail
|
||||
|
||||
cd /tmp
|
||||
wget https://dist.ipfs.tech/kubo/v0.19.1/kubo_v0.19.1_linux-amd64.tar.gz
|
||||
tar -zxf kubo_v0.19.1_linux-amd64.tar.gz
|
||||
PATH="/tmp/kubo:$PATH" ipfs init --profile=lowpower
|
18
.github/github_update_homepage.sh
vendored
Executable file
18
.github/github_update_homepage.sh
vendored
Executable file
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euET -o pipefail
|
||||
|
||||
echo "Hashing repository contents with IPFS..."
|
||||
|
||||
h="$(result/www/ipfs-add.sh --pin=true)"
|
||||
|
||||
printf "The new homepage URL will be: https://%s.ipfs.dweb.link/\n" "$h"
|
||||
|
||||
# Update Homepage URL on GitHub
|
||||
curl -L \
|
||||
-X PATCH \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer $API_TOKEN_FOR_UPDATE_HOMEPAGE"\
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
https://api.github.com/repos/ligolang/bounties \
|
||||
-d '{"name":"bounties", "homepage":"https://dweb.link/ipfs/'"$h"'"}' > /dev/null
|
76
.github/pin-using-ipfs.sh
vendored
Executable file
76
.github/pin-using-ipfs.sh
vendored
Executable file
|
@ -0,0 +1,76 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euET -o pipefail
|
||||
|
||||
echo "Hashing repository contents with IPFS..."
|
||||
|
||||
h="$(result/www/ipfs-add.sh --pin=true)"
|
||||
|
||||
printf "Pinning ipfs://%s/\n" "$h"
|
||||
|
||||
echo 0 > ipfs-pin-global-exitcode
|
||||
|
||||
if test -n "${IPFS_REMOTE_API_ENDPOINT:-}" && test -n "${IPFS_REMOTE_TOKEN:-}" && test -n "${IPFS_SWARM_CONNECT_TO:-}"; then
|
||||
# Wait for IPFS daemon to be ready
|
||||
echo 'Starting IPFS daemon...'
|
||||
tail -F /tmp/ipfs-daemon.logs -n +1 & pid=$!
|
||||
ipfs daemon >/tmp/ipfs-daemon.logs 2>&1 &
|
||||
while ! grep 'Daemon is ready' /tmp/ipfs-daemon.logs; do sleep 1; date; done
|
||||
echo 'IPFS daemon started, killing log tail...'
|
||||
kill "$pid"
|
||||
echo 'log tail killed'
|
||||
|
||||
printf %s\\n "$IPFS_SWARM_CONNECT_TO" | (i=1; while read multiaddr; do
|
||||
printf "Connecting to IPFS node %s...\n" "$i"
|
||||
(
|
||||
ipfs swarm connect "$multiaddr" &
|
||||
) > /dev/null 2>&1
|
||||
i=$((i+1))
|
||||
done)
|
||||
sleep 10
|
||||
|
||||
printf %s\\n "$IPFS_REMOTE_API_ENDPOINT" | (i=1; while read api_endpoint; do
|
||||
printf "Extracting token %s from environment...\n" "$i"
|
||||
token="$( (printf %s\\n "$IPFS_REMOTE_TOKEN" | tail -n +"$i" | head -n 1) 2>/dev/null )"
|
||||
#(printf %s "$token" | sha256sum | sha256sum | sha256sum) 2>/dev/null # for debugging without leaking the token
|
||||
# Pin this hash
|
||||
printf "Adding remote pinning service %s...\n" "$i"
|
||||
(
|
||||
ipfs pin remote service add my-remote-pin-"$i" "$api_endpoint" "$token"
|
||||
) > /dev/null 2>&1
|
||||
|
||||
printf "Pinning %s on the remote service %s...\n" "$h" "$i"
|
||||
(
|
||||
if ipfs pin remote add --service=my-remote-pin-"$i" --name="site-bounties-$(TZ=UTC git log -1 --format=%cd --date=iso-strict-local HEAD)-$GITHUB_SHA" "$h"; then
|
||||
echo $? > ipfs-pin-remote-add-exitcode
|
||||
else
|
||||
echo $? > ipfs-pin-remote-add-exitcode
|
||||
fi
|
||||
) > /dev/null 2>&1
|
||||
printf "Finished pinning %s on the remote service %s, exitcode=%s\n" "$h" "$i" "$(cat ipfs-pin-remote-add-exitcode)"
|
||||
if test "$(cat ipfs-pin-remote-add-exitcode)" != 0; then
|
||||
echo 1 > ipfs-pin-global-exitcode
|
||||
fi
|
||||
i=$((i+1))
|
||||
done)
|
||||
fi
|
||||
|
||||
# warm up cache, twice (a few files in the first attempt would likely fail as the DHT propagation is not instant)
|
||||
for i in `seq 2`; do
|
||||
ipfs add --progress=false --ignore-rules-path "result/www/.ipfsignore" --pin=false --hidden -r result/www \
|
||||
| cut -d ' ' -f 3- \
|
||||
| sed -e 's~^www/*~~' \
|
||||
| while read f; do
|
||||
if (printf %s\\n "$IPFS_REMOTE_API_ENDPOINT" | grep pinata) >/dev/null 2>&1; then
|
||||
printf "Warming up pinata cache for %s (attempt %d)...\n" "$f" "$i"
|
||||
wget --tries=1 --timeout=10 -O- "https://gateway.pinata.cloud/ipfs/$h/$f" > /dev/null || true
|
||||
fi
|
||||
printf "Warming up Cloudflare cache for %s (attempt %d)...\n" "$f" "$i"
|
||||
wget --tries=1 --timeout=10 -O- "https://cloudflare-ipfs.com/ipfs/$h/$f" > /dev/null || true
|
||||
printf "Warming up dweb.link cache for %s (attempt %d)...\n" "$f" "$i"
|
||||
wget --tries=1 --timeout=10 -O- "https://$h.ipfs.dweb.link/$f" > /dev/null || true
|
||||
done
|
||||
done
|
||||
|
||||
# Fail job if one of the pinning services didn't work
|
||||
exit "$(cat ipfs-pin-global-exitcode)"
|
13
.github/print-and-compare-ipfs.sh
vendored
Executable file
13
.github/print-and-compare-ipfs.sh
vendored
Executable file
|
@ -0,0 +1,13 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euET -o pipefail
|
||||
|
||||
h1="ipfs://$(./result/www/ipfs-add.sh --pin=false)"
|
||||
h2="ipfs://$(./ipfs-add.sh --pin=false)"
|
||||
h3="$(cat result/ipfs.url)"
|
||||
|
||||
echo "$h1"
|
||||
echo "$h2"
|
||||
echo "$h3"
|
||||
|
||||
test "$h1" = "$h2" && test "$h2" = "$h3"
|
49
.github/update-ovh.py
vendored
Executable file
49
.github/update-ovh.py
vendored
Executable file
|
@ -0,0 +1,49 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
'''
|
||||
First, install the latest release of Python wrapper: $ pip install ovh
|
||||
|
||||
To create an API token, visit:
|
||||
OVH_DNS_DOMAIN=foobar.com
|
||||
OVH_DNS_RECORD_ID=??????
|
||||
x-www-browser https://www.ovh.com/auth/api/createToken?GET=/domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"&PUT=/domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"&POST=/domain/zone/"$OVH_DNS_DOMAIN"/refresh
|
||||
|
||||
This should create an API key with the following.
|
||||
Add the last one and uncomment the code a few lines
|
||||
below to be able to obtain the "$OVH_DNS_RECORD_ID" number.
|
||||
|
||||
GET /domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"
|
||||
PUT /domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"
|
||||
POST /domain/zone/"$OVH_DNS_DOMAIN"/refresh
|
||||
#GET /domain/zone/"$OVH_DNS_DOMAIN"/record
|
||||
'''
|
||||
import os
|
||||
import json
|
||||
import ovh
|
||||
|
||||
# Instanciate an OVH Client.
|
||||
# You can generate new credentials with full access to your account on
|
||||
# the token creation page
|
||||
client = ovh.Client(
|
||||
endpoint=os.environ['API_OVH_ENDPOINT'],
|
||||
application_key=os.environ['API_OVH_APPLICATION_KEY'],
|
||||
application_secret=os.environ['API_OVH_APPLICATION_SECRET'],
|
||||
consumer_key=os.environ['API_OVH_CONSUMER_KEY'],
|
||||
)
|
||||
|
||||
# Uncomment to get the OVH_DNS_RECORD_ID number (needs GET /domain/zone/"$OVH_DNS_DOMAIN"/record allowed in the API token)
|
||||
#result = client.get('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/record',
|
||||
# fieldType='TXT',
|
||||
# subDomain='_dnslink.git-tutorial',
|
||||
#)
|
||||
#print(json.dumps(result, indent=4))
|
||||
|
||||
if client.get('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/record/'+os.environ['OVH_DNS_RECORD_ID'])['subDomain'] == '_dnslink.git-tutorial':
|
||||
result = client.put('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/record/'+os.environ['OVH_DNS_RECORD_ID'],
|
||||
subDomain='_dnslink.git-tutorial',
|
||||
target='dnslink=/ipfs/bafybeigexuwmsjhnitngyacj5ja7nqigddyekkhcsz6ejntrgpwcwtusoy',
|
||||
ttl=60,
|
||||
)
|
||||
print(json.dumps(result, indent=4))
|
||||
|
||||
result = client.post('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/refresh')
|
||||
print(json.dumps(result, indent=4))
|
75
.github/workflows/upload-to-ipfs-and-update-dns.yml
vendored
Normal file
75
.github/workflows/upload-to-ipfs-and-update-dns.yml
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
# Simple workflow for deploying static content to GitHub Pages
|
||||
name: Upload to IPFS
|
||||
|
||||
on:
|
||||
# Runs on pushes targeting the default branch
|
||||
push:
|
||||
branches: ["gh-pages"]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
|
||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
# Single deploy job since we're just deploying
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@v17
|
||||
- name: Build website
|
||||
run: nix build
|
||||
- name: Download IPFS
|
||||
run: ./.github/github_install_ipfs.sh
|
||||
- name: Print and compare IPFS hahes
|
||||
run: export PATH="/tmp/kubo:$PATH"; .github/print-and-compare-ipfs.sh
|
||||
- name: Upload to IPFS
|
||||
run: PATH="/tmp/kubo:$PATH" ./.github/pin-using-ipfs.sh
|
||||
continue-on-error: true
|
||||
env:
|
||||
IPFS_SWARM_CONNECT_TO: ${{ secrets.IPFS_SWARM_CONNECT_TO }}
|
||||
IPFS_REMOTE_API_ENDPOINT: ${{ secrets.IPFS_REMOTE_API_ENDPOINT }}
|
||||
IPFS_REMOTE_TOKEN: ${{ secrets.IPFS_REMOTE_TOKEN }}
|
||||
- name: Install OVH pip package
|
||||
run: pip install ovh
|
||||
- name: Install OVH pip package
|
||||
run: python ./.github/update-ovh.py >/dev/null 2>&1
|
||||
env:
|
||||
API_OVH_APPLICATION_KEY: ${{ secrets.API_OVH_APPLICATION_KEY }}
|
||||
API_OVH_APPLICATION_SECRET: ${{ secrets.API_OVH_APPLICATION_SECRET }}
|
||||
API_OVH_CONSUMER_KEY: ${{ secrets.API_OVH_CONSUMER_KEY }}
|
||||
API_OVH_ENDPOINT: ${{ secrets.API_OVH_ENDPOINT }}
|
||||
OVH_DNS_DOMAIN: ${{ secrets.OVH_DNS_DOMAIN }}
|
||||
OVH_DNS_RECORD_ID: ${{ secrets.OVH_DNS_RECORD_ID }}
|
||||
# - name: Update homepage URL
|
||||
# run: PATH="/tmp/kubo:$PATH" ./.github/github_update_homepage.sh
|
||||
# env:
|
||||
# API_TOKEN_FOR_UPDATE_HOMEPAGE: ${{ secrets.API_TOKEN_FOR_UPDATE_HOMEPAGE }}
|
||||
# - name: Setup Pages
|
||||
# uses: actions/configure-pages@v3
|
||||
# - name: Upload artifact
|
||||
# uses: actions/upload-pages-artifact@v2
|
||||
# with:
|
||||
# # Upload entire repository
|
||||
# path: 'result/www/'
|
||||
# - name: Deploy to GitHub Pages
|
||||
# id: deployment
|
||||
# uses: actions/deploy-pages@v2
|
5
README
5
README
|
@ -5,3 +5,8 @@ Viz.js v2.1.2: https://github.com/mdaines/viz.js license MIT
|
|||
FileSaver.js: https://github.com/eligrey/FileSaver.js license MIT
|
||||
Blob.js: https://github.com/eligrey/Blob.js license MIT license
|
||||
JSZip v1.8.2: https://github.com/Stuk/jszip/tree/v2.6.1 license (MIT OR GPLv3)
|
||||
|
||||
GitHub depoloyment environment variables:
|
||||
* IPFS_REMOTE_API_ENDPOINT: one per line
|
||||
* IPFS_REMOTE_TOKEN: one per line (same order as IPFS_REMOTE_API_ENDPOINT)
|
||||
* IPFS_SWARM_CONNECT_TO: multiaddr of peers to connect to, to help as intermediaries when connecting to the DHT & pinning services (can be multiaddr of the pinning node itself if known)
|
||||
|
|
|
@ -27,6 +27,8 @@ diff result/www/directory_hashes.js directory_hashes.js
|
|||
diff result/www/favicon.ico favicon.ico
|
||||
diff result/www/sitemap.html sitemap.html
|
||||
|
||||
./.github/print-and-compare-ipfs.sh
|
||||
|
||||
# Add to IPFS and get the hash
|
||||
ipfs_hash="$(./result/www/ipfs-add.sh --pin=true)"
|
||||
printf %s\\n "$ipfs_hash"
|
||||
|
@ -34,4 +36,8 @@ printf %s\\n "$ipfs_hash"
|
|||
git tag "$1"
|
||||
git tag "ipfs-$1-${ipfs_hash}"
|
||||
|
||||
git push origin HEAD:gh-pages
|
||||
git push origin "$1"
|
||||
git push origin "ipfs-$1-${ipfs_hash}"
|
||||
|
||||
ipfs name publish --key=git-tutorial "/ipfs/$ipfs_hash"
|
|
@ -1 +1 @@
|
|||
jsonp_ipfs_directory_hashes({"vanity_text":"soy","vanity_number":20445,"tree":{"Links":[{"Name":".gitignore","Hash":"QmW9iMXzmPqLSnzL4p6DKsvsL3nC1xKS3teRB4SRdukfrz","Size":16},{"Name":".ipfsignore","Hash":"QmPpQN29FbeaNwGsXbebbv588UZtSLCGRffa3Zrz68RAMp","Size":22},{"Name":".nojekyll","Hash":"QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH","Size":6},{"Name":"Blob.js","Hash":"QmSxKmtcBxBRkmkLGpnXAGrRc16kPrJx5Hmdsvt6LRWeSG","Size":21833},{"Name":"CNAME","Hash":"QmT3mZdxv3tQQGq9mwMmjnhXDaDAnkrcwmh2Hvy8gQhRyr","Size":32},{"Name":"FileSaver.js","Hash":"QmUgg2HLo4W9bpf92CkHH6WWVFfg2DmNqJrz2Z46L7VmUq","Size":7367},{"Name":"JSZip","Hash":"QmWW2hDPrMU5e5KgSAMiqfM2YW5RSiZzWiNJSQ7w63ngiL","Size":422094},{"Name":"README","Hash":"QmPVpTsg2DmVqnCWRVua3vggVAoYLKzZPPgGf5ZQzzVUwf","Size":464},{"Name":"Viz.js","Hash":"QmaxUCu1gnFwTTpDoTAPB3fMQQav1NJZrZ7LGqLXECidKj","Size":3564410},{"Name":"codemirror-5.60.0","Hash":"QmXPbArMAid8MbC5G7HCyWz2PUkfSMWZaUQpnq63x8Dw2y","Size":4669604},{"Name":"deploy.sh","Hash":"QmWr2dfiJX2LjpYnXpetkxmwjWP53eAv6RKNYPEptBUPkz","Size":1144},{"Name":"directory_hashes.js","Hash":"","Size":0},{"Name":"favicon.ico","Hash":"QmUq6pQamF58ZDNpPSvF3C2bcCWEJSjx3dFZZLjkSCrYpi","Size":32052},{"Name":"favicon.svg","Hash":"QmesnKGtStCZGpiTjoAcAETdSZgUUQ3wzekn1LSQMFtbgn","Size":3272},{"Name":"flake.lock","Hash":"QmdkX8PkV6j2sLH1JSPD1z4533rEGTa6JKSfsJcYAGSrvx","Size":1475},{"Name":"flake.nix","Hash":"QmRFLGF9aQ6zm67hLt4S1SKf6aRxxxK3sneiq79TtfA9YF","Size":1549},{"Name":"git-tutorial.css","Hash":"QmdsWg4RVZR3kRA7xFchoWLEQQzLpzVyspTtKwa9qttDMF","Size":10842},{"Name":"git-tutorial.js","Hash":"QmbcAMAuGyFumz4pHtKMnRY2VyRAr2tZoiYBLfNY3p2kCj","Size":47083},{"Name":"index.html","Hash":"QmUFRUWMnogkPtFgbPdffKQ2YVbXATZrwMqyuWF7J597uF","Size":117584},{"Name":"ipfs-add.sh","Hash":"QmXSLYLy13efSFVEN3Ej3A3vyimH618Vrt82hoBKeKYgDB","Size":473},{"Name":"micro_ipfs.js","Hash":"QmeWPj4vzN66eCUwQkjjzTgfciBLBzNjQQdvqEBL8x1pmh","Size":16738},{"Name":"pako","Hash":"QmRtJhu2rJCe59JPS9UiyAja5iUZNmJ8nyBijdZpLLEgG9","Size":178431},{"Name":"sha1.js","Hash":"QmP7HPPYQqwKXYyDrkDm9vKt8FZE1WsDUJG8cLnjFf4a11","Size":7966},{"Name":"sha256.js","Hash":"QmRhgx5Fq4JqfCgsPcMxNSYwt8M9WRBkec9omPWzJ7gdwL","Size":8553},{"Name":"sitemap.html","Hash":"Qmb7AvFhE73oxQWXYTJUWuqXCGkPD4FwMWLPv8BRrAgB9X","Size":70990}],"Data":"\b\u0001"}});
|
||||
jsonp_ipfs_directory_hashes({"vanity_text":"soy","vanity_number":2035,"tree":{"Links":[{"Name":".github","Hash":"QmPVN8KzBJhCezPsFbNHfJ38LRJFyWhVwkpGQdWE6nSRh3","Size":9433},{"Name":".gitignore","Hash":"QmW9iMXzmPqLSnzL4p6DKsvsL3nC1xKS3teRB4SRdukfrz","Size":16},{"Name":".ipfsignore","Hash":"QmPpQN29FbeaNwGsXbebbv588UZtSLCGRffa3Zrz68RAMp","Size":22},{"Name":".nojekyll","Hash":"QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH","Size":6},{"Name":"Blob.js","Hash":"QmSxKmtcBxBRkmkLGpnXAGrRc16kPrJx5Hmdsvt6LRWeSG","Size":21833},{"Name":"CNAME","Hash":"QmT3mZdxv3tQQGq9mwMmjnhXDaDAnkrcwmh2Hvy8gQhRyr","Size":32},{"Name":"FileSaver.js","Hash":"QmUgg2HLo4W9bpf92CkHH6WWVFfg2DmNqJrz2Z46L7VmUq","Size":7367},{"Name":"JSZip","Hash":"QmWW2hDPrMU5e5KgSAMiqfM2YW5RSiZzWiNJSQ7w63ngiL","Size":422094},{"Name":"README","Hash":"QmVPSdJVXbYuQSN5v3VXqZcYKJuuzvv4ZArg8S7u1MF85F","Size":810},{"Name":"Viz.js","Hash":"QmaxUCu1gnFwTTpDoTAPB3fMQQav1NJZrZ7LGqLXECidKj","Size":3564410},{"Name":"codemirror-5.60.0","Hash":"QmXPbArMAid8MbC5G7HCyWz2PUkfSMWZaUQpnq63x8Dw2y","Size":4669604},{"Name":"deploy.sh","Hash":"QmedckMFRS5rBkuv4DVqHxff11ouWZSitr2SNKvnYot56j","Size":1273},{"Name":"directory_hashes.js","Hash":"","Size":0},{"Name":"favicon.ico","Hash":"QmUq6pQamF58ZDNpPSvF3C2bcCWEJSjx3dFZZLjkSCrYpi","Size":32052},{"Name":"favicon.svg","Hash":"QmesnKGtStCZGpiTjoAcAETdSZgUUQ3wzekn1LSQMFtbgn","Size":3272},{"Name":"flake.lock","Hash":"QmdkX8PkV6j2sLH1JSPD1z4533rEGTa6JKSfsJcYAGSrvx","Size":1475},{"Name":"flake.nix","Hash":"Qmf28V3ScemKqCCq2x7uAYPSikHv9tYbWwo78MsxqvKaZf","Size":1556},{"Name":"git-tutorial.css","Hash":"QmdsWg4RVZR3kRA7xFchoWLEQQzLpzVyspTtKwa9qttDMF","Size":10842},{"Name":"git-tutorial.js","Hash":"QmTpny5DSeUzCULRtYH2YJSgLx57KuYPr2LRR7N2A2K4Qg","Size":47919},{"Name":"index.html","Hash":"QmSAqYiSkbR1xhZU6FLMUFTtfXBNXrbqtYb6hJ78Zq1ibB","Size":118368},{"Name":"ipfs-add.sh","Hash":"QmXSLYLy13efSFVEN3Ej3A3vyimH618Vrt82hoBKeKYgDB","Size":473},{"Name":"micro_ipfs.js","Hash":"QmeWPj4vzN66eCUwQkjjzTgfciBLBzNjQQdvqEBL8x1pmh","Size":16738},{"Name":"pako","Hash":"QmRtJhu2rJCe59JPS9UiyAja5iUZNmJ8nyBijdZpLLEgG9","Size":178431},{"Name":"sha1.js","Hash":"QmP7HPPYQqwKXYyDrkDm9vKt8FZE1WsDUJG8cLnjFf4a11","Size":7966},{"Name":"sha256.js","Hash":"QmRhgx5Fq4JqfCgsPcMxNSYwt8M9WRBkec9omPWzJ7gdwL","Size":8553},{"Name":"sitemap.html","Hash":"QmWgCZfJ7TkX4hpXj71Q1FnZ5ccZeKSik3oXSnXein4XW4","Size":71584}],"Data":"\b\u0001"}});
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
cd "$out/www";
|
||||
echo '<!DOCTYPE html><html><head><title>Sitemap</title></head><body>'
|
||||
# TODO: honor .ipfsignore
|
||||
find | sed -e 's~.*~<a href="\0">\0</a>~'
|
||||
find | sort | sed -e 's~.*~<a href="\0">\0</a>~'
|
||||
echo '</body></html>'
|
||||
) > "$out/www/sitemap.html"
|
||||
|
||||
|
|
11
index.html
11
index.html
|
@ -5,11 +5,11 @@
|
|||
<title>GIT tutorial</title>
|
||||
|
||||
<link rel="canonical" class="ipfs-permalink-href" href="#" />
|
||||
<link rel="alternate" href="https://suzanne.soy/git-tutorial/" />
|
||||
<link rel="alternate" href="https://git-tutorial.suzanne.soy/" />
|
||||
|
||||
<!-- These rel="duplicate" links use an HTML encoding of the rfc6249 Metalink/HTTP -->
|
||||
<link rel="duplicate" class="ipfs-permalink-href" href="#" />
|
||||
<link rel="duplicate" href="https://suzanne.soy/git-tutorial/" />
|
||||
<link rel="duplicate" href="https://git-tutorial.suzanne.soy/" />
|
||||
|
||||
<!-- Third-party libraries: -->
|
||||
<link rel="stylesheet" href="codemirror-5.60.0/lib/codemirror.css">
|
||||
|
@ -52,15 +52,16 @@ function ___example(id, f) {
|
|||
<a href="#" class="permalink"><h1 itemprop="headline">Git tutorial: reimplementing part of GIT in JavaScript</h1></a>
|
||||
<p class="article-metadata">By <a href="https://suzanne.soy/" itemprop="author" rel="author" itemscope="itemscope" itemtype="https://schema.org/Person">Suzanne Soy</a> for <a href="https://ligolang.org/" itemprop="copyrightHolder" itemscope="itemscope" itemtype="https://schema.org/Organization">LIGO</a>. <time itemprop="dateCreated datePublished" datetime="2021-06-29">02021-06-29</time>.</p>
|
||||
<p>Please send remarks and suggestions to <a href="mailto:git-tutorial@suzanne.soy">git-tutorial@suzanne.soy</a> or simply fork <a href="https://github.com/jsmaniac/git-tutorial">this repository on GitHub</a>.</p>
|
||||
<p>This version of the site matches the tag <a id="this-version" href="https://github.com/jsmaniac/git-tutorial/tree/v1.0.2">v1.0.2</a> on GitHub.
|
||||
<p>This version of the site matches the tag <a id="this-version" href="https://github.com/jsmaniac/git-tutorial/tree/v1.1.0">v1.1.0</a> on GitHub.
|
||||
Permalinks to snapshots of this site are available via IPFS:
|
||||
<a class="ipfs-permalink-href" href="#" title="published on 02023-11-21">v1.0.2 (this version)<span class="while-computing-ipfs-permalink"> [computing URL…]</span></a>,
|
||||
<a class="ipfs-permalink-href" href="#" title="published on 02023-11-21">v1.1.0 (this version)<span class="while-computing-ipfs-permalink"> [computing URL…]</span></a>,
|
||||
<a href="ipfs://bafybeigexuwmsjhnitngyacj5ja7nqigddyekkhcsz6ejntrgpwcwtusoy/" title="">v1.0.2 (02023-11-21)</a>,
|
||||
<a href="ipfs://bafybeie6rfdvlkl5raju4m4vqjrofwm3jl4gghjfm2xv2rljyahdl5nsoy/" title="">v1.0.1 (02023-11-21)</a>,
|
||||
<a href="ipfs://bafybeiciboq5zritmgkvmzucvuokajozit7hfwhmappxwmrh2p5zdovdie/" title="published shortly after 02021-06-29">v1 (02021-06-29)</a>,
|
||||
Alternatively check the
|
||||
<a href="ipns://git-tutorial.suzanne.soy/">latest version via IPNS/IPFS</a>
|
||||
or
|
||||
<a href="https://suzanne.soy/git-tutorial/">latest via HTTPS</a>.
|
||||
<a href="https://git-tutorial.suzanne.soy/">latest via HTTPS</a>.
|
||||
See the <a href="#changelog">Changelog</a> section for errata, and the <a href="sitemap.html">sitemap</a> for a list of contents.</p>
|
||||
|
||||
<section id="credits-license">
|
||||
|
|
1464
sitemap.html
1464
sitemap.html
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user