Compare commits

...

6 Commits

Author SHA1 Message Date
Suzanne Soy
4aec99915b Fixes on the deployment process 2023-11-21 20:24:23 +00:00
Suzanne Soy
7cd1380d06 Updated links, auto-update _dnslink entry for IPFS 2023-11-21 18:56:59 +00:00
Suzanne Soy
c80240e7f8 Fixed scrolling issue 2023-11-21 17:56:32 +00:00
Suzanne Soy
10a2e6a01d Merge commit '1ca76ee10d' into HEAD 2023-11-21 14:10:04 +00:00
Suzanne Soy
1ca76ee10d Fixed bug: entries in trees are sorted alphabetically, and subtrees don't come before blobs 2021-07-08 23:15:14 +01:00
Suzanne Soy
c066e50151 Escape strings in the graph description 2021-07-08 23:14:32 +01:00
16 changed files with 1104 additions and 773 deletions

15
.github/files-to-cache.lst vendored Normal file
View File

@ -0,0 +1,15 @@
codemirror-5.60.0/lib/codemirror.css
codemirror-5.60.0/lib/codemirror.js
codemirror-5.60.0/mode/javascript/javascript.js
sha1.js/sha1.js
pako/pako.min.js
Viz.js/viz.js
FileSaver.js/FileSaver.js
Blob.js/Blob.js
JSZip/jszip.min.js
git-tutorial.css
git-tutorial.js
sha256.js
micro_ipfs.js
directory_hashes.js
favicon.ico

8
.github/github_install_ipfs.sh vendored Executable file
View File

@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euET -o pipefail
cd /tmp
wget https://dist.ipfs.tech/kubo/v0.19.1/kubo_v0.19.1_linux-amd64.tar.gz
tar -zxf kubo_v0.19.1_linux-amd64.tar.gz
PATH="/tmp/kubo:$PATH" ipfs init --profile=lowpower

18
.github/github_update_homepage.sh vendored Executable file
View File

@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -euET -o pipefail
echo "Hashing repository contents with IPFS..."
h="$(result/www/ipfs-add.sh --pin=true)"
printf "The new homepage URL will be: https://%s.ipfs.dweb.link/\n" "$h"
# Update Homepage URL on GitHub
curl -L \
-X PATCH \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer $API_TOKEN_FOR_UPDATE_HOMEPAGE"\
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/ligolang/bounties \
-d '{"name":"bounties", "homepage":"https://dweb.link/ipfs/'"$h"'"}' > /dev/null

59
.github/pin-using-ipfs.sh vendored Executable file
View File

@ -0,0 +1,59 @@
#!/usr/bin/env bash
set -euET -o pipefail
echo "Hashing repository contents with IPFS..."
h="$(result/www/ipfs-add.sh --pin=true)"
printf "Pinning ipfs://%s/\n" "$h"
echo 0 > ipfs-pin-global-exitcode
if test -n "${IPFS_REMOTE_API_ENDPOINT:-}" && test -n "${IPFS_REMOTE_TOKEN:-}" && test -n "${IPFS_SWARM_CONNECT_TO:-}"; then
# Wait for IPFS daemon to be ready
echo 'Starting IPFS daemon...'
tail -F /tmp/ipfs-daemon.logs -n +1 & pid=$!
ipfs daemon >/tmp/ipfs-daemon.logs 2>&1 &
while ! grep 'Daemon is ready' /tmp/ipfs-daemon.logs; do sleep 1; date; done
echo 'IPFS daemon started, killing log tail...'
kill "$pid"
echo 'log tail killed'
printf %s\\n "$IPFS_SWARM_CONNECT_TO" | (i=1; while read multiaddr; do
printf "Connecting to IPFS node %s...\n" "$i"
(
ipfs swarm connect "$multiaddr" &
) > /dev/null 2>&1
i=$((i+1))
done)
sleep 10
printf %s\\n "$IPFS_REMOTE_API_ENDPOINT" | (i=1; while read api_endpoint; do
printf "Extracting token %s from environment...\n" "$i"
token="$( (printf %s\\n "$IPFS_REMOTE_TOKEN" | tail -n +"$i" | head -n 1) 2>/dev/null )"
#(printf %s "$token" | sha256sum | sha256sum | sha256sum) 2>/dev/null # for debugging without leaking the token
# Pin this hash
printf "Adding remote pinning service %s...\n" "$i"
(
ipfs pin remote service add my-remote-pin-"$i" "$api_endpoint" "$token"
) > /dev/null 2>&1
printf "Pinning %s on the remote service %s...\n" "$h" "$i"
(
if ipfs pin remote add --service=my-remote-pin-"$i" --name="site-bounties-$(TZ=UTC git log -1 --format=%cd --date=iso-strict-local HEAD)-$GITHUB_SHA" "$h"; then
echo $? > ipfs-pin-remote-add-exitcode
else
echo $? > ipfs-pin-remote-add-exitcode
fi
) > /dev/null 2>&1
printf "Finished pinning %s on the remote service %s, exitcode=%s\n" "$h" "$i" "$(cat ipfs-pin-remote-add-exitcode)"
if test "$(cat ipfs-pin-remote-add-exitcode)" != 0; then
echo 1 > ipfs-pin-global-exitcode
fi
i=$((i+1))
done)
fi
# Fail job if one of the pinning services didn't work
exit "$(cat ipfs-pin-global-exitcode)"

13
.github/print-and-compare-ipfs.sh vendored Executable file
View File

@ -0,0 +1,13 @@
#!/usr/bin/env bash
set -euET -o pipefail
h1="ipfs://$(./result/www/ipfs-add.sh --pin=false)"
h2="ipfs://$(./ipfs-add.sh --pin=false)"
h3="$(cat result/ipfs.url)"
echo "$h1"
echo "$h2"
echo "$h3"
test "$h1" = "$h2" && test "$h2" = "$h3"

49
.github/update-ovh.py vendored Executable file
View File

@ -0,0 +1,49 @@
# -*- encoding: utf-8 -*-
'''
First, install the latest release of Python wrapper: $ pip install ovh
To create an API token, visit:
OVH_DNS_DOMAIN=foobar.com
OVH_DNS_RECORD_ID=??????
x-www-browser https://www.ovh.com/auth/api/createToken?GET=/domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"&PUT=/domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"&POST=/domain/zone/"$OVH_DNS_DOMAIN"/refresh
This should create an API key with the following.
Add the last one and uncomment the code a few lines
below to be able to obtain the "$OVH_DNS_RECORD_ID" number.
GET /domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"
PUT /domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"
POST /domain/zone/"$OVH_DNS_DOMAIN"/refresh
#GET /domain/zone/"$OVH_DNS_DOMAIN"/record
'''
import os
import json
import ovh
# Instanciate an OVH Client.
# You can generate new credentials with full access to your account on
# the token creation page
client = ovh.Client(
endpoint=os.environ['API_OVH_ENDPOINT'],
application_key=os.environ['API_OVH_APPLICATION_KEY'],
application_secret=os.environ['API_OVH_APPLICATION_SECRET'],
consumer_key=os.environ['API_OVH_CONSUMER_KEY'],
)
# Uncomment to get the OVH_DNS_RECORD_ID number (needs GET /domain/zone/"$OVH_DNS_DOMAIN"/record allowed in the API token)
#result = client.get('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/record',
# fieldType='TXT',
# subDomain='_dnslink.git-tutorial',
#)
#print(json.dumps(result, indent=4))
if client.get('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/record/'+os.environ['OVH_DNS_RECORD_ID'])['subDomain'] == '_dnslink.git-tutorial':
result = client.put('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/record/'+os.environ['OVH_DNS_RECORD_ID'],
subDomain='_dnslink.git-tutorial',
target='dnslink=/ipfs/bafybeigexuwmsjhnitngyacj5ja7nqigddyekkhcsz6ejntrgpwcwtusoy',
ttl=60,
)
print(json.dumps(result, indent=4))
result = client.post('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/refresh')
print(json.dumps(result, indent=4))

30
.github/warm-up-gateway-caches.sh vendored Executable file
View File

@ -0,0 +1,30 @@
#!/usr/bin/env bash
set -euET -o pipefail
echo "Warm up cache on a couple of IPFS gateways"
h="$(result/www/ipfs-add.sh --pin=true)"
#wget --reject-regex ".*\?.*" -r -np --timeout=2 --tries=1 "https://cloudflare-ipfs.com/ipfs/$h" 2>&1 | grep '^--' & pid_cloudflare="$!"
#wget --reject-regex ".*\?.*" -r -np --timeout=2 --tries=1 "https://$h.ipfs.dweb.link/" 2>&1 | grep '^--' & pid_dweb="$!"
#wait "$pid_cloudflare" || true
#wait "$pid_dweb" || true
# Download the files, twice (a few files in the first attempt would likely fail as the DHT propagation is not instantaneous?)
for i in `seq 2`; do
#ipfs add --progress=false --ignore-rules-path "result/www/.ipfsignore" --pin=false --hidden -r result/www \
#| cut -d ' ' -f 3- \
#| sed -e 's~^www/*~~' \
cat .github/files-to-cache.lst \
| while read f; do
if (printf %s\\n "$IPFS_REMOTE_API_ENDPOINT" | grep pinata) >/dev/null 2>&1; then
printf "Warming up pinata cache for %s (attempt %d)...\n" "$f" "$i"
wget --tries=1 --timeout=10 -O- "https://gateway.pinata.cloud/ipfs/$h/$f" > /dev/null || true
fi
printf "Warming up Cloudflare cache for %s (attempt %d)...\n" "$f" "$i"
wget --tries=1 --timeout=10 -O- "https://cloudflare-ipfs.com/ipfs/$h/$f" > /dev/null || true
printf "Warming up dweb.link cache for %s (attempt %d)...\n" "$f" "$i"
wget --tries=1 --timeout=10 -O- "https://$h.ipfs.dweb.link/$f" > /dev/null || true
done
done

View File

@ -0,0 +1,80 @@
# Simple workflow for deploying static content to GitHub Pages
name: Upload to IPFS
on:
# Runs on pushes targeting the default branch
push:
branches: ["gh-pages"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
# Single deploy job since we're just deploying
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install Nix
uses: cachix/install-nix-action@v17
- name: Build website
run: nix build
- name: Download IPFS
run: ./.github/github_install_ipfs.sh
- name: Print and compare IPFS hahes
run: export PATH="/tmp/kubo:$PATH"; .github/print-and-compare-ipfs.sh
- name: Make tarball of website
run: mkdir -p "$RUNNER_TEMP" && tar --directory result/www/ -cvf "$RUNNER_TEMP/artifact.tar" .
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: github-pages
path: ${{ runner.temp }}/artifact.tar
if-no-files-found: error
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2
- name: Upload to IPFS
run: PATH="/tmp/kubo:$PATH" ./.github/pin-using-ipfs.sh
continue-on-error: true
env:
IPFS_SWARM_CONNECT_TO: ${{ secrets.IPFS_SWARM_CONNECT_TO }}
IPFS_REMOTE_API_ENDPOINT: ${{ secrets.IPFS_REMOTE_API_ENDPOINT }}
IPFS_REMOTE_TOKEN: ${{ secrets.IPFS_REMOTE_TOKEN }}
- name: Install OVH pip package
run: pip install ovh
- name: Update OVH _dnslink
run: python ./.github/update-ovh.py >/dev/null 2>&1
env:
API_OVH_APPLICATION_KEY: ${{ secrets.API_OVH_APPLICATION_KEY }}
API_OVH_APPLICATION_SECRET: ${{ secrets.API_OVH_APPLICATION_SECRET }}
API_OVH_CONSUMER_KEY: ${{ secrets.API_OVH_CONSUMER_KEY }}
API_OVH_ENDPOINT: ${{ secrets.API_OVH_ENDPOINT }}
OVH_DNS_DOMAIN: ${{ secrets.OVH_DNS_DOMAIN }}
OVH_DNS_RECORD_ID: ${{ secrets.OVH_DNS_RECORD_ID }}
- name: Warm up IPFS gateway caches
run: PATH="/tmp/kubo:$PATH" ./.github/warm-up-gateway-caches.sh
# - name: Update homepage URL
# run: PATH="/tmp/kubo:$PATH" ./.github/github_update_homepage.sh
# env:
# API_TOKEN_FOR_UPDATE_HOMEPAGE: ${{ secrets.API_TOKEN_FOR_UPDATE_HOMEPAGE }}
# - name: Setup Pages
# uses: actions/configure-pages@v3

7
README
View File

@ -4,4 +4,9 @@ pako 2.0.3: https://github.com/nodeca/pako license (MIT AND Zlib)
Viz.js v2.1.2: https://github.com/mdaines/viz.js license MIT
FileSaver.js: https://github.com/eligrey/FileSaver.js license MIT
Blob.js: https://github.com/eligrey/Blob.js license MIT license
JSZip v1.8.2: https://github.com/Stuk/jszip/tree/v2.6.1 license (MIT OR GPLv3)
JSZip v1.8.2: https://github.com/Stuk/jszip/tree/v2.6.1 license (MIT OR GPLv3)
GitHub depoloyment environment variables:
* IPFS_REMOTE_API_ENDPOINT: one per line
* IPFS_REMOTE_TOKEN: one per line (same order as IPFS_REMOTE_API_ENDPOINT)
* IPFS_SWARM_CONNECT_TO: multiaddr of peers to connect to, to help as intermediaries when connecting to the DHT & pinning services (can be multiaddr of the pinning node itself if known)

15
build-and-update.sh Executable file
View File

@ -0,0 +1,15 @@
#!/usr/bin/env bash
set -euET -o pipefail
nix build
cp result/www/directory_hashes.js directory_hashes.js
cp result/www/favicon.ico favicon.ico
cp result/www/sitemap.html sitemap.html
if test -n "$(git status --short)"; then git commit -a --amend; fi
nix build
diff result/www/directory_hashes.js directory_hashes.js
diff result/www/favicon.ico favicon.ico
diff result/www/sitemap.html sitemap.html
./.github/print-and-compare-ipfs.sh

View File

@ -17,15 +17,7 @@ if ! grep '<a id="this-version" href="https://github.com/jsmaniac/git-tutorial/t
exit 1
fi
nix build
cp result/www/directory_hashes.js directory_hashes.js
cp result/www/favicon.ico favicon.ico
cp result/www/sitemap.html sitemap.html
if test -n "$(git status --short)"; then git commit -a --amend; fi
nix build
diff result/www/directory_hashes.js directory_hashes.js
diff result/www/favicon.ico favicon.ico
diff result/www/sitemap.html sitemap.html
./build-and-update.sh
# Add to IPFS and get the hash
ipfs_hash="$(./result/www/ipfs-add.sh --pin=true)"
@ -34,4 +26,8 @@ printf %s\\n "$ipfs_hash"
git tag "$1"
git tag "ipfs-$1-${ipfs_hash}"
ipfs name publish --key=git-tutorial "/ipfs/$ipfs_hash"
git push origin HEAD:gh-pages
git push origin "$1"
git push origin "ipfs-$1-${ipfs_hash}"
ipfs name publish --key=git-tutorial "/ipfs/$ipfs_hash"

View File

@ -1 +1 @@
jsonp_ipfs_directory_hashes({"vanity_text":"soy","vanity_number":20445,"tree":{"Links":[{"Name":".gitignore","Hash":"QmW9iMXzmPqLSnzL4p6DKsvsL3nC1xKS3teRB4SRdukfrz","Size":16},{"Name":".ipfsignore","Hash":"QmPpQN29FbeaNwGsXbebbv588UZtSLCGRffa3Zrz68RAMp","Size":22},{"Name":".nojekyll","Hash":"QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH","Size":6},{"Name":"Blob.js","Hash":"QmSxKmtcBxBRkmkLGpnXAGrRc16kPrJx5Hmdsvt6LRWeSG","Size":21833},{"Name":"CNAME","Hash":"QmT3mZdxv3tQQGq9mwMmjnhXDaDAnkrcwmh2Hvy8gQhRyr","Size":32},{"Name":"FileSaver.js","Hash":"QmUgg2HLo4W9bpf92CkHH6WWVFfg2DmNqJrz2Z46L7VmUq","Size":7367},{"Name":"JSZip","Hash":"QmWW2hDPrMU5e5KgSAMiqfM2YW5RSiZzWiNJSQ7w63ngiL","Size":422094},{"Name":"README","Hash":"QmPVpTsg2DmVqnCWRVua3vggVAoYLKzZPPgGf5ZQzzVUwf","Size":464},{"Name":"Viz.js","Hash":"QmaxUCu1gnFwTTpDoTAPB3fMQQav1NJZrZ7LGqLXECidKj","Size":3564410},{"Name":"codemirror-5.60.0","Hash":"QmXPbArMAid8MbC5G7HCyWz2PUkfSMWZaUQpnq63x8Dw2y","Size":4669604},{"Name":"deploy.sh","Hash":"QmWr2dfiJX2LjpYnXpetkxmwjWP53eAv6RKNYPEptBUPkz","Size":1144},{"Name":"directory_hashes.js","Hash":"","Size":0},{"Name":"favicon.ico","Hash":"QmUq6pQamF58ZDNpPSvF3C2bcCWEJSjx3dFZZLjkSCrYpi","Size":32052},{"Name":"favicon.svg","Hash":"QmesnKGtStCZGpiTjoAcAETdSZgUUQ3wzekn1LSQMFtbgn","Size":3272},{"Name":"flake.lock","Hash":"QmdkX8PkV6j2sLH1JSPD1z4533rEGTa6JKSfsJcYAGSrvx","Size":1475},{"Name":"flake.nix","Hash":"QmRFLGF9aQ6zm67hLt4S1SKf6aRxxxK3sneiq79TtfA9YF","Size":1549},{"Name":"git-tutorial.css","Hash":"QmdsWg4RVZR3kRA7xFchoWLEQQzLpzVyspTtKwa9qttDMF","Size":10842},{"Name":"git-tutorial.js","Hash":"QmbcAMAuGyFumz4pHtKMnRY2VyRAr2tZoiYBLfNY3p2kCj","Size":47083},{"Name":"index.html","Hash":"QmUFRUWMnogkPtFgbPdffKQ2YVbXATZrwMqyuWF7J597uF","Size":117584},{"Name":"ipfs-add.sh","Hash":"QmXSLYLy13efSFVEN3Ej3A3vyimH618Vrt82hoBKeKYgDB","Size":473},{"Name":"micro_ipfs.js","Hash":"QmeWPj4vzN66eCUwQkjjzTgfciBLBzNjQQdvqEBL8x1pmh","Size":16738},{"Name":"pako","Hash":"QmRtJhu2rJCe59JPS9UiyAja5iUZNmJ8nyBijdZpLLEgG9","Size":178431},{"Name":"sha1.js","Hash":"QmP7HPPYQqwKXYyDrkDm9vKt8FZE1WsDUJG8cLnjFf4a11","Size":7966},{"Name":"sha256.js","Hash":"QmRhgx5Fq4JqfCgsPcMxNSYwt8M9WRBkec9omPWzJ7gdwL","Size":8553},{"Name":"sitemap.html","Hash":"Qmb7AvFhE73oxQWXYTJUWuqXCGkPD4FwMWLPv8BRrAgB9X","Size":70990}],"Data":"\b\u0001"}});
jsonp_ipfs_directory_hashes({"vanity_text":"soy","vanity_number":862,"tree":{"Links":[{"Name":".github","Hash":"QmVQymgJYtsWZiVSaxdXxCrovGsdsxTPNWW9K2LXmNWmrk","Size":10687},{"Name":".gitignore","Hash":"QmW9iMXzmPqLSnzL4p6DKsvsL3nC1xKS3teRB4SRdukfrz","Size":16},{"Name":".ipfsignore","Hash":"QmPpQN29FbeaNwGsXbebbv588UZtSLCGRffa3Zrz68RAMp","Size":22},{"Name":".nojekyll","Hash":"QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH","Size":6},{"Name":"Blob.js","Hash":"QmSxKmtcBxBRkmkLGpnXAGrRc16kPrJx5Hmdsvt6LRWeSG","Size":21833},{"Name":"CNAME","Hash":"QmT3mZdxv3tQQGq9mwMmjnhXDaDAnkrcwmh2Hvy8gQhRyr","Size":32},{"Name":"FileSaver.js","Hash":"QmUgg2HLo4W9bpf92CkHH6WWVFfg2DmNqJrz2Z46L7VmUq","Size":7367},{"Name":"JSZip","Hash":"QmWW2hDPrMU5e5KgSAMiqfM2YW5RSiZzWiNJSQ7w63ngiL","Size":422094},{"Name":"README","Hash":"QmVPSdJVXbYuQSN5v3VXqZcYKJuuzvv4ZArg8S7u1MF85F","Size":810},{"Name":"Viz.js","Hash":"QmaxUCu1gnFwTTpDoTAPB3fMQQav1NJZrZ7LGqLXECidKj","Size":3564410},{"Name":"build-and-update.sh","Hash":"QmRypwTrHQq6Ftrb3ygJZ2E4haqqvNZxxZrm4sVnbJuaHH","Size":448},{"Name":"codemirror-5.60.0","Hash":"QmXPbArMAid8MbC5G7HCyWz2PUkfSMWZaUQpnq63x8Dw2y","Size":4669604},{"Name":"deploy.sh","Hash":"QmdgLSVN4UedbKuyAbsCQhiHjxdWpyxMqjdW4cDsvfzDNF","Size":901},{"Name":"directory_hashes.js","Hash":"","Size":0},{"Name":"favicon.ico","Hash":"QmUq6pQamF58ZDNpPSvF3C2bcCWEJSjx3dFZZLjkSCrYpi","Size":32052},{"Name":"favicon.svg","Hash":"QmesnKGtStCZGpiTjoAcAETdSZgUUQ3wzekn1LSQMFtbgn","Size":3272},{"Name":"flake.lock","Hash":"QmdkX8PkV6j2sLH1JSPD1z4533rEGTa6JKSfsJcYAGSrvx","Size":1475},{"Name":"flake.nix","Hash":"Qmaa8dpTGb7T2KJLiPxjR4HrmGmffDWHrCn2G5FmhjhZXb","Size":1642},{"Name":"git-tutorial.css","Hash":"QmdsWg4RVZR3kRA7xFchoWLEQQzLpzVyspTtKwa9qttDMF","Size":10842},{"Name":"git-tutorial.js","Hash":"QmTpny5DSeUzCULRtYH2YJSgLx57KuYPr2LRR7N2A2K4Qg","Size":47919},{"Name":"index.html","Hash":"QmSAqYiSkbR1xhZU6FLMUFTtfXBNXrbqtYb6hJ78Zq1ibB","Size":118368},{"Name":"ipfs-add.sh","Hash":"QmXSLYLy13efSFVEN3Ej3A3vyimH618Vrt82hoBKeKYgDB","Size":473},{"Name":"micro_ipfs.js","Hash":"QmeWPj4vzN66eCUwQkjjzTgfciBLBzNjQQdvqEBL8x1pmh","Size":16738},{"Name":"pako","Hash":"QmRtJhu2rJCe59JPS9UiyAja5iUZNmJ8nyBijdZpLLEgG9","Size":178431},{"Name":"sha1.js","Hash":"QmP7HPPYQqwKXYyDrkDm9vKt8FZE1WsDUJG8cLnjFf4a11","Size":7966},{"Name":"sha256.js","Hash":"QmRhgx5Fq4JqfCgsPcMxNSYwt8M9WRBkec9omPWzJ7gdwL","Size":8553},{"Name":"sitemap.html","Hash":"QmdxctnkjqpH4JrwhnxA8NHuGK3QqUTe2nnGXuWKc6pM3k","Size":71800}],"Data":"\b\u0001"}});

View File

@ -10,6 +10,8 @@
src = self;
buildInputs = with pkgs; [kubo jq nodejs-slim imagemagick];
buildPhase = ''
# TODO: remove files ignored by .ipfsignore during build process
convert -background none favicon.svg -define icon:auto-resize=64,48,32,16 favicon.ico
mkdir "$out"
@ -19,7 +21,7 @@
cd "$out/www";
echo '<!DOCTYPE html><html><head><title>Sitemap</title></head><body>'
# TODO: honor .ipfsignore
find | sed -e 's~.*~<a href="\0">\0</a>~'
find | sort | sed -e 's~.*~<a href="\0">\0</a>~'
echo '</body></html>'
) > "$out/www/sitemap.html"

View File

@ -73,7 +73,7 @@ function ___to_hex_for_printf(str) {
return '<span style="display: block;">' + hex + '</span>';
}
function ___specialchars_and_colour(s) {
return s.replace(/[^-a-zA-Z0-9+_/!%$@.()':]/g, function (c) {
return s.replace(/[^-a-zA-Z0-9+_/!%$@.()':^]/g, function (c) {
switch (c) {
case " ": return '<span class="space"> </span>';
case "\\": return '<span class="specialchar">\\\\</span>';
@ -733,6 +733,23 @@ var ___script_log_header = '' +
/*+*/ '})(window.console);\n' +
/*+*/ '\n';
function ___escape_gv(name) {
return name.replace(/[^- a-zA-Z0-9+_/!%$@.()':&<>'…^]/g, function (c) {
switch (c) {
case "\\": return '\\\\\\\\';
case "\0": return '\\\\000';
case "\r": return '\\\\r';
case "\n": return '\\\\n';
case "\t": return '\\\\t';
case '"': return '\\"';
default: return '\\\\x'+___left_pad(c.charCodeAt(0).toString(16), 0, 2)+'';
}
});
}
function ___quote_gv(name) {
return '"' + ___escape_gv(name) + '"';
}
function ___file_contents_to_graphview(filesystem, path_of_this_file, s) {
var gv = '';
var s2 = null;
@ -771,11 +788,6 @@ var ___previous_file_node_style = 'color = "#808080", fontcolor = "#808080", cla
var ___previous_directory_node_style = 'color = "#80c5c5", fontcolor = "#80c5c5", class = dimmed_previous_directory';
var ___directory_node_style = 'color = "#008b8b", fontcolor = "#008b8b"'; // darkcyan = #008b8b
function ___quote_gv(name) {
if (window.console && window.console.log) { window.console.log('TODO: escape GV'); }
return '"' + name.replace('\n', '\\n') + '"';
}
function ___entry_to_graphview(previous_filesystem, filesystem, x) {
var gv = '';
gv += ___quote_gv(x[0]) + '\n';
@ -783,6 +795,7 @@ function ___entry_to_graphview(previous_filesystem, filesystem, x) {
var components = x[0].split('/');
var shortname = components[components.length - 1];
var type = null;
if (___is_hashed_object_path(x[0])) {
// var hash = components.slice(components.length-2).join('');
shortname = shortname.substr(0, 3) + '…';
@ -791,8 +804,10 @@ function ___entry_to_graphview(previous_filesystem, filesystem, x) {
var parent = components.slice(0, components.length - 1).join('/');
if (parent != '') {
if (filesystem.hasOwnProperty(parent)) {
// show arrow from the parent to this element, if the parent directory exists in the filesystem.
gv += ___quote_gv(parent) + ' -> ' + ___quote_gv(x[0]) + ' ['+___directory_edge_style+'];\n';
} else {
// if the parent directory was not created in the filesystem, show the full path
shortname = parent + '/' + shortname;
}
}
@ -800,7 +815,7 @@ function ___entry_to_graphview(previous_filesystem, filesystem, x) {
// Put a transparent background to make the nodes clickable.
gv += ___quote_gv(x[0]) + ' [ style="filled", fillcolor="transparent" ]';
// contents of the file as a tooltip:
// full name of the file as a tooltip:
gv += ___quote_gv(x[0]) + ' [ tooltip = ' + ___quote_gv(x[0]) + ' ]';
var id = 'gv-' + (___global_unique_id++);
@ -808,9 +823,9 @@ function ___entry_to_graphview(previous_filesystem, filesystem, x) {
if (x[1] === null) {
if (shortname.length <= 2) {
shortname = shortname + '\ndir';
type = '(dir)';
} else {
shortname = shortname + '\ndirectory';
type = '(directory)';
}
if (previous_filesystem.hasOwnProperty(x[0])) {
// dim nodes that existed in the previous_filesystem
@ -820,7 +835,7 @@ function ___entry_to_graphview(previous_filesystem, filesystem, x) {
}
} else {
var contents = ___file_contents_to_graphview(filesystem, x[0], x[1]);
shortname = shortname + '\n(' + contents.type + ')';
type = '(' + contents.type + ')';
gv += contents.gv;
if (previous_filesystem.hasOwnProperty(x[0])) {
// dim nodes that existed in the previous_filesystem
@ -829,7 +844,7 @@ function ___entry_to_graphview(previous_filesystem, filesystem, x) {
}
// shortname as a label
gv += ___quote_gv(x[0]) + ' [ label = ' + ___quote_gv(shortname) + ' ]';
gv += ___quote_gv(x[0]) + ' [ label = "' + ___escape_gv(shortname) + (type == null ? '' : '\\n' + ___escape_gv(type)) + '" ]';
return { id:id, gv:gv };
}
@ -1162,7 +1177,12 @@ function ___scrollToLine(editor, line) {
editor.addLineClass(line, 'background', 'scrolled-to-line');
var editorOffset = ___getOffset(editor.getScrollerElement()).top;
var lineOffset = editor.charCoords({line: line, ch: 0}, "local").top;
document.body.scrollTo(0, editorOffset + lineOffset - window.innerHeight/2);
var toOffset = editorOffset + lineOffset - window.innerHeight/2;
document.body.parentElement.scrollTo(0, toOffset);
if (document.body.parentElement.scrollTop == 0) {
// depending on the CSS, the scrollbar can belong to the HTML element or to the body element.
document.body.scrollTo(0, toOffset);
}
}
function ___toCodeMirror(ta) {
var editor = CodeMirror.fromTextArea(ta, {

View File

@ -5,11 +5,11 @@
<title>GIT tutorial</title>
<link rel="canonical" class="ipfs-permalink-href" href="#" />
<link rel="alternate" href="https://suzanne.soy/git-tutorial/" />
<link rel="alternate" href="https://git-tutorial.suzanne.soy/" />
<!-- These rel="duplicate" links use an HTML encoding of the rfc6249 Metalink/HTTP -->
<link rel="duplicate" class="ipfs-permalink-href" href="#" />
<link rel="duplicate" href="https://suzanne.soy/git-tutorial/" />
<link rel="duplicate" href="https://git-tutorial.suzanne.soy/" />
<!-- Third-party libraries: -->
<link rel="stylesheet" href="codemirror-5.60.0/lib/codemirror.css">
@ -52,15 +52,16 @@ function ___example(id, f) {
<a href="#" class="permalink"><h1 itemprop="headline">Git tutorial: reimplementing part of GIT in JavaScript</h1></a>
<p class="article-metadata">By <a href="https://suzanne.soy/" itemprop="author" rel="author" itemscope="itemscope" itemtype="https://schema.org/Person">Suzanne Soy</a> for <a href="https://ligolang.org/" itemprop="copyrightHolder" itemscope="itemscope" itemtype="https://schema.org/Organization">LIGO</a>. <time itemprop="dateCreated datePublished" datetime="2021-06-29">02021-06-29</time>.</p>
<p>Please send remarks and suggestions to <a href="mailto:git-tutorial@suzanne.soy">git-tutorial@suzanne.soy</a> or simply fork <a href="https://github.com/jsmaniac/git-tutorial">this repository on GitHub</a>.</p>
<p>This version of the site matches the tag <a id="this-version" href="https://github.com/jsmaniac/git-tutorial/tree/v1.0.2">v1.0.2</a> on GitHub.
<p>This version of the site matches the tag <a id="this-version" href="https://github.com/jsmaniac/git-tutorial/tree/v1.1.0">v1.1.0</a> on GitHub.
Permalinks to snapshots of this site are available via IPFS:
<a class="ipfs-permalink-href" href="#" title="published on 02023-11-21">v1.0.2 (this version)<span class="while-computing-ipfs-permalink"> [computing URL…]</span></a>,
<a class="ipfs-permalink-href" href="#" title="published on 02023-11-21">v1.1.0 (this version)<span class="while-computing-ipfs-permalink"> [computing URL…]</span></a>,
<a href="ipfs://bafybeigexuwmsjhnitngyacj5ja7nqigddyekkhcsz6ejntrgpwcwtusoy/" title="">v1.0.2 (02023-11-21)</a>,
<a href="ipfs://bafybeie6rfdvlkl5raju4m4vqjrofwm3jl4gghjfm2xv2rljyahdl5nsoy/" title="">v1.0.1 (02023-11-21)</a>,
<a href="ipfs://bafybeiciboq5zritmgkvmzucvuokajozit7hfwhmappxwmrh2p5zdovdie/" title="published shortly after 02021-06-29">v1 (02021-06-29)</a>,
Alternatively check the
<a href="ipns://git-tutorial.suzanne.soy/">latest version via IPNS/IPFS</a>
or
<a href="https://suzanne.soy/git-tutorial/">latest via HTTPS</a>.
<a href="https://git-tutorial.suzanne.soy/">latest via HTTPS</a>.
See the <a href="#changelog">Changelog</a> section for errata, and the <a href="sitemap.html">sitemap</a> for a list of contents.</p>
<section id="credits-license">
@ -1191,30 +1192,36 @@ This is done by creating a <em>tree</em> object</p>
});
</script>
<p>In the contents of a tree, subdirectories (trees) are listed before files (blobs);
within each group the entries are ordered alphabetically.</p>
<p>In the contents of a tree, the entries are ordered alphabetically.</p>
<textarea id="in8">
// base_directory is a string
// filenames is a list of strings
// subtrees is a list of {name, hash} objects.
function store_tree(base_directory, filenames, subtrees) {
function get_file_hash(filename) {
var path = join_paths(base_directory, filename);
var hash = hash_object(true, 'blob', false, path)
return hex_to_raw_bytes(hash);
// entries will contain {name:'…', entry:'…'} objects
var entries = [];
for (var i = 0; i < filenames.length; i++) {
var path = join_paths(base_directory, filenames[i]);
var hash = hash_object(true, 'blob', false, path);
var entry = "100644 " + filenames[i] + "\0" + hex_to_raw_bytes(hash);
entries.push({ name: filenames[i], entry: entry });
}
var blobs = filenames.map(function (filename) {
return "100644 " + filename + "\0" + get_file_hash(filename);
for (var j = 0; j < subtrees.length; j++) {
var entry = "40000 " + subtrees[j].name + "\0" + hex_to_raw_bytes(subtrees[j].hash);
entries.push({ name: subtrees[j], entry: entry });
}
// Sort the entries by name, alphabetically.
// Note that this won't work with e.g. unicode names.
entries.sort(function (a,b) {
return (a.name < b.name ? -1 : (a.name > b.name ? 1 : 0));
});
var trees = subtrees.map(function (subtree) {
return "40000 " + subtree.name + "\0" + hex_to_raw_bytes(subtree.hash);
});
// blobs are listed before subtrees
var tree_content = blobs.join('') + trees.join('');
// concatenate the entries
var tree_content = entries.map(function (entry) { return entry.entry; }).join('');
// cat tree_content | git hash-object -w -t tree --stdin
return hash_object(true, 'tree', true, tree_content);
@ -2428,12 +2435,15 @@ commands.</p>
</section>
</section>
<section>
<section id="changelog-errata">
<h2>Changelog and errata</h2>
<dl>
<dt>v1</dt><dd>Initial version.</dd>
<dt>v1.0.1</dt><dd>Internal changes to provide IPFS links.</dd>
<dt>v1.0.2</dt><dd>Added a sitemap for download tools.</dd>
<dt>v1.1.0</dt><dd>The <a href="storing-trees">section on storing tres</a> used to indicate that subtrees appear before blobs in the
binary representation of a tree. This was incorrect, the entries are simply sorted alphabetically without any
consideration of their type. Thanks to Exe for spotting this. Internal change: escaped some strings.</dd>
</dl>
</section>

File diff suppressed because it is too large Load Diff