Cron job
This commit is contained in:
parent
d35bd4a9d8
commit
081f8abf7b
8
.github/github_install_ipfs.sh
vendored
Executable file
8
.github/github_install_ipfs.sh
vendored
Executable file
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euET -o pipefail
|
||||
|
||||
cd /tmp
|
||||
wget https://dist.ipfs.tech/kubo/v0.19.1/kubo_v0.19.1_linux-amd64.tar.gz
|
||||
tar -zxf kubo_v0.19.1_linux-amd64.tar.gz
|
||||
PATH="/tmp/kubo:$PATH" ipfs init --profile=lowpower
|
23
.github/github_update_homepage.sh
vendored
Executable file
23
.github/github_update_homepage.sh
vendored
Executable file
|
@ -0,0 +1,23 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euET -o pipefail
|
||||
|
||||
user="$GITHUB_REPOSITORY_OWNER"
|
||||
repo="${GITHUB_REPOSITORY#*/}"
|
||||
|
||||
printf %s\\n "user=$user repo=$repo"
|
||||
|
||||
echo "Hashing repository contents with IPFS..."
|
||||
|
||||
h="$(result/www/ipfs-add.sh --pin=true)"
|
||||
|
||||
printf "The new homepage URL will be: https://%s.ipfs.dweb.link/\n" "$h"
|
||||
|
||||
# Update Homepage URL on GitHub
|
||||
curl -L \
|
||||
-X PATCH \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer $API_TOKEN_FOR_UPDATE_HOMEPAGE"\
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
https://api.github.com/repos/"$user"/"$repo" \
|
||||
-d '{"name":"'"$repo"'", "homepage":"https://dweb.link/ipfs/'"$h"'"}' > /dev/null
|
59
.github/pin-using-ipfs.sh
vendored
Executable file
59
.github/pin-using-ipfs.sh
vendored
Executable file
|
@ -0,0 +1,59 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euET -o pipefail
|
||||
|
||||
echo "Hashing repository contents with IPFS..."
|
||||
|
||||
h="$(result/www/ipfs-add.sh --pin=true)"
|
||||
|
||||
printf "Pinning ipfs://%s/\n" "$h"
|
||||
|
||||
echo 0 > ipfs-pin-global-exitcode
|
||||
|
||||
if test -n "${IPFS_REMOTE_API_ENDPOINT:-}" && test -n "${IPFS_REMOTE_TOKEN:-}" && test -n "${IPFS_SWARM_CONNECT_TO:-}"; then
|
||||
# Wait for IPFS daemon to be ready
|
||||
echo 'Starting IPFS daemon...'
|
||||
tail -F /tmp/ipfs-daemon.logs -n +1 & pid=$!
|
||||
ipfs daemon >/tmp/ipfs-daemon.logs 2>&1 &
|
||||
while ! grep 'Daemon is ready' /tmp/ipfs-daemon.logs; do sleep 1; date; done
|
||||
echo 'IPFS daemon started, killing log tail...'
|
||||
kill "$pid"
|
||||
echo 'log tail killed'
|
||||
|
||||
printf %s\\n "$IPFS_SWARM_CONNECT_TO" | (i=1; while read multiaddr; do
|
||||
printf "Connecting to IPFS node %s...\n" "$i"
|
||||
(
|
||||
ipfs swarm connect "$multiaddr" &
|
||||
) > /dev/null 2>&1
|
||||
i=$((i+1))
|
||||
done)
|
||||
sleep 10
|
||||
|
||||
printf %s\\n "$IPFS_REMOTE_API_ENDPOINT" | (i=1; while read api_endpoint; do
|
||||
printf "Extracting token %s from environment...\n" "$i"
|
||||
token="$( (printf %s\\n "$IPFS_REMOTE_TOKEN" | tail -n +"$i" | head -n 1) 2>/dev/null )"
|
||||
#(printf %s "$token" | sha256sum | sha256sum | sha256sum) 2>/dev/null # for debugging without leaking the token
|
||||
# Pin this hash
|
||||
printf "Adding remote pinning service %s...\n" "$i"
|
||||
(
|
||||
ipfs pin remote service add my-remote-pin-"$i" "$api_endpoint" "$token"
|
||||
) > /dev/null 2>&1
|
||||
|
||||
printf "Pinning %s on the remote service %s...\n" "$h" "$i"
|
||||
(
|
||||
if ipfs pin remote add --service=my-remote-pin-"$i" --name="site-bounties-$(TZ=UTC git log -1 --format=%cd --date=iso-strict-local HEAD)-$GITHUB_SHA" "$h"; then
|
||||
echo $? > ipfs-pin-remote-add-exitcode
|
||||
else
|
||||
echo $? > ipfs-pin-remote-add-exitcode
|
||||
fi
|
||||
) > /dev/null 2>&1
|
||||
printf "Finished pinning %s on the remote service %s, exitcode=%s\n" "$h" "$i" "$(cat ipfs-pin-remote-add-exitcode)"
|
||||
if test "$(cat ipfs-pin-remote-add-exitcode)" != 0; then
|
||||
echo 1 > ipfs-pin-global-exitcode
|
||||
fi
|
||||
i=$((i+1))
|
||||
done)
|
||||
fi
|
||||
|
||||
# Fail job if one of the pinning services didn't work
|
||||
exit "$(cat ipfs-pin-global-exitcode)"
|
52
.github/update-ovh.py
vendored
Executable file
52
.github/update-ovh.py
vendored
Executable file
|
@ -0,0 +1,52 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
'''
|
||||
First, install the latest release of Python wrapper: $ pip install ovh
|
||||
|
||||
To create an API token, visit:
|
||||
OVH_DNS_DOMAIN=foobar.com
|
||||
OVH_DNS_RECORD_ID=??????
|
||||
x-www-browser https://www.ovh.com/auth/api/createToken?GET=/domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"&PUT=/domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"&POST=/domain/zone/"$OVH_DNS_DOMAIN"/refresh
|
||||
|
||||
This should create an API key with the following.
|
||||
Add the last one and uncomment the code a few lines
|
||||
below to be able to obtain the "$OVH_DNS_RECORD_ID" number.
|
||||
|
||||
GET /domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"
|
||||
PUT /domain/zone/"$OVH_DNS_DOMAIN"/record/"$OVH_DNS_RECORD_ID"
|
||||
POST /domain/zone/"$OVH_DNS_DOMAIN"/refresh
|
||||
#GET /domain/zone/"$OVH_DNS_DOMAIN"/record
|
||||
'''
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import ovh
|
||||
|
||||
# Instanciate an OVH Client.
|
||||
# You can generate new credentials with full access to your account on
|
||||
# the token creation page
|
||||
client = ovh.Client(
|
||||
endpoint=os.environ['API_OVH_ENDPOINT'],
|
||||
application_key=os.environ['API_OVH_APPLICATION_KEY'],
|
||||
application_secret=os.environ['API_OVH_APPLICATION_SECRET'],
|
||||
consumer_key=os.environ['API_OVH_CONSUMER_KEY'],
|
||||
)
|
||||
|
||||
# Uncomment to get the OVH_DNS_RECORD_ID number (needs GET /domain/zone/"$OVH_DNS_DOMAIN"/record allowed in the API token)
|
||||
#result = client.get('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/record',
|
||||
# fieldType='TXT',
|
||||
# subDomain='_dnslink.xkcd',
|
||||
#)
|
||||
#print(json.dumps(result, indent=4))
|
||||
|
||||
if client.get('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/record/'+os.environ['OVH_DNS_RECORD_ID'])['subDomain'] == '_dnslink.xkcd':
|
||||
result = client.put('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/record/'+os.environ['OVH_DNS_RECORD_ID'],
|
||||
subDomain='_dnslink.xkcd',
|
||||
target='dnslink=/ipfs/' + os.environ['IPFS_HASH'],
|
||||
ttl=60,
|
||||
)
|
||||
print(json.dumps(result, indent=4))
|
||||
|
||||
result = client.post('/domain/zone/'+os.environ['OVH_DNS_DOMAIN']+'/refresh')
|
||||
print(json.dumps(result, indent=4))
|
||||
else:
|
||||
print('Wrong subdomain?')
|
30
.github/warm-up-gateway-caches.sh
vendored
Executable file
30
.github/warm-up-gateway-caches.sh
vendored
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euET -o pipefail
|
||||
|
||||
echo "Warm up cache on a couple of IPFS gateways"
|
||||
|
||||
h="$(result/www/ipfs-add.sh --pin=true)"
|
||||
|
||||
#wget --reject-regex ".*\?.*" -r -np --timeout=2 --tries=1 "https://cloudflare-ipfs.com/ipfs/$h" 2>&1 | grep '^--' & pid_cloudflare="$!"
|
||||
#wget --reject-regex ".*\?.*" -r -np --timeout=2 --tries=1 "https://$h.ipfs.dweb.link/" 2>&1 | grep '^--' & pid_dweb="$!"
|
||||
#wait "$pid_cloudflare" || true
|
||||
#wait "$pid_dweb" || true
|
||||
|
||||
# Download the files, twice (a few files in the first attempt would likely fail as the DHT propagation is not instantaneous?)
|
||||
for i in `seq 2`; do
|
||||
#ipfs add --progress=false --ignore-rules-path "result/www/.ipfsignore" --pin=false --hidden -r result/www \
|
||||
#| cut -d ' ' -f 3- \
|
||||
#| sed -e 's~^www/*~~' \
|
||||
cat .github/files-to-cache.lst \
|
||||
| while read f; do
|
||||
if (printf %s\\n "$IPFS_REMOTE_API_ENDPOINT" | grep pinata) >/dev/null 2>&1; then
|
||||
printf "Warming up pinata cache for %s (attempt %d)...\n" "$f" "$i"
|
||||
wget --tries=1 --timeout=10 -O- "https://gateway.pinata.cloud/ipfs/$h/$f" > /dev/null || true
|
||||
fi
|
||||
printf "Warming up Cloudflare cache for %s (attempt %d)...\n" "$f" "$i"
|
||||
wget --tries=1 --timeout=10 -O- "https://cloudflare-ipfs.com/ipfs/$h/$f" > /dev/null || true
|
||||
printf "Warming up dweb.link cache for %s (attempt %d)...\n" "$f" "$i"
|
||||
wget --tries=1 --timeout=10 -O- "https://$h.ipfs.dweb.link/$f" > /dev/null || true
|
||||
done
|
||||
done
|
111
.github/workflows/cron.yml
vendored
Normal file
111
.github/workflows/cron.yml
vendored
Normal file
|
@ -0,0 +1,111 @@
|
|||
name: update XKCD archive
|
||||
on:
|
||||
# Runs on pushes targeting the default branch
|
||||
push:
|
||||
branches: ["main"]
|
||||
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
updatexkcd:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: Go install
|
||||
run: go install .
|
||||
- name: Make output dir
|
||||
run: mkdir result/
|
||||
- name: Go run
|
||||
run: go run . out -f 1 -t 5
|
||||
- name: Create content
|
||||
run: ls -ld out; ls -lRa out; mkdir -p result; mkdir -p result/www; cp out/{styles.css,favicon.ico} result/www/; cp -r 'out/1/' result/www/1; find result/ -type d -print0 | xargs -0 chmod a+rx; find result/ -type f -print0 | xargs -0 chmod a+r; rm -f result/www/1/*; echo "<html><body><h1>hiaaa $(date)</h1></body></html>" > result/www/index.html
|
||||
# run: ls -ld out; ls -lRa out; mkdir -p result; mkdir -p result/www; cp out/{styles.css,favicon.ico} result/www/; mkdir result/www/1; cp out/1/{index.html,info.json} result/www/1/; cp 'out/1/barrel_cropped_(1).jpg' result/www/1/; echo "<html><body><h1>hiaaa $(date)</h1></body></html>" > result/www/index.html; ls -lRa result
|
||||
- name: create .ipfsignore
|
||||
run: touch result/www/.ipfsignore
|
||||
- name: create .nojekyll
|
||||
run: touch result/www/.nojekyll
|
||||
- name: create ipfs-add.sh
|
||||
run: cp _ipfs-add.sh result/www/ipfs-add.sh; (cd result/www; find -type f > ../../.github/files-to-cache.lst)
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v3
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v2
|
||||
with:
|
||||
# Upload entire repository
|
||||
path: 'result/www'
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v2
|
||||
|
||||
# - name: Download IPFS
|
||||
# run: ./.github/github_install_ipfs.sh
|
||||
# - name: Print IPFS hash
|
||||
# run: PATH="/tmp/kubo:$PATH" ipfs cid base32 "$(ipfs add --ignore-rules-path "result/www/.ipfsignore" --pin=false --hidden -Qr "result/www/")"
|
||||
#
|
||||
# - name: Make tarball of website
|
||||
# run: mkdir -p "$RUNNER_TEMP" && tar --directory result/www/ -cvf "$RUNNER_TEMP/artifact.tar" .
|
||||
# - name: Upload artifact
|
||||
# uses: actions/upload-artifact@v3
|
||||
# with:
|
||||
# name: github-pages
|
||||
# path: ${{ runner.temp }}/artifact.tar
|
||||
# if-no-files-found: error
|
||||
# - name: Deploy to GitHub Pages
|
||||
# id: deployment
|
||||
# uses: actions/deploy-pages@v2
|
||||
#
|
||||
## - name: Setup Pages
|
||||
## uses: actions/configure-pages@v3
|
||||
## - name: Upload artifact
|
||||
## uses: actions/upload-pages-artifact@v2
|
||||
## with:
|
||||
## # Upload entire folder
|
||||
## path: 'result/www/'
|
||||
## - name: Deploy to GitHub Pages
|
||||
## id: deployment
|
||||
## uses: actions/deploy-pages@v2
|
||||
#
|
||||
# - name: Upload to IPFS
|
||||
# run: PATH="/tmp/kubo:$PATH" ./.github/pin-using-ipfs.sh
|
||||
# continue-on-error: true
|
||||
# env:
|
||||
# IPFS_SWARM_CONNECT_TO: ${{ secrets.IPFS_SWARM_CONNECT_TO }}
|
||||
# IPFS_REMOTE_API_ENDPOINT: ${{ secrets.IPFS_REMOTE_API_ENDPOINT }}
|
||||
# IPFS_REMOTE_TOKEN: ${{ secrets.IPFS_REMOTE_TOKEN }}
|
||||
# - name: Install OVH pip package
|
||||
# run: pip install ovh
|
||||
# - name: Update OVH _dnslink
|
||||
# run: export PATH="/tmp/kubo:$PATH"; export IPFS_HASH="$(ipfs cid base32 "$(ipfs add --ignore-rules-path "result/www/.ipfsignore" --pin=false --hidden -Qr "result/www/")")"; python ./.github/update-ovh.py >/dev/null 2>&1
|
||||
# env:
|
||||
# API_OVH_APPLICATION_KEY: ${{ secrets.API_OVH_APPLICATION_KEY }}
|
||||
# API_OVH_APPLICATION_SECRET: ${{ secrets.API_OVH_APPLICATION_SECRET }}
|
||||
# API_OVH_CONSUMER_KEY: ${{ secrets.API_OVH_CONSUMER_KEY }}
|
||||
# API_OVH_ENDPOINT: ${{ secrets.API_OVH_ENDPOINT }}
|
||||
# OVH_DNS_DOMAIN: ${{ secrets.OVH_DNS_DOMAIN }}
|
||||
# OVH_DNS_RECORD_ID: ${{ secrets.OVH_DNS_RECORD_ID }}
|
||||
# - name: Warm up IPFS gateway caches
|
||||
# run: PATH="/tmp/kubo:$PATH" ./.github/warm-up-gateway-caches.sh
|
||||
# - name: Update homepage URL
|
||||
# run: PATH="/tmp/kubo:$PATH" ./.github/github_update_homepage.sh
|
||||
# env:
|
||||
# API_TOKEN_FOR_UPDATE_HOMEPAGE: ${{ secrets.API_TOKEN_FOR_UPDATE_HOMEPAGE }}
|
2
.github/workflows/lint.yml
vendored
2
.github/workflows/lint.yml
vendored
|
@ -4,7 +4,7 @@ on:
|
|||
tags:
|
||||
- v*
|
||||
branches:
|
||||
- main
|
||||
- main2
|
||||
pull_request:
|
||||
jobs:
|
||||
golangci:
|
||||
|
|
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
|
@ -4,7 +4,7 @@ on:
|
|||
tags:
|
||||
- v*
|
||||
branches:
|
||||
- main
|
||||
- main2
|
||||
pull_request:
|
||||
jobs:
|
||||
test:
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1 +1,2 @@
|
|||
out/
|
||||
SECRETS
|
||||
|
|
14
_ipfs-add.sh
Executable file
14
_ipfs-add.sh
Executable file
|
@ -0,0 +1,14 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euET -o pipefail
|
||||
|
||||
usage() {
|
||||
printf "Usage:\n"
|
||||
printf " %s --pin=true\n" "$0"
|
||||
printf " %s --pin=false\n" "$0"
|
||||
}
|
||||
if test $# -lt 1; then usage; exit 1; fi
|
||||
if test "x$1" = "x-h" || test "x$1" = "x--help"; then usage; exit 0; fi
|
||||
if test "x$1" != "x--pin=true" && test "x$1" != "x--pin=false"; then usage; exit 1; fi
|
||||
|
||||
ipfs cid base32 "$(ipfs add --ignore-rules-path "$(dirname "$0")/.ipfsignore" "$1" --hidden -Qr "$(dirname "$0")")"
|
Loading…
Reference in New Issue
Block a user