1
0
Fork 0
mirror of https://git.sr.ht/~seirdy/seirdy.one synced 2024-11-27 14:12:09 +00:00

Compare commits

..

6 commits

Author SHA1 Message Date
Rohan Kumar
d8eae6f057
fix fetching mentions when no webmention file exists 2023-11-23 20:05:54 -08:00
Rohan Kumar
aebc49772e
Fix: exit check_cached_webmentions early 2023-11-23 20:01:54 -08:00
Rohan Kumar
226ee52f85
Format shell scripts 2023-11-23 19:06:46 -08:00
Rohan Kumar
d88d98f829
Fix get-webmentions diagnostics 2023-11-23 19:03:40 -08:00
Rohan Kumar
2b016aa973
Refactor shell scripts.
- Avoid relative paths, so I can run them from any working directory.
- Make a dedicated curl-wrapping shell script instead of re-defining the
  same alias everywhere.
- Support extended offline periods: allow get-webmentions.sh to fall
  back to the cached copy of my webmentions for up to a day, and don't
  accidentally destroy it; keep changes atomic.
- Verify that the fetched webmentions are legit before replacing the
  cached ones.
- Make shellcheck happy about quoting in vnu.sh by passing the list of
  files with xargs instead of a shell variable.
2023-11-23 18:45:03 -08:00
Rohan Kumar
a3e4729b6c
Update Webhint configs
- More exact browserlist
- stop hiding "hint" execution
- Disable chromium sandbox since I'm now running it in a container and
  all content is trusted.
- Remove obsolete elements-to-ignore from the compatibility check.
- Re-enable the compression check but disable checking for Zopfli
  compression, which returns a fale-positive; I use
  efficient-compression-tool for gzip compression which is actually
  better than Zopfli.
- Stop ignoring the "X-Frame-Options" header in no-disallowed-headers,
  since I stopped sending that header.
2023-11-23 18:40:01 -08:00
11 changed files with 106 additions and 56 deletions

View file

@ -24,7 +24,7 @@ LIGHTHOUSE_ARGS += --budget-path linter-configs/budget.json --output html --outp
hint-online: hint-online:
@echo "Running webhint" @echo "Running webhint"
@hint --config linter-configs/hintrc $(URLS) hint --config linter-configs/hintrc $(URLS)
lighthouse: lighthouse:
mkdir -p $(CHROME_PROFILE) mkdir -p $(CHROME_PROFILE)
CHROME_PATH=$(CHROME_PATH) CHROME_PROFILE=$(CHROME_PROFILE) JS_FLAGS='' lighthouse $(URLS) $(LIGHTHOUSE_ARGS) CHROME_PATH=$(CHROME_PATH) CHROME_PROFILE=$(CHROME_PROFILE) JS_FLAGS='' lighthouse $(URLS) $(LIGHTHOUSE_ARGS)
@ -40,8 +40,9 @@ axe-ff:
@echo "Running axe with Firefox" @echo "Running axe with Firefox"
@scripts/bin/axe-ff $(OUTPUT_DIR) $(URLS) @scripts/bin/axe-ff $(OUTPUT_DIR) $(URLS)
.PHONY: axe axe-ff .PHONY: axe axe-ff
.PHONY: all-extra .PHONY: all-extra
all-extra: axe-ff validate-json equal-access htmlproofer lint-css validate-feeds all-extra: axe-ff equal-access lint-local

View file

@ -7,6 +7,7 @@
> 0.1% > 0.1%
last 5 versions last 5 versions
Firefox ESR Firefox >= 57
iOS >= 9.1
not dead not dead
IE 9-11 IE 11

View file

@ -3,7 +3,8 @@
"name": "puppeteer", "name": "puppeteer",
"options": { "options": {
"browser": "Chromium", "browser": "Chromium",
"headless": true "headless": true,
"puppeteerOptions": {"args": ["--no-sandbox"]}
} }
}, },
"extends": [ "extends": [
@ -43,8 +44,6 @@
"error", "error",
{ {
"ignore": [ "ignore": [
"time",
"picture",
"a[download]", "a[download]",
"a[referrerpolicy]", "a[referrerpolicy]",
"a[translate]", "a[translate]",
@ -52,7 +51,6 @@
"samp[translate]", "samp[translate]",
"span[translate]", "span[translate]",
"img[decoding]", "img[decoding]",
"meta[name=color-scheme]",
"meta[name=theme-color]" "meta[name=theme-color]"
] ]
} }
@ -64,12 +62,20 @@
"maxAgeTarget": 10800 "maxAgeTarget": 10800
} }
], ],
"http-compression": "off", "http-compression": ["error", {
"resource": {
"zopfli": false
},
"html": {
"zopfli": false
}
}],
"https-only": "error", "https-only": "error",
"sri": "error", "sri": "error",
"detect-css-reflows": "off", "no-http-redirects": "error",
"detect-css-reflows/layout": "off", "detect-css-reflows/layout": "hint",
"detect-css-reflows/paint": "off", "detect-css-reflows/paint": "hint",
"detect-css-reflows/composite": "hint",
"manifest-exists": "error", "manifest-exists": "error",
"manifest-file-extension": "error", "manifest-file-extension": "error",
"manifest-is-valid": "error", "manifest-is-valid": "error",
@ -83,10 +89,7 @@
"no-vulnerable-javascript-libraries": "off", "no-vulnerable-javascript-libraries": "off",
"html-checker": "off", "html-checker": "off",
"image-optimization-cloudinary": "off", "image-optimization-cloudinary": "off",
"no-disallowed-headers": [ "error", "no-disallowed-headers": "error",
{
"ignore": ["x-frame-options"]
}],
"meta-charset-utf-8": "error", "meta-charset-utf-8": "error",
"disown-opener": "error", "disown-opener": "error",
"content-type": ["error", { "content-type": ["error", {

View file

@ -1,7 +1,9 @@
#!/bin/sh #!/bin/sh
set -e -u
# Runs axe on every page of my sitemap # Runs axe on every page of my sitemap
# first arg is output directory, after that comes urls from a sitemap. We replace the urls with equivalent local file paths. # first arg is output directory, after that comes urls from a sitemap. We replace the urls with equivalent local file paths.
PATH="scripts/bin:$PATH" PATH="$(dirname "$0"):$PATH"
output_dir="$1" output_dir="$1"
shift shift
urls="$(echo "$*" | tr ' ' '\n' | sd '^https?://[^/]*' "file://$PWD/$output_dir" | sd '/$' '/index.html' | tr '\n' ' ')" urls="$(echo "$*" | tr ' ' '\n' | sd '^https?://[^/]*' "file://$PWD/$output_dir" | sd '/$' '/index.html' | tr '\n' ' ')"

View file

@ -39,7 +39,6 @@ done
url="$1" url="$1"
find_rel_mention_count() { find_rel_mention_count() {
curl -s "$url" | grep -cF 'rel="mentioned"' curl -s "$url" | grep -cF 'rel="mentioned"'
} }

View file

@ -1,14 +1,14 @@
#!/bin/sh #!/bin/sh
set -e -u set -e -u
alias curl_cmd='curl --proto "=https" --proto-default https --http2 -siSL --tlsv1.3 --cert-status'
ipv6_success=1 ipv6_success=1
ipv4_success=1 ipv4_success=1
curl_cmd -6 'seirdy.one/ip' || ipv6_success=0 curl_wrapper="$(dirname "$0")/curl-wrapper.sh"
"$curl_wrapper" -6 'https://seirdy.one/ip' || ipv6_success=0
echo echo
curl_cmd -4 'seirdy.one/ip' || ipv4_success=0 "$curl_wrapper" -4 'https://seirdy.one/ip' || ipv4_success=0
echo echo
if [ "$ipv6_success" = 0 ] && [ "$ipv4_success" = 0 ]; then if [ "$ipv6_success" = 0 ] && [ "$ipv4_success" = 0 ]; then

3
scripts/curl-wrapper.sh Executable file
View file

@ -0,0 +1,3 @@
#!/bin/sh
# a curl wrapper for seirdy.one
curl --proto "=https" --tlsv1.3 --cert-status -sS -m10 "$@"

View file

@ -2,18 +2,50 @@
# #
# Script to fetch all approved webmentions from webmentiond as a big json response. # Script to fetch all approved webmentions from webmentiond as a big json response.
# Uses POSIX and cURL in CI, also uses any pass/pash-compatible pwmngr otherwise # Uses POSIX and cURL in CI, also uses any pass/pash-compatible pwmngr otherwise
# The response is cached for 90 minutes. # The response is cached for 90 minutes. If fetching fresh webmentions fails, fall back to the cached version for up to one day (1440 minutes). If cached webmentions are older than a day, error out.
set -e -u set -e -u
dirname="$(dirname "$0")"
curl_wrapper="$dirname/curl-wrapper.sh"
auth_url='https://seirdy.one/webmentions/authenticate/access-key' auth_url='https://seirdy.one/webmentions/authenticate/access-key'
webmentions_url='https://seirdy.one/webmentions/manage/mentions?limit=9999&status=approved' webmentions_url='https://seirdy.one/webmentions/manage/mentions?limit=9999&status=approved'
webmentions_file="$(realpath data/webmentions.json)" webmentions_file="$dirname/../data/webmentions.json"
# just a little curl wrapper I use on seirdy.one stale_after_minutes=90
alias ccurl='curl --proto "=https" --proto-default https --tlsv1.3 --cert-status' fallback_stale_after_minutes=1440
# use a long-lived key (password) to fetch a short-lived bearer token. skip_check=0
times_run=0
check_cached_webmentions() {
times_run="$((times_run + 1))"
if [ "$skip_check" = 0 ]; then
expires_in="$fallback_stale_after_minutes"
if [ $# -gt 0 ]; then
expires_in="$1"
fi
exit_status=1
if [ -f "$webmentions_file" ]; then
old_webmention_file="$(find "$webmentions_file" -mmin +"$expires_in")"
if [ "$old_webmention_file" = "" ]; then
echo 'Using cached webmentions'
skip_check=1
exit_status=0
return 0
fi
return 1
fi
if [ "$exit_status" = 1 ] && [ "$times_run" = 2 ]; then
echo "Webmentions are outdated. failed to fetch for over a day."
exit "$exit_status"
fi
fi
}
trap check_cached_webmentions EXIT
# Grab my long-lived key (password). We will use this to authenticate.
key() { key() {
set +u set +u
if [ -n "$BUILD_SUBMITTER" ]; then if [ -n "$BUILD_SUBMITTER" ]; then
@ -24,23 +56,36 @@ key() {
set -u set -u
} }
# Fetch a short-lived access token from my webmention receiver.
token() { token() {
key_response="$(key)" key_response="$(key)"
ccurl -sX POST "$auth_url" -d "key=$key_response" "$curl_wrapper" -sX POST "$auth_url" -d "key=$key_response"
} }
# use that token to fetch all webmentions # Verify that the webmentions file has downloaded succesfully by ensuring that it starts and ends with a given substring.
# The total number of webmentions should be at least a 3-digit number.
verify_webmentions() {
grep -E '^\{"items":\[\{"id":".*,"total":[0-9]{3}([0-9]*)?\}$' "$webmentions_file.tmp" >/dev/null
}
# use the token to fetch all webmentions.
fetch_webmentions() { fetch_webmentions() {
echo 'Fetching webmentions'
token_response="$(token)" token_response="$(token)"
ccurl --compressed -H "Authorization: Bearer $token_response" "$webmentions_url" -o "$webmentions_file" "$curl_wrapper" --compressed -H "Authorization: Bearer $token_response" "$webmentions_url" -o "$webmentions_file.tmp" || return 1
if verify_webmentions; then
mv "$webmentions_file.tmp" "$webmentions_file"
else
echo 'Error: webmentions failed to verify'
exit 1
fi
} }
# fetch webmentions if we don't have a fresh copy already. # fetch webmentions if we don't have a fresh copy already.
if [ -f "$webmentions_file" ]; then
if [ -f "$webmentions_file" ] \ if ! check_cached_webmentions "$stale_after_minutes"; then
&& [ "$(find "$webmentions_file" -mmin +90)" = "" ]; then fetch_webmentions
echo 'Using cached webmentions' fi
else else
echo 'Fetching webmentions'
fetch_webmentions fetch_webmentions
fi fi

View file

@ -8,7 +8,6 @@
set -e -u set -e -u
# the name of this program # the name of this program
progname="$(basename "$0")" progname="$(basename "$0")"
dirname="$(dirname "$0")" dirname="$(dirname "$0")"
@ -58,15 +57,15 @@ done
endless_orbit() { endless_orbit() {
printf 'Endless Orbit,' printf 'Endless Orbit,'
curl -sSL --compressed https://linkyblog.neocities.org/onionring/onionring-variables.js \ curl -sSL --compressed https://linkyblog.neocities.org/onionring/onionring-variables.js \
| grep -C 1 https://seirdy.one/ \ | grep -C 1 https://seirdy.one/ \
| tr -d "'\n" | sed 's|https://seirdy.one/|https://linkyblog.neocities.org/webring.html|' | tr -d "'\n" | sed 's|https://seirdy.one/|https://linkyblog.neocities.org/webring.html|'
echo 'null' echo 'null'
} }
if [ "$dry_run" = '1' ]; then if [ "$dry_run" = '1' ]; then
endless_orbit endless_orbit
elif [ -f "$webrings_dest" ]; then elif [ -f "$webrings_dest" ]; then
echo "webrings file already generated" echo "webrings file already generated"
else else
endless_orbit | cat "$webrings_src" - >"$webrings_dest" endless_orbit | cat "$webrings_src" - >"$webrings_dest"
fi fi

View file

@ -1,6 +1,6 @@
#!/bin/sh #!/bin/sh
set -e -u set -e -u
pwd="$(dirname "$0")" dirname="$(dirname "$0")"
output_dir="$1" output_dir="$1"
find_files_to_analyze() { find_files_to_analyze() {
@ -8,12 +8,9 @@ find_files_to_analyze() {
| grep -Ev '(bimi\.svg|search/index\.x?html)$' | grep -Ev '(bimi\.svg|search/index\.x?html)$'
} }
files_to_analyze="$(find_files_to_analyze)" # files_to_analyze="$(find_files_to_analyze)"
# we skip the BIMI icon (VNU can't handle SVG 1.2) and the search page (it has raw templates). # we skip the BIMI icon (VNU can't handle SVG 1.2) and the search page (it has raw templates).
vnu \ find_files_to_analyze \
--stdout \ | xargs vnu --stdout --format json --also-check-svg \
--format json \ | sh "$dirname/filter-vnu.sh"
--also-check-svg \
$files_to_analyze \
| sh "$pwd/filter-vnu.sh"

View file

@ -23,7 +23,7 @@ cleanup() {
} }
trap cleanup EXIT trap cleanup EXIT
run_tidy () { run_tidy() {
tidy -asxhtml -config linter-configs/tidy.conf 2>/dev/null || true tidy -asxhtml -config linter-configs/tidy.conf 2>/dev/null || true
} }
@ -36,13 +36,13 @@ sed 7d "$html_file" | xmllint --format --encode UTF-8 --noent - | tail -n +2 >"$
# shellcheck disable=SC2016 # these are regex statements, not shell expressions # shellcheck disable=SC2016 # these are regex statements, not shell expressions
#shellcheck source=/home/rkumar/Executables/ghq/git.sr.ht/~seirdy/seirdy.one/scripts/xhtmlize.sh #shellcheck source=/home/rkumar/Executables/ghq/git.sr.ht/~seirdy/seirdy.one/scripts/xhtmlize.sh
sed \ sed \
-e '1,7d' \ -e '1,7d' \
-e 's|</span>(&nbsp;)?.span itemprop="familyName|</span>&#160;<span itemprop="familyName"|' \ -e 's|</span>(&nbsp;)?.span itemprop="familyName|</span>&#160;<span itemprop="familyName"|' \
-e 's|class="u-photo photo"[^<]*<|class="u-photo photo"/> <|' \ -e 's|class="u-photo photo"[^<]*<|class="u-photo photo"/> <|' \
-E \ -E \
-e 's|([a-z])<data|\1 <data|' \ -e 's|([a-z])<data|\1 <data|' \
-e 's#</span>(<a[^>]*rel="(nofollow ugc|ugc nofollow)"([^>]*)?>liked</a>)#</span> \1#' \ -e 's#</span>(<a[^>]*rel="(nofollow ugc|ugc nofollow)"([^>]*)?>liked</a>)#</span> \1#' \
-e 's#^[\t\s]*<(code|/pre)#<\1#' \ -e 's#^[\t\s]*<(code|/pre)#<\1#' \
"$tmp_file" \ "$tmp_file" \
| awk '/^<\/code>/{printf "%s",$0;next}7' | awk '/^<\/code>/{printf "%s",$0;next}7'
} >"$html_file" } >"$html_file"