mirror of
https://git.sr.ht/~seirdy/seirdy.one
synced 2024-11-10 00:12:09 +00:00
Compare commits
No commits in common. "d8eae6f0572630d39c3c4f03cbb8c0896d451a6f" and "ba7453ca23f8b14223d5b70d763bc72cf487f3ae" have entirely different histories.
d8eae6f057
...
ba7453ca23
11 changed files with 56 additions and 106 deletions
|
@ -24,7 +24,7 @@ LIGHTHOUSE_ARGS += --budget-path linter-configs/budget.json --output html --outp
|
||||||
|
|
||||||
hint-online:
|
hint-online:
|
||||||
@echo "Running webhint"
|
@echo "Running webhint"
|
||||||
hint --config linter-configs/hintrc $(URLS)
|
@hint --config linter-configs/hintrc $(URLS)
|
||||||
lighthouse:
|
lighthouse:
|
||||||
mkdir -p $(CHROME_PROFILE)
|
mkdir -p $(CHROME_PROFILE)
|
||||||
CHROME_PATH=$(CHROME_PATH) CHROME_PROFILE=$(CHROME_PROFILE) JS_FLAGS='' lighthouse $(URLS) $(LIGHTHOUSE_ARGS)
|
CHROME_PATH=$(CHROME_PATH) CHROME_PROFILE=$(CHROME_PROFILE) JS_FLAGS='' lighthouse $(URLS) $(LIGHTHOUSE_ARGS)
|
||||||
|
@ -40,9 +40,8 @@ axe-ff:
|
||||||
@echo "Running axe with Firefox"
|
@echo "Running axe with Firefox"
|
||||||
@scripts/bin/axe-ff $(OUTPUT_DIR) $(URLS)
|
@scripts/bin/axe-ff $(OUTPUT_DIR) $(URLS)
|
||||||
|
|
||||||
|
|
||||||
.PHONY: axe axe-ff
|
.PHONY: axe axe-ff
|
||||||
|
|
||||||
.PHONY: all-extra
|
.PHONY: all-extra
|
||||||
all-extra: axe-ff equal-access lint-local
|
all-extra: axe-ff validate-json equal-access htmlproofer lint-css validate-feeds
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
|
|
||||||
> 0.1%
|
> 0.1%
|
||||||
last 5 versions
|
last 5 versions
|
||||||
Firefox >= 57
|
Firefox ESR
|
||||||
iOS >= 9.1
|
|
||||||
not dead
|
not dead
|
||||||
IE 11
|
IE 9-11
|
||||||
|
|
|
@ -3,8 +3,7 @@
|
||||||
"name": "puppeteer",
|
"name": "puppeteer",
|
||||||
"options": {
|
"options": {
|
||||||
"browser": "Chromium",
|
"browser": "Chromium",
|
||||||
"headless": true,
|
"headless": true
|
||||||
"puppeteerOptions": {"args": ["--no-sandbox"]}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"extends": [
|
"extends": [
|
||||||
|
@ -44,6 +43,8 @@
|
||||||
"error",
|
"error",
|
||||||
{
|
{
|
||||||
"ignore": [
|
"ignore": [
|
||||||
|
"time",
|
||||||
|
"picture",
|
||||||
"a[download]",
|
"a[download]",
|
||||||
"a[referrerpolicy]",
|
"a[referrerpolicy]",
|
||||||
"a[translate]",
|
"a[translate]",
|
||||||
|
@ -51,6 +52,7 @@
|
||||||
"samp[translate]",
|
"samp[translate]",
|
||||||
"span[translate]",
|
"span[translate]",
|
||||||
"img[decoding]",
|
"img[decoding]",
|
||||||
|
"meta[name=color-scheme]",
|
||||||
"meta[name=theme-color]"
|
"meta[name=theme-color]"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -62,20 +64,12 @@
|
||||||
"maxAgeTarget": 10800
|
"maxAgeTarget": 10800
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"http-compression": ["error", {
|
"http-compression": "off",
|
||||||
"resource": {
|
|
||||||
"zopfli": false
|
|
||||||
},
|
|
||||||
"html": {
|
|
||||||
"zopfli": false
|
|
||||||
}
|
|
||||||
}],
|
|
||||||
"https-only": "error",
|
"https-only": "error",
|
||||||
"sri": "error",
|
"sri": "error",
|
||||||
"no-http-redirects": "error",
|
"detect-css-reflows": "off",
|
||||||
"detect-css-reflows/layout": "hint",
|
"detect-css-reflows/layout": "off",
|
||||||
"detect-css-reflows/paint": "hint",
|
"detect-css-reflows/paint": "off",
|
||||||
"detect-css-reflows/composite": "hint",
|
|
||||||
"manifest-exists": "error",
|
"manifest-exists": "error",
|
||||||
"manifest-file-extension": "error",
|
"manifest-file-extension": "error",
|
||||||
"manifest-is-valid": "error",
|
"manifest-is-valid": "error",
|
||||||
|
@ -89,7 +83,10 @@
|
||||||
"no-vulnerable-javascript-libraries": "off",
|
"no-vulnerable-javascript-libraries": "off",
|
||||||
"html-checker": "off",
|
"html-checker": "off",
|
||||||
"image-optimization-cloudinary": "off",
|
"image-optimization-cloudinary": "off",
|
||||||
"no-disallowed-headers": "error",
|
"no-disallowed-headers": [ "error",
|
||||||
|
{
|
||||||
|
"ignore": ["x-frame-options"]
|
||||||
|
}],
|
||||||
"meta-charset-utf-8": "error",
|
"meta-charset-utf-8": "error",
|
||||||
"disown-opener": "error",
|
"disown-opener": "error",
|
||||||
"content-type": ["error", {
|
"content-type": ["error", {
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
set -e -u
|
|
||||||
# Runs axe on every page of my sitemap
|
# Runs axe on every page of my sitemap
|
||||||
# first arg is output directory, after that comes urls from a sitemap. We replace the urls with equivalent local file paths.
|
# first arg is output directory, after that comes urls from a sitemap. We replace the urls with equivalent local file paths.
|
||||||
PATH="$(dirname "$0"):$PATH"
|
PATH="scripts/bin:$PATH"
|
||||||
output_dir="$1"
|
output_dir="$1"
|
||||||
shift
|
shift
|
||||||
urls="$(echo "$*" | tr ' ' '\n' | sd '^https?://[^/]*' "file://$PWD/$output_dir" | sd '/$' '/index.html' | tr '\n' ' ')"
|
urls="$(echo "$*" | tr ' ' '\n' | sd '^https?://[^/]*' "file://$PWD/$output_dir" | sd '/$' '/index.html' | tr '\n' ' ')"
|
||||||
|
|
|
@ -39,6 +39,7 @@ done
|
||||||
|
|
||||||
url="$1"
|
url="$1"
|
||||||
|
|
||||||
|
|
||||||
find_rel_mention_count() {
|
find_rel_mention_count() {
|
||||||
curl -s "$url" | grep -cF 'rel="mentioned"'
|
curl -s "$url" | grep -cF 'rel="mentioned"'
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
set -e -u
|
set -e -u
|
||||||
|
|
||||||
|
alias curl_cmd='curl --proto "=https" --proto-default https --http2 -siSL --tlsv1.3 --cert-status'
|
||||||
|
|
||||||
ipv6_success=1
|
ipv6_success=1
|
||||||
ipv4_success=1
|
ipv4_success=1
|
||||||
|
|
||||||
curl_wrapper="$(dirname "$0")/curl-wrapper.sh"
|
curl_cmd -6 'seirdy.one/ip' || ipv6_success=0
|
||||||
|
|
||||||
"$curl_wrapper" -6 'https://seirdy.one/ip' || ipv6_success=0
|
|
||||||
echo
|
echo
|
||||||
"$curl_wrapper" -4 'https://seirdy.one/ip' || ipv4_success=0
|
curl_cmd -4 'seirdy.one/ip' || ipv4_success=0
|
||||||
echo
|
echo
|
||||||
|
|
||||||
if [ "$ipv6_success" = 0 ] && [ "$ipv4_success" = 0 ]; then
|
if [ "$ipv6_success" = 0 ] && [ "$ipv4_success" = 0 ]; then
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
# a curl wrapper for seirdy.one
|
|
||||||
curl --proto "=https" --tlsv1.3 --cert-status -sS -m10 "$@"
|
|
|
@ -2,50 +2,18 @@
|
||||||
#
|
#
|
||||||
# Script to fetch all approved webmentions from webmentiond as a big json response.
|
# Script to fetch all approved webmentions from webmentiond as a big json response.
|
||||||
# Uses POSIX and cURL in CI, also uses any pass/pash-compatible pwmngr otherwise
|
# Uses POSIX and cURL in CI, also uses any pass/pash-compatible pwmngr otherwise
|
||||||
# The response is cached for 90 minutes. If fetching fresh webmentions fails, fall back to the cached version for up to one day (1440 minutes). If cached webmentions are older than a day, error out.
|
# The response is cached for 90 minutes.
|
||||||
|
|
||||||
set -e -u
|
set -e -u
|
||||||
|
|
||||||
dirname="$(dirname "$0")"
|
|
||||||
curl_wrapper="$dirname/curl-wrapper.sh"
|
|
||||||
auth_url='https://seirdy.one/webmentions/authenticate/access-key'
|
auth_url='https://seirdy.one/webmentions/authenticate/access-key'
|
||||||
webmentions_url='https://seirdy.one/webmentions/manage/mentions?limit=9999&status=approved'
|
webmentions_url='https://seirdy.one/webmentions/manage/mentions?limit=9999&status=approved'
|
||||||
webmentions_file="$dirname/../data/webmentions.json"
|
webmentions_file="$(realpath data/webmentions.json)"
|
||||||
|
|
||||||
stale_after_minutes=90
|
# just a little curl wrapper I use on seirdy.one
|
||||||
fallback_stale_after_minutes=1440
|
alias ccurl='curl --proto "=https" --proto-default https --tlsv1.3 --cert-status'
|
||||||
|
|
||||||
skip_check=0
|
# use a long-lived key (password) to fetch a short-lived bearer token.
|
||||||
times_run=0
|
|
||||||
|
|
||||||
check_cached_webmentions() {
|
|
||||||
times_run="$((times_run + 1))"
|
|
||||||
if [ "$skip_check" = 0 ]; then
|
|
||||||
expires_in="$fallback_stale_after_minutes"
|
|
||||||
if [ $# -gt 0 ]; then
|
|
||||||
expires_in="$1"
|
|
||||||
fi
|
|
||||||
exit_status=1
|
|
||||||
if [ -f "$webmentions_file" ]; then
|
|
||||||
old_webmention_file="$(find "$webmentions_file" -mmin +"$expires_in")"
|
|
||||||
if [ "$old_webmention_file" = "" ]; then
|
|
||||||
echo 'Using cached webmentions'
|
|
||||||
skip_check=1
|
|
||||||
exit_status=0
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
if [ "$exit_status" = 1 ] && [ "$times_run" = 2 ]; then
|
|
||||||
echo "Webmentions are outdated. failed to fetch for over a day."
|
|
||||||
exit "$exit_status"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
trap check_cached_webmentions EXIT
|
|
||||||
|
|
||||||
# Grab my long-lived key (password). We will use this to authenticate.
|
|
||||||
key() {
|
key() {
|
||||||
set +u
|
set +u
|
||||||
if [ -n "$BUILD_SUBMITTER" ]; then
|
if [ -n "$BUILD_SUBMITTER" ]; then
|
||||||
|
@ -56,36 +24,23 @@ key() {
|
||||||
set -u
|
set -u
|
||||||
}
|
}
|
||||||
|
|
||||||
# Fetch a short-lived access token from my webmention receiver.
|
|
||||||
token() {
|
token() {
|
||||||
key_response="$(key)"
|
key_response="$(key)"
|
||||||
"$curl_wrapper" -sX POST "$auth_url" -d "key=$key_response"
|
ccurl -sX POST "$auth_url" -d "key=$key_response"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Verify that the webmentions file has downloaded succesfully by ensuring that it starts and ends with a given substring.
|
# use that token to fetch all webmentions
|
||||||
# The total number of webmentions should be at least a 3-digit number.
|
|
||||||
verify_webmentions() {
|
|
||||||
grep -E '^\{"items":\[\{"id":".*,"total":[0-9]{3}([0-9]*)?\}$' "$webmentions_file.tmp" >/dev/null
|
|
||||||
}
|
|
||||||
|
|
||||||
# use the token to fetch all webmentions.
|
|
||||||
fetch_webmentions() {
|
fetch_webmentions() {
|
||||||
echo 'Fetching webmentions'
|
|
||||||
token_response="$(token)"
|
token_response="$(token)"
|
||||||
"$curl_wrapper" --compressed -H "Authorization: Bearer $token_response" "$webmentions_url" -o "$webmentions_file.tmp" || return 1
|
ccurl --compressed -H "Authorization: Bearer $token_response" "$webmentions_url" -o "$webmentions_file"
|
||||||
if verify_webmentions; then
|
|
||||||
mv "$webmentions_file.tmp" "$webmentions_file"
|
|
||||||
else
|
|
||||||
echo 'Error: webmentions failed to verify'
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# fetch webmentions if we don't have a fresh copy already.
|
# fetch webmentions if we don't have a fresh copy already.
|
||||||
if [ -f "$webmentions_file" ]; then
|
|
||||||
if ! check_cached_webmentions "$stale_after_minutes"; then
|
if [ -f "$webmentions_file" ] \
|
||||||
fetch_webmentions
|
&& [ "$(find "$webmentions_file" -mmin +90)" = "" ]; then
|
||||||
fi
|
echo 'Using cached webmentions'
|
||||||
else
|
else
|
||||||
|
echo 'Fetching webmentions'
|
||||||
fetch_webmentions
|
fetch_webmentions
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
|
|
||||||
set -e -u
|
set -e -u
|
||||||
|
|
||||||
|
|
||||||
# the name of this program
|
# the name of this program
|
||||||
progname="$(basename "$0")"
|
progname="$(basename "$0")"
|
||||||
dirname="$(dirname "$0")"
|
dirname="$(dirname "$0")"
|
||||||
|
@ -57,15 +58,15 @@ done
|
||||||
endless_orbit() {
|
endless_orbit() {
|
||||||
printf 'Endless Orbit,'
|
printf 'Endless Orbit,'
|
||||||
curl -sSL --compressed https://linkyblog.neocities.org/onionring/onionring-variables.js \
|
curl -sSL --compressed https://linkyblog.neocities.org/onionring/onionring-variables.js \
|
||||||
| grep -C 1 https://seirdy.one/ \
|
| grep -C 1 https://seirdy.one/ \
|
||||||
| tr -d "'\n" | sed 's|https://seirdy.one/|https://linkyblog.neocities.org/webring.html|'
|
| tr -d "'\n" | sed 's|https://seirdy.one/|https://linkyblog.neocities.org/webring.html|'
|
||||||
echo 'null'
|
echo 'null'
|
||||||
}
|
}
|
||||||
|
|
||||||
if [ "$dry_run" = '1' ]; then
|
if [ "$dry_run" = '1' ]; then
|
||||||
endless_orbit
|
endless_orbit
|
||||||
elif [ -f "$webrings_dest" ]; then
|
elif [ -f "$webrings_dest" ]; then
|
||||||
echo "webrings file already generated"
|
echo "webrings file already generated"
|
||||||
else
|
else
|
||||||
endless_orbit | cat "$webrings_src" - >"$webrings_dest"
|
endless_orbit | cat "$webrings_src" - >"$webrings_dest"
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
set -e -u
|
set -e -u
|
||||||
dirname="$(dirname "$0")"
|
pwd="$(dirname "$0")"
|
||||||
output_dir="$1"
|
output_dir="$1"
|
||||||
|
|
||||||
find_files_to_analyze() {
|
find_files_to_analyze() {
|
||||||
|
@ -8,9 +8,12 @@ find_files_to_analyze() {
|
||||||
| grep -Ev '(bimi\.svg|search/index\.x?html)$'
|
| grep -Ev '(bimi\.svg|search/index\.x?html)$'
|
||||||
}
|
}
|
||||||
|
|
||||||
# files_to_analyze="$(find_files_to_analyze)"
|
files_to_analyze="$(find_files_to_analyze)"
|
||||||
|
|
||||||
# we skip the BIMI icon (VNU can't handle SVG 1.2) and the search page (it has raw templates).
|
# we skip the BIMI icon (VNU can't handle SVG 1.2) and the search page (it has raw templates).
|
||||||
find_files_to_analyze \
|
vnu \
|
||||||
| xargs vnu --stdout --format json --also-check-svg \
|
--stdout \
|
||||||
| sh "$dirname/filter-vnu.sh"
|
--format json \
|
||||||
|
--also-check-svg \
|
||||||
|
$files_to_analyze \
|
||||||
|
| sh "$pwd/filter-vnu.sh"
|
||||||
|
|
|
@ -23,7 +23,7 @@ cleanup() {
|
||||||
}
|
}
|
||||||
trap cleanup EXIT
|
trap cleanup EXIT
|
||||||
|
|
||||||
run_tidy() {
|
run_tidy () {
|
||||||
tidy -asxhtml -config linter-configs/tidy.conf 2>/dev/null || true
|
tidy -asxhtml -config linter-configs/tidy.conf 2>/dev/null || true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,13 +36,13 @@ sed 7d "$html_file" | xmllint --format --encode UTF-8 --noent - | tail -n +2 >"$
|
||||||
# shellcheck disable=SC2016 # these are regex statements, not shell expressions
|
# shellcheck disable=SC2016 # these are regex statements, not shell expressions
|
||||||
#shellcheck source=/home/rkumar/Executables/ghq/git.sr.ht/~seirdy/seirdy.one/scripts/xhtmlize.sh
|
#shellcheck source=/home/rkumar/Executables/ghq/git.sr.ht/~seirdy/seirdy.one/scripts/xhtmlize.sh
|
||||||
sed \
|
sed \
|
||||||
-e '1,7d' \
|
-e '1,7d' \
|
||||||
-e 's|</span>( )?.span itemprop="familyName|</span> <span itemprop="familyName"|' \
|
-e 's|</span>( )?.span itemprop="familyName|</span> <span itemprop="familyName"|' \
|
||||||
-e 's|class="u-photo photo"[^<]*<|class="u-photo photo"/> <|' \
|
-e 's|class="u-photo photo"[^<]*<|class="u-photo photo"/> <|' \
|
||||||
-E \
|
-E \
|
||||||
-e 's|([a-z])<data|\1 <data|' \
|
-e 's|([a-z])<data|\1 <data|' \
|
||||||
-e 's#</span>(<a[^>]*rel="(nofollow ugc|ugc nofollow)"([^>]*)?>liked</a>)#</span> \1#' \
|
-e 's#</span>(<a[^>]*rel="(nofollow ugc|ugc nofollow)"([^>]*)?>liked</a>)#</span> \1#' \
|
||||||
-e 's#^[\t\s]*<(code|/pre)#<\1#' \
|
-e 's#^[\t\s]*<(code|/pre)#<\1#' \
|
||||||
"$tmp_file" \
|
"$tmp_file" \
|
||||||
| awk '/^<\/code>/{printf "%s",$0;next}7'
|
| awk '/^<\/code>/{printf "%s",$0;next}7'
|
||||||
} >"$html_file"
|
} >"$html_file"
|
||||||
|
|
Loading…
Reference in a new issue