1
0
Fork 0
mirror of https://git.sr.ht/~seirdy/seirdy.one synced 2024-11-23 21:02:09 +00:00

Compare commits

..

No commits in common. "d8eae6f0572630d39c3c4f03cbb8c0896d451a6f" and "ba7453ca23f8b14223d5b70d763bc72cf487f3ae" have entirely different histories.

11 changed files with 56 additions and 106 deletions

View file

@ -24,7 +24,7 @@ LIGHTHOUSE_ARGS += --budget-path linter-configs/budget.json --output html --outp
hint-online:
@echo "Running webhint"
hint --config linter-configs/hintrc $(URLS)
@hint --config linter-configs/hintrc $(URLS)
lighthouse:
mkdir -p $(CHROME_PROFILE)
CHROME_PATH=$(CHROME_PATH) CHROME_PROFILE=$(CHROME_PROFILE) JS_FLAGS='' lighthouse $(URLS) $(LIGHTHOUSE_ARGS)
@ -40,9 +40,8 @@ axe-ff:
@echo "Running axe with Firefox"
@scripts/bin/axe-ff $(OUTPUT_DIR) $(URLS)
.PHONY: axe axe-ff
.PHONY: all-extra
all-extra: axe-ff equal-access lint-local
all-extra: axe-ff validate-json equal-access htmlproofer lint-css validate-feeds

View file

@ -7,7 +7,6 @@
> 0.1%
last 5 versions
Firefox >= 57
iOS >= 9.1
Firefox ESR
not dead
IE 11
IE 9-11

View file

@ -3,8 +3,7 @@
"name": "puppeteer",
"options": {
"browser": "Chromium",
"headless": true,
"puppeteerOptions": {"args": ["--no-sandbox"]}
"headless": true
}
},
"extends": [
@ -44,6 +43,8 @@
"error",
{
"ignore": [
"time",
"picture",
"a[download]",
"a[referrerpolicy]",
"a[translate]",
@ -51,6 +52,7 @@
"samp[translate]",
"span[translate]",
"img[decoding]",
"meta[name=color-scheme]",
"meta[name=theme-color]"
]
}
@ -62,20 +64,12 @@
"maxAgeTarget": 10800
}
],
"http-compression": ["error", {
"resource": {
"zopfli": false
},
"html": {
"zopfli": false
}
}],
"http-compression": "off",
"https-only": "error",
"sri": "error",
"no-http-redirects": "error",
"detect-css-reflows/layout": "hint",
"detect-css-reflows/paint": "hint",
"detect-css-reflows/composite": "hint",
"detect-css-reflows": "off",
"detect-css-reflows/layout": "off",
"detect-css-reflows/paint": "off",
"manifest-exists": "error",
"manifest-file-extension": "error",
"manifest-is-valid": "error",
@ -89,7 +83,10 @@
"no-vulnerable-javascript-libraries": "off",
"html-checker": "off",
"image-optimization-cloudinary": "off",
"no-disallowed-headers": "error",
"no-disallowed-headers": [ "error",
{
"ignore": ["x-frame-options"]
}],
"meta-charset-utf-8": "error",
"disown-opener": "error",
"content-type": ["error", {

View file

@ -1,9 +1,7 @@
#!/bin/sh
set -e -u
# Runs axe on every page of my sitemap
# first arg is output directory, after that comes urls from a sitemap. We replace the urls with equivalent local file paths.
PATH="$(dirname "$0"):$PATH"
PATH="scripts/bin:$PATH"
output_dir="$1"
shift
urls="$(echo "$*" | tr ' ' '\n' | sd '^https?://[^/]*' "file://$PWD/$output_dir" | sd '/$' '/index.html' | tr '\n' ' ')"

View file

@ -39,6 +39,7 @@ done
url="$1"
find_rel_mention_count() {
curl -s "$url" | grep -cF 'rel="mentioned"'
}

View file

@ -1,14 +1,14 @@
#!/bin/sh
set -e -u
alias curl_cmd='curl --proto "=https" --proto-default https --http2 -siSL --tlsv1.3 --cert-status'
ipv6_success=1
ipv4_success=1
curl_wrapper="$(dirname "$0")/curl-wrapper.sh"
"$curl_wrapper" -6 'https://seirdy.one/ip' || ipv6_success=0
curl_cmd -6 'seirdy.one/ip' || ipv6_success=0
echo
"$curl_wrapper" -4 'https://seirdy.one/ip' || ipv4_success=0
curl_cmd -4 'seirdy.one/ip' || ipv4_success=0
echo
if [ "$ipv6_success" = 0 ] && [ "$ipv4_success" = 0 ]; then

View file

@ -1,3 +0,0 @@
#!/bin/sh
# a curl wrapper for seirdy.one
curl --proto "=https" --tlsv1.3 --cert-status -sS -m10 "$@"

View file

@ -2,50 +2,18 @@
#
# Script to fetch all approved webmentions from webmentiond as a big json response.
# Uses POSIX and cURL in CI, also uses any pass/pash-compatible pwmngr otherwise
# The response is cached for 90 minutes. If fetching fresh webmentions fails, fall back to the cached version for up to one day (1440 minutes). If cached webmentions are older than a day, error out.
# The response is cached for 90 minutes.
set -e -u
dirname="$(dirname "$0")"
curl_wrapper="$dirname/curl-wrapper.sh"
auth_url='https://seirdy.one/webmentions/authenticate/access-key'
webmentions_url='https://seirdy.one/webmentions/manage/mentions?limit=9999&status=approved'
webmentions_file="$dirname/../data/webmentions.json"
webmentions_file="$(realpath data/webmentions.json)"
stale_after_minutes=90
fallback_stale_after_minutes=1440
# just a little curl wrapper I use on seirdy.one
alias ccurl='curl --proto "=https" --proto-default https --tlsv1.3 --cert-status'
skip_check=0
times_run=0
check_cached_webmentions() {
times_run="$((times_run + 1))"
if [ "$skip_check" = 0 ]; then
expires_in="$fallback_stale_after_minutes"
if [ $# -gt 0 ]; then
expires_in="$1"
fi
exit_status=1
if [ -f "$webmentions_file" ]; then
old_webmention_file="$(find "$webmentions_file" -mmin +"$expires_in")"
if [ "$old_webmention_file" = "" ]; then
echo 'Using cached webmentions'
skip_check=1
exit_status=0
return 0
fi
return 1
fi
if [ "$exit_status" = 1 ] && [ "$times_run" = 2 ]; then
echo "Webmentions are outdated. failed to fetch for over a day."
exit "$exit_status"
fi
fi
}
trap check_cached_webmentions EXIT
# Grab my long-lived key (password). We will use this to authenticate.
# use a long-lived key (password) to fetch a short-lived bearer token.
key() {
set +u
if [ -n "$BUILD_SUBMITTER" ]; then
@ -56,36 +24,23 @@ key() {
set -u
}
# Fetch a short-lived access token from my webmention receiver.
token() {
key_response="$(key)"
"$curl_wrapper" -sX POST "$auth_url" -d "key=$key_response"
ccurl -sX POST "$auth_url" -d "key=$key_response"
}
# Verify that the webmentions file has downloaded succesfully by ensuring that it starts and ends with a given substring.
# The total number of webmentions should be at least a 3-digit number.
verify_webmentions() {
grep -E '^\{"items":\[\{"id":".*,"total":[0-9]{3}([0-9]*)?\}$' "$webmentions_file.tmp" >/dev/null
}
# use the token to fetch all webmentions.
# use that token to fetch all webmentions
fetch_webmentions() {
echo 'Fetching webmentions'
token_response="$(token)"
"$curl_wrapper" --compressed -H "Authorization: Bearer $token_response" "$webmentions_url" -o "$webmentions_file.tmp" || return 1
if verify_webmentions; then
mv "$webmentions_file.tmp" "$webmentions_file"
else
echo 'Error: webmentions failed to verify'
exit 1
fi
ccurl --compressed -H "Authorization: Bearer $token_response" "$webmentions_url" -o "$webmentions_file"
}
# fetch webmentions if we don't have a fresh copy already.
if [ -f "$webmentions_file" ]; then
if ! check_cached_webmentions "$stale_after_minutes"; then
fetch_webmentions
fi
if [ -f "$webmentions_file" ] \
&& [ "$(find "$webmentions_file" -mmin +90)" = "" ]; then
echo 'Using cached webmentions'
else
echo 'Fetching webmentions'
fetch_webmentions
fi

View file

@ -8,6 +8,7 @@
set -e -u
# the name of this program
progname="$(basename "$0")"
dirname="$(dirname "$0")"
@ -57,15 +58,15 @@ done
endless_orbit() {
printf 'Endless Orbit,'
curl -sSL --compressed https://linkyblog.neocities.org/onionring/onionring-variables.js \
| grep -C 1 https://seirdy.one/ \
| tr -d "'\n" | sed 's|https://seirdy.one/|https://linkyblog.neocities.org/webring.html|'
| grep -C 1 https://seirdy.one/ \
| tr -d "'\n" | sed 's|https://seirdy.one/|https://linkyblog.neocities.org/webring.html|'
echo 'null'
}
if [ "$dry_run" = '1' ]; then
endless_orbit
elif [ -f "$webrings_dest" ]; then
echo "webrings file already generated"
echo "webrings file already generated"
else
endless_orbit | cat "$webrings_src" - >"$webrings_dest"
fi

View file

@ -1,6 +1,6 @@
#!/bin/sh
set -e -u
dirname="$(dirname "$0")"
pwd="$(dirname "$0")"
output_dir="$1"
find_files_to_analyze() {
@ -8,9 +8,12 @@ find_files_to_analyze() {
| grep -Ev '(bimi\.svg|search/index\.x?html)$'
}
# files_to_analyze="$(find_files_to_analyze)"
files_to_analyze="$(find_files_to_analyze)"
# we skip the BIMI icon (VNU can't handle SVG 1.2) and the search page (it has raw templates).
find_files_to_analyze \
| xargs vnu --stdout --format json --also-check-svg \
| sh "$dirname/filter-vnu.sh"
vnu \
--stdout \
--format json \
--also-check-svg \
$files_to_analyze \
| sh "$pwd/filter-vnu.sh"

View file

@ -23,7 +23,7 @@ cleanup() {
}
trap cleanup EXIT
run_tidy() {
run_tidy () {
tidy -asxhtml -config linter-configs/tidy.conf 2>/dev/null || true
}
@ -36,13 +36,13 @@ sed 7d "$html_file" | xmllint --format --encode UTF-8 --noent - | tail -n +2 >"$
# shellcheck disable=SC2016 # these are regex statements, not shell expressions
#shellcheck source=/home/rkumar/Executables/ghq/git.sr.ht/~seirdy/seirdy.one/scripts/xhtmlize.sh
sed \
-e '1,7d' \
-e 's|</span>(&nbsp;)?.span itemprop="familyName|</span>&#160;<span itemprop="familyName"|' \
-e 's|class="u-photo photo"[^<]*<|class="u-photo photo"/> <|' \
-E \
-e 's|([a-z])<data|\1 <data|' \
-e 's#</span>(<a[^>]*rel="(nofollow ugc|ugc nofollow)"([^>]*)?>liked</a>)#</span> \1#' \
-e 's#^[\t\s]*<(code|/pre)#<\1#' \
"$tmp_file" \
-e '1,7d' \
-e 's|</span>(&nbsp;)?.span itemprop="familyName|</span>&#160;<span itemprop="familyName"|' \
-e 's|class="u-photo photo"[^<]*<|class="u-photo photo"/> <|' \
-E \
-e 's|([a-z])<data|\1 <data|' \
-e 's#</span>(<a[^>]*rel="(nofollow ugc|ugc nofollow)"([^>]*)?>liked</a>)#</span> \1#' \
-e 's#^[\t\s]*<(code|/pre)#<\1#' \
"$tmp_file" \
| awk '/^<\/code>/{printf "%s",$0;next}7'
} >"$html_file"