mirror of
https://git.sr.ht/~seirdy/seirdy.one
synced 2024-11-23 21:02:09 +00:00
Make shellcheck happy
This commit is contained in:
parent
bbfa381368
commit
0dfcced14f
6 changed files with 13 additions and 10 deletions
|
@ -52,7 +52,6 @@ while getopts "hj" flags; do
|
|||
;;
|
||||
*)
|
||||
bad_option "${flags}" 'invalid option'
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
@ -61,9 +60,12 @@ base_url="${1-http://localhost:8089}"
|
|||
|
||||
# HTML validation is already parallelized, so run that single-threaded.
|
||||
make -j1 HUGO_FLAGS=-DF HUGO_BASEURL="$base_url" clean hugo xhtmlize copy-to-xhtml validate-html
|
||||
make -j "$jobs" -f Makefile.online HUGO_BASEURL="$base_url" all-extra URLS="$(curl -sSL "$base_url/sitemap.xml" | htmlq loc -t | rg -v '/search/$' | tr '\n' ' ')" &
|
||||
sitemap_links="$(curl -sSL --compressed "$base_url/sitemap.xml" | htmlq loc -t)"
|
||||
urls_offline="$(echo "$sitemap_links" | rg -v '/search/$' | tr '\n' ' ')"
|
||||
make -j "$jobs" -f Makefile.online HUGO_BASEURL="$base_url" all-extra URLS="$urls_offline" &
|
||||
make deploy-staging RSYNCFLAGS_EXTRA=-q
|
||||
make -f Makefile.online hint-online URLS="$(curl -sSL --compressed https://staging.seirdy.one/sitemap.xml | htmlq loc -t | rg -v '/(?:search|wcag-is-a-starting-point)/$' | sort | tr '\n' ' ')"
|
||||
urls_online="$(echo "$sitemap_links" | rg -v '/(?:search|wcag-is-a-starting-point)/$' | sort | tr '\n' ' ')"
|
||||
make -f Makefile.online hint-online URLS="$urls_online"
|
||||
wait
|
||||
# TODO: run lighthouse on every page in the sitemap.
|
||||
|
||||
|
|
|
@ -43,7 +43,6 @@ while getopts "hr" flags; do
|
|||
;;
|
||||
*)
|
||||
bad_option "${flags}" 'invalid option'
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
|
|
@ -33,7 +33,6 @@ while getopts "ho" flags; do
|
|||
;;
|
||||
*)
|
||||
bad_option "${flags}" 'invalid option'
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
|
|
@ -25,12 +25,14 @@ key() {
|
|||
}
|
||||
|
||||
token() {
|
||||
ccurl -sX POST "$auth_url" -d "key=$(key)"
|
||||
key_response="$(key)"
|
||||
ccurl -sX POST "$auth_url" -d "key=$key_response"
|
||||
}
|
||||
|
||||
# use that token to fetch all webmentions
|
||||
fetch_webmentions() {
|
||||
ccurl --compressed -H "Authorization: Bearer $(token)" "$webmentions_url" -o "$webmentions_file"
|
||||
token_response="$(token)"
|
||||
ccurl --compressed -H "Authorization: Bearer $token_response" "$webmentions_url" -o "$webmentions_file"
|
||||
}
|
||||
|
||||
# fetch webmentions if we don't have a fresh copy already.
|
||||
|
|
|
@ -51,7 +51,6 @@ while getopts "hd" flags; do
|
|||
;;
|
||||
*)
|
||||
bad_option "$flags" 'invalid option'
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
|
|
@ -3,15 +3,17 @@ set -e -u
|
|||
pwd="$(dirname "$0")"
|
||||
output_dir="$1"
|
||||
|
||||
files_to_analyze() {
|
||||
find_files_to_analyze() {
|
||||
find "$output_dir" -type f -name '*.xhtml' -o -name '*.svg' \
|
||||
| grep -Ev '(bimi\.svg|search/index\.x?html)$'
|
||||
}
|
||||
|
||||
files_to_analyze="$(find_files_to_analyze)"
|
||||
|
||||
# we skip the BIMI icon (VNU can't handle SVG 1.2) and the search page (it has raw templates).
|
||||
vnu \
|
||||
--stdout \
|
||||
--format json \
|
||||
--also-check-svg \
|
||||
$(files_to_analyze) \
|
||||
$files_to_analyze \
|
||||
| sh "$pwd/filter-vnu.sh"
|
||||
|
|
Loading…
Reference in a new issue