1
0
Fork 0
mirror of https://git.sr.ht/~seirdy/seirdy.one synced 2024-11-27 14:12:09 +00:00

Compare commits

..

12 commits

Author SHA1 Message Date
Rohan Kumar
dcb19779ce
Don't run html-proofer on envs deploy 2023-11-15 02:31:45 -08:00
Rohan Kumar
0dfcced14f
Make shellcheck happy 2023-11-15 02:31:06 -08:00
Rohan Kumar
bbfa381368
Add HTML-Tidy's generator tag manually
It auto-inserts it to the start of <head> but I want it at the end.
2023-11-15 02:26:52 -08:00
Rohan Kumar
6dbf6fa91c
Update build docs in README, remove obsolete bits
The site design standards page on my website obsoletes some README docs.
Remove the obsolete bits and update the build docs to the point at which
anybody should be able to rebuild my site.
2023-11-14 23:25:00 -08:00
Rohan Kumar
683eca2277
Smarten punctuation 2023-11-14 23:24:43 -08:00
Rohan Kumar
30ce292b57
Continue transitioning from real name to handle
- Switch meta author and open-graph author to Seirdy
- Update Gemini copyright footer name (and year)

Also sneakily smarten punctuation in the site title, don't tell anyone
2023-11-14 23:22:10 -08:00
Rohan Kumar
2418560975
use curl -o instead of shell redirection 2023-11-14 23:01:46 -08:00
Rohan Kumar
dad8c4e3e4
Add html-proofer to lint-local 2023-11-14 22:54:33 -08:00
Rohan Kumar
82e678fe7e
typo oops 2023-11-14 22:54:21 -08:00
Rohan Kumar
0912a89ca9
Remove usage of htmlq in xhtmlize.sh
Now I no longer need to include htmlq in my binaries.tar.gz
2023-11-14 22:00:15 -08:00
Rohan Kumar
649f827f66
Refactor post-processing to use sed instead of sd
html-tidy takes care of some post-processing, rendering other
substitutions obsolete. Remove the obsolete regex substitutions.

Now that we did that, the remaining substitutions can be done with
vanilla POSIX or POSIX-Extended regular expressions. Replace sd with
sed, and group the substitutions together into one invocation instead of
multiple invocations piped together. This change speeds up
post-processing to be almost as fast as the initial build step.
2023-11-14 21:39:53 -08:00
Rohan Kumar
4ca800f1c3
syndicate 2023-11-14 21:39:43 -08:00
16 changed files with 91 additions and 182 deletions

View file

@ -88,7 +88,7 @@ html-validate:
pnpm -s dlx html-validate --ext=html -c linter-configs/htmlvalidate.json $(OUTPUT_DIR) pnpm -s dlx html-validate --ext=html -c linter-configs/htmlvalidate.json $(OUTPUT_DIR)
.PHONY: lint-local .PHONY: lint-local
lint-local: html-validate validate-html validate-json lint-css lint-local: html-validate validate-html validate-json lint-css htmlproofer
# dev server, includes future and draft posts # dev server, includes future and draft posts
.PHONY: serve .PHONY: serve
@ -172,5 +172,5 @@ lint-and-deploy-staging:
.PHONY: deploy-envs .PHONY: deploy-envs
deploy-envs: deploy-envs:
@$(MAKE) -j1 HUGO_FLAGS='' USER=seirdy@envs.net WWW_ROOT=/home/seirdy/public_html GEMINI_ROOT=/home/seirdy/public_gemini HUGO_BASEURL='https://envs.net/~seirdy/' OUTPUT_DIR=public_envs .prepare-deploy copy-to-xhtml @$(MAKE) -j1 HUGO_FLAGS='' USER=seirdy@envs.net WWW_ROOT=/home/seirdy/public_html GEMINI_ROOT=/home/seirdy/public_gemini HUGO_BASEURL='https://envs.net/~seirdy/' OUTPUT_DIR=public_envs .prepare-deploy copy-to-xhtml
@$(MAKE) HUGO_FLAGS='' USER=seirdy@envs.net WWW_ROOT=/home/seirdy/public_html GEMINI_ROOT=/home/seirdy/public_gemini HUGO_BASEURL='https://envs.net/~seirdy/' OUTPUT_DIR=public_envs lint-local @$(MAKE) HUGO_FLAGS='' USER=seirdy@envs.net WWW_ROOT=/home/seirdy/public_html GEMINI_ROOT=/home/seirdy/public_gemini HUGO_BASEURL='https://envs.net/~seirdy/' OUTPUT_DIR=public_envs html-validate validate-html validate-json lint-css
@$(MAKE) SSHFLAGS='-o KexAlgorithms=curve25519-sha256@libssh.org' HUGO_FLAGS='' USER=seirdy@envs.net WWW_ROOT=/home/seirdy/public_html GEMINI_ROOT=/home/seirdy/public_gemini HUGO_BASEURL='https://envs.net/~seirdy/' OUTPUT_DIR=public_envs deploy @$(MAKE) SSHFLAGS='-o KexAlgorithms=curve25519-sha256@libssh.org' HUGO_FLAGS='' USER=seirdy@envs.net WWW_ROOT=/home/seirdy/public_html GEMINI_ROOT=/home/seirdy/public_gemini HUGO_BASEURL='https://envs.net/~seirdy/' OUTPUT_DIR=public_envs deploy

140
README.md
View file

@ -1,5 +1,4 @@
seirdy.one # seirdy.one
==========
[![sourcehut](https://img.shields.io/badge/repository-sourcehut-lightgrey.svg?logo=data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZlcnNpb249IjEuMSINCiAgICB3aWR0aD0iMTI4IiBoZWlnaHQ9IjEyOCI+DQogIDxkZWZzPg0KICAgIDxmaWx0ZXIgaWQ9InNoYWRvdyIgeD0iLTEwJSIgeT0iLTEwJSIgd2lkdGg9IjEyNSUiIGhlaWdodD0iMTI1JSI+DQogICAgICA8ZmVEcm9wU2hhZG93IGR4PSIwIiBkeT0iMCIgc3RkRGV2aWF0aW9uPSIxLjUiDQogICAgICAgIGZsb29kLWNvbG9yPSJibGFjayIgLz4NCiAgICA8L2ZpbHRlcj4NCiAgICA8ZmlsdGVyIGlkPSJ0ZXh0LXNoYWRvdyIgeD0iLTEwJSIgeT0iLTEwJSIgd2lkdGg9IjEyNSUiIGhlaWdodD0iMTI1JSI+DQogICAgICA8ZmVEcm9wU2hhZG93IGR4PSIwIiBkeT0iMCIgc3RkRGV2aWF0aW9uPSIxLjUiDQogICAgICAgIGZsb29kLWNvbG9yPSIjQUFBIiAvPg0KICAgIDwvZmlsdGVyPg0KICA8L2RlZnM+DQogIDxjaXJjbGUgY3g9IjUwJSIgY3k9IjUwJSIgcj0iMzglIiBzdHJva2U9IndoaXRlIiBzdHJva2Utd2lkdGg9IjQlIg0KICAgIGZpbGw9Im5vbmUiIGZpbHRlcj0idXJsKCNzaGFkb3cpIiAvPg0KICA8Y2lyY2xlIGN4PSI1MCUiIGN5PSI1MCUiIHI9IjM4JSIgc3Ryb2tlPSJ3aGl0ZSIgc3Ryb2tlLXdpZHRoPSI0JSINCiAgICBmaWxsPSJub25lIiBmaWx0ZXI9InVybCgjc2hhZG93KSIgLz4NCjwvc3ZnPg0KCg==)](https://sr.ht/~seirdy/seirdy.one) [![GitLab mirror](https://img.shields.io/badge/mirror-GitLab-orange.svg?logo=gitlab)](https://gitlab.com/Seirdy/seirdy.one) [![GitHub mirror](https://img.shields.io/badge/mirror-GitHub-black.svg?logo=github)](https://github.com/Seirdy/seirdy.one) [![Codeberg mirror](https://img.shields.io/badge/mirror-Codeberg-blue.svg?logo=codeberg)](https://codeberg.org/Seirdy/seirdy.one) [![sourcehut](https://img.shields.io/badge/repository-sourcehut-lightgrey.svg?logo=data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZlcnNpb249IjEuMSINCiAgICB3aWR0aD0iMTI4IiBoZWlnaHQ9IjEyOCI+DQogIDxkZWZzPg0KICAgIDxmaWx0ZXIgaWQ9InNoYWRvdyIgeD0iLTEwJSIgeT0iLTEwJSIgd2lkdGg9IjEyNSUiIGhlaWdodD0iMTI1JSI+DQogICAgICA8ZmVEcm9wU2hhZG93IGR4PSIwIiBkeT0iMCIgc3RkRGV2aWF0aW9uPSIxLjUiDQogICAgICAgIGZsb29kLWNvbG9yPSJibGFjayIgLz4NCiAgICA8L2ZpbHRlcj4NCiAgICA8ZmlsdGVyIGlkPSJ0ZXh0LXNoYWRvdyIgeD0iLTEwJSIgeT0iLTEwJSIgd2lkdGg9IjEyNSUiIGhlaWdodD0iMTI1JSI+DQogICAgICA8ZmVEcm9wU2hhZG93IGR4PSIwIiBkeT0iMCIgc3RkRGV2aWF0aW9uPSIxLjUiDQogICAgICAgIGZsb29kLWNvbG9yPSIjQUFBIiAvPg0KICAgIDwvZmlsdGVyPg0KICA8L2RlZnM+DQogIDxjaXJjbGUgY3g9IjUwJSIgY3k9IjUwJSIgcj0iMzglIiBzdHJva2U9IndoaXRlIiBzdHJva2Utd2lkdGg9IjQlIg0KICAgIGZpbGw9Im5vbmUiIGZpbHRlcj0idXJsKCNzaGFkb3cpIiAvPg0KICA8Y2lyY2xlIGN4PSI1MCUiIGN5PSI1MCUiIHI9IjM4JSIgc3Ryb2tlPSJ3aGl0ZSIgc3Ryb2tlLXdpZHRoPSI0JSINCiAgICBmaWxsPSJub25lIiBmaWx0ZXI9InVybCgjc2hhZG93KSIgLz4NCjwvc3ZnPg0KCg==)](https://sr.ht/~seirdy/seirdy.one) [![GitLab mirror](https://img.shields.io/badge/mirror-GitLab-orange.svg?logo=gitlab)](https://gitlab.com/Seirdy/seirdy.one) [![GitHub mirror](https://img.shields.io/badge/mirror-GitHub-black.svg?logo=github)](https://github.com/Seirdy/seirdy.one) [![Codeberg mirror](https://img.shields.io/badge/mirror-Codeberg-blue.svg?logo=codeberg)](https://codeberg.org/Seirdy/seirdy.one)
@ -9,125 +8,62 @@ Code for my personal website, [seirdy.one](https://seirdy.one). Built with Hugo.
Also builds my Gemini capsule: <gemini://seirdy.one/>. Also builds my Gemini capsule: <gemini://seirdy.one/>.
Dependencies I document [my site design standards](https://seirdy.one/meta/site-design/) on my website.
------------
To build: ## Dependencies
- Hugo 0.93 or later ### Build-time dependencies
- Hugo. I usually use the most recent version of Hugo at the time of publishing, but it _should_ work with any version of Hugo v0.116.0 or later (v0.116.0 had an improvement to `where` that I might use).
- bmake or GNU Make. OpenBSD make (omake) should work too, but I haven't tested it. - bmake or GNU Make. OpenBSD make (omake) should work too, but I haven't tested it.
- Git (Hugo uses Git info for features like date last updated) - Git (Hugo uses Git info for features like date last updated)
- [htmlq](https://github.com/mgdm/htmlq), to parse HTML when fetching some webring links and for some post-processing. - curl, for fetching some webring code and all my webmentions. **this requires authentication.** When running locally, it invokes `pash`, my password manager; when running in CI, it reads a file for a secret. You may have to modify `scripts/get-webmentions.sh` to avoid this.
- POSIX utils: `grep`, `find`, POSIX-compliant `/bin/sh`, etc. - POSIX utilities: `grep`, `find`, `sed`, POSIX-compliant `/bin/sh`, etc. Tested to work with Busybox and GNU Coreutils.
Before deploying, I use some tools to process the output. Before deploying, I use some tools for post-processing:
- `xmllint`, part of libxml2, to format the generated polygot XHTML5 markup. - `xmllint`, part of libxml2, to format the generated polygot XHTML5 markup.
- [sd](https://github.com/chmln/sdA) (for advanced multi-line regex operations, much of which exist to fix `xmllint`'s output) - a [patched version of html-tidy](https://git.sr.ht/~seirdy/tidy-html5)
- a patched version of html-tidy - More POSIX utilities.
I also apply static compression at max levels, using the following tools: I also apply static compression at max levels, using the following tools:
- [Efficient Compression Tool](https://github.com/fhanau/Efficient-Compression-Tool) It's like zopfli but more efficient and faster. - [Efficient Compression Tool](https://github.com/fhanau/Efficient-Compression-Tool) It's like zopfli but more efficient and faster. If you don't have it installed, it should be trivial to edit `scripts/compress.sh` to replace `ect` with `gzip` or `zopfli`.
- Brotli - Brotli
To deploy: I package all build-time dependencies _except_ curl as statically-linked binaries in a tarball, available at <https://seirdy.one/pb/binaries.tar.gz>.
- rsync, with SSH and zstd support ### Other dependencies
To deploy, I use rsync with SSH and zstd support.
Further tasks also use additional command-line utilities such as `sd`, `htmlq`, and a version of `xargs` that supports the `-P` flag (nearly all versions of `xargs` do, but it's not POSIX). I run all npm packages using `pnpm -s dlx` (similar to `npx`).
To lint: To lint:
- stylelint, invoked using pnpm. - Stylelint
- [lychee](https://github.com/lycheeverse/lychee), to check broken links. - [html-validate](https://html-validate.org/)
- A very recent build of the w3c's [Nu HTML checker](https://github.com/validator/validator) to validate the HTML and XHTML. - A very recent build of the W3C's [Nu HTML checker](https://github.com/validator/validator) to validate the HTML and XHTML, available on your `$PATH` as `vnu`. I have a very simple shell-script wrapper for this that invokes `java -jar`.
- [jq](https://stedolan.github.io/jq/), to filter false-positives from the Nu HTML checker. - [jq](https://stedolan.github.io/jq/), to filter false-positives from the Nu HTML checker and to verify that JSON files parse.
- [HTMLProofer](https://github.com/gjtorikian/html-proofer), version 5 or later. Requires Ruby.
Build instructions More in-depth local tests:
------------------
- Axe-Core, using the CLI and a headless browser (Firefox or Chromium).
- IBM Equal Access Checker, using the CLI and a headless Chromium. Runs on a patched version of the site with a modified stylesheet due to a bug (reported upstream).
Remote tests:
- Lighthouse
- WebHint (might not pass; only informative)
- Feed validator (requires Python)
## Build instructions
- To just build the HTML: `make hugo` - To just build the HTML: `make hugo`
- To build the polygot formatted HTML and XHTML: `make hugo xhtmlize` - To build the polygot formatted HTML and XHTML: `make hugo xhtmlize`
- To lint and validate: `make hugo xhtmlize lint-local` - To lint and validate: `make hugo xhtmlize lint-local`
- To build everything and compress: `make hugo xhtmlize compress` - To build everything and compress: `make hugo xhtmlize compress copy-to-xhtml`
- To deploy the clearnet site and corresponding Tor hidden service: `make deploy-prod deploy-onion` - To deploy the clearnet site and corresponding Tor hidden service: `make deploy-prod deploy-onion`.
`make` can parallelize only a little, since many jobs depend on previous jobs. `lint-local` and deployment tasks support limited parallelization with `-j`.
Compatibility
-------------
I made the site as inclusive as possible. Tested using multiple screen readers (Orca, TalkBack, Apple VoiceOver, Windows Narrator, NVDA), and I regularly test with the following browsers/engines. Testing in a browser does not imply any sort of endorsement; I just want to meet people where they're at and I want my site to be as robust as possible.
For all the listed options, I test "reading mode" whenever it's available. Most of my testing happens on Linux since that's my main OS, but I sometimes test on a Windows machine.
The main compatibility issue is a lack of support for `<details>`; the only non-mainstream engine to support it is Servo. The site is still perfectly usable without support for `<details>`; users will just be annoyed by pre-expanded toggle buttons that don't do anything.
### Desktop
Mainstream engines:
- Gecko: Nightly, Stable, ESR, and sometimes Pale Moon
- the Tor Browser
- Blink: latest Chromium snapshot, stable, and QtWebEngine
- WebKit, via Webkit2GTK3
Non-mainstream engines:
- NetSurf
- [The SerenityOS Browser](https://github.com/SerenityOS/serenity/tree/master/Userland/Libraries/LibWeb) (does not yet support ECDSA-based certs, so I test on my Tildeverse mirror). Known issue: SVG avatar doesn't render unless I view it in a new tab.
- Very old WebKit via rekonq (Qt4 QtWebKit).
- KHTML (KF5), via Konqueror.
- Servo
- Tkhtml, via Hv3 (no TLS 1.2, so I use a terminating proxy or localhost version)
Tested on a provisional basis, when I have access to a Windows machine:
- [Winternight Classic](https://github.com/ClassicNick/Crescent-Vine).
- IE 11.
- Even older WebKit, via Safari 5.1.7. Requires a TLS terminating proxy.
- Ancient Gecko, via NetScape Navigator. Requires a TLS terminating proxy.
Desktop screen readers tested:
- Orca
- NVDA
- Windows Narrator
- TODO: borrow someone's mac and test macOS VoiceOver.
### Mobile
- Android: Blink, Gecko, Tor Browser
- iOS WebKit: latest stable version, iOS 12, iOS 10 on an iPhone 5. Also tested Reader Mode.
- TODO: try a KaiOS device and Samsung Internet's dark mode.
The site should work well even on viewports that are under 240 CSS pixels wide.
Mobile screen readers:
- TalkBack
- VoiceOver
- TODO: test KaiOS Readout
### Smart watches
- Borrowed an Apple Watch to try the embedded browser.
- TODO: test on a Tizen or Wear OS device's browser (Samsung Internet is a popular choice)
## Accessibility
To my knowledge, this site meets all applicable WCAG 2.2 AA requirements.
This site meets all applicable WCAG 2.2 AAA requirements, with the following exceptions:
- SC 1.4.8 Visual Presentation: long article body text for articles should have an average character count per line below 80 characters. Some lines may exceed this limit. Text outside of article bodies has a longer line width.
- SC 2.4.9 Link Purpose (Link Only): I mostly follow this guideline, but there may be some exceptions. Link purpose in context is always clear, though.
- SC 3.1.5 Reading Level: the required reading ability often exceeds the lower secondary education level
- SC 3.1.6 Pronunciation: I do not yet provide pronunciation information.
I have only tested WCAG compliance in mainstream browser engines (Blink, Gecko, WebKit).
I also go further than WCAG in many aspects.
- Rather than follow SC 2.5.5's requirement to achieve a minimum tap target size of 44 by 44 pixels, I follow Google's more strict guidelines. These guidelines mandate that targets are at least 48-by-48 pixels, with no overlap against any other targets in a 56-by-56 pixel range.
- I ensure at least one such 56-by-56 pixel non-interactive region exists on the page, for users with hand tremors or or anyone who wants to tap the screen without clicking something.
- I only set custom colors in response to the `prefers-color-scheme: dark` media query. These custom colors pass APCA contrast ratios, all being close to the ideal lightness contrast of 90. They are also autism- and overstimulation-friendly colors: yellow links are significantly de-saturated to reduce harshness.

View file

@ -1,6 +1,6 @@
baseURL = "https://seirdy.one/" # just the default, I have mirrors baseURL = "https://seirdy.one/" # just the default, I have mirrors
languageCode = "en-us" languageCode = "en-us"
title = "Seirdy's Home" title = "Seirdys Home"
timeZone = "UTC" timeZone = "UTC"
summaryLength = 150 summaryLength = 150
@ -18,7 +18,7 @@ disableKinds = ["taxonomy", "term"]
canonicalBaseURL = "https://seirdy.one" # I have mirrors with different baseURLs canonicalBaseURL = "https://seirdy.one" # I have mirrors with different baseURLs
webmentionEndpoint = "https://seirdy.one/webmentions/receive" webmentionEndpoint = "https://seirdy.one/webmentions/receive"
logUrlPrefix = "https://git.sr.ht/~seirdy/seirdy.one/log/master/item/" logUrlPrefix = "https://git.sr.ht/~seirdy/seirdy.one/log/master/item/"
copyright = "Copyright © 2021 Rohan Kumar" copyright = "Copyright © 2023 Rohan “Seirdy” Kumar"
dark = "auto" dark = "auto"
highlight = false highlight = false
icon = "/favicon.svg" icon = "/favicon.svg"
@ -28,7 +28,7 @@ disableKinds = ["taxonomy", "term"]
lastmod = ['lastmod', ':git', 'date', 'publishDate'] lastmod = ['lastmod', ':git', 'date', 'publishDate']
[author] [author]
name = "Rohan Kumar" name = "Seirdy"
url = "https://seirdy.one/" url = "https://seirdy.one/"
first = "Rohan" first = "Rohan"
last = "Kumar" last = "Kumar"

View file

@ -4,7 +4,7 @@ outputs:
- gemtext - gemtext
- atom - atom
- RSS - RSS
title: Seirdy's Home title: Seirdys Home
sitemap: sitemap:
- ChangeFreq: weekly - ChangeFreq: weekly
- Priority: 0.9 - Priority: 0.9

View file

@ -1,6 +1,6 @@
--- ---
template: "notes.html" template: "notes.html"
description: "All the microblogs (\"notes\") on Seirdy's Home" description: "All the microblogs (“notes”) on Seirdys Home"
title: "Notes" title: "Notes"
sitemap: sitemap:
ChangeFreq: daily ChangeFreq: daily

View file

@ -11,6 +11,8 @@ syndicatedCopies:
url: 'https://lobste.rs/comments/3tsiiw/reply' url: 'https://lobste.rs/comments/3tsiiw/reply'
- title: 'The Fediverse' - title: 'The Fediverse'
url: 'https://pleroma.envs.net/notice/AboaFMxxNcJfCgE95s' url: 'https://pleroma.envs.net/notice/AboaFMxxNcJfCgE95s'
- title: 'jstpst'
url: 'https://www.jstpst.net/f/just_post/9060/webrings-are-already-back'
--- ---
Webrings are alive and well; they don't need to be "brought back" because they're already here. Webrings are alive and well; they don't need to be "brought back" because they're already here.

View file

@ -1,6 +1,6 @@
--- ---
template: "posts.html" template: "posts.html"
description: "All the long-form articles on Seirdy's Home" description: "All the long-form articles on Seirdys Home"
title: "Articles" title: "Articles"
sitemap: sitemap:
ChangeFreq: weekly ChangeFreq: weekly

View file

@ -5,4 +5,7 @@ input-encoding: utf8
output-xhtml: yes output-xhtml: yes
quiet: yes quiet: yes
indent: no indent: no
# I set this manually
tidy-mark: no tidy-mark: no
quote-nbsp: no
sort-attributes: alpha

View file

@ -52,7 +52,6 @@ while getopts "hj" flags; do
;; ;;
*) *)
bad_option "${flags}" 'invalid option' bad_option "${flags}" 'invalid option'
exit 1
;; ;;
esac esac
done done
@ -61,9 +60,12 @@ base_url="${1-http://localhost:8089}"
# HTML validation is already parallelized, so run that single-threaded. # HTML validation is already parallelized, so run that single-threaded.
make -j1 HUGO_FLAGS=-DF HUGO_BASEURL="$base_url" clean hugo xhtmlize copy-to-xhtml validate-html make -j1 HUGO_FLAGS=-DF HUGO_BASEURL="$base_url" clean hugo xhtmlize copy-to-xhtml validate-html
make -j "$jobs" -f Makefile.online HUGO_BASEURL="$base_url" all-extra URLS="$(curl -sSL "$base_url/sitemap.xml" | htmlq loc -t | rg -v '/search/$' | tr '\n' ' ')" & sitemap_links="$(curl -sSL --compressed "$base_url/sitemap.xml" | htmlq loc -t)"
urls_offline="$(echo "$sitemap_links" | rg -v '/search/$' | tr '\n' ' ')"
make -j "$jobs" -f Makefile.online HUGO_BASEURL="$base_url" all-extra URLS="$urls_offline" &
make deploy-staging RSYNCFLAGS_EXTRA=-q make deploy-staging RSYNCFLAGS_EXTRA=-q
make -f Makefile.online hint-online URLS="$(curl -sSL --compressed https://staging.seirdy.one/sitemap.xml | htmlq loc -t | rg -v '/(?:search|wcag-is-a-starting-point)/$' | sort | tr '\n' ' ')" urls_online="$(echo "$sitemap_links" | rg -v '/(?:search|wcag-is-a-starting-point)/$' | sort | tr '\n' ' ')"
make -f Makefile.online hint-online URLS="$urls_online"
wait wait
# TODO: run lighthouse on every page in the sitemap. # TODO: run lighthouse on every page in the sitemap.

View file

@ -43,7 +43,6 @@ while getopts "hr" flags; do
;; ;;
*) *)
bad_option "${flags}" 'invalid option' bad_option "${flags}" 'invalid option'
exit 1
;; ;;
esac esac
done done

View file

@ -33,7 +33,6 @@ while getopts "ho" flags; do
;; ;;
*) *)
bad_option "${flags}" 'invalid option' bad_option "${flags}" 'invalid option'
exit 1
;; ;;
esac esac
done done

View file

@ -25,12 +25,14 @@ key() {
} }
token() { token() {
ccurl -sX POST "$auth_url" -d "key=$(key)" key_response="$(key)"
ccurl -sX POST "$auth_url" -d "key=$key_response"
} }
# use that token to fetch all webmentions # use that token to fetch all webmentions
fetch_webmentions() { fetch_webmentions() {
ccurl --compressed -H "Authorization: Bearer $(token)" "$webmentions_url" token_response="$(token)"
ccurl --compressed -H "Authorization: Bearer $token_response" "$webmentions_url" -o "$webmentions_file"
} }
# fetch webmentions if we don't have a fresh copy already. # fetch webmentions if we don't have a fresh copy already.
@ -40,5 +42,5 @@ if [ -f "$webmentions_file" ] \
echo 'Using cached webmentions' echo 'Using cached webmentions'
else else
echo 'Fetching webmentions' echo 'Fetching webmentions'
fetch_webmentions >"$webmentions_file" fetch_webmentions
fi fi

View file

@ -51,65 +51,24 @@ while getopts "hd" flags; do
;; ;;
*) *)
bad_option "$flags" 'invalid option' bad_option "$flags" 'invalid option'
exit 1
;; ;;
esac esac
done done
trim_trailing_comma() {
sd ',$' ''
}
values_to_csv() {
tr '\n' ',' | trim_trailing_comma && echo
}
# values for the GEORGE webring
# Left bc I quit trying to make a good first-party iframe alternative
# that conformed to my site design standards while also imparting the
# message of GEORGE as intended.
# george() {
# printf 'GEORGE,'
# {
# curl -sSL --compressed 'https://george.gh0.pw/embed.cgi?seirdy' \
# | htmlq -a href 'main p a'
# echo "null"
# } | values_to_csv
# }
#
endless_orbit() { endless_orbit() {
printf 'Endless Orbit,' printf 'Endless Orbit,'
{
curl -sSL --compressed https://linkyblog.neocities.org/onionring/onionring-variables.js \ curl -sSL --compressed https://linkyblog.neocities.org/onionring/onionring-variables.js \
| grep -C 1 https://seirdy.one/ | grep -C 1 https://seirdy.one/ \
echo "'null'," | tr -d "'\n" | sed 's|https://seirdy.one/|https://linkyblog.neocities.org/webring.html|'
} | sd https://seirdy.one/ https://linkyblog.neocities.org/webring.html \ echo 'null'
| sd "\n|'" '' | trim_trailing_comma
echo
}
netizens() {
printf 'Netizens,'
{
curl -sSL --compressed https://netizensring.link/onionring-variables.js \
| grep -C 1 https://seirdy.one/
} | sd 'https://seirdy.one/,?' 'https://netizensring.link/' \
| sd "\n|'|\r" '' | trim_trailing_comma
echo ',null'
}
print_csv_values() {
# george
endless_orbit
# netizens
} }
if [ "$dry_run" = '1' ]; then if [ "$dry_run" = '1' ]; then
print_csv_values endless_orbit
elif [ -f "$webrings_dest" ]; then elif [ -f "$webrings_dest" ]; then
echo "webrings file already generated" echo "webrings file already generated"
else else
print_csv_values | cat "$webrings_src" - >"$webrings_dest" endless_orbit | cat "$webrings_src" - >"$webrings_dest"
fi fi
# vi:ft=sh # vi:ft=sh

View file

@ -3,15 +3,17 @@ set -e -u
pwd="$(dirname "$0")" pwd="$(dirname "$0")"
output_dir="$1" output_dir="$1"
files_to_analyze() { find_files_to_analyze() {
find "$output_dir" -type f -name '*.xhtml' -o -name '*.svg' \ find "$output_dir" -type f -name '*.xhtml' -o -name '*.svg' \
| grep -Ev '(bimi\.svg|search/index\.x?html)$' | grep -Ev '(bimi\.svg|search/index\.x?html)$'
} }
files_to_analyze="$(find_files_to_analyze)"
# we skip the BIMI icon (VNU can't handle SVG 1.2) and the search page (it has raw templates). # we skip the BIMI icon (VNU can't handle SVG 1.2) and the search page (it has raw templates).
vnu \ vnu \
--stdout \ --stdout \
--format json \ --format json \
--also-check-svg \ --also-check-svg \
$(files_to_analyze) \ $files_to_analyze \
| sh "$pwd/filter-vnu.sh" | sh "$pwd/filter-vnu.sh"

View file

@ -10,14 +10,13 @@
# use xmllint to do the formatting. # use xmllint to do the formatting.
# xmllint ruins inline CSS so delete the inline CSS and re-insert it. # xmllint ruins inline CSS so delete the inline CSS and re-insert it.
# xmllint also adds extra whitespace around <pre><code> which we remove # xmllint also adds extra whitespace around <pre><code> which we remove
# with "sd". I chose sd since it handles newlines well. # with sed.
# It also decreases indents by one level # It also decreases indents by one level
set -e -u set -e -u
html_file="$1" html_file="$1"
tmp_file="$html_file.tmp" tmp_file="$(mktemp)"
xhtml_file=${html_file%*.html}.xhtml
cleanup() { cleanup() {
rm -f "$tmp_file" rm -f "$tmp_file"
@ -30,17 +29,20 @@ run_tidy () {
# delete the stylesheet from the html file; we'll re-insert it later. # delete the stylesheet from the html file; we'll re-insert it later.
# Also remove two indentation levels # Also remove two indentation levels
sed 7d "$html_file" | xmllint --format --encode UTF-8 --noent - | tail -n +2 | sd '^\t(?:\t)?' '' | run_tidy >"$tmp_file" sed 7d "$html_file" | xmllint --format --encode UTF-8 --noent - | tail -n +2 | run_tidy >"$tmp_file"
{ {
head -n7 "$tmp_file" head -n7 "$tmp_file"
cat "$OUTPUT_DIR/tmp.css" cat "${OUTPUT_DIR:?}/tmp.css"
# shellcheck disable=SC2016 # these are regex statements, not shell expressions # shellcheck disable=SC2016 # these are regex statements, not shell expressions
tail -n +8 "$tmp_file" \ #shellcheck source=/home/rkumar/Executables/ghq/git.sr.ht/~seirdy/seirdy.one/scripts/xhtmlize.sh
| sd '<pre(?: tabindex="0")?>\n(?:\t|\s)*<(code|samp)( |>)' '<pre tabindex="0"><$1$2' \ sed \
| sd '(?:\n)?</(code|samp)>\n(?:[\t\s]*)?</pre>' '</$1></pre>' \ -e '1,7d' \
| sd '</span>(?:&nbsp;)?.span itemprop="familyName"' '</span>&#160;<span itemprop="familyName"' \ -e "s|name=\"generator\" />|name=\"generator\" />\n${TIDY:?}|" \
| sd -s '&nbsp;' '&#160;' \ -e 's|\.svg" width="16" /><span|.svg" width="16" /> <span|' \
| sd -f m 'class="u-photo photo"[^<]*<' 'class="u-photo photo"/> <' \ -e 's|</span>(&nbsp;)?.span itemprop="familyName|</span>&#160;<span itemprop="familyName"|' \
| sd '([a-z])<(data|time)' '$1 <$2' \ -E \
| sd '</span>(<a[^>]*rel="(?:nofollow ugc|ugc nofollow)"(?:[^>]*)?>liked</a>)' '</span> $1' -e 's|([a-z])<data|\1 <data|' \
-e 's#</span>(<a[^>]*rel="(nofollow ugc|ugc nofollow)"([^>]*)?>liked</a>)#</span> \1#' \
-e 's#<pre( tabindex="0")?>\n(\t|\s)*<(code|samp)( |>)#<pre tabindex="0"><\3\4#' \
"$tmp_file"
} >"$html_file" } >"$html_file"

View file

@ -8,7 +8,10 @@ set -e -u
output_dir="$1" output_dir="$1"
script_dir="$(dirname "$0")" script_dir="$(dirname "$0")"
printf '<style>%s</style>\n' "$(htmlq -t style <"$output_dir/index.html")" >"$output_dir/tmp.css" tidy_version="$(tidy -version)"
export TIDY="<meta content=\"$tidy_version\" name=\"generator\" />"
sed -e '7q;d' "$output_dir/index.html" | tr -d '\t' >"$output_dir/tmp.css"
cleanup() { cleanup() {
rm -f "$output_dir/tmp.css" rm -f "$output_dir/tmp.css"
} }
@ -16,5 +19,5 @@ trap cleanup EXIT
export XMLLINT_INDENT=' ' export XMLLINT_INDENT=' '
export OUTPUT_DIR="$output_dir" export OUTPUT_DIR="$output_dir"
find "$output_dir" -type f -name '*.html' | xargs -n1 sh "$script_dir/xhtmlize-single-file.sh" find "$output_dir" -type f -name '*.html' -exec sh "$script_dir/xhtmlize-single-file.sh" {} \;
# done # done