diff --git a/content/posts/website-best-practices.md b/content/posts/website-best-practices.md index e924e73..22b6e0b 100644 --- a/content/posts/website-best-practices.md +++ b/content/posts/website-best-practices.md @@ -425,7 +425,7 @@ Long pages with many DOM nodes may benefit from CSS containment, a more recently Leveraging containment and `content-visibility` is a progressive enhancement, so there aren't any serious implications for older browsers. I use `content-visibility` to defer rendering off-screen entries in my archives. Doing so allows me to serve long archive pages instead of resorting to pagination, with page-length limited only by download size. In my tests using Lighthouse with Chromium Devtools' simulated CPU throttling,[^11] this article rendered faster _with_ containment-enabled CSS than without any custom stylesheets at all. -Using containment for content at the end of the page is relatively safe. Using it for content earlier in the page risks introducing [layout shifts](#layout-shifts). Eliminate the layout shifts by calculating a value for the `contain-intrinsic-size` property. {{}}{{}}, by {{}}, is a comprehensive guide to calculating intrinsic size values. +Using containment for content at the end of the page is relatively safe. Using it for content earlier in the page risks introducing [layout shifts](#layout-shifts). Eliminate the layout shifts by calculating a value for the `contain-intrinsic-size` property. {{}}{{}}, by {{}}, is a comprehensive guide to calculating intrinsic size values. ### Performance of assistive technologies diff --git a/layouts/partials/webmentions.html b/layouts/partials/webmentions.html index 48bcbf9..9b1467d 100644 --- a/layouts/partials/webmentions.html +++ b/layouts/partials/webmentions.html @@ -1,4 +1,4 @@ -{{- $wbmLinks := (slice "https://si3t.ch/log/2021-04-18-entetes-floc.html" "https://xmpp.org/2021/02/newsletter-02-feburary/" "https://gurlic.com/technology/post/393626430212145157" "https://gurlic.com/technology/post/343249858599059461" "https://www.librepunk.club/@penryn/108411423190214816" "https://benign.town/@josias/108457015755310198" "http://www.tuxmachines.org/node/148146") -}} +{{- $wbmLinks := (slice "https://si3t.ch/log/2021-04-18-entetes-floc.html" "https://xmpp.org/2021/02/newsletter-02-feburary/" "https://gurlic.com/technology/post/393626430212145157" "https://gurlic.com/technology/post/343249858599059461" "https://www.librepunk.club/@penryn/108411423190214816" "https://benign.town/@josias/108457015755310198" "http://www.tuxmachines.org/node/148146" "https://i.reddit.com/r/web_design/comments/k0dmpj/an_opinionated_list_of_best_practices_for_textual/gdmxy4u/" "https://bbbhltz.space/posts/thoughts-on-tech-feb2021/") -}}

Web­mentions

diff --git a/linter-configs/htmltest.yml b/linter-configs/htmltest.yml index 57379e8..78ca9b3 100644 --- a/linter-configs/htmltest.yml +++ b/linter-configs/htmltest.yml @@ -36,13 +36,19 @@ IgnoreURLs: - "https://seirdy.one/webmentions/" - "http://creativecommons.org/ns" - "https://seirdy.one/search/" + - "https://www.reddit.com/user/Seirdy" # reddit doesn't like htmltest + # - "https://i.reddit.com/r/web_design/comments/k0dmpj/an_opinionated_list_of_best_practices_for_textual/gdmxy4u/" + - "https://i.reddit.com" - "https://fediring.net/(previous|next)" # redir - - "https://forum.palemoon.org/" # manual check: blocks crawlers + # - "https://forum.palemoon.org/viewtopic.php?f=1&t=25473" # manual check: blocks crawlers + - "https://forum.palemoon.org/viewtopic.php" - "https://queue.acm.org/detail" # manual check: blocks crawlers - "https://www.geocities.ws/jaup/jaup.htm" # manual check: blocks crawlers - "https://plausible.io/blog/google-floc#" # manual check: I block this domain - "https://twitter.com/" # manual check: 404 for some reason, using curl works fine. - - "https://bugs.debian.org/cgi-bin/bugreport.cgi" # manual check: 400 for some reason, using curl works fine. - - "https://forum.kuketz-blog.de/" # manual check: blocks crawlers - - "https://web.archive.org/web/0/http" # the wayback machine. + # - "https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=830173" # manual check: 400 for some reason, using curl works fine. + - "https://bugs.debian.org/cgi-bin/bugreport.cgi" + # - "https://forum.kuketz-blog.de/viewtopic.php?p=78202" # manual check: blocks crawlers + - "https://forum.kuketz-blog.de/viewtopic.php" + - "https://web.archive.org/web/0/http" # the wayback machine itself. OutputDir: "linter-configs/htmltest"