mirror of
https://git.sr.ht/~seirdy/seirdy.one
synced 2024-11-24 05:02:10 +00:00
Compare commits
No commits in common. "06bc35f25b7bbb5320e5d45f138829d7b0a2b87b" and "8312af7632628a88e858939fc802427b3238d93e" have entirely different histories.
06bc35f25b
...
8312af7632
3 changed files with 1 additions and 14 deletions
2
Makefile
2
Makefile
|
@ -13,7 +13,7 @@ WWW_RSYNC_DEST = $(USER):$(WWW_ROOT)
|
||||||
GEMINI_RSYNC_DEST = $(USER):$(GEMINI_ROOT)
|
GEMINI_RSYNC_DEST = $(USER):$(GEMINI_ROOT)
|
||||||
|
|
||||||
OUTPUT_DIR = public
|
OUTPUT_DIR = public
|
||||||
RSYNCFLAGS += -rlpcv --zc=zstd --zl=6 --skip-compress=gz/br/zst/png/webp/jpg/avif/jxl/mp4/mkv/webm/opus/mp3 -e "ssh -o KexAlgorithms=sntrup761x25519-sha512@openssh.com" --chmod=D755,F644
|
RSYNCFLAGS += -rlpcv --zc=zstd --zl=6 --skip-compress=gz/br/zst/png/webp/jpg/avif/jxl/mp4/mkv/webm/opus/mp3 -e "ssh -o KexAlgorithms=sntrup761x25519-sha512@openssh.com"
|
||||||
RSYNCFLAGS_EXTRA ?=
|
RSYNCFLAGS_EXTRA ?=
|
||||||
# compression gets slow for extreme levels like the old "70109"
|
# compression gets slow for extreme levels like the old "70109"
|
||||||
ECT_LEVEL=9
|
ECT_LEVEL=9
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
---
|
|
||||||
title: "Re: things not available when someone blocks all cookies"
|
|
||||||
date: 2022-08-31T20:36:30-07:00
|
|
||||||
replyURI: "https://blog.tomayac.com/2022/08/30/things-not-available-when-someone-blocks-all-cookies/"
|
|
||||||
replyTitle: "Things not available when someone blocks all cookies"
|
|
||||||
replyType: "BlogPosting"
|
|
||||||
replyAuthor: "Thomas Steiner"
|
|
||||||
replyAuthorURI: "https://blog.tomayac.com/about/"
|
|
||||||
---
|
|
||||||
Client-side storage (cookies, cache, etc.) is one of many things worth disabling during site testing. The Tor Browser's "safest" level blocks SVG, remote fonts, JS, and other features; many textual browsers don't support anything besides a subset of (X)HTML. Most [non-mainstream search engines]({{<relref "/posts/search-engines-with-own-indexes.md">}}) are similar.
|
|
||||||
|
|
||||||
Personally, I try to keep sites [<dfn>curlable</dfn>](https://indieweb.org/curlable). It should be possible to use a plain <code>curl <var>URL</var></code> command to get all the necessary markup, complete with visible contents. If the main content of the page is text, then *everything else should be a progressive enhancement.*
|
|
|
@ -15,7 +15,6 @@ alias find_compressible='find "$output_dir" -type f \( -name "*.html" -o -name "
|
||||||
if [ "$format" = "gzip" ]; then
|
if [ "$format" = "gzip" ]; then
|
||||||
compress_level="$3"
|
compress_level="$3"
|
||||||
find_compressible -exec ect -quiet -"$compress_level" -gzip {} \;
|
find_compressible -exec ect -quiet -"$compress_level" -gzip {} \;
|
||||||
find_compressible -exec touch -r {} {}.gz \;
|
|
||||||
elif [ "$2" = "brotli" ]; then
|
elif [ "$2" = "brotli" ]; then
|
||||||
find_compressible -exec brotli -Z -- {} \;
|
find_compressible -exec brotli -Z -- {} \;
|
||||||
fi
|
fi
|
||||||
|
|
Loading…
Reference in a new issue