1
0
Fork 0
mirror of https://git.sr.ht/~seirdy/seirdy.one synced 2024-11-10 00:12:09 +00:00

Compare commits

...

8 commits

Author SHA1 Message Date
Rohan Kumar
7bd1a14ef5
avoid supressing compressor errors 2023-11-26 15:54:48 -08:00
Rohan Kumar
f4d43d1e35
rename file to remove annoying whitespace 2023-11-26 15:22:59 -08:00
Rohan Kumar
866ca1b386
Switch soem things from find -exec to xargs
Allows better filtering and doesn't supress exit codes. Since I'm no
longer supressing exit codes, I had to handle them properly in
copy-file-to-xhtml.sh by using if-statements.

This also allowed me to skip the generation of an XHTML redirect page.
2023-11-26 15:14:06 -08:00
Rohan Kumar
6487c5c7f3
Make html-validate skip redirect page 2023-11-26 14:57:56 -08:00
Rohan Kumar
c98b28f6fe
Make redirect alias valid XHTML5 2023-11-26 14:57:42 -08:00
Rohan Kumar
8971d74dac
try to get xmllint to print diagnostics 2023-11-26 14:37:07 -08:00
Rohan Kumar
59d6de6a92
re-title resume 2023-11-26 14:36:32 -08:00
Rohan Kumar
33b3c5211a
Move resume to "about"
Define a redirect alias too.

This also entails special xhtmlize behavior as redirects in Hugo don't
use trailing slashes.
2023-11-26 14:28:41 -08:00
10 changed files with 59 additions and 32 deletions

View file

@ -84,7 +84,7 @@ hint: hugo .hintrc-local
.PHONY: html-validate .PHONY: html-validate
html-validate: html-validate:
pnpm -s dlx html-validate --ext=html -c linter-configs/htmlvalidate.json $(OUTPUT_DIR) find $(OUTPUT_DIR) -type f -name "*.html" | grep -v 'resume/index.html' | xargs pnpm -s dlx html-validate --ext=html -c linter-configs/htmlvalidate.json
.validate-feed-main: .validate-feed-main:
scripts/bin/validate-feed file://$(PWD)/$(OUTPUT_DIR)/atom.xml scripts/bin/validate-feed file://$(PWD)/$(OUTPUT_DIR)/atom.xml
@ -128,7 +128,7 @@ xhtmlize:
.PHONY: copy-to-xhtml .PHONY: copy-to-xhtml
copy-to-xhtml: copy-to-xhtml:
find $(OUTPUT_DIR) -type f -name "*.html" -exec sh scripts/copy-file-to-xhtml.sh {} \; find $(OUTPUT_DIR) -type f -name "*.html" | grep -v 'resume/index.html' | xargs -n1 sh scripts/copy-file-to-xhtml.sh
.PHONY: deploy-html .PHONY: deploy-html
deploy-html: deploy-html:

View file

@ -70,13 +70,6 @@ disableKinds = ["taxonomy", "term"]
url = "/meta/" url = "/meta/"
weight = 40 weight = 40
[[menu.main]]
identifier = "resume"
name = "Resume"
title = "resume"
url = "/resume/"
weight = 50
[[menu.main]] [[menu.main]]
identifier = "Support" identifier = "Support"
name = "Support" name = "Support"

View file

@ -1,26 +1,25 @@
--- ---
date: "2021-01-23T12:21:38-08:00" date: "2021-01-23T12:21:38-08:00"
keywords: intern, python, golang, go, lua, moonscript, shell, bash, zsh, posix, java, haskell, C, influxdb, influxdata, chronograf, grafana, kapacitor, numpy, scipy, pandas, jupyter, docker, podman, buildah, skopeo, kubernetes, openshift, cloud native, physics, jenkins, git, gitlab, github, linux, bsd, red hat, fedora, debian, ubuntu, opensuse, suse keywords: intern, python, golang, go, lua, moonscript, shell, bash, zsh, posix, java, haskell, C, influxdb, influxdata, chronograf, grafana, kapacitor, numpy, scipy, pandas, jupyter, docker, podman, buildah, skopeo, kubernetes, openshift, cloud native, physics, jenkins, git, gitlab, github, linux, bsd, red hat, fedora, debian, ubuntu, opensuse, suse
title: Rohan Kumar title: Resumé
description: "Detail-oriented, committed, self-motivated, OSS enthusiast proficient in Python, Go, Linux/UNIX systems, and cloud-native computing looking for an internship." description: "Detail-oriented, committed, self-motivated, OSS enthusiast proficient in Python, Go, Linux/UNIX systems, and cloud-native computing looking for an internship."
aliases:
- "/resume/"
disableMeta: true disableMeta: true
--- ---
[https://seirdy<wbr />.one](https://seirdy.one/) | [seirdy<wbr />@seirdy.one](mailto:seirdy@seirdy.one) [https://seirdy<wbr />.one](https://seirdy.one/) | [seirdy<wbr />@seirdy.one](mailto:seirdy@seirdy.one)
Detail oriented, committed, self-motivated, open-source enthusiast proficient in Python, Go, Linux/UNIX systems, and cloud-native computing looking for an internship. Detail oriented, committed, self-motivated, open-source enthusiast proficient in Python, Go, Linux/UNIX systems, and cloud-native computing looking for an internship.
Education: Lewis and Clark College ## Education: Lewis and Clark College
----------------------------------
Bachelor of Arts, Major in Math and Computer Science. Fall 2018 - Spring 2023 (expected) Bachelor of Arts, Major in Math and Computer Science. Fall 2018 - Spring 2023 (expected)
Certifi&shy;cations {#certifications} ## Certifi&shy;cations {#certifications}
-------------------
CompTIA Security+ (SY0-601), Dec. 2021. Verification available upon request. CompTIA Security+ (SY0-601), Dec. 2021. Verification available upon request.
Work Experience ## Work Experience
---------------
### June-August 2019: Software Engineering Intern, SAP Ariba, Palo Alto ### June-August 2019: Software Engineering Intern, SAP Ariba, Palo Alto
@ -30,8 +29,7 @@ Developed an anomaly detection and seasonal forecasting tool in Go (Golang) to s
Worked with a remote team on YAML-based data serialization and validation of data streamed in from Redfish and Selenium test suites to ensure compliance with a schema. Developed Redshift and Selenium tests for Cisco Intersight's APIs and user interface. Used Python. Worked with a remote team on YAML-based data serialization and validation of data streamed in from Redfish and Selenium test suites to ensure compliance with a schema. Developed Redshift and Selenium tests for Cisco Intersight's APIs and user interface. Used Python.
Technical Skills ## Technical Skills
----------------
- Cloud-native technologies: Docker Podman, Buildah, Skopeo, Kubernetes, OpenShift 4. - Cloud-native technologies: Docker Podman, Buildah, Skopeo, Kubernetes, OpenShift 4.
- Operating systems: Linux, BSD, Windows, macOS. Able to adapt to any UNIX-like environment. - Operating systems: Linux, BSD, Windows, macOS. Able to adapt to any UNIX-like environment.
@ -41,13 +39,14 @@ Technical Skills
- Python: Familiar with math and data science libraries such as the SciPy stack, Jupyter notebooks, and Pandas. - Python: Familiar with math and data science libraries such as the SciPy stack, Jupyter notebooks, and Pandas.
- Other tools: Git, Continuous Integration/<wbr />Delivery (Jenkins, GitLab CI, Travis CI), Nginx. - Other tools: Git, Continuous Integration/<wbr />Delivery (Jenkins, GitLab CI, Travis CI), Nginx.
Portfolio ## Portfolio
---------
Git repositories mirrored across [Sourcehut](https://sr.ht/~seirdy), [GitHub](https://github.com/Seirdy), and [GitLab](https://gitlab.com/Seirdy). Selected projects: Git repositories mirrored across [Sourcehut](https://sr.ht/~seirdy), [GitHub](https://github.com/Seirdy), and [GitLab](https://gitlab.com/Seirdy). Selected projects:
[Clogstats: sr.ht<wbr />/~seirdy<wbr />/clogstats](https://sr.ht/~seirdy/clogstats) [Clogstats: sr.ht<wbr />/~seirdy<wbr />/clogstats](https://sr.ht/~seirdy/clogstats)
: Gathers IRC channel activity statistics from WeeChat logs and performs time-series analysis and forecasting on them. It can quantify, rank, and chart chatting activity over time and display forecasts. It can also detect anomalous increases in activity. Written in Python with NumPy and Pandas. : Gathers IRC channel activity statistics from WeeChat logs and performs time-series analysis and forecasting on them. It can quantify, rank, and chart chatting activity over time and display forecasts. It can also detect anomalous increases in activity. Written in Python with NumPy and Pandas.
[MOAC: sr.ht<wbr />/~seirdy<wbr />/MOAC](https://sr.ht/~seirdy/MOAC/) [MOAC: sr.ht<wbr />/~seirdy<wbr />/MOAC](https://sr.ht/~seirdy/MOAC/)
: Analyze password strength given physical limits to computing, and generate secure passwords. Computes theoretical limits to a brute-force attack limited by given physical quantities (mass, energy, power, temperature, etc.) and generates passwords to withstand them. This provides a future-proof understanding of password strength. Extensively tested. Written in Go. : Analyze password strength given physical limits to computing, and generate secure passwords. Computes theoretical limits to a brute-force attack limited by given physical quantities (mass, energy, power, temperature, etc.) and generates passwords to withstand them. This provides a future-proof understanding of password strength. Extensively tested. Written in Go.

View file

@ -1,6 +1,7 @@
--- ---
title: "Introducing breadcrumbs" title: "Introducing breadcrumbs"
date: 2022-07-07T18:12:10-07:00 date: 2022-07-07T18:12:10-07:00
lastMod: 2022-07-07T18:12:10-07:00
--- ---
I just rolled out breadcrumbs for my website. Now, any page that is not linked directly from the navbar or site footer will have a breadcrumb list in its header. The breadcrumb list shows how to reach the page, starting from a navbar link. The first item is the navbar or footer link; the second link is the current page. I just rolled out breadcrumbs for my website. Now, any page that is not linked directly from the navbar or site footer will have a breadcrumb list in its header. The breadcrumb list shows how to reach the page, starting from a navbar link. The first item is the navbar or footer link; the second link is the current page.

View file

@ -0,0 +1,8 @@
#!/bin/sh
# compress a file with brotli if it isn't already compressed.
set -e -u
if [ ! -f "$1.br" ]; then
brotli -Z -- "$1"
fi

View file

@ -5,18 +5,19 @@
# 2. "gzip" or "brotli" # 2. "gzip" or "brotli"
# 3. ECT zopfli compression level # 3. ECT zopfli compression level
# no pipefail here since there are no pipes. #shellcheck disable=SC3040 # This only sets pipefail if it's available and otherwise does nothing
set -o pipefail 2>/dev/null || true
set -e -u set -e -u
dirname="$(dirname "$0")"
output_dir="$1" output_dir="$1"
format="$2" format="$2"
alias find_compressible='find "$output_dir" -type f \( -name "*.html" -o -name "*.txt" -o -name "*.xml" -o -name "*.webmanifest" -o -name "*.*.svg" -o -name "*.json" \)' alias find_compressible='find "$output_dir" -type f \( -name "*.html" -o -name "*.txt" -o -name "*.xml" -o -name "*.webmanifest" -o -name "*.*.svg" -o -name "*.json" \)'
if [ "$format" = "gzip" ]; then if [ "$format" = "gzip" ]; then
compress_level="$3" # compression level should be the third argument
find_compressible -exec ect -quiet -"$compress_level" -gzip {} \; find_compressible | xargs -n1 sh "$dirname/ect-wrapper.sh" "${3-6}"
find_compressible -exec touch -r {} {}.gz \; elif [ "$format" = "brotli" ]; then
elif [ "$2" = "brotli" ]; then find_compressible | xargs -n1 sh "$dirname/brotli-wrapper.sh"
find_compressible -exec brotli -Z -- {} \;
fi fi

View file

@ -9,5 +9,9 @@ html_file="$1"
xhtml_file="${html_file%*.html}.xhtml" xhtml_file="${html_file%*.html}.xhtml"
cp -p "$html_file" "$xhtml_file" cp -p "$html_file" "$xhtml_file"
[ -f "$html_file.gz" ] && cp -p "$html_file.gz" "$xhtml_file.gz" if [ -f "$html_file.gz" ]; then
[ -f "$html_file.br" ] && cp -p "$html_file.br" "$xhtml_file.br" cp -p "$html_file.gz" "$xhtml_file.gz"
fi
if [ -f "$html_file.br" ]; then
cp -p "$html_file.br" "$xhtml_file.br"
fi

9
scripts/ect-wrapper.sh Normal file
View file

@ -0,0 +1,9 @@
#!/bin/sh
# compress a file with ect and preserve the mtime
# args: compression level and filename.
set -e -u
if [ ! -f "$2.gz" ]; then
ect -quiet -"$1" -gzip "$2"
touch -r "$2" "$2.gz"
fi

View file

@ -25,12 +25,19 @@ cleanup() {
} }
trap cleanup EXIT trap cleanup EXIT
run_tidy() { # run_tidy() {
tidy -asxhtml -config linter-configs/tidy.conf 2>/dev/null || true # tidy -asxhtml -config linter-configs/tidy.conf 2>/dev/null || true
# }
run_xmllint() {
xmllint --format --encode UTF-8 --noent - || {
echo "$html_file"
exit 1
}
} }
# delete the stylesheet from the html file; we'll re-insert it later. # delete the stylesheet from the html file; we'll re-insert it later.
sed 7d "$html_file" | xmllint --format --encode UTF-8 --noent - | tail -n +2 >"$tmp_file" sed 7d "$html_file" | run_xmllint | tail -n +2 >"$tmp_file"
{ {
head -n7 "$tmp_file" head -n7 "$tmp_file"
cat "${OUTPUT_DIR:?}/tmp.css" cat "${OUTPUT_DIR:?}/tmp.css"

View file

@ -8,12 +8,18 @@ set -e -u
output_dir="$1" output_dir="$1"
script_dir="$(dirname "$0")" script_dir="$(dirname "$0")"
temp_resume="$(mktemp)"
# I have an alias for a redirect. I also define the redirect in Nginx, but this is there for the envs.net/~seirdy mirror. Hugo aliases don't have trailing slashes; this will trip up xmllint.
sed -i -e 's|<html lang="en-us"|<html xmlns="http://www.w3.org/1999/xhtml" lang="en-us" xml:lang="en-us"|' -E -e 's#<((link|meta) .*)">#<\1" />#' "$output_dir/resume/index.html"
mv "$output_dir/resume/index.html" "$temp_resume"
{ {
printf '\t' && sed -e '7q;d' "$output_dir/index.html" printf '\t' && sed -e '7q;d' "$output_dir/index.html"
} >"$output_dir/tmp.css" } >"$output_dir/tmp.css"
cleanup() { cleanup() {
rm -f "$output_dir/tmp.css" rm -f "$output_dir/tmp.css"
mv "$temp_resume" "$output_dir/resume/index.html"
} }
trap cleanup EXIT trap cleanup EXIT
@ -21,4 +27,3 @@ export XMLLINT_INDENT=' '
export OUTPUT_DIR="$output_dir" export OUTPUT_DIR="$output_dir"
find "$output_dir" -type f -name '*.html' -exec sh "$script_dir/xhtmlize-single-file.sh" {} \; find "$output_dir" -type f -name '*.html' -exec sh "$script_dir/xhtmlize-single-file.sh" {} \;
find "$output_dir" -type f -name '*.xml' -exec xmllint --noblanks --encode UTF-8 --noent {} --output {} \; find "$output_dir" -type f -name '*.xml' -exec xmllint --noblanks --encode UTF-8 --noent {} --output {} \;
# done