mirror of
https://git.sr.ht/~seirdy/seirdy.one
synced 2024-11-23 21:02:09 +00:00
Compare commits
No commits in common. "7bd1a14ef54c600bd3a7b54e95f05a7eae62f510" and "10259af99c5832218443e93f86086ce4695bbd7a" have entirely different histories.
7bd1a14ef5
...
10259af99c
10 changed files with 32 additions and 59 deletions
4
Makefile
4
Makefile
|
@ -84,7 +84,7 @@ hint: hugo .hintrc-local
|
|||
|
||||
.PHONY: html-validate
|
||||
html-validate:
|
||||
find $(OUTPUT_DIR) -type f -name "*.html" | grep -v 'resume/index.html' | xargs pnpm -s dlx html-validate --ext=html -c linter-configs/htmlvalidate.json
|
||||
pnpm -s dlx html-validate --ext=html -c linter-configs/htmlvalidate.json $(OUTPUT_DIR)
|
||||
|
||||
.validate-feed-main:
|
||||
scripts/bin/validate-feed file://$(PWD)/$(OUTPUT_DIR)/atom.xml
|
||||
|
@ -128,7 +128,7 @@ xhtmlize:
|
|||
|
||||
.PHONY: copy-to-xhtml
|
||||
copy-to-xhtml:
|
||||
find $(OUTPUT_DIR) -type f -name "*.html" | grep -v 'resume/index.html' | xargs -n1 sh scripts/copy-file-to-xhtml.sh
|
||||
find $(OUTPUT_DIR) -type f -name "*.html" -exec sh scripts/copy-file-to-xhtml.sh {} \;
|
||||
|
||||
.PHONY: deploy-html
|
||||
deploy-html:
|
||||
|
|
|
@ -70,6 +70,13 @@ disableKinds = ["taxonomy", "term"]
|
|||
url = "/meta/"
|
||||
weight = 40
|
||||
|
||||
[[menu.main]]
|
||||
identifier = "resume"
|
||||
name = "Resume"
|
||||
title = "resume"
|
||||
url = "/resume/"
|
||||
weight = 50
|
||||
|
||||
[[menu.main]]
|
||||
identifier = "Support"
|
||||
name = "Support"
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
---
|
||||
title: "Introducing breadcrumbs"
|
||||
date: 2022-07-07T18:12:10-07:00
|
||||
lastMod: 2022-07-07T18:12:10-07:00
|
||||
---
|
||||
I just rolled out breadcrumbs for my website. Now, any page that is not linked directly from the navbar or site footer will have a breadcrumb list in its header. The breadcrumb list shows how to reach the page, starting from a navbar link. The first item is the navbar or footer link; the second link is the current page.
|
||||
|
|
@ -1,25 +1,26 @@
|
|||
---
|
||||
date: "2021-01-23T12:21:38-08:00"
|
||||
keywords: intern, python, golang, go, lua, moonscript, shell, bash, zsh, posix, java, haskell, C, influxdb, influxdata, chronograf, grafana, kapacitor, numpy, scipy, pandas, jupyter, docker, podman, buildah, skopeo, kubernetes, openshift, cloud native, physics, jenkins, git, gitlab, github, linux, bsd, red hat, fedora, debian, ubuntu, opensuse, suse
|
||||
title: Resumé
|
||||
title: Rohan Kumar
|
||||
description: "Detail-oriented, committed, self-motivated, OSS enthusiast proficient in Python, Go, Linux/UNIX systems, and cloud-native computing looking for an internship."
|
||||
aliases:
|
||||
- "/resume/"
|
||||
disableMeta: true
|
||||
---
|
||||
[https://seirdy<wbr />.one](https://seirdy.one/) | [seirdy<wbr />@seirdy.one](mailto:seirdy@seirdy.one)
|
||||
|
||||
Detail oriented, committed, self-motivated, open-source enthusiast proficient in Python, Go, Linux/UNIX systems, and cloud-native computing looking for an internship.
|
||||
|
||||
## Education: Lewis and Clark College
|
||||
Education: Lewis and Clark College
|
||||
----------------------------------
|
||||
|
||||
Bachelor of Arts, Major in Math and Computer Science. Fall 2018 - Spring 2023 (expected)
|
||||
|
||||
## Certifi­cations {#certifications}
|
||||
Certifi­cations {#certifications}
|
||||
-------------------
|
||||
|
||||
CompTIA Security+ (SY0-601), Dec. 2021. Verification available upon request.
|
||||
|
||||
## Work Experience
|
||||
Work Experience
|
||||
---------------
|
||||
|
||||
### June-August 2019: Software Engineering Intern, SAP Ariba, Palo Alto
|
||||
|
||||
|
@ -29,7 +30,8 @@ Developed an anomaly detection and seasonal forecasting tool in Go (Golang) to s
|
|||
|
||||
Worked with a remote team on YAML-based data serialization and validation of data streamed in from Redfish and Selenium test suites to ensure compliance with a schema. Developed Redshift and Selenium tests for Cisco Intersight's APIs and user interface. Used Python.
|
||||
|
||||
## Technical Skills
|
||||
Technical Skills
|
||||
----------------
|
||||
|
||||
- Cloud-native technologies: Docker Podman, Buildah, Skopeo, Kubernetes, OpenShift 4.
|
||||
- Operating systems: Linux, BSD, Windows, macOS. Able to adapt to any UNIX-like environment.
|
||||
|
@ -39,14 +41,13 @@ Worked with a remote team on YAML-based data serialization and validation of dat
|
|||
- Python: Familiar with math and data science libraries such as the SciPy stack, Jupyter notebooks, and Pandas.
|
||||
- Other tools: Git, Continuous Integration/<wbr />Delivery (Jenkins, GitLab CI, Travis CI), Nginx.
|
||||
|
||||
## Portfolio
|
||||
Portfolio
|
||||
---------
|
||||
|
||||
Git repositories mirrored across [Sourcehut](https://sr.ht/~seirdy), [GitHub](https://github.com/Seirdy), and [GitLab](https://gitlab.com/Seirdy). Selected projects:
|
||||
|
||||
|
||||
[Clogstats: sr.ht<wbr />/~seirdy<wbr />/clogstats](https://sr.ht/~seirdy/clogstats)
|
||||
: Gathers IRC channel activity statistics from WeeChat logs and performs time-series analysis and forecasting on them. It can quantify, rank, and chart chatting activity over time and display forecasts. It can also detect anomalous increases in activity. Written in Python with NumPy and Pandas.
|
||||
|
||||
[MOAC: sr.ht<wbr />/~seirdy<wbr />/MOAC](https://sr.ht/~seirdy/MOAC/)
|
||||
: Analyze password strength given physical limits to computing, and generate secure passwords. Computes theoretical limits to a brute-force attack limited by given physical quantities (mass, energy, power, temperature, etc.) and generates passwords to withstand them. This provides a future-proof understanding of password strength. Extensively tested. Written in Go.
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/sh
|
||||
# compress a file with brotli if it isn't already compressed.
|
||||
set -e -u
|
||||
|
||||
if [ ! -f "$1.br" ]; then
|
||||
brotli -Z -- "$1"
|
||||
fi
|
||||
|
|
@ -5,19 +5,18 @@
|
|||
# 2. "gzip" or "brotli"
|
||||
# 3. ECT zopfli compression level
|
||||
|
||||
#shellcheck disable=SC3040 # This only sets pipefail if it's available and otherwise does nothing
|
||||
set -o pipefail 2>/dev/null || true
|
||||
# no pipefail here since there are no pipes.
|
||||
set -e -u
|
||||
|
||||
dirname="$(dirname "$0")"
|
||||
output_dir="$1"
|
||||
format="$2"
|
||||
|
||||
alias find_compressible='find "$output_dir" -type f \( -name "*.html" -o -name "*.txt" -o -name "*.xml" -o -name "*.webmanifest" -o -name "*.*.svg" -o -name "*.json" \)'
|
||||
|
||||
if [ "$format" = "gzip" ]; then
|
||||
# compression level should be the third argument
|
||||
find_compressible | xargs -n1 sh "$dirname/ect-wrapper.sh" "${3-6}"
|
||||
elif [ "$format" = "brotli" ]; then
|
||||
find_compressible | xargs -n1 sh "$dirname/brotli-wrapper.sh"
|
||||
compress_level="$3"
|
||||
find_compressible -exec ect -quiet -"$compress_level" -gzip {} \;
|
||||
find_compressible -exec touch -r {} {}.gz \;
|
||||
elif [ "$2" = "brotli" ]; then
|
||||
find_compressible -exec brotli -Z -- {} \;
|
||||
fi
|
||||
|
|
|
@ -9,9 +9,5 @@ html_file="$1"
|
|||
xhtml_file="${html_file%*.html}.xhtml"
|
||||
|
||||
cp -p "$html_file" "$xhtml_file"
|
||||
if [ -f "$html_file.gz" ]; then
|
||||
cp -p "$html_file.gz" "$xhtml_file.gz"
|
||||
fi
|
||||
if [ -f "$html_file.br" ]; then
|
||||
cp -p "$html_file.br" "$xhtml_file.br"
|
||||
fi
|
||||
[ -f "$html_file.gz" ] && cp -p "$html_file.gz" "$xhtml_file.gz"
|
||||
[ -f "$html_file.br" ] && cp -p "$html_file.br" "$xhtml_file.br"
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/sh
|
||||
# compress a file with ect and preserve the mtime
|
||||
# args: compression level and filename.
|
||||
set -e -u
|
||||
|
||||
if [ ! -f "$2.gz" ]; then
|
||||
ect -quiet -"$1" -gzip "$2"
|
||||
touch -r "$2" "$2.gz"
|
||||
fi
|
|
@ -25,19 +25,12 @@ cleanup() {
|
|||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
# run_tidy() {
|
||||
# tidy -asxhtml -config linter-configs/tidy.conf 2>/dev/null || true
|
||||
# }
|
||||
|
||||
run_xmllint() {
|
||||
xmllint --format --encode UTF-8 --noent - || {
|
||||
echo "$html_file"
|
||||
exit 1
|
||||
}
|
||||
run_tidy() {
|
||||
tidy -asxhtml -config linter-configs/tidy.conf 2>/dev/null || true
|
||||
}
|
||||
|
||||
# delete the stylesheet from the html file; we'll re-insert it later.
|
||||
sed 7d "$html_file" | run_xmllint | tail -n +2 >"$tmp_file"
|
||||
sed 7d "$html_file" | xmllint --format --encode UTF-8 --noent - | tail -n +2 >"$tmp_file"
|
||||
{
|
||||
head -n7 "$tmp_file"
|
||||
cat "${OUTPUT_DIR:?}/tmp.css"
|
||||
|
|
|
@ -8,18 +8,12 @@ set -e -u
|
|||
|
||||
output_dir="$1"
|
||||
script_dir="$(dirname "$0")"
|
||||
temp_resume="$(mktemp)"
|
||||
|
||||
# I have an alias for a redirect. I also define the redirect in Nginx, but this is there for the envs.net/~seirdy mirror. Hugo aliases don't have trailing slashes; this will trip up xmllint.
|
||||
sed -i -e 's|<html lang="en-us"|<html xmlns="http://www.w3.org/1999/xhtml" lang="en-us" xml:lang="en-us"|' -E -e 's#<((link|meta) .*)">#<\1" />#' "$output_dir/resume/index.html"
|
||||
mv "$output_dir/resume/index.html" "$temp_resume"
|
||||
|
||||
{
|
||||
printf '\t' && sed -e '7q;d' "$output_dir/index.html"
|
||||
} >"$output_dir/tmp.css"
|
||||
cleanup() {
|
||||
rm -f "$output_dir/tmp.css"
|
||||
mv "$temp_resume" "$output_dir/resume/index.html"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
|
@ -27,3 +21,4 @@ export XMLLINT_INDENT=' '
|
|||
export OUTPUT_DIR="$output_dir"
|
||||
find "$output_dir" -type f -name '*.html' -exec sh "$script_dir/xhtmlize-single-file.sh" {} \;
|
||||
find "$output_dir" -type f -name '*.xml' -exec xmllint --noblanks --encode UTF-8 --noent {} --output {} \;
|
||||
# done
|
||||
|
|
Loading…
Reference in a new issue