Assorted shell and Python scripts

Compare changes

Choose any two refs to compare.

+36
.archived/amimullvad
···
+
#!/usr/bin/env zsh
+
+
# Check for dependencies
+
if ! test -x "$(command -v curl)"; then
+
echo "Missing dependency: curl"
+
exit 1
+
fi
+
+
if ! test -x "$(command -v gum)"; then
+
echo "Missing dependency: gum"
+
echo "See github.com/charmbracelet/gum"
+
exit 1
+
fi
+
+
if ! test -x "$(command -v jq)"; then
+
echo "Missing dependency: jq"
+
exit 1
+
fi
+
+
MV_API=$(curl -sSL https://am.i.mullvad.net/json)
+
IP=$(echo $MV_API | jq ."ip" | tr -d '"')
+
CITY=$(echo $MV_API | jq ."city" | tr -d '"')
+
COUNTRY=$(echo $MV_API | jq ."country" | tr -d '"')
+
MV_EXIT_IP_HN=$(echo $MV_API | jq ."mullvad_exit_ip_hostname" | tr -d '"')
+
MV_SERVER_TYPE=$(echo $MV_API | jq ."mullvad_server_type" | tr -d '"')
+
BLACKLISTED=$(echo $MV_API | jq ."blacklisted"."blacklisted")
+
+
LEFT_COL=$(printf "%s\n%s\n%s\n%s\n%s\n%s\n" "IP Address" "City" "Country" "Exit IP Hostname" "Server Type" "Blacklisted")
+
RIGHT_COL=$(printf "%s\n%s\n%s\n%s\n%s\n%s\n" "$IP" "$CITY" "$COUNTRY" "$MV_EXIT_IP_HN" "$MV_SERVER_TYPE" "$BLACKLISTED")
+
GUM_LEFT=$(gum style --foreground "#73F59F" --border-foreground 57 --border none --width 20 --margin "1 2" --padding "0 1" --align right "$LEFT_COL")
+
GUM_RIGHT=$(gum style --foreground "#F1F1F1" --border-foreground 57 --border none --width 20 --margin "1 0" --align left "$RIGHT_COL")
+
+
GUM_TOP=$(gum style --bold --foreground 212 --border-foreground 57 --border rounded --width 50 --align center --padding "0 1" "Am I Mullvad?")
+
GUM_BOTTOM=$(gum join --horizontal --align right "$GUM_LEFT" "$GUM_RIGHT")
+
BOTTOM=$(gum style --border-foreground 57 --border rounded --width 50 --align center --padding "0 1" $GUM_BOTTOM)
+
gum join --vertical "$GUM_TOP" "$BOTTOM"
+21
.archived/backup_podvol
···
+
#!/usr/bin/env bash
+
+
set -euo pipefail
+
+
BACKUP_DIR="${HOME}/podman_volume_backups"
+
DATE=$(date '+%Y-%m-%d_%H%M%S')
+
+
volumes=("$@")
+
+
if [ ! -d "$BACKUP_DIR" ]; then
+
mkdir -p "$BACKUP_DIR"
+
fi
+
+
for vol in "${volumes[@]}"; do
+
podman volume export "$vol" --output "${BACKUP_DIR}/${vol}-${DATE}.tar"
+
gzip "${BACKUP_DIR}/${vol}-${DATE}.tar"
+
done
+
+
find "$BACKUP_DIR" -maxdepth 1 -mtime +3 -type f -delete
+
+
exit 0
+19
.archived/check_updates
···
+
#!/usr/bin/env bash
+
+
updates=()
+
+
if [[ -f /etc/debian_version ]]; then
+
APT_UPDATES=$(sudo apt update 2>/dev/null | grep package | tail -1 | cut -d '.' -f 1 | awk '{print $1}')
+
if [ "$APT_UPDATES" = "All" ]; then
+
NUM_UPDATES=0
+
else
+
NUM_UPDATES="$APT_UPDATES"
+
fi
+
updates+=("APT: ${NUM_UPDATES}")
+
fi
+
+
if command -v flatpak >/dev/null; then
+
updates+=("Flatpak: $(flatpak remote-ls --updates | wc -l)")
+
fi
+
+
echo "${updates[*]}"
+28
.archived/encrypt_mail
···
+
#!/usr/bin/env bash
+
+
set -euxo pipefail
+
+
if test -f "${HOME}/.env_common"; then
+
source "${HOME}/.env_common"
+
fi
+
+
cleanup() {
+
echo "Cleaning up"
+
rm -rfv /tmp/mail*
+
}
+
+
trap cleanup 0 1 2 3 6
+
+
DATE=$(date '+%Y-%m-%d')
+
tar czf "/tmp/mail-${DATE}.tar.gz" /naspool/mail
+
age --recipient 'age12pcwr6d8w6wfh5ymarphypzlyqxza3c3xj7cseturzyu70s02umske6mt6' --output "/tmp/mail-${DATE}.tar.gz.age" "/tmp/mail-${DATE}.tar.gz"
+
scp "/tmp/mail-${DATE}.tar.gz.age" root@aux-remote.carp-wyvern.ts.net:/bpool/encrypted_mail
+
ssh root@aux-remote.carp-wyvern.ts.net -- find /bpool/encrypted_mail -maxdepth 1 -type f -mtime +7 -delete
+
+
curl \
+
-H prio:default \
+
-H tags:incoming_envelope \
+
-d "encrypt_mail: success" \
+
"${NTFY_SERVER}/backups"
+
+
# vim: ts=4 sts=4 sw=4 et ai ft=bash
+26
.archived/nc_snap_create
···
+
#!/usr/bin/env nu
+
+
let refresh_token = (open ($env.HOME | path join .netcup_refresh_token.json) | get refresh_token)
+
+
def get_access_token [refresh_token] {
+
(curl -s "https://www.servercontrolpanel.de/realms/scp/protocol/openid-connect/token" -d "client_id=scp" -d $"refresh_token=($refresh_token)" -d "grant_type=refresh_token")
+
| from json
+
| get access_token
+
}
+
+
def get_servers [access_token] {
+
(curl -s -X "GET" "https://www.servercontrolpanel.de/scp-core/api/v1/servers" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json")
+
| from json
+
}
+
+
let access_token = (get_access_token ($refresh_token))
+
let servers = (get_servers ($access_token))
+
+
let date_now = (date now | format date "%Y%m%d")
+
+
($servers | get id) | each {
+
(curl -s -X "POST" $"https://www.servercontrolpanel.de/scp-core/api/v1/servers/($in)/snapshots" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json" -H "Content-Type: application/json" -d $'{"name": "($date_now)", "description": "via script", "diskName": "vda", "onlineSnapshot": false}')
+
| from json
+
}
+
+
# vim: sw=4 sts=4 ts=4 ai et ft=nu
+21
.archived/nc_snap_optimize
···
+
#!/usr/bin/env nu
+
+
let refresh_token = (open ($env.HOME | path join .netcup_refresh_token.json) | get refresh_token)
+
+
def get_access_token [refresh_token] {
+
(curl -s "https://www.servercontrolpanel.de/realms/scp/protocol/openid-connect/token" -d "client_id=scp" -d $"refresh_token=($refresh_token)" -d "grant_type=refresh_token")
+
| from json
+
| get access_token
+
}
+
+
def get_servers [access_token] {
+
(curl -s -X "GET" "https://www.servercontrolpanel.de/scp-core/api/v1/servers" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json")
+
| from json
+
}
+
+
let access_token = (get_access_token ($refresh_token))
+
let servers = (get_servers ($access_token))
+
+
($servers | get id) | par-each { |p| (curl -s -X "POST" $"https://www.servercontrolpanel.de/scp-core/api/v1/servers/($p)/storageoptimization?startAfterOptimization=true" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json") | from json }
+
+
# vim: sw=4 sts=4 ts=4 ai et ft=nu
+104
.archived/seed_armbian_torrents
···
+
#!/usr/bin/env -S uv run --script
+
# /// script
+
# dependencies = [
+
# "qbittorrent-api",
+
# "requests",
+
# "bs4",
+
# "docopt"
+
# ]
+
# ///
+
+
"""seed_armbian_torrents.py
+
+
Description:
+
Armbian torrents seed script
+
+
This script will scrape https://mirrors.jevincanders.net/armbian/dl/ for
+
torrent files and add them to a qBittorrent instance. If there are already
+
Armbian torrents in the qBittorrent instance, they will be removed, and new
+
ones will be added in their place. This script is intended to be run under
+
/etc/cron.weekly or used in a systemd timer.
+
+
Usage:
+
seed_armbian_torrents.py (HOSTNAME) (USERNAME) (PASSWORD)
+
seed_armbian_torrents.py -h
+
+
Examples:
+
seed_armbian_torrents.py "http://localhost:8080" "admin" "adminadmin"
+
seed_armbian_torrents.py "https://cat.seedhost.eu/lol/qbittorrent" "lol" "pw"
+
+
Options:
+
-h, --help show this help message and exit.
+
"""
+
+
import os
+
+
import qbittorrentapi
+
import requests
+
from bs4 import BeautifulSoup
+
from docopt import docopt
+
+
+
def add_torrents(args: dict):
+
base_url = "https://mirrors.jevincanders.net/armbian/dl"
+
ignore_dirs = ["/armbian/", "_patch/", "_toolchain/"]
+
archive_dir_urls = []
+
+
page = requests.get(base_url).text
+
soup = BeautifulSoup(page, "html.parser")
+
for node in soup.find_all("a"):
+
if node.get("href") is not None:
+
if node.get("href").endswith("/") and node.get("href") not in ignore_dirs:
+
archive_dir_urls.append(f"{base_url}/{node.get("href")}archive/")
+
+
torrent_urls = []
+
for url in archive_dir_urls:
+
response = requests.get(url, timeout=60)
+
soup = BeautifulSoup(response.content, "html.parser")
+
links = soup.find_all("a")
+
for link in links:
+
if link.text.endswith(".torrent"):
+
torrent_urls.append(url + link.text)
+
+
try:
+
qbt_client = qbittorrentapi.Client(
+
host=args["HOSTNAME"], username=args["USERNAME"], password=args["PASSWORD"]
+
)
+
qbt_client.auth_log_in()
+
+
torrent_count = 0
+
for url in torrent_urls:
+
torrent_count = torrent_count + 1
+
+
print(f"There are {torrent_count} torrents to add. This gonna take a while...")
+
+
for url in torrent_urls:
+
qbt_client.torrents_add(url, category="distro")
+
print(f"Added {os.path.basename(url)}")
+
qbt_client.auth_log_out()
+
except qbittorrentapi.LoginFailed as e:
+
print(e)
+
+
+
def remove_torrents(args: dict):
+
try:
+
qbt_client = qbittorrentapi.Client(
+
host=args["HOSTNAME"], username=args["USERNAME"], password=args["PASSWORD"]
+
)
+
qbt_client.auth_log_in()
+
+
for torrent in qbt_client.torrents_info():
+
if torrent.name.startswith("Armbian"):
+
torrent.delete(delete_files=True)
+
print(f"Removed {torrent.name}")
+
qbt_client.auth_log_out()
+
except qbittorrentapi.LoginFailed as e:
+
print(e)
+
+
+
if __name__ == "__main__":
+
args = docopt(__doc__) # type: ignore
+
remove_torrents(args)
+
add_torrents(args)
+
+
# vim: ts=4 sts=4 sw=4 et ai ft=python
+22
.archived/speedcheck
···
+
#!/usr/bin/env bash
+
+
set -euo pipefail
+
+
LOG_DIR="${HOME}/speedtest-logs"
+
DAY="$(date '+%Y-%m-%d')"
+
+
if [ ! -d "${LOG_DIR}" ]; then
+
mkdir -p "${LOG_DIR}"
+
fi
+
+
print_speed() {
+
_time=$(date '+%H:%M:%S')
+
_speedtest=$(speedtest++ --output text | tail -n 2)
+
_dl_speed=$(echo "$_speedtest" | head -n 1 | awk -F= '{print $2}')
+
_ul_speed=$(echo "$_speedtest" | tail -n 1 | awk -F= '{print $2}')
+
echo "${_time} [D: ${_dl_speed} MB/s] [U: ${_ul_speed} MB/s]"
+
}
+
+
print_speed >>"${LOG_DIR}/${DAY}.log"
+
+
# vim: sw=4 ts=4 sts=4 ai et ft=bash
-36
amimullvad
···
-
#!/usr/bin/env zsh
-
-
# Check for dependencies
-
if ! test -x "$(command -v curl)"; then
-
echo "Missing dependency: curl"
-
exit 1
-
fi
-
-
if ! test -x "$(command -v gum)"; then
-
echo "Missing dependency: gum"
-
echo "See github.com/charmbracelet/gum"
-
exit 1
-
fi
-
-
if ! test -x "$(command -v jq)"; then
-
echo "Missing dependency: jq"
-
exit 1
-
fi
-
-
MV_API=$(curl -sSL https://am.i.mullvad.net/json)
-
IP=$(echo $MV_API | jq ."ip" | tr -d '"')
-
CITY=$(echo $MV_API | jq ."city" | tr -d '"')
-
COUNTRY=$(echo $MV_API | jq ."country" | tr -d '"')
-
MV_EXIT_IP_HN=$(echo $MV_API | jq ."mullvad_exit_ip_hostname" | tr -d '"')
-
MV_SERVER_TYPE=$(echo $MV_API | jq ."mullvad_server_type" | tr -d '"')
-
BLACKLISTED=$(echo $MV_API | jq ."blacklisted"."blacklisted")
-
-
LEFT_COL=$(printf "%s\n%s\n%s\n%s\n%s\n%s\n" "IP Address" "City" "Country" "Exit IP Hostname" "Server Type" "Blacklisted")
-
RIGHT_COL=$(printf "%s\n%s\n%s\n%s\n%s\n%s\n" "$IP" "$CITY" "$COUNTRY" "$MV_EXIT_IP_HN" "$MV_SERVER_TYPE" "$BLACKLISTED")
-
GUM_LEFT=$(gum style --foreground "#73F59F" --border-foreground 57 --border none --width 20 --margin "1 2" --padding "0 1" --align right "$LEFT_COL")
-
GUM_RIGHT=$(gum style --foreground "#F1F1F1" --border-foreground 57 --border none --width 20 --margin "1 0" --align left "$RIGHT_COL")
-
-
GUM_TOP=$(gum style --bold --foreground 212 --border-foreground 57 --border rounded --width 50 --align center --padding "0 1" "Am I Mullvad?")
-
GUM_BOTTOM=$(gum join --horizontal --align right "$GUM_LEFT" "$GUM_RIGHT")
-
BOTTOM=$(gum style --border-foreground 57 --border rounded --width 50 --align center --padding "0 1" $GUM_BOTTOM)
-
gum join --vertical "$GUM_TOP" "$BOTTOM"
-21
backup_podvol
···
-
#!/usr/bin/env bash
-
-
set -euo pipefail
-
-
BACKUP_DIR="${HOME}/podman_volume_backups"
-
DATE=$(date '+%Y-%m-%d_%H%M%S')
-
-
volumes=("$@")
-
-
if [ ! -d "$BACKUP_DIR" ]; then
-
mkdir -p "$BACKUP_DIR"
-
fi
-
-
for vol in "${volumes[@]}"; do
-
podman volume export "$vol" --output "${BACKUP_DIR}/${vol}-${DATE}.tar"
-
gzip "${BACKUP_DIR}/${vol}-${DATE}.tar"
-
done
-
-
find "$BACKUP_DIR" -maxdepth 1 -mtime +3 -type f -delete
-
-
exit 0
+146
blog2gemlog
···
+
#!/usr/bin/env -S uv run --script
+
# /// script
+
# dependencies = [
+
# "feedgen",
+
# "feedparser",
+
# "md2gemini",
+
# ]
+
# ///
+
+
# 1. Take a markdown blog post as input, convert it to gemtext.
+
# 2. Update gemlog index.
+
# 3. Update the gemlog Atom feed.
+
+
import sys
+
from datetime import datetime
+
from pathlib import Path
+
from zoneinfo import ZoneInfo
+
+
import feedparser
+
from feedgen.feed import FeedGenerator
+
from md2gemini import md2gemini
+
+
# This is so that Python doesn't yell at me if I forget an argument. Not
+
# likely to happen, but still.
+
if len(sys.argv) != 3:
+
print('Usage: blog2gemlog /path/to/blog/post.md "Blog Post Title"')
+
exit(1)
+
+
# Set the absolute path to the gemini content directory
+
gemini_dir = Path.home().joinpath(
+
"repos/tildegit.org/hyperreal/hyperreal.coffee/gemini"
+
)
+
+
# Get the current date in YYYY-MM-DD format
+
date_now = datetime.now().strftime("%Y-%m-%d")
+
+
# Read blog post path from sys.argv[1] and ensure it is an absolute path
+
blog_post_path = Path(sys.argv[1])
+
if not blog_post_path.is_absolute():
+
print("Supply absolute path to blog post.")
+
exit(1)
+
+
# Convert the markdown blog post to gemtext
+
with open(blog_post_path, "r") as md_f:
+
content = md2gemini(md_f.read(), frontmatter=True, links="paragraph", md_links=True)
+
+
# Set the absolute path to the gemlog post
+
gemlog_post_path = gemini_dir.joinpath(f"gemlog/{blog_post_path.stem}.gmi")
+
+
# Write the gemtext content to the gemlog post path
+
with open(gemlog_post_path, "w") as gmi_f:
+
gmi_f.write(content)
+
+
# Set the string for the END section of the gemlog post
+
gemlog_end = f"\n\n## END\nLast updated: {date_now}\n\n=> ../gemlog Gemlog archive\n=> ../ hyperreal.coffee"
+
+
# Append gemlog_end to the end of the gemlog post
+
with open(gemlog_post_path, "a") as gmi_f:
+
gmi_f.write(gemlog_end)
+
+
# Read the gemlog post file lines into a list
+
with open(gemlog_post_path, "r") as gmi_f:
+
contents = gmi_f.readlines()
+
+
# Get the gemlog post title from sys.argv[2]
+
gemlog_post_title = str(sys.argv[2])
+
+
# Insert the gemlog post title as the level 1 heading on line 1
+
contents.insert(0, f"# {gemlog_post_title}\n\n")
+
+
# Write the new contents as a string to the gemlog file
+
with open(gemlog_post_path, "w") as gmi_f:
+
contents = "".join(contents)
+
gmi_f.write(contents)
+
+
# Read the lines of the gemlog index into a list
+
with open(gemini_dir.joinpath("gemlog/index.gmi"), "r") as index_f:
+
contents = index_f.readlines()
+
+
# Set the content of the gemlog index entry line
+
gemlog_index_line = f"=> ./{gemlog_post_path.name} {date_now} {gemlog_post_title}\n"
+
+
# Insert the new gemlog index line into the list on line 6
+
contents.insert(5, gemlog_index_line)
+
+
# Write the new contents as a string to the gemlog index file
+
with open(gemini_dir.joinpath("gemlog/index.gmi"), "w") as index_f:
+
contents = "".join(contents)
+
index_f.write(contents)
+
+
# Get a timezone-aware datetime object from a timestamp of the present moment
+
aware_ts = datetime.fromtimestamp(
+
datetime.timestamp(datetime.now()), tz=ZoneInfo("America/Chicago")
+
)
+
+
# Format the timezone-aware datetime object for the <updated> element of the
+
# Atom feed
+
updated_ts = aware_ts.strftime("%Y-%m-%dT%H:%M:%S%z")
+
+
# Instantiate a FeedParserDict object
+
d = feedparser.parse(gemini_dir.joinpath("gemlog/atom.xml"))
+
+
# Update the <updated> element's value to the current timestamp
+
d["updated"] = updated_ts
+
+
# Define a dictionary for the new Atom feed entry
+
new_entry_dict = {
+
"id": f"gemini://hyperreal.coffee/gemlog/{gemlog_post_path.name}",
+
"title": gemlog_post_title,
+
"updated": updated_ts,
+
"links": [
+
{
+
"href": f"gemini://hyperreal.coffee/gemlog/{gemlog_post_path.name}",
+
"rel": "alternate",
+
"type": "text/gemini",
+
}
+
],
+
}
+
+
# Insert the new Atom feed entry into the FeedParserDict
+
d["entries"].insert(0, new_entry_dict)
+
+
# Instantiate a FeedGenerator object and set the methods for the feed
+
fg = FeedGenerator()
+
fg.id(d["feed"]["id"])
+
fg.title(d["feed"]["title"])
+
fg.updated(d["feed"]["updated"])
+
fg.link(d["feed"]["links"])
+
+
# Reverse the order of d["entries"] so that they are written to the file in
+
# the correct order
+
d["entries"].reverse()
+
+
# For each entry, add a new entry to the FeedGenerator object
+
for entry in d["entries"]:
+
fe = fg.add_entry()
+
fe.id(entry["id"])
+
fe.title(entry["title"])
+
fe.updated(entry["updated"])
+
fe.link(entry["links"])
+
+
# Finally, render the FeedGenerator object as an Atom feed and write it to
+
# the atom.xml file
+
fg.atom_file(gemini_dir.joinpath("gemlog/atom.xml"), pretty=True)
+
+
# vim: ai et ft=python sts=4 sw=4 ts=4
-19
check_updates
···
-
#!/usr/bin/env bash
-
-
updates=()
-
-
if [[ -f /etc/debian_version ]]; then
-
APT_UPDATES=$(sudo apt update 2>/dev/null | grep package | tail -1 | cut -d '.' -f 1 | awk '{print $1}')
-
if [ "$APT_UPDATES" = "All" ]; then
-
NUM_UPDATES=0
-
else
-
NUM_UPDATES="$APT_UPDATES"
-
fi
-
updates+=("APT: ${NUM_UPDATES}")
-
fi
-
-
if command -v flatpak >/dev/null; then
-
updates+=("Flatpak: $(flatpak remote-ls --updates | wc -l)")
-
fi
-
-
echo "${updates[*]}"
-28
encrypt_mail
···
-
#!/usr/bin/env bash
-
-
set -euxo pipefail
-
-
if test -f "${HOME}/.env_common"; then
-
source "${HOME}/.env_common"
-
fi
-
-
cleanup() {
-
echo "Cleaning up"
-
rm -rfv /tmp/mail*
-
}
-
-
trap cleanup 0 1 2 3 6
-
-
DATE=$(date '+%Y-%m-%d')
-
tar czf "/tmp/mail-${DATE}.tar.gz" /naspool/mail
-
age --recipient 'age12pcwr6d8w6wfh5ymarphypzlyqxza3c3xj7cseturzyu70s02umske6mt6' --output "/tmp/mail-${DATE}.tar.gz.age" "/tmp/mail-${DATE}.tar.gz"
-
scp "/tmp/mail-${DATE}.tar.gz.age" root@aux-remote.carp-wyvern.ts.net:/bpool/encrypted_mail
-
ssh root@aux-remote.carp-wyvern.ts.net -- find /bpool/encrypted_mail -maxdepth 1 -type f -mtime +7 -delete
-
-
curl \
-
-H prio:default \
-
-H tags:incoming_envelope \
-
-d "encrypt_mail: success" \
-
"${NTFY_SERVER}/backups"
-
-
# vim: ts=4 sts=4 sw=4 et ai ft=bash
+2 -1
feed_count
···
let mf_auth_token = (secret-tool lookup miniflux-auth-token hyperreal)
let mf_password = (secret-tool lookup miniflux-password hyperreal)
+
let mf_api_url = "http://moonshadow.carp-wyvern.ts.net:8080/v1/feeds/counters"
let unreads = (
( curl \
···
-H "Content-Type: application/json" \
-H $"X-Auth-Token: ($mf_auth_token)" \
-u $"hyperreal:($mf_password)" \
-
"https://mf.vern.cc/v1/feeds/counters"
+
($mf_api_url)
)
| from json
| get unreads
+55
git_backup
···
+
#!/usr/bin/env bash
+
+
set -euxo pipefail
+
+
if [ -f "${HOME}/.env_common" ]; then
+
source "${HOME}/.env_common"
+
else
+
echo ".env_common not found"
+
exit 1
+
fi
+
+
TILDEGIT_URL="https://tildegit.org"
+
TILDEGIT_CLONE_URL="git@tildegit.org:hyperreal"
+
TILDEGIT_BACKUP_DIR="/naspool/tildegit-backup"
+
KNOT_BACKUP_DIR="/naspool/knot-backup"
+
KNOT_CLONE_URL="git@knot.moonshadow.dev:hyperreal.bsky.moonshadow.dev"
+
+
curl -s -k \
+
-u "hyperreal:${GITEA_TOKEN}" \
+
"${TILDEGIT_URL}/api/v1/user/repos?limit=100&page=1" |
+
jq '.[].name | select(.!="keyoxide_proof")' |
+
tr -d '"' |
+
tee "${TILDEGIT_BACKUP_DIR}/repos.txt"
+
+
while read -r line; do
+
if [ -d "${TILDEGIT_BACKUP_DIR}/${line}" ]; then
+
cd "${TILDEGIT_BACKUP_DIR}/${line}"
+
git pull
+
else
+
cd "${TILDEGIT_BACKUP_DIR}"
+
git clone "${TILDEGIT_CLONE_URL}/${line}.git"
+
fi
+
sleep 30
+
done <"${TILDEGIT_BACKUP_DIR}/repos.txt"
+
+
knot_repos=(
+
"ansible-homelab"
+
"bin"
+
"dotfiles"
+
"hyperreal.coffee"
+
"justfiles"
+
)
+
+
for repo in "${knot_repos[@]}"; do
+
if [ -d "${KNOT_BACKUP_DIR}/${repo}" ]; then
+
cd "${KNOT_BACKUP_DIR}/${repo}"
+
git pull
+
else
+
cd "${KNOT_BACKUP_DIR}"
+
git clone "${KNOT_CLONE_URL}/${repo}"
+
fi
+
sleep 30
+
done
+
+
# vim: ts=4 sw=4 sts=4 ai et ft=bash
+12 -12
hyperreal_backup
···
set -euxo pipefail
if [ ! -f "${HOME}/.env_common" ]; then
-
echo "ERROR: .env_common not found"
-
exit 1
+
echo "ERROR: .env_common not found"
+
exit 1
else
-
source "${HOME}/.env_common"
+
source "${HOME}/.env_common"
fi
curl --retry 3 "${HC_PING_URL}/start"
-
BORG_ARCHIVE=$(borg list ssh://root@hyperreal.carp-wyvern.ts.net/srv/borgbackup/hyperreal | tail -n 1 | awk '{print $1}')
+
BORG_ARCHIVE=$(borg list ssh://u511927@u511927.your-storagebox.de:23/home/borgbackup/hyperreal | tail -n 1 | awk '{print $1}')
ARCHIVE_BASENAME=$(echo "$BORG_ARCHIVE" | cut -d "T" -f 1)
if ! borg export-tar \
-
"ssh://root@hyperreal.carp-wyvern.ts.net/srv/borgbackup/hyperreal::${BORG_ARCHIVE}" \
-
"/naspool/hyperreal_backup/${ARCHIVE_BASENAME}.tar"; then
-
curl --retry 3 "${HC_PING_URL}/fail"
+
"ssh://u511927@u511927.your-storagebox.de:23/home/borgbackup/hyperreal::${BORG_ARCHIVE}" \
+
"/naspool/hyperreal_backup/${ARCHIVE_BASENAME}.tar"; then
+
curl --retry 3 "${HC_PING_URL}/fail"
fi
find /naspool/hyperreal_backup \
-
-maxdepth 1 \
-
-type f \
-
-mtime +7 \
-
-exec rm -fv {} \; ||
-
curl --retry 3 "${HC_PING_URL}/fail"
+
-maxdepth 1 \
+
-type f \
+
-mtime +7 \
+
-exec rm -fv {} \; ||
+
curl --retry 3 "${HC_PING_URL}/fail"
curl --retry 3 "$HC_PING_URL"
-26
nc_snap_create
···
-
#!/usr/bin/env nu
-
-
let refresh_token = (open ($env.HOME | path join .netcup_refresh_token.json) | get refresh_token)
-
-
def get_access_token [refresh_token] {
-
(curl -s "https://www.servercontrolpanel.de/realms/scp/protocol/openid-connect/token" -d "client_id=scp" -d $"refresh_token=($refresh_token)" -d "grant_type=refresh_token")
-
| from json
-
| get access_token
-
}
-
-
def get_servers [access_token] {
-
(curl -s -X "GET" "https://www.servercontrolpanel.de/scp-core/api/v1/servers" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json")
-
| from json
-
}
-
-
let access_token = (get_access_token ($refresh_token))
-
let servers = (get_servers ($access_token))
-
-
let date_now = (date now | format date "%Y%m%d")
-
-
($servers | get id) | each {
-
(curl -s -X "POST" $"https://www.servercontrolpanel.de/scp-core/api/v1/servers/($in)/snapshots" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json" -H "Content-Type: application/json" -d $'{"name": "($date_now)", "description": "via script", "diskName": "vda", "onlineSnapshot": false}')
-
| from json
-
}
-
-
# vim: sw=4 sts=4 ts=4 ai et ft=nu
-21
nc_snap_optimize
···
-
#!/usr/bin/env nu
-
-
let refresh_token = (open ($env.HOME | path join .netcup_refresh_token.json) | get refresh_token)
-
-
def get_access_token [refresh_token] {
-
(curl -s "https://www.servercontrolpanel.de/realms/scp/protocol/openid-connect/token" -d "client_id=scp" -d $"refresh_token=($refresh_token)" -d "grant_type=refresh_token")
-
| from json
-
| get access_token
-
}
-
-
def get_servers [access_token] {
-
(curl -s -X "GET" "https://www.servercontrolpanel.de/scp-core/api/v1/servers" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json")
-
| from json
-
}
-
-
let access_token = (get_access_token ($refresh_token))
-
let servers = (get_servers ($access_token))
-
-
($servers | get id) | par-each { |p| (curl -s -X "POST" $"https://www.servercontrolpanel.de/scp-core/api/v1/servers/($p)/storageoptimization?startAfterOptimization=true" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json") | from json }
-
-
# vim: sw=4 sts=4 ts=4 ai et ft=nu
-104
seed_armbian_torrents
···
-
#!/usr/bin/env -S uv run --script
-
# /// script
-
# dependencies = [
-
# "qbittorrent-api",
-
# "requests",
-
# "bs4",
-
# "docopt"
-
# ]
-
# ///
-
-
"""seed_armbian_torrents.py
-
-
Description:
-
Armbian torrents seed script
-
-
This script will scrape https://mirrors.jevincanders.net/armbian/dl/ for
-
torrent files and add them to a qBittorrent instance. If there are already
-
Armbian torrents in the qBittorrent instance, they will be removed, and new
-
ones will be added in their place. This script is intended to be run under
-
/etc/cron.weekly or used in a systemd timer.
-
-
Usage:
-
seed_armbian_torrents.py (HOSTNAME) (USERNAME) (PASSWORD)
-
seed_armbian_torrents.py -h
-
-
Examples:
-
seed_armbian_torrents.py "http://localhost:8080" "admin" "adminadmin"
-
seed_armbian_torrents.py "https://cat.seedhost.eu/lol/qbittorrent" "lol" "pw"
-
-
Options:
-
-h, --help show this help message and exit.
-
"""
-
-
import os
-
-
import qbittorrentapi
-
import requests
-
from bs4 import BeautifulSoup
-
from docopt import docopt
-
-
-
def add_torrents(args: dict):
-
base_url = "https://mirrors.jevincanders.net/armbian/dl"
-
ignore_dirs = ["/armbian/", "_patch/", "_toolchain/"]
-
archive_dir_urls = []
-
-
page = requests.get(base_url).text
-
soup = BeautifulSoup(page, "html.parser")
-
for node in soup.find_all("a"):
-
if node.get("href") is not None:
-
if node.get("href").endswith("/") and node.get("href") not in ignore_dirs:
-
archive_dir_urls.append(f"{base_url}/{node.get("href")}archive/")
-
-
torrent_urls = []
-
for url in archive_dir_urls:
-
response = requests.get(url, timeout=60)
-
soup = BeautifulSoup(response.content, "html.parser")
-
links = soup.find_all("a")
-
for link in links:
-
if link.text.endswith(".torrent"):
-
torrent_urls.append(url + link.text)
-
-
try:
-
qbt_client = qbittorrentapi.Client(
-
host=args["HOSTNAME"], username=args["USERNAME"], password=args["PASSWORD"]
-
)
-
qbt_client.auth_log_in()
-
-
torrent_count = 0
-
for url in torrent_urls:
-
torrent_count = torrent_count + 1
-
-
print(f"There are {torrent_count} torrents to add. This gonna take a while...")
-
-
for url in torrent_urls:
-
qbt_client.torrents_add(url, category="distro")
-
print(f"Added {os.path.basename(url)}")
-
qbt_client.auth_log_out()
-
except qbittorrentapi.LoginFailed as e:
-
print(e)
-
-
-
def remove_torrents(args: dict):
-
try:
-
qbt_client = qbittorrentapi.Client(
-
host=args["HOSTNAME"], username=args["USERNAME"], password=args["PASSWORD"]
-
)
-
qbt_client.auth_log_in()
-
-
for torrent in qbt_client.torrents_info():
-
if torrent.name.startswith("Armbian"):
-
torrent.delete(delete_files=True)
-
print(f"Removed {torrent.name}")
-
qbt_client.auth_log_out()
-
except qbittorrentapi.LoginFailed as e:
-
print(e)
-
-
-
if __name__ == "__main__":
-
args = docopt(__doc__) # type: ignore
-
remove_torrents(args)
-
add_torrents(args)
-
-
# vim: ts=4 sts=4 sw=4 et ai ft=python
-22
speedcheck
···
-
#!/usr/bin/env bash
-
-
set -euo pipefail
-
-
LOG_DIR="${HOME}/speedtest-logs"
-
DAY="$(date '+%Y-%m-%d')"
-
-
if [ ! -d "${LOG_DIR}" ]; then
-
mkdir -p "${LOG_DIR}"
-
fi
-
-
print_speed() {
-
_time=$(date '+%H:%M:%S')
-
_speedtest=$(speedtest++ --output text | tail -n 2)
-
_dl_speed=$(echo "$_speedtest" | head -n 1 | awk -F= '{print $2}')
-
_ul_speed=$(echo "$_speedtest" | tail -n 1 | awk -F= '{print $2}')
-
echo "${_time} [D: ${_dl_speed} MB/s] [U: ${_ul_speed} MB/s]"
-
}
-
-
print_speed >>"${LOG_DIR}/${DAY}.log"
-
-
# vim: sw=4 ts=4 sts=4 ai et ft=bash
-34
tildegit_backup
···
-
#!/usr/bin/env bash
-
-
set -euxo pipefail
-
-
if [ -f "${HOME}/.env_common" ]; then
-
source "${HOME}/.env_common"
-
else
-
echo ".env_common not found"
-
exit 1
-
fi
-
-
TILDEGIT_URL="https://tildegit.org"
-
TILDEGIT_CLONE_URL="git@tildegit.org:hyperreal"
-
BACKUP_DIR="/naspool/tildegit-backup"
-
-
curl -s -k \
-
-u "hyperreal:${GITEA_TOKEN}" \
-
"${TILDEGIT_URL}/api/v1/user/repos?limit=100&page=1" |
-
jq '.[].name | select(.!="keyoxide_proof")' |
-
tr -d '"' |
-
tee "${BACKUP_DIR}/repos.txt"
-
-
while read -r line; do
-
if [ -d "${BACKUP_DIR}/${line}" ]; then
-
cd "${BACKUP_DIR}/${line}"
-
git pull
-
else
-
cd "${BACKUP_DIR}"
-
git clone "${TILDEGIT_CLONE_URL}/${line}.git"
-
fi
-
sleep 30
-
done <"${BACKUP_DIR}/repos.txt"
-
-
# vim: ts=4 sw=4 sts=4 ai et ft=bash