Assorted shell and Python scripts

Compare changes

Choose any two refs to compare.

+21
.archived/backup_podvol
···
···
+
#!/usr/bin/env bash
+
+
set -euo pipefail
+
+
BACKUP_DIR="${HOME}/podman_volume_backups"
+
DATE=$(date '+%Y-%m-%d_%H%M%S')
+
+
volumes=("$@")
+
+
if [ ! -d "$BACKUP_DIR" ]; then
+
mkdir -p "$BACKUP_DIR"
+
fi
+
+
for vol in "${volumes[@]}"; do
+
podman volume export "$vol" --output "${BACKUP_DIR}/${vol}-${DATE}.tar"
+
gzip "${BACKUP_DIR}/${vol}-${DATE}.tar"
+
done
+
+
find "$BACKUP_DIR" -maxdepth 1 -mtime +3 -type f -delete
+
+
exit 0
+2 -6
.archived/check_updates
···
else
NUM_UPDATES="$APT_UPDATES"
fi
-
updates+=("[ APT: ${NUM_UPDATES} ]")
-
fi
-
-
if [[ -f /etc/redhat-release ]]; then
-
updates+=("[ DNF: $(sudo dnf check-update | wc -l) ]")
fi
if command -v flatpak >/dev/null; then
-
updates+=("[ Flatpak: $(flatpak remote-ls --updates | wc -l) ]")
fi
echo "${updates[*]}"
···
else
NUM_UPDATES="$APT_UPDATES"
fi
+
updates+=("APT: ${NUM_UPDATES}")
fi
if command -v flatpak >/dev/null; then
+
updates+=("Flatpak: $(flatpak remote-ls --updates | wc -l)")
fi
echo "${updates[*]}"
+28
.archived/encrypt_mail
···
···
+
#!/usr/bin/env bash
+
+
set -euxo pipefail
+
+
if test -f "${HOME}/.env_common"; then
+
source "${HOME}/.env_common"
+
fi
+
+
cleanup() {
+
echo "Cleaning up"
+
rm -rfv /tmp/mail*
+
}
+
+
trap cleanup 0 1 2 3 6
+
+
DATE=$(date '+%Y-%m-%d')
+
tar czf "/tmp/mail-${DATE}.tar.gz" /naspool/mail
+
age --recipient 'age12pcwr6d8w6wfh5ymarphypzlyqxza3c3xj7cseturzyu70s02umske6mt6' --output "/tmp/mail-${DATE}.tar.gz.age" "/tmp/mail-${DATE}.tar.gz"
+
scp "/tmp/mail-${DATE}.tar.gz.age" root@aux-remote.carp-wyvern.ts.net:/bpool/encrypted_mail
+
ssh root@aux-remote.carp-wyvern.ts.net -- find /bpool/encrypted_mail -maxdepth 1 -type f -mtime +7 -delete
+
+
curl \
+
-H prio:default \
+
-H tags:incoming_envelope \
+
-d "encrypt_mail: success" \
+
"${NTFY_SERVER}/backups"
+
+
# vim: ts=4 sts=4 sw=4 et ai ft=bash
+26
.archived/nc_snap_create
···
···
+
#!/usr/bin/env nu
+
+
let refresh_token = (open ($env.HOME | path join .netcup_refresh_token.json) | get refresh_token)
+
+
def get_access_token [refresh_token] {
+
(curl -s "https://www.servercontrolpanel.de/realms/scp/protocol/openid-connect/token" -d "client_id=scp" -d $"refresh_token=($refresh_token)" -d "grant_type=refresh_token")
+
| from json
+
| get access_token
+
}
+
+
def get_servers [access_token] {
+
(curl -s -X "GET" "https://www.servercontrolpanel.de/scp-core/api/v1/servers" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json")
+
| from json
+
}
+
+
let access_token = (get_access_token ($refresh_token))
+
let servers = (get_servers ($access_token))
+
+
let date_now = (date now | format date "%Y%m%d")
+
+
($servers | get id) | each {
+
(curl -s -X "POST" $"https://www.servercontrolpanel.de/scp-core/api/v1/servers/($in)/snapshots" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json" -H "Content-Type: application/json" -d $'{"name": "($date_now)", "description": "via script", "diskName": "vda", "onlineSnapshot": false}')
+
| from json
+
}
+
+
# vim: sw=4 sts=4 ts=4 ai et ft=nu
+21
.archived/nc_snap_optimize
···
···
+
#!/usr/bin/env nu
+
+
let refresh_token = (open ($env.HOME | path join .netcup_refresh_token.json) | get refresh_token)
+
+
def get_access_token [refresh_token] {
+
(curl -s "https://www.servercontrolpanel.de/realms/scp/protocol/openid-connect/token" -d "client_id=scp" -d $"refresh_token=($refresh_token)" -d "grant_type=refresh_token")
+
| from json
+
| get access_token
+
}
+
+
def get_servers [access_token] {
+
(curl -s -X "GET" "https://www.servercontrolpanel.de/scp-core/api/v1/servers" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json")
+
| from json
+
}
+
+
let access_token = (get_access_token ($refresh_token))
+
let servers = (get_servers ($access_token))
+
+
($servers | get id) | par-each { |p| (curl -s -X "POST" $"https://www.servercontrolpanel.de/scp-core/api/v1/servers/($p)/storageoptimization?startAfterOptimization=true" -H $"Authorization: Bearer ($access_token)" -H "accept: application/hal+json") | from json }
+
+
# vim: sw=4 sts=4 ts=4 ai et ft=nu
-39
.archived/quickinfo
···
-
#!/usr/bin/env bash
-
-
if ! command -v gum >/dev/null; then
-
echo "Gum command not found"
-
exit 0
-
fi
-
-
function gumstyle() {
-
GUMSTYLE=$(gum style --foreground="#cba6f7" "$1")
-
echo "${GUMSTYLE} : ${2}"
-
}
-
-
echo
-
source /etc/os-release
-
gum style --foreground="#f38ba8" "$(echo $PRETTY_NAME)"
-
echo
-
gumstyle "Kernel" "$(uname -sr)"
-
gumstyle "Uptime" "$(uptime -p)"
-
echo
-
-
if [[ -f /etc/debian_version ]]; then
-
APT_PACKAGES=$(sudo apt update 2>/dev/null | grep packages | cut -d '.' -f 1 | awk '{print $1}')
-
gumstyle "APT updates" "$APT_PACKAGES"
-
fi
-
-
if [[ -f /etc/redhat-release ]]; then
-
DNF_PACKAGES=$(sudo dnf check-update | wc -l)
-
gumstyle "DNF updates" "$DNF_PACKAGES"
-
fi
-
-
if command -v flatpak >/dev/null; then
-
FLATPAK_PACKAGES=$(flatpak remote-ls --updates | wc -l)
-
gumstyle "Flatpak updates" "$FLATPAK_PACKAGES"
-
fi
-
-
if [[ -d /home/linuxbrew/.linuxbrew ]]; then
-
BREW_PACKAGES=$(cat /home/jas/.homebrew_updates)
-
gumstyle "Homebrew updates" "$BREW_PACKAGES"
-
fi
···
+104
.archived/seed_armbian_torrents
···
···
+
#!/usr/bin/env -S uv run --script
+
# /// script
+
# dependencies = [
+
# "qbittorrent-api",
+
# "requests",
+
# "bs4",
+
# "docopt"
+
# ]
+
# ///
+
+
"""seed_armbian_torrents.py
+
+
Description:
+
Armbian torrents seed script
+
+
This script will scrape https://mirrors.jevincanders.net/armbian/dl/ for
+
torrent files and add them to a qBittorrent instance. If there are already
+
Armbian torrents in the qBittorrent instance, they will be removed, and new
+
ones will be added in their place. This script is intended to be run under
+
/etc/cron.weekly or used in a systemd timer.
+
+
Usage:
+
seed_armbian_torrents.py (HOSTNAME) (USERNAME) (PASSWORD)
+
seed_armbian_torrents.py -h
+
+
Examples:
+
seed_armbian_torrents.py "http://localhost:8080" "admin" "adminadmin"
+
seed_armbian_torrents.py "https://cat.seedhost.eu/lol/qbittorrent" "lol" "pw"
+
+
Options:
+
-h, --help show this help message and exit.
+
"""
+
+
import os
+
+
import qbittorrentapi
+
import requests
+
from bs4 import BeautifulSoup
+
from docopt import docopt
+
+
+
def add_torrents(args: dict):
+
base_url = "https://mirrors.jevincanders.net/armbian/dl"
+
ignore_dirs = ["/armbian/", "_patch/", "_toolchain/"]
+
archive_dir_urls = []
+
+
page = requests.get(base_url).text
+
soup = BeautifulSoup(page, "html.parser")
+
for node in soup.find_all("a"):
+
if node.get("href") is not None:
+
if node.get("href").endswith("/") and node.get("href") not in ignore_dirs:
+
archive_dir_urls.append(f"{base_url}/{node.get("href")}archive/")
+
+
torrent_urls = []
+
for url in archive_dir_urls:
+
response = requests.get(url, timeout=60)
+
soup = BeautifulSoup(response.content, "html.parser")
+
links = soup.find_all("a")
+
for link in links:
+
if link.text.endswith(".torrent"):
+
torrent_urls.append(url + link.text)
+
+
try:
+
qbt_client = qbittorrentapi.Client(
+
host=args["HOSTNAME"], username=args["USERNAME"], password=args["PASSWORD"]
+
)
+
qbt_client.auth_log_in()
+
+
torrent_count = 0
+
for url in torrent_urls:
+
torrent_count = torrent_count + 1
+
+
print(f"There are {torrent_count} torrents to add. This gonna take a while...")
+
+
for url in torrent_urls:
+
qbt_client.torrents_add(url, category="distro")
+
print(f"Added {os.path.basename(url)}")
+
qbt_client.auth_log_out()
+
except qbittorrentapi.LoginFailed as e:
+
print(e)
+
+
+
def remove_torrents(args: dict):
+
try:
+
qbt_client = qbittorrentapi.Client(
+
host=args["HOSTNAME"], username=args["USERNAME"], password=args["PASSWORD"]
+
)
+
qbt_client.auth_log_in()
+
+
for torrent in qbt_client.torrents_info():
+
if torrent.name.startswith("Armbian"):
+
torrent.delete(delete_files=True)
+
print(f"Removed {torrent.name}")
+
qbt_client.auth_log_out()
+
except qbittorrentapi.LoginFailed as e:
+
print(e)
+
+
+
if __name__ == "__main__":
+
args = docopt(__doc__) # type: ignore
+
remove_torrents(args)
+
add_torrents(args)
+
+
# vim: ts=4 sts=4 sw=4 et ai ft=python
+22
.archived/speedcheck
···
···
+
#!/usr/bin/env bash
+
+
set -euo pipefail
+
+
LOG_DIR="${HOME}/speedtest-logs"
+
DAY="$(date '+%Y-%m-%d')"
+
+
if [ ! -d "${LOG_DIR}" ]; then
+
mkdir -p "${LOG_DIR}"
+
fi
+
+
print_speed() {
+
_time=$(date '+%H:%M:%S')
+
_speedtest=$(speedtest++ --output text | tail -n 2)
+
_dl_speed=$(echo "$_speedtest" | head -n 1 | awk -F= '{print $2}')
+
_ul_speed=$(echo "$_speedtest" | tail -n 1 | awk -F= '{print $2}')
+
echo "${_time} [D: ${_dl_speed} MB/s] [U: ${_ul_speed} MB/s]"
+
}
+
+
print_speed >>"${LOG_DIR}/${DAY}.log"
+
+
# vim: sw=4 ts=4 sts=4 ai et ft=bash
+2 -16
archivebox_schedule
···
exit 1
fi
-
if test -f "${HOME}/.env_common"; then
-
source "${HOME}/.env_common"
-
fi
-
# Go to archivebox directory and run scheduled command for supplied
-
# feed URL. Send ntfy an error message if it fails.
cd /naspool/archivebox
if ! /home/jas/.local/bin/archivebox add --depth=1 "$1" \
>>/naspool/archivebox/logs/schedule.log; then
-
curl \
-
-H prio:urgent \
-
-H tags:warning \
-
-d "Error running archivebox schedule for $1" \
-
"${NTFY_SERVER}/archivebox_schedule"
-
else
-
curl \
-
-H prio:default \
-
-H tags:incoming_envelope \
-
-d "archivebox schedule succeeded: $1" \
-
"${NTFY_SERVER}/archivebox_schedule"
fi
···
exit 1
fi
# Go to archivebox directory and run scheduled command for supplied
+
# feed URL.
cd /naspool/archivebox
if ! /home/jas/.local/bin/archivebox add --depth=1 "$1" \
>>/naspool/archivebox/logs/schedule.log; then
+
echo "$(date '+%Y-%m-%d %H:%M:%S') ERROR. Exiting."
fi
+4
awkuptime
···
···
+
#!/usr/bin/env sh
+
+
# Lifted from https://superuser.com/a/1783477/3091052
+
awk '{m=int($1/60%60);h=int($1/3600%24);d=int($1/86400);printf "%sd %sh %sm\n", d, h, m}' /proc/uptime
-21
backup_podvol
···
-
#!/usr/bin/env bash
-
-
set -euo pipefail
-
-
BACKUP_DIR="${HOME}/podman_volume_backups"
-
DATE=$(date '+%Y-%m-%d_%H%M%S')
-
-
volumes=("$@")
-
-
if [ ! -d "$BACKUP_DIR" ]; then
-
mkdir -p "$BACKUP_DIR"
-
fi
-
-
for vol in "${volumes[@]}"; do
-
podman volume export "$vol" --output "${BACKUP_DIR}/${vol}-${DATE}.tar"
-
gzip "${BACKUP_DIR}/${vol}-${DATE}.tar"
-
done
-
-
find "$BACKUP_DIR" -maxdepth 1 -mtime +3 -type f -delete
-
-
exit 0
···
+146
blog2gemlog
···
···
+
#!/usr/bin/env -S uv run --script
+
# /// script
+
# dependencies = [
+
# "feedgen",
+
# "feedparser",
+
# "md2gemini",
+
# ]
+
# ///
+
+
# 1. Take a markdown blog post as input, convert it to gemtext.
+
# 2. Update gemlog index.
+
# 3. Update the gemlog Atom feed.
+
+
import sys
+
from datetime import datetime
+
from pathlib import Path
+
from zoneinfo import ZoneInfo
+
+
import feedparser
+
from feedgen.feed import FeedGenerator
+
from md2gemini import md2gemini
+
+
# This is so that Python doesn't yell at me if I forget an argument. Not
+
# likely to happen, but still.
+
if len(sys.argv) != 3:
+
print('Usage: blog2gemlog /path/to/blog/post.md "Blog Post Title"')
+
exit(1)
+
+
# Set the absolute path to the gemini content directory
+
gemini_dir = Path.home().joinpath(
+
"repos/tildegit.org/hyperreal/hyperreal.coffee/gemini"
+
)
+
+
# Get the current date in YYYY-MM-DD format
+
date_now = datetime.now().strftime("%Y-%m-%d")
+
+
# Read blog post path from sys.argv[1] and ensure it is an absolute path
+
blog_post_path = Path(sys.argv[1])
+
if not blog_post_path.is_absolute():
+
print("Supply absolute path to blog post.")
+
exit(1)
+
+
# Convert the markdown blog post to gemtext
+
with open(blog_post_path, "r") as md_f:
+
content = md2gemini(md_f.read(), frontmatter=True, links="paragraph", md_links=True)
+
+
# Set the absolute path to the gemlog post
+
gemlog_post_path = gemini_dir.joinpath(f"gemlog/{blog_post_path.stem}.gmi")
+
+
# Write the gemtext content to the gemlog post path
+
with open(gemlog_post_path, "w") as gmi_f:
+
gmi_f.write(content)
+
+
# Set the string for the END section of the gemlog post
+
gemlog_end = f"\n\n## END\nLast updated: {date_now}\n\n=> ../gemlog Gemlog archive\n=> ../ hyperreal.coffee"
+
+
# Append gemlog_end to the end of the gemlog post
+
with open(gemlog_post_path, "a") as gmi_f:
+
gmi_f.write(gemlog_end)
+
+
# Read the gemlog post file lines into a list
+
with open(gemlog_post_path, "r") as gmi_f:
+
contents = gmi_f.readlines()
+
+
# Get the gemlog post title from sys.argv[2]
+
gemlog_post_title = str(sys.argv[2])
+
+
# Insert the gemlog post title as the level 1 heading on line 1
+
contents.insert(0, f"# {gemlog_post_title}\n\n")
+
+
# Write the new contents as a string to the gemlog file
+
with open(gemlog_post_path, "w") as gmi_f:
+
contents = "".join(contents)
+
gmi_f.write(contents)
+
+
# Read the lines of the gemlog index into a list
+
with open(gemini_dir.joinpath("gemlog/index.gmi"), "r") as index_f:
+
contents = index_f.readlines()
+
+
# Set the content of the gemlog index entry line
+
gemlog_index_line = f"=> ./{gemlog_post_path.name} {date_now} {gemlog_post_title}\n"
+
+
# Insert the new gemlog index line into the list on line 6
+
contents.insert(5, gemlog_index_line)
+
+
# Write the new contents as a string to the gemlog index file
+
with open(gemini_dir.joinpath("gemlog/index.gmi"), "w") as index_f:
+
contents = "".join(contents)
+
index_f.write(contents)
+
+
# Get a timezone-aware datetime object from a timestamp of the present moment
+
aware_ts = datetime.fromtimestamp(
+
datetime.timestamp(datetime.now()), tz=ZoneInfo("America/Chicago")
+
)
+
+
# Format the timezone-aware datetime object for the <updated> element of the
+
# Atom feed
+
updated_ts = aware_ts.strftime("%Y-%m-%dT%H:%M:%S%z")
+
+
# Instantiate a FeedParserDict object
+
d = feedparser.parse(gemini_dir.joinpath("gemlog/atom.xml"))
+
+
# Update the <updated> element's value to the current timestamp
+
d["updated"] = updated_ts
+
+
# Define a dictionary for the new Atom feed entry
+
new_entry_dict = {
+
"id": f"gemini://hyperreal.coffee/gemlog/{gemlog_post_path.name}",
+
"title": gemlog_post_title,
+
"updated": updated_ts,
+
"links": [
+
{
+
"href": f"gemini://hyperreal.coffee/gemlog/{gemlog_post_path.name}",
+
"rel": "alternate",
+
"type": "text/gemini",
+
}
+
],
+
}
+
+
# Insert the new Atom feed entry into the FeedParserDict
+
d["entries"].insert(0, new_entry_dict)
+
+
# Instantiate a FeedGenerator object and set the methods for the feed
+
fg = FeedGenerator()
+
fg.id(d["feed"]["id"])
+
fg.title(d["feed"]["title"])
+
fg.updated(d["feed"]["updated"])
+
fg.link(d["feed"]["links"])
+
+
# Reverse the order of d["entries"] so that they are written to the file in
+
# the correct order
+
d["entries"].reverse()
+
+
# For each entry, add a new entry to the FeedGenerator object
+
for entry in d["entries"]:
+
fe = fg.add_entry()
+
fe.id(entry["id"])
+
fe.title(entry["title"])
+
fe.updated(entry["updated"])
+
fe.link(entry["links"])
+
+
# Finally, render the FeedGenerator object as an Atom feed and write it to
+
# the atom.xml file
+
fg.atom_file(gemini_dir.joinpath("gemlog/atom.xml"), pretty=True)
+
+
# vim: ai et ft=python sts=4 sw=4 ts=4
-28
encrypt_mail
···
-
#!/usr/bin/env bash
-
-
set -euxo pipefail
-
-
if test -f "${HOME}/.env_common"; then
-
source "${HOME}/.env_common"
-
fi
-
-
cleanup() {
-
echo "Cleaning up"
-
rm -rfv /tmp/mail*
-
}
-
-
trap cleanup 0 1 2 3 6
-
-
DATE=$(date '+%Y-%m-%d')
-
tar czf "/tmp/mail-${DATE}.tar.gz" /naspool/mail
-
age --recipient 'age12pcwr6d8w6wfh5ymarphypzlyqxza3c3xj7cseturzyu70s02umske6mt6' --output "/tmp/mail-${DATE}.tar.gz.age" "/tmp/mail-${DATE}.tar.gz"
-
scp "/tmp/mail-${DATE}.tar.gz.age" root@aux-remote.carp-wyvern.ts.net:/bpool/encrypted_mail
-
ssh root@aux-remote.carp-wyvern.ts.net -- find /bpool/encrypted_mail -maxdepth 1 -type f -mtime +7 -delete
-
-
curl \
-
-H prio:default \
-
-H tags:incoming_envelope \
-
-d "encrypt_mail: success" \
-
"${NTFY_SERVER}/backups"
-
-
# vim: ts=4 sts=4 sw=4 et ai ft=bash
···
+26
feed_count
···
···
+
#!/usr/bin/env nu
+
+
let mf_auth_token = (secret-tool lookup miniflux-auth-token hyperreal)
+
let mf_password = (secret-tool lookup miniflux-password hyperreal)
+
let mf_api_url = "http://moonshadow.carp-wyvern.ts.net:8080/v1/feeds/counters"
+
+
let unreads = (
+
( curl \
+
-s \
+
-X GET \
+
-H "Content-Type: application/json" \
+
-H $"X-Auth-Token: ($mf_auth_token)" \
+
-u $"hyperreal:($mf_password)" \
+
($mf_api_url)
+
)
+
| from json
+
| get unreads
+
| values
+
)
+
+
if ($unreads | is-empty) {
+
"0"
+
} else {
+
$unreads | math sum
+
}
+
# vim: sw=4 sts=4 ts=4 ai et ft=nu
-6
gen_archive_listing
···
-
#!/usr/bin/env bash
-
-
cd /naspool/archives
-
fd | sort | tee /home/jas/digital_archive_listing.txt
-
-
exit 0
···
+55
git_backup
···
···
+
#!/usr/bin/env bash
+
+
set -euxo pipefail
+
+
if [ -f "${HOME}/.env_common" ]; then
+
source "${HOME}/.env_common"
+
else
+
echo ".env_common not found"
+
exit 1
+
fi
+
+
TILDEGIT_URL="https://tildegit.org"
+
TILDEGIT_CLONE_URL="git@tildegit.org:hyperreal"
+
TILDEGIT_BACKUP_DIR="/naspool/tildegit-backup"
+
KNOT_BACKUP_DIR="/naspool/knot-backup"
+
KNOT_CLONE_URL="git@knot.moonshadow.dev:hyperreal.bsky.moonshadow.dev"
+
+
curl -s -k \
+
-u "hyperreal:${GITEA_TOKEN}" \
+
"${TILDEGIT_URL}/api/v1/user/repos?limit=100&page=1" |
+
jq '.[].name | select(.!="keyoxide_proof")' |
+
tr -d '"' |
+
tee "${TILDEGIT_BACKUP_DIR}/repos.txt"
+
+
while read -r line; do
+
if [ -d "${TILDEGIT_BACKUP_DIR}/${line}" ]; then
+
cd "${TILDEGIT_BACKUP_DIR}/${line}"
+
git pull
+
else
+
cd "${TILDEGIT_BACKUP_DIR}"
+
git clone "${TILDEGIT_CLONE_URL}/${line}.git"
+
fi
+
sleep 30
+
done <"${TILDEGIT_BACKUP_DIR}/repos.txt"
+
+
knot_repos=(
+
"ansible-homelab"
+
"bin"
+
"dotfiles"
+
"hyperreal.coffee"
+
"justfiles"
+
)
+
+
for repo in "${knot_repos[@]}"; do
+
if [ -d "${KNOT_BACKUP_DIR}/${repo}" ]; then
+
cd "${KNOT_BACKUP_DIR}/${repo}"
+
git pull
+
else
+
cd "${KNOT_BACKUP_DIR}"
+
git clone "${KNOT_CLONE_URL}/${repo}"
+
fi
+
sleep 30
+
done
+
+
# vim: ts=4 sw=4 sts=4 ai et ft=bash
+19 -11
hyperreal_backup
···
set -euxo pipefail
-
if test -f "${HOME}/.env_common"; then
source "${HOME}/.env_common"
fi
-
BORG_ARCHIVE=$(borg list ssh://root@hyperreal.carp-wyvern.ts.net/mnt/borgbackup/hyperreal | tail -n 1 | awk '{print $1}')
ARCHIVE_BASENAME=$(echo "$BORG_ARCHIVE" | cut -d "T" -f 1)
-
borg export-tar \
-
"ssh://root@hyperreal.carp-wyvern.ts.net/mnt/borgbackup/hyperreal::${BORG_ARCHIVE}" \
-
"/naspool/hyperreal_backup/${ARCHIVE_BASENAME}.tar"
-
find /naspool/hyperreal_backup -maxdepth 1 -type f -mtime +7 -exec rm -fv {} \;
-
curl \
-
-H prio:default \
-
-H tags:incoming_envelope \
-
-d "hyperreal_backup: success" \
-
"${NTFY_SERVER}/backups"
# vim: ts=4 sts=4 sw=4 et ai ft=bash
···
set -euxo pipefail
+
if [ ! -f "${HOME}/.env_common" ]; then
+
echo "ERROR: .env_common not found"
+
exit 1
+
else
source "${HOME}/.env_common"
fi
+
curl --retry 3 "${HC_PING_URL}/start"
+
+
BORG_ARCHIVE=$(borg list ssh://u511927@u511927.your-storagebox.de:23/home/borgbackup/hyperreal | tail -n 1 | awk '{print $1}')
ARCHIVE_BASENAME=$(echo "$BORG_ARCHIVE" | cut -d "T" -f 1)
+
if ! borg export-tar \
+
"ssh://u511927@u511927.your-storagebox.de:23/home/borgbackup/hyperreal::${BORG_ARCHIVE}" \
+
"/naspool/hyperreal_backup/${ARCHIVE_BASENAME}.tar"; then
+
curl --retry 3 "${HC_PING_URL}/fail"
+
fi
+
find /naspool/hyperreal_backup \
+
-maxdepth 1 \
+
-type f \
+
-mtime +7 \
+
-exec rm -fv {} \; ||
+
curl --retry 3 "${HC_PING_URL}/fail"
+
curl --retry 3 "$HC_PING_URL"
# vim: ts=4 sts=4 sw=4 et ai ft=bash
+28
quickinfo
···
···
+
#!/usr/bin/env bash
+
+
set -euo pipefail
+
+
desktop_info="separator:os:kernel:uptime:packages:memory:initsystem:btrfs:separator"
+
laptop_info="separator:os:kernel:uptime:packages:memory:initsystem:btrfs:battery:separator"
+
server_info="separator:os:kernel:uptime:packages:memory:initsystem:separator"
+
fbsd_info="separator:os:kernel:uptime:packages:memory:initsystem:zpool:separator"
+
styled_unread=$(gum style --foreground="#f2cdcd" --bold "Unread feeds: ")
+
+
case "$(hostname)" in
+
"desktop")
+
fastfetch -s "$desktop_info" -l none
+
gum join "$styled_unread" "$("${HOME}"/bin/feed_count)"
+
;;
+
"laptop")
+
fastfetch -s "$laptop_info" -l none
+
gum join "$styled_unread" "$("${HOME}"/bin/feed_count)"
+
;;
+
"nas")
+
fastfetch -s "$fbsd_info" -l none
+
;;
+
*)
+
fastfetch -s "$server_info" -l none
+
;;
+
esac
+
+
# vim: sw=4 sts=4 ts=4 ai et ft=bash
-104
seed_armbian_torrents.py
···
-
#!/usr/bin/env -S uv run --script
-
# /// script
-
# dependencies = [
-
# "qbittorrent-api",
-
# "requests",
-
# "bs4",
-
# "docopt"
-
# ]
-
# ///
-
-
"""seed_armbian_torrents.py
-
-
Description:
-
Armbian torrents seed script
-
-
This script will scrape https://mirrors.jevincanders.net/armbian/dl/ for
-
torrent files and add them to a qBittorrent instance. If there are already
-
Armbian torrents in the qBittorrent instance, they will be removed, and new
-
ones will be added in their place. This script is intended to be run under
-
/etc/cron.weekly or used in a systemd timer.
-
-
Usage:
-
seed_armbian_torrents.py (HOSTNAME) (USERNAME) (PASSWORD)
-
seed_armbian_torrents.py -h
-
-
Examples:
-
seed_armbian_torrents.py "http://localhost:8080" "admin" "adminadmin"
-
seed_armbian_torrents.py "https://cat.seedhost.eu/lol/qbittorrent" "lol" "pw"
-
-
Options:
-
-h, --help show this help message and exit.
-
"""
-
-
import os
-
-
import qbittorrentapi
-
import requests
-
from bs4 import BeautifulSoup
-
from docopt import docopt
-
-
-
def add_torrents(args: dict):
-
base_url = "https://mirrors.jevincanders.net/armbian/dl"
-
ignore_dirs = ["/armbian/", "_patch/", "_toolchain/"]
-
archive_dir_urls = []
-
-
page = requests.get(base_url).text
-
soup = BeautifulSoup(page, "html.parser")
-
for node in soup.find_all("a"):
-
if node.get("href") is not None:
-
if node.get("href").endswith("/") and node.get("href") not in ignore_dirs:
-
archive_dir_urls.append(f"{base_url}/{node.get("href")}archive/")
-
-
torrent_urls = []
-
for url in archive_dir_urls:
-
response = requests.get(url, timeout=60)
-
soup = BeautifulSoup(response.content, "html.parser")
-
links = soup.find_all("a")
-
for link in links:
-
if link.text.endswith(".torrent"):
-
torrent_urls.append(url + link.text)
-
-
try:
-
qbt_client = qbittorrentapi.Client(
-
host=args["HOSTNAME"], username=args["USERNAME"], password=args["PASSWORD"]
-
)
-
qbt_client.auth_log_in()
-
-
torrent_count = 0
-
for url in torrent_urls:
-
torrent_count = torrent_count + 1
-
-
print(f"There are {torrent_count} torrents to add. This gonna take a while...")
-
-
for url in torrent_urls:
-
qbt_client.torrents_add(url, category="distro")
-
print(f"Added {os.path.basename(url)}")
-
qbt_client.auth_log_out()
-
except qbittorrentapi.LoginFailed as e:
-
print(e)
-
-
-
def remove_torrents(args: dict):
-
try:
-
qbt_client = qbittorrentapi.Client(
-
host=args["HOSTNAME"], username=args["USERNAME"], password=args["PASSWORD"]
-
)
-
qbt_client.auth_log_in()
-
-
for torrent in qbt_client.torrents_info():
-
if torrent.name.startswith("Armbian"):
-
torrent.delete(delete_files=True)
-
print(f"Removed {torrent.name}")
-
qbt_client.auth_log_out()
-
except qbittorrentapi.LoginFailed as e:
-
print(e)
-
-
-
if __name__ == "__main__":
-
args = docopt(__doc__) # type: ignore
-
remove_torrents(args)
-
add_torrents(args)
-
-
# vim: ts=4 sts=4 sw=4 et ai ft=python
···
+24
split_dict
···
···
+
#!/usr/bin/env bash
+
+
# Check if gum is available
+
if ! command -v gum >/dev/null; then
+
echo "Missing dependency: gum"
+
echo "See https://github.com/charmbracelet/gum"
+
exit 1
+
fi
+
+
# Check if get-def is available
+
if ! command -v get-def >/dev/null; then
+
echo "Missing dependency: get-def"
+
echo "Run pipx install get-def"
+
exit 1
+
fi
+
+
WORD=$(gum input --placeholder="word")
+
+
if [[ -n "$ZELLIJ" ]]; then
+
zellij action new-pane -- get-def "$WORD"
+
else
+
echo "No Zellij sessions detected."
+
exit 1
+
fi
+1 -1
split_man
···
#!/usr/bin/env bash
# Check if gum is available
-
if ! test -x "$(command -v gum)"; then
echo "Missing dependency: gum"
echo "See https://github.com/charmbracelet/gum"
exit 1
···
#!/usr/bin/env bash
# Check if gum is available
+
if ! command -v gum >/dev/null; then
echo "Missing dependency: gum"
echo "See https://github.com/charmbracelet/gum"
exit 1