master
Jeffrey Paul 1 year ago
parent 65d134c4ff
commit b55882ecde
  1. 3
      .gitignore
  2. 1
      2021-11-09-wireguard-config-renamer/run.sh
  3. 46
      2022-02-09-ambientweather/aw.mjs
  4. 38
      2022-02-09-ambientweather/weatherlog.sh
  5. 51
      2022-09-16.signalattachments/process.sh
  6. 140
      archive-to-cloud/tocloud
  7. 4
      golang-binaries-installer/install.sh
  8. 11
      misc/photobackup-ext.Makefile
  9. 16
      osmand-maps/Dockerfile
  10. 17
      osmand-maps/Makefile
  11. 114
      osmand-maps/gen.sh
  12. 10
      osmand-maps/run.sh

3
.gitignore vendored

@ -1,5 +1,8 @@
.envrc
.DS_Store
*.pyc
.terraform/
terraform.tfstate
terraform.tfstate.backup
*.log
log.txt

@ -11,4 +11,5 @@ for LINE in $SERVERS ; do
provider="$(echo $LINE | awk -F, '{print $4}')"
short="$(echo $hostname | awk -F'-' '{print $1}')"
echo mv mullvad-$short.conf $short-$city-$provider.conf
echo mv $short-wireguard.conf $short-$city-$provider.conf
done

@ -0,0 +1,46 @@
#!/usr/bin/env zx
const api = "https://lightning.ambientweather.net/devices";
const outputDir = `${process.env.HOME}/tmp/weatherfetch`;
const queryString =
"%24publicBox%5B0%5D%5B0%5D=-115.37389456264378&%24publicBox%5B0%5D%5B1%5D=36.1098453902911&%24publicBox%5B1%5D%5B0%5D=-115.1709572152316&%24publicBox%5B1%5D%5B1%5D=36.24422733946878&%24limit=500";
const userAgent =
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36";
async function fetchWeather() {
const cw = await $`curl ${api}?${queryString} \
-H 'accept: application/json' \
-H 'authority: lightning.ambientweather.net' \
-H 'user-agent: '${userAgent} \
-H 'origin: https://ambientweather.net' \
-H 'sec-fetch-site: same-site' \
-H 'sec-fetch-mode: cors' \
-H 'sec-fetch-dest: empty' \
-H 'referer: https://ambientweather.net/' \
-H 'accept-language: en-US,en;q=0.9' \
--compressed 2>/dev/null`;
const o = await JSON.parse(cw);
return o;
}
function minToMs(minutes) {
return minutes * 60 * 1000;
}
async function main() {
await $`test -d ${outputDir} || mkdir -p ${outputDir}`;
while (true) {
await oneLoop();
await sleep(minToMs(30));
}
}
async function oneLoop() {
const now = await $`date -u +"%Y%m%dT%H%M%SZ"`;
const data = await fetchWeather();
fs.writeFileSync(outputDir + "/latest.json", JSON.stringify(data));
await $`cp ${outputDir}/latest.json ${outputDir}/${now}.json`;
}
main();

@ -1,38 +0,0 @@
#!/bin/bash
API='https://lightning.ambientweather.net/devices'
OUTDIR="$HOME/tmp/weatherfetch"
function fetchWeather() {
curl "$API"?'%24publicBox%5B0%5D%5B0%5D=-115.37389456264378&%24publicBox%5B0%5D%5B1%5D=36.1098453902911&%24publicBox%5B1%5D%5B0%5D=-115.1709572152316&%24publicBox%5B1%5D%5B1%5D=36.24422733946878&%24limit=500' \
-H 'authority: lightning.ambientweather.net' \
-H 'accept: application/json' \
-H 'user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36' \
-H 'origin: https://ambientweather.net' \
-H 'sec-fetch-site: same-site' \
-H 'sec-fetch-mode: cors' \
-H 'sec-fetch-dest: empty' \
-H 'referer: https://ambientweather.net/' \
-H 'accept-language: en-US,en;q=0.9' \
--compressed | jq .
}
function main() {
if [[ ! -d $OUTDIR ]]; then
mkdir -p $OUTDIR
fi
while sleep 1; do
oneLoop
sleep 3600
done
}
function oneLoop() {
NOW="$(date -u +"%Y%m%dT%H%M%SZ")"
JSON="$(fetchWeather)"
cat <<< "$JSON" > $OUTDIR/latest.json
cp $OUTDIR/latest.json $OUTDIR/$NOW.json
}
main

@ -0,0 +1,51 @@
#!/bin/bash
for FN in $(file * | grep -i png | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.png
done
for FN in $(file * | grep -i jpeg | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.jpg
done
for FN in $(file * | grep -i gif | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.gif
done
for FN in $(file * | grep -i 'Web/P' | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.webp
done
for FN in $(file * | grep -i mp4 | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.mp4
done
for FN in $(file * | grep -i pdf | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.pdf
done
for FN in $(file * | grep -i "\.M4A" | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.m4a
done
for FN in $(file * | grep "MPEG ADTS, AAC" | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.aac
done
for FN in $(file * | grep "EPUB" | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.epub
done
for FN in $(file * | grep "Zip archive" | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.zip
done
for FN in $(file * | grep "Unicode text" | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.txt
done
for FN in $(file * | grep "ASCII text" | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.txt
done
for FN in $(file * | grep "empty" | awk -F':' '{print $1}') ; do
rm -v $FN
done
for FN in $(file * | grep "data" | awk -F':' '{print $1}') ; do
mv -v $FN ../out/$FN.dat
done
exit 1
fdupes -d -q -N ../out
f2 -r '{{mtime.YYYY}}-{{mtime.MM}}/{{mtime.YYYY}}-{{mtime.MM}}-{{mtime.DD}}.{{f}}{{ext}}' -x

@ -0,0 +1,140 @@
#!/bin/bash
set -o pipefail
set -e
#set -x
# decrypt like so:
#
# gpg -d ~/.paths/sneak-sync/secrets/backup-encryption-keys/2022-11-16.sneak-longterm-archive-age-key.gpg 2>/dev/null |
# age -d -i - priv.age | tail -1 2>/dev/null |
# age -d -i - archive.age
YYYYMMDD="$(date -u +%Y-%m-%d)"
YYYY="$(date -u +%Y)"
MM="$(date -u +%m)"
THIS="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
MY_PGP="5539AD00DE4C42F3AFE11575052443F4DF2A55C2"
MY_LONGTERM_AGE_PUBKEY="age1278m9q7dp3chsh2dcy82qk27v047zywyvtxwnj4cvt0z65jw6a7q5dqhfj"
TD="$(mktemp -d)"
LOGDIR="$HOME/Documents/_SYSADMIN/$YYYY-$MM/$YYYYMMDD"
if [[ ! -d "$LOGDIR" ]]; then
mkdir -p "$LOGDIR"
fi
#exec > >(tee -a $LOGDIR/$YYYYMMDD.$(date -u +%s).tocloud-backup.log) 2>&1
function on_exit {
rm -rf "$TD"
}
function on_terminate {
echo "### Cleaning up..."
rm -rfv "$TD"
}
trap on_exit ERR EXIT
trap on_terminate SIGINT SIGTERM
function usage {
echo "usage: $0 <backupname> <dir>" > /dev/stderr
exit 1
}
#function getStorageBoxCredentials {
# gpg -d $HOME/.paths/sneak-sync/secrets/credentials/storagebox-offsite-backup-subaccount.json.gpg
#}
function main {
if [[ $# -ne 2 ]]; then
usage
fi
if [[ -z "$2" ]]; then
usage
fi
if [[ -d "$2" ]]; then
SRC="$(cd "$2" && pwd -P)"
else
SRC="$2"
fi
if [[ ! -r "$SRC" ]]; then
usage
fi
BACKUPNAME="$YYYYMMDD.$1.$(date +%s)"
time do_backup "$BACKUPNAME" "$SRC"
}
function do_backup {
BACKUPNAME="$1"
SRC="$2"
cd "$TD"
mkdir "$BACKUPNAME"
cd "$TD/$BACKUPNAME"
echo "### Beginning backup $BACKUPNAME"
echo "### Temporary Working Directory: $TD"
AGE_PRIV=$(age-keygen 2> ./pub.txt)
age -r $MY_LONGTERM_AGE_PUBKEY <<< "$AGE_PRIV" > ./priv.age
PUB="$(awk -F' ' '{print $3}' < ./pub.txt)"
echo "### Backup Archive Session Pubkey: $PUB"
echo "$PUB" > ./pub.txt # overwrite non-clean one
gpg --trust-model always \
--compress-algo none \
-r $MY_PGP --encrypt \
-a <<< "$AGE_PRIV" \
> ./priv.sneak-pgp-DF2A55C2.asc
echo "### Backup Source Size: $(du -sh "$SRC" | awk '{print $1}')"
echo "### Indexing backup..."
(find "$SRC" -type f \( -exec sha1sum {} \; \)) |
tee /dev/stderr |
age -r $PUB > "$TD/$BACKUPNAME/archive-sums.txt.age"
echo "### Compressing backup..."
tar -P -c "$SRC" |
nice -n 20 zstd --compress -T0 -10 |
pv --delay-start 3 --progress --eta --size $(du -sb "$SRC" | awk '{print $1}') |
age -r $PUB |
split -d -b 1G -a 4 - $TD/$BACKUPNAME/archive.tar.zst.age.
COUNT="$(cd "$TD/$BACKUPNAME" && ls -1 archive.tar.zst.age.* | wc -l | awk '{print $1}')"
if [[ "$COUNT" -eq 1 ]]; then
mv "$TD/$BACKUPNAME/archive.tar.zst.age.0000" "$TD/$BACKUPNAME/archive.tar.zst.age"
fi
cd "$TD/$BACKUPNAME"
echo "### Backup Compressed Archive Size: $(du -sh "$TD/$BACKUPNAME" | awk '{print $1}')"
echo "### Creating Checksums..."
shasum archive.tar.zst.age* archive-sums.txt.age | tee -a SHASUMS.txt
echo "### Signing Checksums..."
gpg --default-key $MY_PGP --output SHASUMS.txt.gpg --detach-sig SHASUMS.txt
#tar -c . | pv --progress --eta --size $(du -sb "$TD/$BACKUPNAME" | awk '{print $1}') |
#ssh fsn1-storagebox-10T "mkdir -p $BACKUPNAME ; cd $BACKUPNAME && tar xvf -"
#while ! rsync -avvvcP --delete "$TD/$BACKUPNAME/" fsn1-storagebox-10T:"$BACKUPNAME"/
# sleep 1
#done
echo "### Uploading data..."
# i want to use rsync here but rclone gives much better total
# progress/ETA display.
rclone sync \
--retries 99999 \
--progress \
--progress-terminal-title \
--stats-unit bits \
"$TD/$BACKUPNAME" \
fsn1-storagebox-10T:"$BACKUPNAME"/
# belt and suspenders
echo "### Verifying uploaded data checksums..."
rsync -acP "$TD/$BACKUPNAME/" fsn1-storagebox-10T:"$BACKUPNAME"/
RETVAL="$?"
if [[ "$RETVAL" -eq 0 ]]; then
echo "### Backup successful."
exit 0
else
echo "### Problem detected."
exit 1
fi
}
main "$@"

@ -23,3 +23,7 @@ fi
if ! which gofumpt 2>&1 >/dev/null ; then
go install -v mvdan.cc/gofumpt@latest
fi
if ! which countdown 2>&1 >/dev/null ; then
go install -v github.com/antonmedv/countdown@latest
fi

@ -0,0 +1,11 @@
default: sync
sync:
rsync -avP \
--delete \
--delete-before \
--delete-excluded \
--exclude '.sync-conflict*DS_Store' \
--exclude '.DS_Store' \
$(HOME)/Library/Syncthing/folders/LightroomMasters-CurrentYear/ \
./LightroomMasters-CurrentYear/

@ -0,0 +1,16 @@
FROM ubuntu as builder
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && apt-get install -y nginx unzip zip curl wget
RUN mkdir -p /work/webroot && mkdir -p /work/download && mkdir -p /work/bin
VOLUME /work/webroot
VOLUME /work/download
ADD gen.sh /work/bin
ADD run.sh /work/bin
CMD ["/bin/bash", "/work/bin/run.sh" ]

@ -0,0 +1,17 @@
IMAGE := osmand-maps
default: build-and-run
build-and-run: build run
build:
script -a log.txt docker build -t $(IMAGE) .
run:
docker rm -f osmand-maps
script -a log.txt docker run \
-v /webroot:/work/webroot \
-v /download:/work/download \
-p 80:80 \
--name osmand-maps \
$(IMAGE)

@ -0,0 +1,114 @@
#!/bin/bash
set -x
export YYYYMMDD="$(date -u +%Y-%m-%d)"
export CONTINENTS="
europe
northamerica
southamerica
centralamerica
asia
africa
oceania
antarctica
world
voice
"
#https://download.osmand.net/download?standard=yes&file=$FN
#https://download.osmand.net/indexes.php
function fetchContinent() {
CONTINENT="$1"
cd /work/download
if [[ ! -d ./$CONTINENT ]]; then
mkdir $CONTINENT
fi
cd $CONTINENT
FILES="$(grep -i $CONTINENT ../listing.txt)"
for FILE in $FILES ; do
URL="https://download.osmand.net/download?standard=yes&file=$FILE"
if [[ ! -e "$FILE" ]]; then
echo "file $FILE is missing, downloading"
wget --progress=dot:giga --report-speed=bits \
-O "$FILE.tmp" \
-c \
"$URL" && mv "$FILE.tmp" "$FILE"
rm *.tmp
fi
ls -tla
du -sh .
df -h .
done
}
function zipContinent() {
CONTINENT="$1"
cd /work/download
du -sh $CONTINENT
df -h .
cd $CONTINENT
find . -type f -iname '*.zip' -print0 | xargs -0 -n 1 -P 8 unzip
rm -fv *.zip *.tmp
cd ..
zip -9r $YYYYMMDD.$CONTINENT.zip $CONTINENT
rm -rfv $CONTINENT
mv $YYYYMMDD.$CONTINENT.zip /work/webroot
}
function fetch() {
cd /work/download
# srtmf files are 404
curl -sf https://download.osmand.net/indexes.php | tr "\"" "\n" |
tr ">" "\n" | tr "<" "\n" | grep obf.zip |
grep -v "srtmf" | sort > listing.txt
cat > /work/webroot/index.html.new <<EOF
<html>
<head>
<title>files</title>
</head>
<body>
<h1>files</h1>
<ul>
EOF
for CONTINENT in $CONTINENTS; do
if [[ ! -e /work/webroot/$YYYYMMDD.$CONTINENT.zip ]]; then
fetchContinent "$CONTINENT"
zipContinent "$CONTINENT"
fi
cat >> /work/webroot/index.html.new <<EOF
<li>
<a href="/$YYYYMMDD.$CONTINENT.zip">$YYYYMMDD.$CONTINENT.zip</a>
</li>
EOF
done
cat >> /work/webroot/index.html.new <<EOF
</ul>
<pre>
$(date -u)
</pre>
</body>
</html>
EOF
mv /work/webroot/index.html.new /work/webroot/index.html
}
function main() {
fetch
}
main

@ -0,0 +1,10 @@
#!/bin/bash
chmod a+rx /work/bin/*
bash -c "/work/bin/gen.sh" &
rm -rfv /var/www/html
ln -s /work/webroot /var/www/html
chmod -Rv a+rx /work/webroot/*
exec nginx -g "daemon off;"
Loading…
Cancel
Save