init
This commit is contained in:
commit
017bf6e067
115 changed files with 19650 additions and 0 deletions
17
scripts/copy-sw.js
Normal file
17
scripts/copy-sw.js
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Copies sw.js + workbox-*.js from dist/server/ to dist/client/ after build.
|
||||
// @astrojs/node standalone only serves static files from dist/client/, but
|
||||
// @vite-pwa/astro generates the service worker into dist/server/ during the
|
||||
// SSR Vite build pass.
|
||||
import { copyFile, readdir } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
|
||||
const serverDir = "dist/server";
|
||||
const clientDir = "dist/client";
|
||||
|
||||
const files = await readdir(serverDir).catch(() => []);
|
||||
for (const file of files) {
|
||||
if (file === "sw.js" || file.startsWith("workbox-")) {
|
||||
await copyFile(join(serverDir, file), join(clientDir, file));
|
||||
console.log(`[copy-sw] ${file} → dist/client/`);
|
||||
}
|
||||
}
|
||||
39
scripts/new-note-mdx-prompt.sh
Executable file
39
scripts/new-note-mdx-prompt.sh
Executable file
|
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env bash
|
||||
# Standalone wrapper für Obsidian Script Runner — neue Note mit Cover-Bild (MDX)
|
||||
set -euo pipefail
|
||||
|
||||
VAULT='/Users/adrian/Obsidian/Web/adrian-altner-com'
|
||||
|
||||
TITLE=$(osascript \
|
||||
-e 'Tell application "System Events" to display dialog "Note title (with cover):" default answer ""' \
|
||||
-e 'text returned of result' 2>/dev/null) || exit 0
|
||||
|
||||
if [[ -z "$TITLE" ]]; then exit 0; fi
|
||||
|
||||
SLUG=$(echo "$TITLE" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9 -]//g' | sed 's/ \+/-/g' | sed 's/^-\+//;s/-\+$//')
|
||||
DATE_FOLDER=$(date +%Y/%m/%d)
|
||||
PUBLISH_DATE=$(date +%Y-%m-%d)
|
||||
DIR="$VAULT/content/notes/$DATE_FOLDER"
|
||||
FILE="$DIR/$SLUG.mdx"
|
||||
|
||||
mkdir -p "$DIR"
|
||||
|
||||
if [[ -f "$FILE" ]]; then
|
||||
osascript -e "display notification \"File already exists: $SLUG.mdx\" with title \"New Note\"" 2>/dev/null || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat > "$FILE" << EOF
|
||||
---
|
||||
title: "$TITLE"
|
||||
publishDate: $PUBLISH_DATE
|
||||
description: ""
|
||||
cover: "./$SLUG.jpg"
|
||||
coverAlt: ""
|
||||
tags: []
|
||||
draft: false
|
||||
syndication:
|
||||
---
|
||||
EOF
|
||||
|
||||
echo "Created: $FILE"
|
||||
37
scripts/new-note-prompt.sh
Executable file
37
scripts/new-note-prompt.sh
Executable file
|
|
@ -0,0 +1,37 @@
|
|||
#!/usr/bin/env bash
|
||||
# Standalone wrapper für Obsidian Script Runner — neue Note anlegen
|
||||
set -euo pipefail
|
||||
|
||||
VAULT='/Users/adrian/Obsidian/Web/adrian-altner-com'
|
||||
|
||||
TITLE=$(osascript \
|
||||
-e 'Tell application "System Events" to display dialog "Note title:" default answer ""' \
|
||||
-e 'text returned of result' 2>/dev/null) || exit 0
|
||||
|
||||
if [[ -z "$TITLE" ]]; then exit 0; fi
|
||||
|
||||
SLUG=$(echo "$TITLE" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9 -]//g' | sed 's/ \+/-/g' | sed 's/^-\+//;s/-\+$//')
|
||||
DATE_FOLDER=$(date +%Y/%m/%d)
|
||||
PUBLISH_DATE=$(date +%Y-%m-%d)
|
||||
DIR="$VAULT/content/notes/$DATE_FOLDER"
|
||||
FILE="$DIR/$SLUG.md"
|
||||
|
||||
mkdir -p "$DIR"
|
||||
|
||||
if [[ -f "$FILE" ]]; then
|
||||
osascript -e "display notification \"File already exists: $SLUG.md\" with title \"New Note\"" 2>/dev/null || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat > "$FILE" << EOF
|
||||
---
|
||||
title: "$TITLE"
|
||||
publishDate: $PUBLISH_DATE
|
||||
description: ""
|
||||
tags: []
|
||||
draft: false
|
||||
syndication:
|
||||
---
|
||||
EOF
|
||||
|
||||
echo "Created: $FILE"
|
||||
66
scripts/new-note.sh
Executable file
66
scripts/new-note.sh
Executable file
|
|
@ -0,0 +1,66 @@
|
|||
#!/usr/bin/env bash
|
||||
# Usage: new-note.sh "Note Title" [--mdx]
|
||||
# Creates a new note in the Obsidian vault with correct frontmatter and folder structure.
|
||||
# Use --mdx for notes that need a cover image or custom components (creates .mdx file).
|
||||
set -euo pipefail
|
||||
|
||||
VAULT='/Users/adrian/Obsidian/Web/adrian-altner-com'
|
||||
|
||||
if [[ -z "${1:-}" ]]; then
|
||||
echo "Usage: new-note.sh \"Note Title\" [--mdx]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TITLE="$1"
|
||||
MDX=false
|
||||
if [[ "${2:-}" == "--mdx" ]]; then
|
||||
MDX=true
|
||||
fi
|
||||
|
||||
SLUG=$(echo "$TITLE" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9 -]//g' | sed 's/ \+/-/g' | sed 's/^-\+//;s/-\+$//')
|
||||
DATE_FOLDER=$(date +%Y/%m/%d)
|
||||
PUBLISH_DATE=$(date +%Y-%m-%d)
|
||||
DIR="$VAULT/content/notes/$DATE_FOLDER"
|
||||
|
||||
if $MDX; then
|
||||
EXT="mdx"
|
||||
else
|
||||
EXT="md"
|
||||
fi
|
||||
|
||||
FILE="$DIR/$SLUG.$EXT"
|
||||
|
||||
mkdir -p "$DIR"
|
||||
|
||||
if [[ -f "$FILE" ]]; then
|
||||
echo "File already exists: $FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if $MDX; then
|
||||
cat > "$FILE" << EOF
|
||||
---
|
||||
title: "$TITLE"
|
||||
publishDate: $PUBLISH_DATE
|
||||
description: ""
|
||||
cover: "./$SLUG.jpg"
|
||||
coverAlt: ""
|
||||
tags: []
|
||||
draft: false
|
||||
syndication:
|
||||
---
|
||||
EOF
|
||||
else
|
||||
cat > "$FILE" << EOF
|
||||
---
|
||||
title: "$TITLE"
|
||||
publishDate: $PUBLISH_DATE
|
||||
description: ""
|
||||
tags: []
|
||||
draft: false
|
||||
syndication:
|
||||
---
|
||||
EOF
|
||||
fi
|
||||
|
||||
echo "Created: $FILE"
|
||||
38
scripts/new-post-prompt.sh
Executable file
38
scripts/new-post-prompt.sh
Executable file
|
|
@ -0,0 +1,38 @@
|
|||
#!/usr/bin/env bash
|
||||
# Standalone wrapper für Obsidian Script Runner — neuen Blog-Post anlegen
|
||||
set -euo pipefail
|
||||
|
||||
VAULT='/Users/adrian/Obsidian/Web/adrian-altner-com'
|
||||
|
||||
TITLE=$(osascript \
|
||||
-e 'Tell application "System Events" to display dialog "Post title:" default answer ""' \
|
||||
-e 'text returned of result' 2>/dev/null) || exit 0
|
||||
|
||||
if [[ -z "$TITLE" ]]; then exit 0; fi
|
||||
|
||||
SLUG=$(echo "$TITLE" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9 -]//g' | sed 's/ \+/-/g' | sed 's/^-\+//;s/-\+$//')
|
||||
DATE_FOLDER=$(date +%Y/%m/%d)
|
||||
PUBLISH_DATE=$(date +%Y-%m-%dT%H:%M:%S%z)
|
||||
DIR="$VAULT/content/blog/posts/$DATE_FOLDER"
|
||||
FILE="$DIR/$SLUG.md"
|
||||
|
||||
mkdir -p "$DIR"
|
||||
|
||||
if [[ -f "$FILE" ]]; then
|
||||
osascript -e "display notification \"File already exists: $SLUG.md\" with title \"New Post\"" 2>/dev/null || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat > "$FILE" << EOF
|
||||
---
|
||||
title: "$TITLE"
|
||||
description: ""
|
||||
publishDate: $PUBLISH_DATE
|
||||
tags: []
|
||||
category: general
|
||||
draft: true
|
||||
syndication:
|
||||
---
|
||||
EOF
|
||||
|
||||
echo "Created: $FILE"
|
||||
39
scripts/new-post.sh
Executable file
39
scripts/new-post.sh
Executable file
|
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env bash
|
||||
# Usage: new-post.sh "Post Title"
|
||||
# Creates a new blog post in the Obsidian vault with correct frontmatter and folder structure.
|
||||
set -euo pipefail
|
||||
|
||||
VAULT='/Users/adrian/Obsidian/Web/adrian-altner-com'
|
||||
|
||||
if [[ -z "${1:-}" ]]; then
|
||||
echo "Usage: new-post.sh \"Post Title\"" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TITLE="$1"
|
||||
SLUG=$(echo "$TITLE" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9 -]//g' | sed 's/ \+/-/g' | sed 's/^-\+//;s/-\+$//')
|
||||
DATE_FOLDER=$(date +%Y/%m/%d)
|
||||
PUBLISH_DATE=$(date +%Y-%m-%dT%H:%M:%S%z)
|
||||
DIR="$VAULT/content/blog/posts/$DATE_FOLDER"
|
||||
FILE="$DIR/$SLUG.md"
|
||||
|
||||
mkdir -p "$DIR"
|
||||
|
||||
if [[ -f "$FILE" ]]; then
|
||||
echo "File already exists: $FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat > "$FILE" << EOF
|
||||
---
|
||||
title: "$TITLE"
|
||||
description: ""
|
||||
publishDate: $PUBLISH_DATE
|
||||
tags: []
|
||||
category: general
|
||||
draft: true
|
||||
syndication:
|
||||
---
|
||||
EOF
|
||||
|
||||
echo "Created: $FILE"
|
||||
67
scripts/publish-all.sh
Executable file
67
scripts/publish-all.sh
Executable file
|
|
@ -0,0 +1,67 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
VAULT_BLOG='/Users/adrian/Obsidian/Web/adrian-altner-com/content/blog'
|
||||
VAULT_PHOTOS='/Users/adrian/Obsidian/Web/adrian-altner-com/content/photos'
|
||||
VPS="${1:-hetzner}"
|
||||
REMOTE_BRANCH="${2:-main}"
|
||||
|
||||
REMOTE_BASE='/opt/websites/www.adrian-altner.com'
|
||||
REMOTE_BLOG="${REMOTE_BASE}/src/content/blog"
|
||||
REMOTE_PHOTOS="${REMOTE_BASE}/src/content/photos"
|
||||
|
||||
# --- 1. Sync vault to VPS ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
git fetch --prune origin '$REMOTE_BRANCH'
|
||||
git checkout '$REMOTE_BRANCH'
|
||||
git reset --hard 'origin/$REMOTE_BRANCH'
|
||||
git clean -fd -e .env -e .env.production
|
||||
mkdir -p '$REMOTE_BLOG'
|
||||
mkdir -p '$REMOTE_PHOTOS'
|
||||
"
|
||||
|
||||
rsync -az --delete \
|
||||
--include='*/' \
|
||||
--include='*.md' \
|
||||
--include='*.mdx' \
|
||||
--exclude='*' \
|
||||
"$VAULT_BLOG/" "$VPS:$REMOTE_BLOG/"
|
||||
|
||||
rsync -az --delete \
|
||||
--include='*/' \
|
||||
--include='*.md' \
|
||||
--include='*.mdx' \
|
||||
--include='*.jpg' \
|
||||
--include='*.jpeg' \
|
||||
--include='*.JPG' \
|
||||
--include='*.JPEG' \
|
||||
--include='*.json' \
|
||||
--exclude='.DS_Store' \
|
||||
--exclude='*' \
|
||||
"$VAULT_PHOTOS/" "$VPS:$REMOTE_PHOTOS/"
|
||||
|
||||
# --- 2. Build + cleanup ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
podman-compose -f compose.yml up --build -d --force-recreate
|
||||
podman image prune -af
|
||||
podman builder prune -af
|
||||
"
|
||||
|
||||
echo "Redeploy done via $VPS (branch: $REMOTE_BRANCH)."
|
||||
|
||||
# --- 3. Webmentions ---
|
||||
WEBMENTION_APP_TOKEN="$(ssh "$VPS" "grep '^WEBMENTION_APP_TOKEN=' '$REMOTE_BASE/.env.production' | cut -d= -f2-" 2>/dev/null || true)"
|
||||
if [[ -n "$WEBMENTION_APP_TOKEN" ]]; then
|
||||
echo "Sending webmentions via webmention.app..."
|
||||
for feed in rss/blog.xml rss/notes.xml rss/links.xml rss/photos.xml; do
|
||||
curl -s -X POST "https://webmention.app/check?url=https://adrian-altner.com/${feed}&token=${WEBMENTION_APP_TOKEN}" \
|
||||
| grep -o '"status":"[^"]*"' || true
|
||||
done
|
||||
echo "Webmentions triggered."
|
||||
else
|
||||
echo "No WEBMENTION_APP_TOKEN in .env.production — skipping webmentions."
|
||||
fi
|
||||
62
scripts/publish-blog.sh
Executable file
62
scripts/publish-blog.sh
Executable file
|
|
@ -0,0 +1,62 @@
|
|||
#!/usr/bin/env bash
|
||||
# Usage: publish-blog.sh [vps-host] [branch]
|
||||
# Can be called from any directory — no dependency on the repo being the working dir.
|
||||
set -euo pipefail
|
||||
|
||||
VAULT_BLOG='/Users/adrian/Obsidian/Web/adrian-altner-com/content/blog'
|
||||
VPS="${1:-hetzner}"
|
||||
REMOTE_BRANCH="${2:-main}"
|
||||
|
||||
REMOTE_BASE='/opt/websites/www.adrian-altner.com'
|
||||
REMOTE_BLOG="${REMOTE_BASE}/src/content/blog"
|
||||
|
||||
# --- 1. Sync vault to VPS ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
git fetch --prune origin '$REMOTE_BRANCH'
|
||||
git checkout '$REMOTE_BRANCH'
|
||||
git reset --hard 'origin/$REMOTE_BRANCH'
|
||||
git clean -fd -e .env -e .env.production
|
||||
mkdir -p '$REMOTE_BLOG'
|
||||
"
|
||||
|
||||
rsync -az --delete \
|
||||
--include='*/' \
|
||||
--include='*.md' \
|
||||
--include='*.mdx' \
|
||||
--include='*.jpg' \
|
||||
--include='*.jpeg' \
|
||||
--include='*.JPG' \
|
||||
--include='*.JPEG' \
|
||||
--include='*.png' \
|
||||
--include='*.PNG' \
|
||||
--include='*.webp' \
|
||||
--include='*.gif' \
|
||||
--exclude='.DS_Store' \
|
||||
--exclude='*' \
|
||||
"$VAULT_BLOG/" "$VPS:$REMOTE_BLOG/"
|
||||
|
||||
# --- 2. Build + cleanup ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
|
||||
podman-compose -f compose.yml up --build -d --force-recreate
|
||||
|
||||
podman image prune -af
|
||||
podman builder prune -af
|
||||
"
|
||||
|
||||
echo "Blog deploy done via $VPS (branch: $REMOTE_BRANCH)."
|
||||
|
||||
# --- 3. Webmentions ---
|
||||
WEBMENTION_APP_TOKEN="$(ssh "$VPS" "grep '^WEBMENTION_APP_TOKEN=' '$REMOTE_BASE/.env.production' | cut -d= -f2-" 2>/dev/null || true)"
|
||||
if [[ -n "$WEBMENTION_APP_TOKEN" ]]; then
|
||||
echo "Sending webmentions..."
|
||||
curl -s -X POST "https://webmention.app/check?url=https://adrian-altner.com/rss/blog.xml&token=${WEBMENTION_APP_TOKEN}" \
|
||||
| grep -o '"status":"[^"]*"' || true
|
||||
echo "Webmentions triggered."
|
||||
else
|
||||
echo "No WEBMENTION_APP_TOKEN in .env.production — skipping webmentions."
|
||||
fi
|
||||
52
scripts/publish-links.sh
Executable file
52
scripts/publish-links.sh
Executable file
|
|
@ -0,0 +1,52 @@
|
|||
#!/usr/bin/env bash
|
||||
# Usage: publish-links.sh [vps-host] [branch]
|
||||
# Can be called from any directory — no dependency on the repo being the working dir.
|
||||
set -euo pipefail
|
||||
|
||||
VAULT_LINKS='/Users/adrian/Obsidian/Web/adrian-altner-com/content/links'
|
||||
VPS="${1:-hetzner}"
|
||||
REMOTE_BRANCH="${2:-main}"
|
||||
|
||||
REMOTE_BASE='/opt/websites/www.adrian-altner.com'
|
||||
REMOTE_LINKS="${REMOTE_BASE}/src/content/links"
|
||||
|
||||
# --- 1. Sync vault to VPS ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
git fetch --prune origin '$REMOTE_BRANCH'
|
||||
git checkout '$REMOTE_BRANCH'
|
||||
git reset --hard 'origin/$REMOTE_BRANCH'
|
||||
git clean -fd -e .env -e .env.production
|
||||
mkdir -p '$REMOTE_LINKS'
|
||||
"
|
||||
|
||||
rsync -az --delete \
|
||||
--include='*/' \
|
||||
--include='*.md' \
|
||||
--include='*.mdx' \
|
||||
--exclude='.DS_Store' \
|
||||
--exclude='*' \
|
||||
"$VAULT_LINKS/" "$VPS:$REMOTE_LINKS/"
|
||||
|
||||
# --- 2. Build + cleanup ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
podman-compose -f compose.yml up --build -d --force-recreate
|
||||
podman image prune -af
|
||||
podman builder prune -af
|
||||
"
|
||||
|
||||
echo "Links deploy done via $VPS (branch: $REMOTE_BRANCH)."
|
||||
|
||||
# --- 3. Webmentions ---
|
||||
WEBMENTION_APP_TOKEN="$(ssh "$VPS" "grep '^WEBMENTION_APP_TOKEN=' '$REMOTE_BASE/.env.production' | cut -d= -f2-" 2>/dev/null || true)"
|
||||
if [[ -n "$WEBMENTION_APP_TOKEN" ]]; then
|
||||
echo "Sending webmentions..."
|
||||
curl -s -X POST "https://webmention.app/check?url=https://adrian-altner.com/rss/links.xml&token=${WEBMENTION_APP_TOKEN}" \
|
||||
| grep -o '"status":"[^"]*"' || true
|
||||
echo "Webmentions triggered."
|
||||
else
|
||||
echo "No WEBMENTION_APP_TOKEN in .env.production — skipping webmentions."
|
||||
fi
|
||||
58
scripts/publish-notes.sh
Executable file
58
scripts/publish-notes.sh
Executable file
|
|
@ -0,0 +1,58 @@
|
|||
#!/usr/bin/env bash
|
||||
# Usage: publish-notes.sh [vps-host] [branch]
|
||||
# Can be called from any directory — no dependency on the repo being the working dir.
|
||||
set -euo pipefail
|
||||
|
||||
VAULT_NOTES='/Users/adrian/Obsidian/Web/adrian-altner-com/content/notes'
|
||||
VPS="${1:-hetzner}"
|
||||
REMOTE_BRANCH="${2:-main}"
|
||||
|
||||
REMOTE_BASE='/opt/websites/www.adrian-altner.com'
|
||||
REMOTE_NOTES="${REMOTE_BASE}/src/content/notes"
|
||||
|
||||
# --- 1. Sync vault to VPS ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
git fetch --prune origin '$REMOTE_BRANCH'
|
||||
git checkout '$REMOTE_BRANCH'
|
||||
git reset --hard 'origin/$REMOTE_BRANCH'
|
||||
git clean -fd -e .env -e .env.production
|
||||
mkdir -p '$REMOTE_NOTES'
|
||||
"
|
||||
|
||||
rsync -az --delete \
|
||||
--include='*/' \
|
||||
--include='*.md' \
|
||||
--include='*.mdx' \
|
||||
--include='*.jpg' \
|
||||
--include='*.jpeg' \
|
||||
--include='*.JPG' \
|
||||
--include='*.JPEG' \
|
||||
--exclude='.DS_Store' \
|
||||
--exclude='*' \
|
||||
"$VAULT_NOTES/" "$VPS:$REMOTE_NOTES/"
|
||||
|
||||
# --- 2. Build + cleanup ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
|
||||
podman-compose -f compose.yml up --build -d --force-recreate
|
||||
|
||||
podman image prune -af
|
||||
podman builder prune -af
|
||||
"
|
||||
|
||||
echo "Notes deploy done via $VPS (branch: $REMOTE_BRANCH)."
|
||||
|
||||
# --- 3. Webmentions ---
|
||||
WEBMENTION_APP_TOKEN="$(ssh "$VPS" "grep '^WEBMENTION_APP_TOKEN=' '$REMOTE_BASE/.env.production' | cut -d= -f2-" 2>/dev/null || true)"
|
||||
if [[ -n "$WEBMENTION_APP_TOKEN" ]]; then
|
||||
echo "Sending webmentions..."
|
||||
curl -s -X POST "https://webmention.app/check?url=https://adrian-altner.com/rss/notes.xml&token=${WEBMENTION_APP_TOKEN}" \
|
||||
| grep -o '"status":"[^"]*"' || true
|
||||
echo "Webmentions triggered."
|
||||
else
|
||||
echo "No WEBMENTION_APP_TOKEN in .env.production — skipping webmentions."
|
||||
fi
|
||||
57
scripts/publish-photos.sh
Executable file
57
scripts/publish-photos.sh
Executable file
|
|
@ -0,0 +1,57 @@
|
|||
#!/usr/bin/env bash
|
||||
# Usage: publish-photos.sh [vps-host] [branch]
|
||||
# Can be called from any directory — no dependency on the repo being the working dir.
|
||||
set -euo pipefail
|
||||
|
||||
VAULT_PHOTOS='/Users/adrian/Obsidian/Web/adrian-altner-com/content/photos'
|
||||
VPS="${1:-hetzner}"
|
||||
REMOTE_BRANCH="${2:-main}"
|
||||
|
||||
REMOTE_BASE='/opt/websites/www.adrian-altner.com'
|
||||
REMOTE_PHOTOS="${REMOTE_BASE}/src/content/photos"
|
||||
|
||||
# --- 1. Sync vault to VPS ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
git fetch --prune origin '$REMOTE_BRANCH'
|
||||
git checkout '$REMOTE_BRANCH'
|
||||
git reset --hard 'origin/$REMOTE_BRANCH'
|
||||
git clean -fd -e .env -e .env.production
|
||||
mkdir -p '$REMOTE_PHOTOS'
|
||||
"
|
||||
|
||||
rsync -az --delete \
|
||||
--include='*/' \
|
||||
--include='*.md' \
|
||||
--include='*.mdx' \
|
||||
--include='*.jpg' \
|
||||
--include='*.jpeg' \
|
||||
--include='*.JPG' \
|
||||
--include='*.JPEG' \
|
||||
--include='*.json' \
|
||||
--exclude='.DS_Store' \
|
||||
--exclude='*' \
|
||||
"$VAULT_PHOTOS/" "$VPS:$REMOTE_PHOTOS/"
|
||||
|
||||
# --- 2. Build + cleanup ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
podman-compose -f compose.yml up --build -d --force-recreate
|
||||
podman image prune -af
|
||||
podman builder prune -af
|
||||
"
|
||||
|
||||
echo "Photos deploy done via $VPS (branch: $REMOTE_BRANCH)."
|
||||
|
||||
# --- 3. Webmentions ---
|
||||
WEBMENTION_APP_TOKEN="$(ssh "$VPS" "grep '^WEBMENTION_APP_TOKEN=' '$REMOTE_BASE/.env.production' | cut -d= -f2-" 2>/dev/null || true)"
|
||||
if [[ -n "$WEBMENTION_APP_TOKEN" ]]; then
|
||||
echo "Sending webmentions..."
|
||||
curl -s -X POST "https://webmention.app/check?url=https://adrian-altner.com/rss/photos.xml&token=${WEBMENTION_APP_TOKEN}" \
|
||||
| grep -o '"status":"[^"]*"' || true
|
||||
echo "Webmentions triggered."
|
||||
else
|
||||
echo "No WEBMENTION_APP_TOKEN in .env.production — skipping webmentions."
|
||||
fi
|
||||
49
scripts/publish-projects.sh
Executable file
49
scripts/publish-projects.sh
Executable file
|
|
@ -0,0 +1,49 @@
|
|||
#!/usr/bin/env bash
|
||||
# Usage: publish-projects.sh [vps-host] [branch]
|
||||
# Can be called from any directory — no dependency on the repo being the working dir.
|
||||
set -euo pipefail
|
||||
|
||||
VAULT_PROJECTS='/Users/adrian/Obsidian/Web/adrian-altner-com/content/projects'
|
||||
VPS="${1:-hetzner}"
|
||||
REMOTE_BRANCH="${2:-main}"
|
||||
|
||||
REMOTE_BASE='/opt/websites/www.adrian-altner.com'
|
||||
REMOTE_PROJECTS="${REMOTE_BASE}/src/content/projects"
|
||||
|
||||
# --- 1. Sync vault to VPS ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
git fetch --prune origin '$REMOTE_BRANCH'
|
||||
git checkout '$REMOTE_BRANCH'
|
||||
git reset --hard 'origin/$REMOTE_BRANCH'
|
||||
git clean -fd -e .env -e .env.production
|
||||
mkdir -p '$REMOTE_PROJECTS'
|
||||
"
|
||||
|
||||
rsync -az --delete \
|
||||
--include='*/' \
|
||||
--include='*.md' \
|
||||
--include='*.mdx' \
|
||||
--include='*.jpg' \
|
||||
--include='*.jpeg' \
|
||||
--include='*.JPG' \
|
||||
--include='*.JPEG' \
|
||||
--include='*.png' \
|
||||
--include='*.PNG' \
|
||||
--include='*.webp' \
|
||||
--include='*.gif' \
|
||||
--exclude='.DS_Store' \
|
||||
--exclude='*' \
|
||||
"$VAULT_PROJECTS/" "$VPS:$REMOTE_PROJECTS/"
|
||||
|
||||
# --- 2. Build + cleanup ---
|
||||
ssh "$VPS" "
|
||||
set -euo pipefail
|
||||
cd '$REMOTE_BASE'
|
||||
podman-compose -f compose.yml up --build -d --force-recreate
|
||||
podman image prune -af
|
||||
podman builder prune -af
|
||||
"
|
||||
|
||||
echo "Projects deploy done via $VPS (branch: $REMOTE_BRANCH)."
|
||||
23
scripts/squash-history.sh
Executable file
23
scripts/squash-history.sh
Executable file
|
|
@ -0,0 +1,23 @@
|
|||
#!/usr/bin/env bash
|
||||
# squash-history.sh — Replaces entire git history with a single "init" commit.
|
||||
# WARNING: Destructive and irreversible. Force-pushes to remote.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
COMMIT_MSG="${1:-init}"
|
||||
REMOTE="${2:-origin}"
|
||||
BRANCH="main"
|
||||
TEMP="temp-squash-$$"
|
||||
|
||||
echo "⚠️ This will destroy all git history and force-push to $REMOTE/$BRANCH."
|
||||
read -r -p "Continue? [y/N] " confirm
|
||||
[[ "$confirm" =~ ^[Yy]$ ]] || { echo "Aborted."; exit 0; }
|
||||
|
||||
git checkout --orphan "$TEMP"
|
||||
git add -A
|
||||
git commit -m "$COMMIT_MSG"
|
||||
git branch -D "$BRANCH"
|
||||
git branch -m "$TEMP" "$BRANCH"
|
||||
git push --force "$REMOTE" "$BRANCH"
|
||||
|
||||
echo "Done. $(git log --oneline)"
|
||||
87
scripts/vision.spec.ts
Normal file
87
scripts/vision.spec.ts
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
import assert from "node:assert/strict";
|
||||
import { mkdir, mkdtemp, rm, writeFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import type {
|
||||
ExifMetadata,
|
||||
ImageMetadataSuggestion,
|
||||
VisionAIResult,
|
||||
} from "./vision.ts";
|
||||
import { getImagesToProcess, mergeMetaAndVisionData } from "./vision.ts";
|
||||
|
||||
const FINAL: ImageMetadataSuggestion = {
|
||||
id: "2R9A2805",
|
||||
title: [
|
||||
"Blossom and Buzz",
|
||||
"Spring's Gentle Awakening",
|
||||
"Cherry Blossom Haven",
|
||||
"Nature's Delicate Balance",
|
||||
"A Bee's Spring Feast",
|
||||
],
|
||||
image: "./2R9A2805.jpg",
|
||||
alt: "Close-up of vibrant pink cherry blossoms on a branch with a honeybee collecting nectar. The bee's wings are slightly blurred, capturing its motion as it works. The background is a soft, dreamy pink hue, complementing the sharp details of the blossoms and the bee.",
|
||||
location: "48 deg 8' 37.56\" N, 11 deg 34' 13.32\" E",
|
||||
date: "2024-03-17",
|
||||
tags: ["nature", "cherryblossom", "bee", "spring", "floral"],
|
||||
exif: {
|
||||
camera: "Canon EOS R6m2",
|
||||
lens: "RF70-200mm F2.8 L IS USM",
|
||||
aperture: "2.8",
|
||||
iso: "125",
|
||||
focal_length: "200.0",
|
||||
shutter_speed: "1/1000",
|
||||
},
|
||||
};
|
||||
|
||||
const VISION_DATA: VisionAIResult = {
|
||||
title_ideas: [
|
||||
"Blossom and Buzz",
|
||||
"Spring's Gentle Awakening",
|
||||
"Cherry Blossom Haven",
|
||||
"Nature's Delicate Balance",
|
||||
"A Bee's Spring Feast",
|
||||
],
|
||||
description:
|
||||
"Close-up of vibrant pink cherry blossoms on a branch with a honeybee collecting nectar. The bee's wings are slightly blurred, capturing its motion as it works. The background is a soft, dreamy pink hue, complementing the sharp details of the blossoms and the bee.",
|
||||
tags: ["nature", "cherryblossom", "bee", "spring", "floral"],
|
||||
};
|
||||
|
||||
const EXIF_DATA: ExifMetadata = {
|
||||
SourceFile: "/Users/flori/Sites/flori-dev/src/content/grid/2R9A2805.jpg",
|
||||
FileName: "2R9A2805.jpg",
|
||||
Model: "Canon EOS R6m2",
|
||||
ExposureTime: "1/1000",
|
||||
FNumber: 2.8,
|
||||
ISO: 125,
|
||||
DateTimeOriginal: "2024:03:17 15:06:16",
|
||||
FocalLength: "200.0 mm",
|
||||
LensModel: "RF70-200mm F2.8 L IS USM",
|
||||
GPSPosition: "48 deg 8' 37.56\" N, 11 deg 34' 13.32\" E",
|
||||
};
|
||||
|
||||
async function main() {
|
||||
const tempRoot = await mkdtemp(join(tmpdir(), "vision-photos-"));
|
||||
|
||||
try {
|
||||
assert.deepEqual(mergeMetaAndVisionData(EXIF_DATA, VISION_DATA), FINAL);
|
||||
|
||||
const albumDirectory = join(tempRoot, "chiang-mai");
|
||||
const missingImage = join(albumDirectory, "2025-10-06-121017.jpg");
|
||||
const completeImage = join(albumDirectory, "2025-10-06-121212.jpg");
|
||||
|
||||
await mkdir(albumDirectory, { recursive: true });
|
||||
await writeFile(missingImage, "");
|
||||
await writeFile(completeImage, "");
|
||||
await writeFile(join(albumDirectory, "2025-10-06-121212.json"), "{}");
|
||||
|
||||
assert.deepEqual(await getImagesToProcess(tempRoot), [missingImage]);
|
||||
assert.deepEqual(await getImagesToProcess(tempRoot, { refresh: true }), [
|
||||
missingImage,
|
||||
completeImage,
|
||||
]);
|
||||
} finally {
|
||||
await rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
await main();
|
||||
522
scripts/vision.ts
Normal file
522
scripts/vision.ts
Normal file
|
|
@ -0,0 +1,522 @@
|
|||
#!/usr/bin/env -S node --experimental-strip-types
|
||||
|
||||
import { execFile } from "node:child_process";
|
||||
import { readFile, writeFile } from "node:fs/promises";
|
||||
import { relative, resolve } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { promisify } from "node:util";
|
||||
import Anthropic from "@anthropic-ai/sdk";
|
||||
import { consola } from "consola";
|
||||
import {
|
||||
getImagesMissingMetadata,
|
||||
getMetadataPathForImage,
|
||||
getPhotoAbsolutePath,
|
||||
getPhotoDirectories,
|
||||
PHOTOS_DIRECTORY,
|
||||
} from "../src/lib/photo-albums.ts";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
/**
|
||||
* Define the directory where the images are located.
|
||||
*/
|
||||
const PHOTOS_DIR = PHOTOS_DIRECTORY;
|
||||
|
||||
/**
|
||||
* Instantiate the Anthropic client.
|
||||
*/
|
||||
let anthropic: Anthropic | undefined;
|
||||
|
||||
function getAnthropicClient(): Anthropic {
|
||||
anthropic ??= new Anthropic({ maxRetries: 0 });
|
||||
return anthropic;
|
||||
}
|
||||
|
||||
function assertRequiredEnvironment(): void {
|
||||
if (!process.env.ANTHROPIC_API_KEY) {
|
||||
throw new Error(
|
||||
"Missing ANTHROPIC_API_KEY. `pnpm run vision` loads `.env.local` automatically. If you run the script directly, use `node --env-file=.env.local --experimental-strip-types scripts/vision.ts`.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the metadata of an image in the Exif format.
|
||||
*/
|
||||
export interface ExifMetadata {
|
||||
SourceFile: string;
|
||||
FileName: string;
|
||||
Model: string;
|
||||
FNumber: number;
|
||||
FocalLength: string;
|
||||
ExposureTime: string;
|
||||
ISO: number;
|
||||
DateTimeOriginal: string;
|
||||
LensModel: string;
|
||||
GPSPosition?: string;
|
||||
GPSLatitude?: string;
|
||||
GPSLongitude?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the result of the AI analysis.
|
||||
*/
|
||||
export interface VisionAIResult {
|
||||
title_ideas: string[];
|
||||
description: string;
|
||||
tags: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the final metadata suggestion for an image.
|
||||
*/
|
||||
export interface ImageMetadataSuggestion {
|
||||
id: string;
|
||||
title: string[];
|
||||
image: string;
|
||||
alt: string;
|
||||
location: string;
|
||||
date: string;
|
||||
tags: string[];
|
||||
exif: {
|
||||
camera: string;
|
||||
lens: string;
|
||||
aperture: string;
|
||||
iso: string;
|
||||
focal_length: string;
|
||||
shutter_speed: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface VisionCliOptions {
|
||||
refresh: boolean;
|
||||
photosDirectory?: string;
|
||||
visionConcurrency: number;
|
||||
visionMaxRetries: number;
|
||||
visionBaseBackoffMs: number;
|
||||
}
|
||||
|
||||
function parseCliOptions(argv: string[]): VisionCliOptions {
|
||||
const getNumericOption = (name: string, fallback: number): number => {
|
||||
const prefix = `--${name}=`;
|
||||
const rawValue = argv
|
||||
.find((arg) => arg.startsWith(prefix))
|
||||
?.slice(prefix.length);
|
||||
const parsed = Number.parseInt(rawValue ?? "", 10);
|
||||
return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
|
||||
};
|
||||
|
||||
const envConcurrency = Number.parseInt(
|
||||
process.env.VISION_CONCURRENCY ?? "",
|
||||
10,
|
||||
);
|
||||
const envMaxRetries = Number.parseInt(
|
||||
process.env.VISION_MAX_RETRIES ?? "",
|
||||
10,
|
||||
);
|
||||
const envBaseBackoffMs = Number.parseInt(
|
||||
process.env.VISION_BASE_BACKOFF_MS ?? "",
|
||||
10,
|
||||
);
|
||||
const nonFlagArgs = argv.filter((arg) => !arg.startsWith("--"));
|
||||
|
||||
return {
|
||||
refresh: argv.includes("--refresh"),
|
||||
photosDirectory: resolve(nonFlagArgs[0] ?? PHOTOS_DIR),
|
||||
visionConcurrency: getNumericOption(
|
||||
"concurrency",
|
||||
Number.isFinite(envConcurrency) && envConcurrency > 0
|
||||
? envConcurrency
|
||||
: 2,
|
||||
),
|
||||
visionMaxRetries: getNumericOption(
|
||||
"retries",
|
||||
Number.isFinite(envMaxRetries) && envMaxRetries > 0 ? envMaxRetries : 8,
|
||||
),
|
||||
visionBaseBackoffMs: getNumericOption(
|
||||
"backoff-ms",
|
||||
Number.isFinite(envBaseBackoffMs) && envBaseBackoffMs > 0
|
||||
? envBaseBackoffMs
|
||||
: 1500,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
function isRateLimitError(error: unknown): boolean {
|
||||
return error instanceof Anthropic.RateLimitError;
|
||||
}
|
||||
|
||||
function extractRetryAfterMs(error: unknown): number | null {
|
||||
if (!(error instanceof Anthropic.RateLimitError)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const retryAfter = error.headers?.get("retry-after");
|
||||
if (retryAfter) {
|
||||
const seconds = Number.parseFloat(retryAfter);
|
||||
if (Number.isFinite(seconds) && seconds > 0) {
|
||||
return Math.ceil(seconds * 1000);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function mapWithConcurrency<T, R>(
|
||||
values: T[],
|
||||
concurrency: number,
|
||||
mapper: (value: T, index: number) => Promise<R>,
|
||||
): Promise<R[]> {
|
||||
if (values.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const results: R[] = new Array(values.length);
|
||||
const workerCount = Math.max(1, Math.min(concurrency, values.length));
|
||||
let cursor = 0;
|
||||
|
||||
const workers = Array.from({ length: workerCount }, async () => {
|
||||
while (true) {
|
||||
const currentIndex = cursor;
|
||||
cursor += 1;
|
||||
|
||||
if (currentIndex >= values.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
const value = values[currentIndex];
|
||||
if (typeof value === "undefined") {
|
||||
continue;
|
||||
}
|
||||
|
||||
results[currentIndex] = await mapper(value, currentIndex);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.all(workers);
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all images that don't have a JSON file and therefore need to be processed.
|
||||
*/
|
||||
export async function getImagesToProcess(
|
||||
photosDirectory = PHOTOS_DIR,
|
||||
options: Pick<VisionCliOptions, "refresh"> = { refresh: false },
|
||||
): Promise<string[]> {
|
||||
const relativeImagePaths = options.refresh
|
||||
? (await getPhotoDirectories(photosDirectory)).flatMap(
|
||||
(directory) => directory.imagePaths,
|
||||
)
|
||||
: await getImagesMissingMetadata(photosDirectory);
|
||||
|
||||
consola.info(
|
||||
options.refresh
|
||||
? `Refreshing ${relativeImagePaths.length} ${relativeImagePaths.length === 1 ? "image" : "images"} with metadata sidecars`
|
||||
: `Found ${relativeImagePaths.length} ${relativeImagePaths.length === 1 ? "image" : "images"} without metadata`,
|
||||
);
|
||||
|
||||
return relativeImagePaths.map((imagePath) =>
|
||||
getPhotoAbsolutePath(imagePath, photosDirectory),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the EXIF metadata from an image file.
|
||||
* @param imagePath - The path to the image file.
|
||||
*
|
||||
* @returns A promise that resolves to the extracted EXIF metadata.
|
||||
*/
|
||||
export async function extractExifMetadata(
|
||||
imagePath: string,
|
||||
): Promise<ExifMetadata> {
|
||||
/// Check if `exiftool` is installed.
|
||||
try {
|
||||
await execFileAsync("exiftool", ["--version"]);
|
||||
} catch (_error) {
|
||||
consola.error(
|
||||
"exiftool is not installed. Please run `brew install exiftool`.",
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
/// Extract the metadata
|
||||
const { stdout } = await execFileAsync("exiftool", ["-j", imagePath]);
|
||||
const output = JSON.parse(stdout) as ExifMetadata[];
|
||||
|
||||
if (!output[0]) {
|
||||
throw new Error(`No EXIF metadata found for ${imagePath}.`);
|
||||
}
|
||||
|
||||
return output[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes an image file to base64.
|
||||
* @param imagePath - The path to the image file.
|
||||
* @returns A Promise that resolves to the base64 encoded image.
|
||||
*/
|
||||
async function base64EncodeImage(imagePath: string): Promise<string> {
|
||||
const buffer = await readFile(imagePath);
|
||||
return buffer.toString("base64");
|
||||
}
|
||||
|
||||
const VISION_TOOL = {
|
||||
name: "vision_response",
|
||||
description: "Return the vision analysis of the image.",
|
||||
input_schema: {
|
||||
type: "object" as const,
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
title_ideas: { type: "array", items: { type: "string" } },
|
||||
description: { type: "string" },
|
||||
tags: { type: "array", items: { type: "string" } },
|
||||
},
|
||||
required: ["title_ideas", "description", "tags"],
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates image description, title suggestions and tags using AI.
|
||||
*
|
||||
* @param metadata - The metadata of the image.
|
||||
* @returns A Promise that resolves to a VisionAIResult object containing the generated image description, title suggestions, and tags.
|
||||
*/
|
||||
async function generateImageDescriptionTitleSuggestionsAndTags(
|
||||
metadata: ExifMetadata,
|
||||
options: Pick<VisionCliOptions, "visionMaxRetries" | "visionBaseBackoffMs">,
|
||||
): Promise<VisionAIResult> {
|
||||
/// Base64 encode the image in order to pass it to the API
|
||||
const encodedImage = await base64EncodeImage(metadata.SourceFile);
|
||||
|
||||
const prompt =
|
||||
"Create an accurate and detailed description of this image that would also work as an alt text. The alt text should not contain words like image, photograph, illustration or such. Describe the scene as it is. Also come up with 5 title suggestions for this image. At last suggest 5 tags that suit the image description. These tags should be single words only. Identify the main subject or theme and make sure to put the according tag first. Return the description, the title suggestions and tags.";
|
||||
|
||||
let lastError: unknown;
|
||||
|
||||
for (let attempt = 0; attempt <= options.visionMaxRetries; attempt += 1) {
|
||||
try {
|
||||
const response = await getAnthropicClient().messages.create({
|
||||
model: "claude-opus-4-6",
|
||||
max_tokens: 2048,
|
||||
tools: [VISION_TOOL],
|
||||
tool_choice: { type: "tool", name: "vision_response" },
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{
|
||||
type: "image",
|
||||
source: {
|
||||
type: "base64",
|
||||
media_type: "image/jpeg",
|
||||
data: encodedImage,
|
||||
},
|
||||
},
|
||||
{ type: "text", text: prompt },
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const toolUseBlock = response.content.find((b) => b.type === "tool_use");
|
||||
if (!toolUseBlock || toolUseBlock.type !== "tool_use") {
|
||||
throw new Error(
|
||||
`No tool use response from AI for ${metadata.SourceFile}.`,
|
||||
);
|
||||
}
|
||||
|
||||
const parsedResponse = toolUseBlock.input as VisionAIResult;
|
||||
|
||||
if (
|
||||
parsedResponse.title_ideas.length === 0 ||
|
||||
parsedResponse.description.length === 0 ||
|
||||
parsedResponse.tags.length === 0
|
||||
) {
|
||||
throw new Error(
|
||||
`Incomplete vision response for ${metadata.SourceFile}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return parsedResponse;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
if (!isRateLimitError(error) || attempt >= options.visionMaxRetries) {
|
||||
break;
|
||||
}
|
||||
|
||||
const retryAfterMs = extractRetryAfterMs(error);
|
||||
const exponentialBackoffMs = options.visionBaseBackoffMs * 2 ** attempt;
|
||||
const jitterMs = Math.floor(Math.random() * 350);
|
||||
const waitMs =
|
||||
Math.max(retryAfterMs ?? 0, exponentialBackoffMs) + jitterMs;
|
||||
const relativeSourcePath = relative(process.cwd(), metadata.SourceFile);
|
||||
const nextAttempt = attempt + 1;
|
||||
consola.warn(
|
||||
`Rate limit for ${relativeSourcePath}. Retry ${nextAttempt}/${options.visionMaxRetries} in ${Math.ceil(waitMs / 1000)}s...`,
|
||||
);
|
||||
await sleep(waitMs);
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
function ensureVisionCanRun(imagesToProcess: string[]): void {
|
||||
if (imagesToProcess.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
assertRequiredEnvironment();
|
||||
}
|
||||
|
||||
function getLocationFromExif(exifData: ExifMetadata): string {
|
||||
if (exifData.GPSPosition) {
|
||||
return exifData.GPSPosition;
|
||||
}
|
||||
|
||||
if (exifData.GPSLatitude && exifData.GPSLongitude) {
|
||||
return `${exifData.GPSLatitude}, ${exifData.GPSLongitude}`;
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges the metadata from EXIF data and vision data to create an ImageMetadataSuggestion object.
|
||||
* @param exifData - The EXIF metadata of the image.
|
||||
* @param visionData - The vision AI result data of the image.
|
||||
* @returns The merged ImageMetadataSuggestion object.
|
||||
*/
|
||||
export function mergeMetaAndVisionData(
|
||||
exifData: ExifMetadata,
|
||||
visionData: VisionAIResult,
|
||||
): ImageMetadataSuggestion {
|
||||
const [date] = exifData.DateTimeOriginal.split(" ");
|
||||
|
||||
if (!date) {
|
||||
throw new Error(`Missing original date for ${exifData.SourceFile}.`);
|
||||
}
|
||||
|
||||
return {
|
||||
id: exifData.FileName.replace(".jpg", ""),
|
||||
title: visionData.title_ideas,
|
||||
image: `./${exifData.FileName}`,
|
||||
alt: visionData.description,
|
||||
location: getLocationFromExif(exifData),
|
||||
date: date.replaceAll(":", "-"),
|
||||
tags: visionData.tags,
|
||||
exif: {
|
||||
camera: exifData.Model,
|
||||
lens: exifData.LensModel,
|
||||
aperture: exifData.FNumber.toString(),
|
||||
iso: exifData.ISO.toString(),
|
||||
focal_length: exifData.FocalLength.replace(" mm", ""),
|
||||
shutter_speed: exifData.ExposureTime,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the given image metadata to a JSON file.
|
||||
* @param imageMetadata - The image metadata to be written.
|
||||
* @returns A Promise that resolves when the JSON file is written successfully.
|
||||
*/
|
||||
async function writeToJsonFile(
|
||||
imageMetadata: ImageMetadataSuggestion,
|
||||
imagePath: string,
|
||||
photosDirectory: string,
|
||||
): Promise<void> {
|
||||
const relativeImagePath = relative(photosDirectory, imagePath);
|
||||
const jsonPath = getMetadataPathForImage(relativeImagePath, photosDirectory);
|
||||
const json = JSON.stringify(imageMetadata, null, 2);
|
||||
await writeFile(jsonPath, json);
|
||||
}
|
||||
|
||||
/**
|
||||
* Main.
|
||||
*/
|
||||
async function main() {
|
||||
consola.start("Checking for images to process...");
|
||||
const cliOptions = parseCliOptions(process.argv.slice(2));
|
||||
const photosDirectory = cliOptions.photosDirectory ?? PHOTOS_DIR;
|
||||
|
||||
/// Load all images that don't have a JSON file.
|
||||
const images = await getImagesToProcess(photosDirectory, cliOptions);
|
||||
|
||||
if (images.length === 0) {
|
||||
consola.success(
|
||||
cliOptions.refresh
|
||||
? "No images found to refresh."
|
||||
: "No images require metadata.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
consola.info(
|
||||
`Vision settings: concurrency=${cliOptions.visionConcurrency}, retries=${cliOptions.visionMaxRetries}, backoff=${cliOptions.visionBaseBackoffMs}ms`,
|
||||
);
|
||||
|
||||
ensureVisionCanRun(images);
|
||||
|
||||
/// Extract EXIF metadata from these images.
|
||||
const exifData = await mapWithConcurrency(
|
||||
images,
|
||||
8,
|
||||
async (imagePath, index) => {
|
||||
consola.info(`Extracting EXIF ${index + 1}/${images.length}...`);
|
||||
return await extractExifMetadata(imagePath);
|
||||
},
|
||||
);
|
||||
|
||||
/// Determine the image description, title suggestions and tags for each image with AI.
|
||||
const visionData = await mapWithConcurrency(
|
||||
exifData,
|
||||
cliOptions.visionConcurrency,
|
||||
async (exifEntry, index) => {
|
||||
consola.info(`Generating AI metadata ${index + 1}/${exifData.length}...`);
|
||||
return await generateImageDescriptionTitleSuggestionsAndTags(
|
||||
exifEntry,
|
||||
cliOptions,
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
/// Merge the EXIF and Vision data to create the final metadata suggestion.
|
||||
const imageData = exifData.map((e, i) => {
|
||||
const currentVisionData = visionData[i];
|
||||
|
||||
if (!currentVisionData) {
|
||||
throw new Error(`Missing vision data for ${e.SourceFile}.`);
|
||||
}
|
||||
|
||||
return mergeMetaAndVisionData(e, currentVisionData);
|
||||
});
|
||||
|
||||
/// Write the metadata to JSON files.
|
||||
await mapWithConcurrency(imageData, 8, async (imageMetadata, index) => {
|
||||
const sourceFile = exifData[index]?.SourceFile;
|
||||
|
||||
if (!sourceFile) {
|
||||
throw new Error(`Missing source file for ${imageMetadata.id}.`);
|
||||
}
|
||||
|
||||
await writeToJsonFile(imageMetadata, sourceFile, photosDirectory);
|
||||
consola.info(`Wrote metadata ${index + 1}/${imageData.length}.`);
|
||||
});
|
||||
|
||||
consola.success("All images processed successfully.");
|
||||
}
|
||||
|
||||
if (process.argv[1] && fileURLToPath(import.meta.url) === process.argv[1]) {
|
||||
try {
|
||||
await main();
|
||||
} catch (error) {
|
||||
consola.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue