- Created a new post on Image Voice Memos detailing a macOS app for browsing photos and recording voice memos with automatic transcription. - Added a guide for Initial VPS Setup on Debian covering system updates, user creation, and SSH hardening. - Introduced a post on caching webmention avatars locally at build time to enhance privacy and comply with CSP. - Documented the implementation of security headers for an Astro site behind Caddy, focusing on GDPR compliance and CSP. - Set up a Forgejo Actions runner for self-hosted CI/CD, detailing the installation and configuration process for automated deployments.
221 lines
7.5 KiB
JavaScript
221 lines
7.5 KiB
JavaScript
#!/usr/bin/env node
|
|
// One-shot migration of /Users/adrian/Developer/Websites/Content → src/content/posts/en/.
|
|
// Rewrites frontmatter to this project's schema and preserves year/month/day layout.
|
|
|
|
import { mkdirSync, readFileSync, writeFileSync, copyFileSync, existsSync } from 'node:fs';
|
|
import path from 'node:path';
|
|
import { globSync } from 'node:fs';
|
|
|
|
const SRC_ROOT = '/Users/adrian/Developer/Websites/Content';
|
|
const DEST_ROOT = '/Users/adrian/Developer/Websites/adrian-altner.de/src/content/posts/en';
|
|
|
|
// Skip list: starter duplicates and explicitly German .mdx posts.
|
|
const SKIP = new Set([
|
|
'2022/07/08/first-post.mdx',
|
|
'2022/07/15/second-post.mdx',
|
|
'2022/07/22/third-post.mdx',
|
|
'2024/07/01/using-mdx.mdx',
|
|
'2024/07/19/markdown-style-guide.mdx',
|
|
'2026/04/06/image-voice-memos.mdx', // superseded by .md sibling
|
|
'2026/04/18/justified-layout.mdx', // German
|
|
'2026/04/18/maple-mono.mdx', // German
|
|
'2026/04/19/diigo-dsgvo-loeschung.mdx', // German
|
|
]);
|
|
|
|
// Category slug normalisation. Keys map from source `[[wiki-link]]` text.
|
|
const CATEGORY_MAP = {
|
|
'on-premises-private-cloud': 'on-premises-private-cloud',
|
|
development: 'development',
|
|
Development: 'development',
|
|
'in-eigener-sache': 'personal',
|
|
projekte: 'projects',
|
|
};
|
|
|
|
// Walk recursively for .md and .mdx files.
|
|
function findContentFiles(dir, out = []) {
|
|
const fs = require('node:fs');
|
|
for (const name of fs.readdirSync(dir)) {
|
|
if (name.startsWith('.')) continue;
|
|
const full = path.join(dir, name);
|
|
const stat = fs.statSync(full);
|
|
if (stat.isDirectory()) findContentFiles(full, out);
|
|
else if (/\.mdx?$/.test(name)) out.push(full);
|
|
}
|
|
return out;
|
|
}
|
|
|
|
// Very small inline YAML frontmatter parser. Handles the exact shapes we see
|
|
// in the source files: scalar lines, inline arrays [a, b], and block arrays
|
|
// introduced with a colon followed by `- item` lines.
|
|
function parseFrontmatter(raw) {
|
|
const match = raw.match(/^---\n([\s\S]*?)\n---\n?([\s\S]*)$/);
|
|
if (!match) return { data: {}, body: raw };
|
|
const [, block, body] = match;
|
|
const data = {};
|
|
const lines = block.split('\n');
|
|
let i = 0;
|
|
while (i < lines.length) {
|
|
const line = lines[i];
|
|
// Strip trailing "# comment" (only if preceded by whitespace; avoids
|
|
// eating # inside quoted strings). Simple heuristic that matches our data.
|
|
const stripped = line.replace(/\s+#\s.*$/, '');
|
|
const kv = stripped.match(/^(\w+):\s*(.*)$/);
|
|
if (!kv) { i++; continue; }
|
|
const key = kv[1];
|
|
let value = kv[2].trim();
|
|
if (value === '' && i + 1 < lines.length && /^\s+-\s/.test(lines[i + 1])) {
|
|
const arr = [];
|
|
i++;
|
|
while (i < lines.length && /^\s+-\s/.test(lines[i])) {
|
|
arr.push(lines[i].replace(/^\s+-\s+/, '').replace(/^["']|["']$/g, ''));
|
|
i++;
|
|
}
|
|
data[key] = arr;
|
|
continue;
|
|
}
|
|
if (value.startsWith('[') && value.endsWith(']')) {
|
|
data[key] = value.slice(1, -1).split(',').map((s) => s.trim().replace(/^["']|["']$/g, '')).filter(Boolean);
|
|
} else if (/^(true|false)$/.test(value)) {
|
|
data[key] = value === 'true';
|
|
} else if (/^-?\d+$/.test(value)) {
|
|
data[key] = Number(value);
|
|
} else {
|
|
data[key] = value.replace(/^["']|["']$/g, '');
|
|
}
|
|
i++;
|
|
}
|
|
return { data, body };
|
|
}
|
|
|
|
function wikiLinkInner(value) {
|
|
if (typeof value !== 'string') return undefined;
|
|
const m = value.match(/^\[\[([^\]]+)\]\]$/);
|
|
return m ? m[1] : value;
|
|
}
|
|
|
|
function quoteIfNeeded(s) {
|
|
if (/[:#\[\]&*!|>'"%@`]/.test(s) || /^\s|\s$/.test(s)) {
|
|
return `'${s.replace(/'/g, "''")}'`;
|
|
}
|
|
return s;
|
|
}
|
|
|
|
function formatFrontmatter(data) {
|
|
const lines = ['---'];
|
|
const order = [
|
|
'title', 'description', 'pubDate', 'updatedDate',
|
|
'heroImage', 'heroAlt', 'hideHero',
|
|
'category',
|
|
'tags',
|
|
'seriesParent', 'seriesOrder',
|
|
'url', 'repo', 'toc', 'draft',
|
|
'translationKey',
|
|
];
|
|
for (const key of order) {
|
|
if (!(key in data)) continue;
|
|
const v = data[key];
|
|
if (v === undefined || v === null) continue;
|
|
if (Array.isArray(v)) {
|
|
if (v.length === 0) continue;
|
|
lines.push(`${key}:`);
|
|
for (const item of v) lines.push(` - ${quoteIfNeeded(String(item))}`);
|
|
} else if (typeof v === 'boolean' || typeof v === 'number') {
|
|
lines.push(`${key}: ${v}`);
|
|
} else {
|
|
lines.push(`${key}: ${quoteIfNeeded(String(v))}`);
|
|
}
|
|
}
|
|
lines.push('---');
|
|
return lines.join('\n') + '\n';
|
|
}
|
|
|
|
function transform(srcFile) {
|
|
const rel = path.relative(SRC_ROOT, srcFile).replace(/\\/g, '/');
|
|
if (SKIP.has(rel)) return null;
|
|
const raw = readFileSync(srcFile, 'utf8');
|
|
const { data, body } = parseFrontmatter(raw);
|
|
|
|
const out = {};
|
|
out.title = data.title;
|
|
out.description = data.description;
|
|
|
|
// Date: prefer pubDate, fall back to publishDate.
|
|
const rawDate = data.pubDate ?? data.publishDate;
|
|
if (rawDate !== undefined) {
|
|
// Strings like `2026-03-01T18:57:00+01:00` or `2026-03-22` or `Jul 08 2022`
|
|
// pass through unchanged; zod's z.coerce.date() parses them.
|
|
out.pubDate = String(rawDate);
|
|
}
|
|
if (data.updatedDate) out.updatedDate = String(data.updatedDate);
|
|
|
|
// Hero image: `heroImage` in existing format, `cover` in new format.
|
|
if (data.heroImage) out.heroImage = data.heroImage;
|
|
else if (data.cover) out.heroImage = data.cover;
|
|
if (data.coverAlt) out.heroAlt = data.coverAlt;
|
|
if (typeof data.hideHero === 'boolean') out.hideHero = data.hideHero;
|
|
|
|
// Category: `[[wiki-link]]` → en/<mapped-slug>. Plain string (already used
|
|
// in some existing posts) stays as en/<slug>.
|
|
if (data.category) {
|
|
const inner = wikiLinkInner(data.category);
|
|
const mapped = CATEGORY_MAP[inner] ?? inner.toLowerCase();
|
|
out.category = `en/${mapped}`;
|
|
}
|
|
|
|
// Series: `[[parent-slug]]` → bare parent-slug.
|
|
if (data.seriesParent) out.seriesParent = wikiLinkInner(data.seriesParent);
|
|
if (typeof data.seriesOrder === 'number') out.seriesOrder = data.seriesOrder;
|
|
|
|
if (Array.isArray(data.tags) && data.tags.length > 0) out.tags = data.tags;
|
|
|
|
if (data.url) out.url = data.url;
|
|
if (data.repo) out.repo = data.repo;
|
|
if (typeof data.toc === 'boolean') out.toc = data.toc;
|
|
if (typeof data.draft === 'boolean') out.draft = data.draft;
|
|
|
|
if (data.translationKey) out.translationKey = data.translationKey;
|
|
|
|
const destRel = rel; // preserve year/month/day
|
|
const destFile = path.join(DEST_ROOT, destRel);
|
|
const frontmatter = formatFrontmatter(out);
|
|
const content = frontmatter + '\n' + body.replace(/^\n+/, '');
|
|
return { destFile, content, srcDir: path.dirname(srcFile), destDir: path.dirname(destFile) };
|
|
}
|
|
|
|
function main() {
|
|
const fs = require('node:fs');
|
|
const files = findContentFiles(SRC_ROOT);
|
|
let written = 0, skipped = 0, assetsCopied = 0;
|
|
const destDirs = new Set();
|
|
for (const srcFile of files) {
|
|
const rel = path.relative(SRC_ROOT, srcFile).replace(/\\/g, '/');
|
|
if (SKIP.has(rel)) { skipped++; continue; }
|
|
const t = transform(srcFile);
|
|
if (!t) { skipped++; continue; }
|
|
mkdirSync(t.destDir, { recursive: true });
|
|
writeFileSync(t.destFile, t.content);
|
|
written++;
|
|
destDirs.add(t.srcDir + '|' + t.destDir);
|
|
}
|
|
// Copy accompanying asset files (images) from each source dir.
|
|
for (const pair of destDirs) {
|
|
const [srcDir, destDir] = pair.split('|');
|
|
for (const name of fs.readdirSync(srcDir)) {
|
|
if (name.startsWith('.')) continue;
|
|
if (/\.(mdx?|DS_Store)$/i.test(name)) continue;
|
|
const srcAsset = path.join(srcDir, name);
|
|
const destAsset = path.join(destDir, name);
|
|
if (!existsSync(destAsset)) {
|
|
copyFileSync(srcAsset, destAsset);
|
|
assetsCopied++;
|
|
}
|
|
}
|
|
}
|
|
console.log(`wrote ${written} post files, skipped ${skipped}, copied ${assetsCopied} assets`);
|
|
}
|
|
|
|
// Node ESM doesn't provide require by default — fall back to createRequire.
|
|
import { createRequire } from 'node:module';
|
|
const require = createRequire(import.meta.url);
|
|
|
|
main();
|