Monorepo for Wisp.place. A static site hosting service built on top of the AT Protocol.

fixes

+21 -11
hosting-service/src/lib/firehose.ts
···
try {
if (commit.operation === 'create' || commit.operation === 'update') {
-
await this.handleCreateOrUpdate(did, commit.rkey, commit.record);
+
// Pass the CID from the event for verification
+
await this.handleCreateOrUpdate(did, commit.rkey, commit.record, commit.cid);
} else if (commit.operation === 'delete') {
await this.handleDelete(did, commit.rkey);
}
···
}
}
-
private async handleCreateOrUpdate(did: string, site: string, record: any) {
+
private async handleCreateOrUpdate(did: string, site: string, record: any, eventCid?: string) {
this.log('Processing create/update', { did, site });
if (!this.validateRecord(record)) {
···
this.log('Resolved PDS', { did, pdsEndpoint });
-
// Verify record exists on PDS
+
// Verify record exists on PDS and fetch its CID
+
let verifiedCid: string;
try {
-
const recordUrl = `${pdsEndpoint}/xrpc/com.atproto.repo.getRecord?repo=${encodeURIComponent(did)}&collection=place.wisp.fs&rkey=${encodeURIComponent(site)}`;
-
const recordRes = await safeFetch(recordUrl);
+
const result = await fetchSiteRecord(did, site);
-
if (!recordRes.ok) {
-
this.log('Record not found on PDS, skipping cache', {
+
if (!result) {
+
this.log('Record not found on PDS, skipping cache', { did, site });
+
return;
+
}
+
+
verifiedCid = result.cid;
+
+
// Verify event CID matches PDS CID (prevent cache poisoning)
+
if (eventCid && eventCid !== verifiedCid) {
+
this.log('CID mismatch detected - potential spoofed event', {
did,
site,
-
status: recordRes.status,
+
eventCid,
+
verifiedCid
});
return;
}
-
this.log('Record verified on PDS', { did, site });
+
this.log('Record verified on PDS', { did, site, cid: verifiedCid });
} catch (err) {
this.log('Failed to verify record on PDS', {
did,
···
return;
}
-
// Cache the record
-
await downloadAndCacheSite(did, site, fsRecord, pdsEndpoint);
+
// Cache the record with verified CID
+
await downloadAndCacheSite(did, site, fsRecord, pdsEndpoint, verifiedCid);
// Upsert site to database
await upsertSite(did, site, fsRecord.site);
+7 -4
hosting-service/src/lib/html-rewriter.ts
···
let rewritten = html;
// Rewrite each attribute type
+
// Use more specific patterns to prevent ReDoS attacks
for (const attr of REWRITABLE_ATTRIBUTES) {
if (attr === 'srcset') {
-
// Special handling for srcset
+
// Special handling for srcset - use possessive quantifiers via atomic grouping simulation
+
// Limit whitespace to reasonable amount (max 5 spaces) to prevent ReDoS
const srcsetRegex = new RegExp(
-
`\\b${attr}\\s*=\\s*"([^"]*)"`,
+
`\\b${attr}[ \\t]{0,5}=[ \\t]{0,5}"([^"]*)"`,
'gi'
);
rewritten = rewritten.replace(srcsetRegex, (match, value) => {
···
});
} else {
// Regular attributes with quoted values
+
// Limit whitespace to prevent catastrophic backtracking
const doubleQuoteRegex = new RegExp(
-
`\\b${attr}\\s*=\\s*"([^"]*)"`,
+
`\\b${attr}[ \\t]{0,5}=[ \\t]{0,5}"([^"]*)"`,
'gi'
);
const singleQuoteRegex = new RegExp(
-
`\\b${attr}\\s*=\\s*'([^']*)'`,
+
`\\b${attr}[ \\t]{0,5}=[ \\t]{0,5}'([^']*)'`,
'gi'
);
+73 -5
hosting-service/src/lib/utils.ts
···
import { AtpAgent } from '@atproto/api';
import type { WispFsRecord, Directory, Entry, File } from './types';
-
import { existsSync, mkdirSync } from 'fs';
-
import { writeFile } from 'fs/promises';
+
import { existsSync, mkdirSync, readFileSync } from 'fs';
+
import { writeFile, readFile } from 'fs/promises';
import { safeFetchJson, safeFetchBlob } from './safe-fetch';
import { CID } from 'multiformats/cid';
+
import { createHash } from 'crypto';
const CACHE_DIR = './cache/sites';
+
const CACHE_TTL = 14 * 24 * 60 * 60 * 1000; // 14 days cache TTL
+
+
interface CacheMetadata {
+
recordCid: string;
+
cachedAt: number;
+
did: string;
+
rkey: string;
+
}
// Type guards for different blob reference formats
interface IpldLink {
···
}
}
-
export async function fetchSiteRecord(did: string, rkey: string): Promise<WispFsRecord | null> {
+
export async function fetchSiteRecord(did: string, rkey: string): Promise<{ record: WispFsRecord; cid: string } | null> {
try {
const pdsEndpoint = await getPdsForDid(did);
if (!pdsEndpoint) return null;
const url = `${pdsEndpoint}/xrpc/com.atproto.repo.getRecord?repo=${encodeURIComponent(did)}&collection=place.wisp.fs&rkey=${encodeURIComponent(rkey)}`;
const data = await safeFetchJson(url);
-
return data.value as WispFsRecord;
+
+
// Return both the record and its CID for verification
+
return {
+
record: data.value as WispFsRecord,
+
cid: data.cid || ''
+
};
} catch (err) {
console.error('Failed to fetch site record', did, rkey, err);
return null;
···
return null;
}
-
export async function downloadAndCacheSite(did: string, rkey: string, record: WispFsRecord, pdsEndpoint: string): Promise<void> {
+
export async function downloadAndCacheSite(did: string, rkey: string, record: WispFsRecord, pdsEndpoint: string, recordCid: string): Promise<void> {
console.log('Caching site', did, rkey);
// Validate record structure
···
}
await cacheFiles(did, rkey, record.root.entries, pdsEndpoint, '');
+
+
// Save cache metadata with CID for verification
+
await saveCacheMetadata(did, rkey, recordCid);
}
async function cacheFiles(
···
export function isCached(did: string, site: string): boolean {
return existsSync(`${CACHE_DIR}/${did}/${site}`);
}
+
+
async function saveCacheMetadata(did: string, rkey: string, recordCid: string): Promise<void> {
+
const metadata: CacheMetadata = {
+
recordCid,
+
cachedAt: Date.now(),
+
did,
+
rkey
+
};
+
+
const metadataPath = `${CACHE_DIR}/${did}/${rkey}/.metadata.json`;
+
const metadataDir = metadataPath.substring(0, metadataPath.lastIndexOf('/'));
+
+
if (!existsSync(metadataDir)) {
+
mkdirSync(metadataDir, { recursive: true });
+
}
+
+
await writeFile(metadataPath, JSON.stringify(metadata, null, 2));
+
}
+
+
async function getCacheMetadata(did: string, rkey: string): Promise<CacheMetadata | null> {
+
try {
+
const metadataPath = `${CACHE_DIR}/${did}/${rkey}/.metadata.json`;
+
if (!existsSync(metadataPath)) return null;
+
+
const content = await readFile(metadataPath, 'utf-8');
+
return JSON.parse(content) as CacheMetadata;
+
} catch (err) {
+
console.error('Failed to read cache metadata', err);
+
return null;
+
}
+
}
+
+
export async function isCacheValid(did: string, rkey: string, currentRecordCid?: string): Promise<boolean> {
+
const metadata = await getCacheMetadata(did, rkey);
+
if (!metadata) return false;
+
+
// Check if cache has expired (14 days TTL)
+
const cacheAge = Date.now() - metadata.cachedAt;
+
if (cacheAge > CACHE_TTL) {
+
console.log('[Cache] Cache expired for', did, rkey);
+
return false;
+
}
+
+
// If current CID is provided, verify it matches
+
if (currentRecordCid && metadata.recordCid !== currentRecordCid) {
+
console.log('[Cache] CID mismatch for', did, rkey, 'cached:', metadata.recordCid, 'current:', currentRecordCid);
+
return false;
+
}
+
+
return true;
+
}
+42 -1
src/index.ts
···
import {
createClientMetadata,
getOAuthClient,
-
getCurrentKeys
+
getCurrentKeys,
+
cleanupExpiredSessions,
+
rotateKeysIfNeeded
} from './lib/oauth-client'
import { authRoutes } from './routes/auth'
import { wispRoutes } from './routes/wisp'
···
const client = await getOAuthClient(config)
+
// Periodic maintenance: cleanup expired sessions and rotate keys
+
// Run every hour
+
const runMaintenance = async () => {
+
console.log('[Maintenance] Running periodic maintenance...')
+
await cleanupExpiredSessions()
+
await rotateKeysIfNeeded()
+
}
+
+
// Run maintenance on startup
+
runMaintenance()
+
+
// Schedule maintenance to run every hour
+
setInterval(runMaintenance, 60 * 60 * 1000)
+
export const app = new Elysia()
+
// Security headers middleware
+
.onAfterHandle(({ set }) => {
+
// Prevent clickjacking attacks
+
set.headers['X-Frame-Options'] = 'DENY'
+
// Prevent MIME type sniffing
+
set.headers['X-Content-Type-Options'] = 'nosniff'
+
// Strict Transport Security (HSTS) - enforce HTTPS
+
set.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
+
// Referrer policy - limit referrer information
+
set.headers['Referrer-Policy'] = 'strict-origin-when-cross-origin'
+
// Content Security Policy
+
set.headers['Content-Security-Policy'] =
+
"default-src 'self'; " +
+
"script-src 'self' 'unsafe-inline' 'unsafe-eval'; " +
+
"style-src 'self' 'unsafe-inline'; " +
+
"img-src 'self' data: https:; " +
+
"font-src 'self' data:; " +
+
"connect-src 'self' https:; " +
+
"frame-ancestors 'none'; " +
+
"base-uri 'self'; " +
+
"form-action 'self'"
+
// Additional security headers
+
set.headers['X-XSS-Protection'] = '1; mode=block'
+
set.headers['Permissions-Policy'] = 'geolocation=(), microphone=(), camera=()'
+
})
.use(
openapi({
references: fromTypes()
+136 -16
src/lib/db.ts
···
CREATE TABLE IF NOT EXISTS oauth_sessions (
sub TEXT PRIMARY KEY,
data TEXT NOT NULL,
-
updated_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW())
+
updated_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW()),
+
expires_at BIGINT NOT NULL DEFAULT EXTRACT(EPOCH FROM NOW()) + 2592000
)
`;
await db`
CREATE TABLE IF NOT EXISTS oauth_keys (
kid TEXT PRIMARY KEY,
-
jwk TEXT NOT NULL
+
jwk TEXT NOT NULL,
+
created_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW())
)
`;
···
)
`;
-
// Add rkey column if it doesn't exist (for existing databases)
+
// Add columns if they don't exist (for existing databases)
try {
await db`ALTER TABLE domains ADD COLUMN IF NOT EXISTS rkey TEXT`;
+
} catch (err) {
+
// Column might already exist, ignore
+
}
+
+
try {
+
await db`ALTER TABLE oauth_sessions ADD COLUMN IF NOT EXISTS expires_at BIGINT NOT NULL DEFAULT EXTRACT(EPOCH FROM NOW()) + 2592000`;
+
} catch (err) {
+
// Column might already exist, ignore
+
}
+
+
try {
+
await db`ALTER TABLE oauth_keys ADD COLUMN IF NOT EXISTS created_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW())`;
+
} catch (err) {
+
// Column might already exist, ignore
+
}
+
+
try {
+
await db`ALTER TABLE oauth_states ADD COLUMN IF NOT EXISTS expires_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW()) + 3600`;
} catch (err) {
// Column might already exist, ignore
}
···
return rows[0]?.rkey ?? null;
};
+
// Session timeout configuration (30 days in seconds)
+
const SESSION_TIMEOUT = 30 * 24 * 60 * 60; // 2592000 seconds
+
// OAuth state timeout (1 hour in seconds)
+
const STATE_TIMEOUT = 60 * 60; // 3600 seconds
+
const stateStore = {
async set(key: string, data: any) {
console.debug('[stateStore] set', key)
+
const expiresAt = Math.floor(Date.now() / 1000) + STATE_TIMEOUT;
await db`
-
INSERT INTO oauth_states (key, data)
-
VALUES (${key}, ${JSON.stringify(data)})
-
ON CONFLICT (key) DO UPDATE SET data = EXCLUDED.data
+
INSERT INTO oauth_states (key, data, created_at, expires_at)
+
VALUES (${key}, ${JSON.stringify(data)}, EXTRACT(EPOCH FROM NOW()), ${expiresAt})
+
ON CONFLICT (key) DO UPDATE SET data = EXCLUDED.data, expires_at = ${expiresAt}
`;
},
async get(key: string) {
console.debug('[stateStore] get', key)
-
const result = await db`SELECT data FROM oauth_states WHERE key = ${key}`;
-
return result[0] ? JSON.parse(result[0].data) : undefined;
+
const now = Math.floor(Date.now() / 1000);
+
const result = await db`
+
SELECT data, expires_at
+
FROM oauth_states
+
WHERE key = ${key}
+
`;
+
if (!result[0]) return undefined;
+
+
// Check if expired
+
const expiresAt = Number(result[0].expires_at);
+
if (expiresAt && now > expiresAt) {
+
console.debug('[stateStore] State expired, deleting', key);
+
await db`DELETE FROM oauth_states WHERE key = ${key}`;
+
return undefined;
+
}
+
+
return JSON.parse(result[0].data);
},
async del(key: string) {
console.debug('[stateStore] del', key)
···
const sessionStore = {
async set(sub: string, data: any) {
console.debug('[sessionStore] set', sub)
+
const expiresAt = Math.floor(Date.now() / 1000) + SESSION_TIMEOUT;
await db`
-
INSERT INTO oauth_sessions (sub, data)
-
VALUES (${sub}, ${JSON.stringify(data)})
-
ON CONFLICT (sub) DO UPDATE SET data = EXCLUDED.data, updated_at = EXTRACT(EPOCH FROM NOW())
+
INSERT INTO oauth_sessions (sub, data, updated_at, expires_at)
+
VALUES (${sub}, ${JSON.stringify(data)}, EXTRACT(EPOCH FROM NOW()), ${expiresAt})
+
ON CONFLICT (sub) DO UPDATE SET
+
data = EXCLUDED.data,
+
updated_at = EXTRACT(EPOCH FROM NOW()),
+
expires_at = ${expiresAt}
`;
},
async get(sub: string) {
console.debug('[sessionStore] get', sub)
-
const result = await db`SELECT data FROM oauth_sessions WHERE sub = ${sub}`;
-
return result[0] ? JSON.parse(result[0].data) : undefined;
+
const now = Math.floor(Date.now() / 1000);
+
const result = await db`
+
SELECT data, expires_at
+
FROM oauth_sessions
+
WHERE sub = ${sub}
+
`;
+
if (!result[0]) return undefined;
+
+
// Check if expired
+
const expiresAt = Number(result[0].expires_at);
+
if (expiresAt && now > expiresAt) {
+
console.log('[sessionStore] Session expired, deleting', sub);
+
await db`DELETE FROM oauth_sessions WHERE sub = ${sub}`;
+
return undefined;
+
}
+
+
return JSON.parse(result[0].data);
},
async del(sub: string) {
console.debug('[sessionStore] del', sub)
···
export { sessionStore };
+
// Cleanup expired sessions and states
+
export const cleanupExpiredSessions = async () => {
+
const now = Math.floor(Date.now() / 1000);
+
try {
+
const sessionsDeleted = await db`
+
DELETE FROM oauth_sessions WHERE expires_at < ${now}
+
`;
+
const statesDeleted = await db`
+
DELETE FROM oauth_states WHERE expires_at IS NOT NULL AND expires_at < ${now}
+
`;
+
console.log(`[Cleanup] Deleted ${sessionsDeleted.length} expired sessions and ${statesDeleted.length} expired states`);
+
return { sessions: sessionsDeleted.length, states: statesDeleted.length };
+
} catch (err) {
+
console.error('[Cleanup] Failed to cleanup expired data:', err);
+
return { sessions: 0, states: 0 };
+
}
+
};
+
export const createClientMetadata = (config: { domain: `https://${string}`, clientName: string }): ClientMetadata => ({
client_id: `${config.domain}/client-metadata.json`,
client_name: config.clientName,
···
if (!priv) return;
const kid = key.kid ?? crypto.randomUUID();
await db`
-
INSERT INTO oauth_keys (kid, jwk)
-
VALUES (${kid}, ${JSON.stringify(priv)})
+
INSERT INTO oauth_keys (kid, jwk, created_at)
+
VALUES (${kid}, ${JSON.stringify(priv)}, EXTRACT(EPOCH FROM NOW()))
ON CONFLICT (kid) DO UPDATE SET jwk = EXCLUDED.jwk
`;
};
const loadPersistedKeys = async (): Promise<JoseKey[]> => {
-
const rows = await db`SELECT kid, jwk FROM oauth_keys ORDER BY kid`;
+
const rows = await db`SELECT kid, jwk, created_at FROM oauth_keys ORDER BY kid`;
const keys: JoseKey[] = [];
for (const row of rows) {
try {
···
let currentKeys: JoseKey[] = [];
export const getCurrentKeys = () => currentKeys;
+
+
// Key rotation - rotate keys older than 30 days (monthly rotation)
+
const KEY_MAX_AGE = 30 * 24 * 60 * 60; // 30 days in seconds
+
+
export const rotateKeysIfNeeded = async (): Promise<boolean> => {
+
const now = Math.floor(Date.now() / 1000);
+
const cutoffTime = now - KEY_MAX_AGE;
+
+
try {
+
// Find keys older than 30 days
+
const oldKeys = await db`
+
SELECT kid, created_at FROM oauth_keys
+
WHERE created_at IS NOT NULL AND created_at < ${cutoffTime}
+
ORDER BY created_at ASC
+
`;
+
+
if (oldKeys.length === 0) {
+
console.log('[KeyRotation] No keys need rotation');
+
return false;
+
}
+
+
console.log(`[KeyRotation] Found ${oldKeys.length} key(s) older than 30 days, rotating oldest key`);
+
+
// Rotate the oldest key
+
const oldestKey = oldKeys[0];
+
const oldKid = oldestKey.kid;
+
+
// Generate new key with same kid
+
const newKey = await JoseKey.generate(['ES256'], oldKid);
+
await persistKey(newKey);
+
+
console.log(`[KeyRotation] Rotated key ${oldKid}`);
+
+
// Reload keys into memory
+
currentKeys = await ensureKeys();
+
+
return true;
+
} catch (err) {
+
console.error('[KeyRotation] Failed to rotate keys:', err);
+
return false;
+
}
+
};
export const getOAuthClient = async (config: { domain: `https://${string}`, clientName: string }) => {
if (currentKeys.length === 0) {
+37
src/lib/logger.ts
···
+
// Secure logging utility - only verbose in development mode
+
const isDev = process.env.NODE_ENV !== 'production';
+
+
export const logger = {
+
// Always log these (safe for production)
+
info: (...args: any[]) => {
+
console.log(...args);
+
},
+
+
// Only log in development (may contain sensitive info)
+
debug: (...args: any[]) => {
+
if (isDev) {
+
console.debug(...args);
+
}
+
},
+
+
// Safe error logging - sanitizes in production
+
error: (message: string, error?: any) => {
+
if (isDev) {
+
// Development: log full error details
+
console.error(message, error);
+
} else {
+
// Production: log only the message, not error details
+
console.error(message);
+
}
+
},
+
+
// Log error with context but sanitize sensitive data in production
+
errorWithContext: (message: string, context?: Record<string, any>, error?: any) => {
+
if (isDev) {
+
console.error(message, context, error);
+
} else {
+
// In production, only log the message
+
console.error(message);
+
}
+
}
+
};
+115 -14
src/lib/oauth-client.ts
···
import { NodeOAuthClient, type ClientMetadata } from "@atproto/oauth-client-node";
import { JoseKey } from "@atproto/jwk-jose";
import { db } from "./db";
+
import { logger } from "./logger";
+
+
// Session timeout configuration (30 days in seconds)
+
const SESSION_TIMEOUT = 30 * 24 * 60 * 60; // 2592000 seconds
+
// OAuth state timeout (1 hour in seconds)
+
const STATE_TIMEOUT = 60 * 60; // 3600 seconds
const stateStore = {
async set(key: string, data: any) {
console.debug('[stateStore] set', key)
+
const expiresAt = Math.floor(Date.now() / 1000) + STATE_TIMEOUT;
await db`
-
INSERT INTO oauth_states (key, data)
-
VALUES (${key}, ${JSON.stringify(data)})
-
ON CONFLICT (key) DO UPDATE SET data = EXCLUDED.data
+
INSERT INTO oauth_states (key, data, created_at, expires_at)
+
VALUES (${key}, ${JSON.stringify(data)}, EXTRACT(EPOCH FROM NOW()), ${expiresAt})
+
ON CONFLICT (key) DO UPDATE SET data = EXCLUDED.data, expires_at = ${expiresAt}
`;
},
async get(key: string) {
console.debug('[stateStore] get', key)
-
const result = await db`SELECT data FROM oauth_states WHERE key = ${key}`;
-
return result[0] ? JSON.parse(result[0].data) : undefined;
+
const now = Math.floor(Date.now() / 1000);
+
const result = await db`
+
SELECT data, expires_at
+
FROM oauth_states
+
WHERE key = ${key}
+
`;
+
if (!result[0]) return undefined;
+
+
// Check if expired
+
const expiresAt = Number(result[0].expires_at);
+
if (expiresAt && now > expiresAt) {
+
console.debug('[stateStore] State expired, deleting', key);
+
await db`DELETE FROM oauth_states WHERE key = ${key}`;
+
return undefined;
+
}
+
+
return JSON.parse(result[0].data);
},
async del(key: string) {
console.debug('[stateStore] del', key)
···
const sessionStore = {
async set(sub: string, data: any) {
console.debug('[sessionStore] set', sub)
+
const expiresAt = Math.floor(Date.now() / 1000) + SESSION_TIMEOUT;
await db`
-
INSERT INTO oauth_sessions (sub, data)
-
VALUES (${sub}, ${JSON.stringify(data)})
-
ON CONFLICT (sub) DO UPDATE SET data = EXCLUDED.data, updated_at = EXTRACT(EPOCH FROM NOW())
+
INSERT INTO oauth_sessions (sub, data, updated_at, expires_at)
+
VALUES (${sub}, ${JSON.stringify(data)}, EXTRACT(EPOCH FROM NOW()), ${expiresAt})
+
ON CONFLICT (sub) DO UPDATE SET
+
data = EXCLUDED.data,
+
updated_at = EXTRACT(EPOCH FROM NOW()),
+
expires_at = ${expiresAt}
`;
},
async get(sub: string) {
console.debug('[sessionStore] get', sub)
-
const result = await db`SELECT data FROM oauth_sessions WHERE sub = ${sub}`;
-
return result[0] ? JSON.parse(result[0].data) : undefined;
+
const now = Math.floor(Date.now() / 1000);
+
const result = await db`
+
SELECT data, expires_at
+
FROM oauth_sessions
+
WHERE sub = ${sub}
+
`;
+
if (!result[0]) return undefined;
+
+
// Check if expired
+
const expiresAt = Number(result[0].expires_at);
+
if (expiresAt && now > expiresAt) {
+
logger.debug('[sessionStore] Session expired, deleting', sub);
+
await db`DELETE FROM oauth_sessions WHERE sub = ${sub}`;
+
return undefined;
+
}
+
+
return JSON.parse(result[0].data);
},
async del(sub: string) {
console.debug('[sessionStore] del', sub)
···
export { sessionStore };
+
// Cleanup expired sessions and states
+
export const cleanupExpiredSessions = async () => {
+
const now = Math.floor(Date.now() / 1000);
+
try {
+
const sessionsDeleted = await db`
+
DELETE FROM oauth_sessions WHERE expires_at < ${now}
+
`;
+
const statesDeleted = await db`
+
DELETE FROM oauth_states WHERE expires_at IS NOT NULL AND expires_at < ${now}
+
`;
+
logger.info(`[Cleanup] Deleted ${sessionsDeleted.length} expired sessions and ${statesDeleted.length} expired states`);
+
return { sessions: sessionsDeleted.length, states: statesDeleted.length };
+
} catch (err) {
+
logger.error('[Cleanup] Failed to cleanup expired data', err);
+
return { sessions: 0, states: 0 };
+
}
+
};
+
export const createClientMetadata = (config: { domain: `https://${string}`, clientName: string }): ClientMetadata => {
// Use editor.wisp.place for OAuth endpoints since that's where the API routes live
return {
···
if (!priv) return;
const kid = key.kid ?? crypto.randomUUID();
await db`
-
INSERT INTO oauth_keys (kid, jwk)
-
VALUES (${kid}, ${JSON.stringify(priv)})
+
INSERT INTO oauth_keys (kid, jwk, created_at)
+
VALUES (${kid}, ${JSON.stringify(priv)}, EXTRACT(EPOCH FROM NOW()))
ON CONFLICT (kid) DO UPDATE SET jwk = EXCLUDED.jwk
`;
};
const loadPersistedKeys = async (): Promise<JoseKey[]> => {
-
const rows = await db`SELECT kid, jwk FROM oauth_keys ORDER BY kid`;
+
const rows = await db`SELECT kid, jwk, created_at FROM oauth_keys ORDER BY kid`;
const keys: JoseKey[] = [];
for (const row of rows) {
try {
···
const key = await JoseKey.fromImportable(obj as any, (obj as any).kid);
keys.push(key);
} catch (err) {
-
console.error('Could not parse stored JWK', err);
+
logger.error('[OAuth] Could not parse stored JWK', err);
}
}
return keys;
···
let currentKeys: JoseKey[] = [];
export const getCurrentKeys = () => currentKeys;
+
+
// Key rotation - rotate keys older than 30 days (monthly rotation)
+
const KEY_MAX_AGE = 30 * 24 * 60 * 60; // 30 days in seconds
+
+
export const rotateKeysIfNeeded = async (): Promise<boolean> => {
+
const now = Math.floor(Date.now() / 1000);
+
const cutoffTime = now - KEY_MAX_AGE;
+
+
try {
+
// Find keys older than 30 days
+
const oldKeys = await db`
+
SELECT kid, created_at FROM oauth_keys
+
WHERE created_at IS NOT NULL AND created_at < ${cutoffTime}
+
ORDER BY created_at ASC
+
`;
+
+
if (oldKeys.length === 0) {
+
logger.debug('[KeyRotation] No keys need rotation');
+
return false;
+
}
+
+
logger.info(`[KeyRotation] Found ${oldKeys.length} key(s) older than 30 days, rotating oldest key`);
+
+
// Rotate the oldest key
+
const oldestKey = oldKeys[0];
+
const oldKid = oldestKey.kid;
+
+
// Generate new key with same kid
+
const newKey = await JoseKey.generate(['ES256'], oldKid);
+
await persistKey(newKey);
+
+
logger.info(`[KeyRotation] Rotated key ${oldKid}`);
+
+
// Reload keys into memory
+
currentKeys = await ensureKeys();
+
+
return true;
+
} catch (err) {
+
logger.error('[KeyRotation] Failed to rotate keys', err);
+
return false;
+
}
+
};
export const getOAuthClient = async (config: { domain: `https://${string}`, clientName: string }) => {
if (currentKeys.length === 0) {
+2 -1
src/lib/wisp-auth.ts
···
import { NodeOAuthClient } from "@atproto/oauth-client-node";
import type { OAuthSession } from "@atproto/oauth-client-node";
import { Cookie } from "elysia";
+
import { logger } from "./logger";
export interface AuthenticatedContext {
···
const session = await client.restore(did, "auto");
return session ? { did, session } : null;
} catch (err) {
-
console.error('Authentication error:', err);
+
logger.error('[Auth] Authentication error', err);
return null;
}
};
+12 -11
src/routes/auth.ts
···
import { getSitesByDid, getDomainByDid } from '../lib/db'
import { syncSitesFromPDS } from '../lib/sync-sites'
import { authenticateRequest } from '../lib/wisp-auth'
+
import { logger } from '../lib/logger'
export const authRoutes = (client: NodeOAuthClient) => new Elysia()
.post('/api/auth/signin', async (c) => {
···
const url = await client.authorize(handle, { state })
return { url: url.toString() }
} catch (err) {
-
console.error('Signin error', err)
+
logger.error('[Auth] Signin error', err)
return { error: 'Authentication failed' }
}
})
···
const { session } = await client.callback(params)
if (!session) {
-
console.error('[Auth] OAuth callback failed: no session returned')
+
logger.error('[Auth] OAuth callback failed: no session returned')
return c.redirect('/?error=auth_failed')
}
···
cookieSession.did.value = session.did
// Sync sites from PDS to database cache
-
console.log('[Auth] Syncing sites from PDS for', session.did)
+
logger.debug('[Auth] Syncing sites from PDS for', session.did)
try {
const syncResult = await syncSitesFromPDS(session.did, session)
-
console.log(`[Auth] Sync complete: ${syncResult.synced} sites synced`)
+
logger.debug(`[Auth] Sync complete: ${syncResult.synced} sites synced`)
if (syncResult.errors.length > 0) {
-
console.warn('[Auth] Sync errors:', syncResult.errors)
+
logger.debug('[Auth] Sync errors:', syncResult.errors)
}
} catch (err) {
-
console.error('[Auth] Failed to sync sites:', err)
+
logger.error('[Auth] Failed to sync sites', err)
// Don't fail auth if sync fails, just log it
}
···
return c.redirect('/editor')
} catch (err) {
// This catches state validation failures and other OAuth errors
-
console.error('[Auth] OAuth callback error:', err)
+
logger.error('[Auth] OAuth callback error', err)
return c.redirect('/?error=auth_failed')
}
})
···
if (did && typeof did === 'string') {
try {
await client.revoke(did)
-
console.log('[Auth] Revoked OAuth session for', did)
+
logger.debug('[Auth] Revoked OAuth session for', did)
} catch (err) {
-
console.error('[Auth] Failed to revoke session:', err)
+
logger.error('[Auth] Failed to revoke session', err)
// Continue with logout even if revoke fails
}
}
return { success: true }
} catch (err) {
-
console.error('[Auth] Logout error:', err)
+
logger.error('[Auth] Logout error', err)
return { error: 'Logout failed' }
}
})
···
did: auth.did
}
} catch (err) {
-
console.error('[Auth] Status check error:', err)
+
logger.error('[Auth] Status check error', err)
return { authenticated: false }
}
})
+11 -10
src/routes/domain.ts
···
} from '../lib/db'
import { createHash } from 'crypto'
import { verifyCustomDomain } from '../lib/dns-verify'
+
import { logger } from '../lib/logger'
export const domainRoutes = (client: NodeOAuthClient) =>
new Elysia({ prefix: '/api/domain' })
···
domain: toDomain(handle)
};
} catch (err) {
-
console.error("domain/check error", err);
+
logger.error('[Domain] Check error', err);
return {
available: false
};
···
return { registered: false };
}
} catch (err) {
-
console.error("domain/registered error", err);
+
logger.error('[Domain] Registered check error', err);
set.status = 500;
return { error: 'Failed to check domain' };
}
···
return { success: true, domain };
} catch (err) {
-
console.error("domain/claim error", err);
+
logger.error('[Domain] Claim error', err);
throw new Error(`Failed to claim: ${err instanceof Error ? err.message : 'Unknown error'}`);
}
})
···
return { success: true, domain };
} catch (err) {
-
console.error("domain/update error", err);
+
logger.error('[Domain] Update error', err);
throw new Error(`Failed to update: ${err instanceof Error ? err.message : 'Unknown error'}`);
}
})
···
verified: false
};
} catch (err) {
-
console.error('custom domain add error', err);
+
logger.error('[Domain] Custom domain add error', err);
throw new Error(`Failed to add domain: ${err instanceof Error ? err.message : 'Unknown error'}`);
}
})
···
}
// Verify DNS records (TXT + CNAME)
-
console.log(`Verifying custom domain: ${domainInfo.domain}`);
+
logger.debug(`[Domain] Verifying custom domain: ${domainInfo.domain}`);
const result = await verifyCustomDomain(domainInfo.domain, auth.did, id);
// Update verification status in database
···
found: result.found
};
} catch (err) {
-
console.error('custom domain verify error', err);
+
logger.error('[Domain] Custom domain verify error', err);
throw new Error(`Failed to verify domain: ${err instanceof Error ? err.message : 'Unknown error'}`);
}
})
···
return { success: true };
} catch (err) {
-
console.error('custom domain delete error', err);
+
logger.error('[Domain] Custom domain delete error', err);
throw new Error(`Failed to delete domain: ${err instanceof Error ? err.message : 'Unknown error'}`);
}
})
···
return { success: true };
} catch (err) {
-
console.error('wisp domain map error', err);
+
logger.error('[Domain] Wisp domain map error', err);
throw new Error(`Failed to map site: ${err instanceof Error ? err.message : 'Unknown error'}`);
}
})
···
return { success: true };
} catch (err) {
-
console.error('custom domain map error', err);
+
logger.error('[Domain] Custom domain map error', err);
throw new Error(`Failed to map site: ${err instanceof Error ? err.message : 'Unknown error'}`);
}
});
+8 -7
src/routes/user.ts
···
import { Agent } from '@atproto/api'
import { getSitesByDid, getDomainByDid, getCustomDomainsByDid, getWispDomainInfo } from '../lib/db'
import { syncSitesFromPDS } from '../lib/sync-sites'
+
import { logger } from '../lib/logger'
export const userRoutes = (client: NodeOAuthClient) =>
new Elysia({ prefix: '/api/user' })
···
sitesCount: sites.length
}
} catch (err) {
-
console.error('user/status error', err)
+
logger.error('[User] Status error', err)
throw new Error('Failed to get user status')
}
})
···
const profile = await agent.getProfile({ actor: auth.did })
handle = profile.data.handle
} catch (err) {
-
console.error('Failed to fetch profile:', err)
+
logger.error('[User] Failed to fetch profile', err)
}
return {
···
handle
}
} catch (err) {
-
console.error('user/info error', err)
+
logger.error('[User] Info error', err)
throw new Error('Failed to get user info')
}
})
···
const sites = await getSitesByDid(auth.did)
return { sites }
} catch (err) {
-
console.error('user/sites error', err)
+
logger.error('[User] Sites error', err)
throw new Error('Failed to get sites')
}
})
···
customDomains
}
} catch (err) {
-
console.error('user/domains error', err)
+
logger.error('[User] Domains error', err)
throw new Error('Failed to get domains')
}
})
.post('/sync', async ({ auth }) => {
try {
-
console.log('[User] Manual sync requested for', auth.did)
+
logger.debug('[User] Manual sync requested for', auth.did)
const result = await syncSitesFromPDS(auth.did, auth.session)
return {
···
errors: result.errors
}
} catch (err) {
-
console.error('user/sync error', err)
+
logger.error('[User] Sync error', err)
throw new Error('Failed to sync sites')
}
})
+8 -15
src/routes/wisp.ts
···
updateFileBlobs
} from '../lib/wisp-utils'
import { upsertSite } from '../lib/db'
+
import { logger } from '../lib/logger'
-
/**
-
* Validate site name (rkey) according to AT Protocol specifications
-
* - Must be 1-512 characters
-
* - Can only contain: alphanumeric, dots, dashes, underscores, tildes, colons
-
* - Cannot be just "." or ".."
-
* - Cannot contain path traversal sequences
-
*/
function isValidSiteName(siteName: string): boolean {
if (!siteName || typeof siteName !== 'string') return false;
···
returnedMimeType: returnedBlobRef.mimeType
};
} catch (uploadError) {
-
console.error(`❌ Upload failed for ${file.name}:`, uploadError);
+
logger.error('[Wisp] Upload failed for file', uploadError);
throw uploadError;
}
});
···
record: manifest
});
} catch (putRecordError: any) {
-
console.error('\n❌ Failed to create record on PDS');
-
console.error('Error:', putRecordError.message);
+
logger.error('[Wisp] Failed to create record on PDS');
+
logger.error('[Wisp] Record creation error', putRecordError);
throw putRecordError;
}
···
return result;
} catch (error) {
-
console.error('❌ Upload error:', error);
-
console.error('Error details:', {
+
logger.error('[Wisp] Upload error', error);
+
logger.errorWithContext('[Wisp] Upload error details', {
message: error instanceof Error ? error.message : 'Unknown error',
-
stack: error instanceof Error ? error.stack : undefined,
name: error instanceof Error ? error.name : undefined
-
});
+
}, error);
throw new Error(`Failed to upload files: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
-
)
+
)