Monorepo for wisp.place. A static site hosting service built on top of the AT Protocol. wisp.place

check manifest and calculate CIDs then compare if we need to reupload blobs

+18 -1
bun.lock
···
"elysia": "latest",
"iron-session": "^8.0.4",
"lucide-react": "^0.546.0",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"react-shiki": "^0.9.0",
···
"ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
-
"multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="],
"negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="],
···
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
"@radix-ui/react-collection/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
"@radix-ui/react-dialog/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
···
"send/encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="],
"send/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
"@tokenizer/inflate/debug/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
···
"elysia": "latest",
"iron-session": "^8.0.4",
"lucide-react": "^0.546.0",
+
"multiformats": "^13.4.1",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"react-shiki": "^0.9.0",
···
"ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
+
"multiformats": ["multiformats@13.4.1", "", {}, "sha512-VqO6OSvLrFVAYYjgsr8tyv62/rCQhPgsZUXLTqoFLSgdkgiUYKYeArbt1uWLlEpkjxQe+P0+sHlbPEte1Bi06Q=="],
"negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="],
···
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
+
"@atproto/api/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="],
+
+
"@atproto/common/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="],
+
+
"@atproto/common-web/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="],
+
+
"@atproto/jwk/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="],
+
+
"@atproto/lexicon/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="],
+
+
"@atproto/oauth-client/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="],
+
+
"@ipld/dag-cbor/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="],
+
"@radix-ui/react-collection/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
"@radix-ui/react-dialog/@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
···
"send/encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="],
"send/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
+
+
"uint8arrays/multiformats": ["multiformats@9.9.0", "", {}, "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg=="],
"@tokenizer/inflate/debug/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
+1
package.json
···
"elysia": "latest",
"iron-session": "^8.0.4",
"lucide-react": "^0.546.0",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"react-shiki": "^0.9.0",
···
"elysia": "latest",
"iron-session": "^8.0.4",
"lucide-react": "^0.546.0",
+
"multiformats": "^13.4.1",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"react-shiki": "^0.9.0",
+2 -2
public/editor/editor.tsx
···
<div className="p-4 bg-muted/30 rounded-lg border-l-4 border-yellow-500/50">
<div className="flex items-start gap-2">
-
<AlertCircle className="w-4 h-4 text-yellow-600 dark:text-yellow-400 mt-0.5 flex-shrink-0" />
<div className="flex-1 space-y-1">
<p className="text-xs font-semibold text-yellow-600 dark:text-yellow-400">
Note about sites.wisp.place URLs
···
{skippedFiles.length > 0 && (
<div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg">
<div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2">
-
<AlertCircle className="w-4 h-4 mt-0.5 flex-shrink-0" />
<div className="flex-1">
<span className="font-medium">
{skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
···
<div className="p-4 bg-muted/30 rounded-lg border-l-4 border-yellow-500/50">
<div className="flex items-start gap-2">
+
<AlertCircle className="w-4 h-4 text-yellow-600 dark:text-yellow-400 mt-0.5 shrink-0" />
<div className="flex-1 space-y-1">
<p className="text-xs font-semibold text-yellow-600 dark:text-yellow-400">
Note about sites.wisp.place URLs
···
{skippedFiles.length > 0 && (
<div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg">
<div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2">
+
<AlertCircle className="w-4 h-4 mt-0.5 shrink-0" />
<div className="flex-1">
<span className="font-medium">
{skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
-7
src/lib/db.ts
···
const stateStore = {
async set(key: string, data: any) {
-
console.debug('[stateStore] set', key)
const expiresAt = Math.floor(Date.now() / 1000) + STATE_TIMEOUT;
await db`
INSERT INTO oauth_states (key, data, created_at, expires_at)
···
`;
},
async get(key: string) {
-
console.debug('[stateStore] get', key)
const now = Math.floor(Date.now() / 1000);
const result = await db`
SELECT data, expires_at
···
// Check if expired
const expiresAt = Number(result[0].expires_at);
if (expiresAt && now > expiresAt) {
-
console.debug('[stateStore] State expired, deleting', key);
await db`DELETE FROM oauth_states WHERE key = ${key}`;
return undefined;
}
···
return JSON.parse(result[0].data);
},
async del(key: string) {
-
console.debug('[stateStore] del', key)
await db`DELETE FROM oauth_states WHERE key = ${key}`;
}
};
const sessionStore = {
async set(sub: string, data: any) {
-
console.debug('[sessionStore] set', sub)
const expiresAt = Math.floor(Date.now() / 1000) + SESSION_TIMEOUT;
await db`
INSERT INTO oauth_sessions (sub, data, updated_at, expires_at)
···
`;
},
async get(sub: string) {
-
console.debug('[sessionStore] get', sub)
const now = Math.floor(Date.now() / 1000);
const result = await db`
SELECT data, expires_at
···
return JSON.parse(result[0].data);
},
async del(sub: string) {
-
console.debug('[sessionStore] del', sub)
await db`DELETE FROM oauth_sessions WHERE sub = ${sub}`;
}
};
···
const stateStore = {
async set(key: string, data: any) {
const expiresAt = Math.floor(Date.now() / 1000) + STATE_TIMEOUT;
await db`
INSERT INTO oauth_states (key, data, created_at, expires_at)
···
`;
},
async get(key: string) {
const now = Math.floor(Date.now() / 1000);
const result = await db`
SELECT data, expires_at
···
// Check if expired
const expiresAt = Number(result[0].expires_at);
if (expiresAt && now > expiresAt) {
await db`DELETE FROM oauth_states WHERE key = ${key}`;
return undefined;
}
···
return JSON.parse(result[0].data);
},
async del(key: string) {
await db`DELETE FROM oauth_states WHERE key = ${key}`;
}
};
const sessionStore = {
async set(sub: string, data: any) {
const expiresAt = Math.floor(Date.now() / 1000) + SESSION_TIMEOUT;
await db`
INSERT INTO oauth_sessions (sub, data, updated_at, expires_at)
···
`;
},
async get(sub: string) {
const now = Math.floor(Date.now() / 1000);
const result = await db`
SELECT data, expires_at
···
return JSON.parse(result[0].data);
},
async del(sub: string) {
await db`DELETE FROM oauth_sessions WHERE sub = ${sub}`;
}
};
-1
src/lib/oauth-client.ts
···
`;
},
async get(sub: string) {
-
console.debug('[sessionStore] get', sub)
const now = Math.floor(Date.now() / 1000);
const result = await db`
SELECT data, expires_at
···
`;
},
async get(sub: string) {
const now = Math.floor(Date.now() / 1000);
const result = await db`
SELECT data, expires_at
+360
src/lib/wisp-utils.test.ts
···
processUploadedFiles,
createManifest,
updateFileBlobs,
type UploadedFile,
type FileUploadResult,
} from './wisp-utils'
···
}
})
})
···
processUploadedFiles,
createManifest,
updateFileBlobs,
+
computeCID,
+
extractBlobMap,
type UploadedFile,
type FileUploadResult,
} from './wisp-utils'
···
}
})
})
+
+
describe('computeCID', () => {
+
test('should compute CID for gzipped+base64 encoded content', () => {
+
// This simulates the actual flow: gzip -> base64 -> compute CID
+
const originalContent = Buffer.from('Hello, World!')
+
const gzipped = compressFile(originalContent)
+
const base64Content = Buffer.from(gzipped.toString('base64'), 'binary')
+
+
const cid = computeCID(base64Content)
+
+
// CID should be a valid CIDv1 string starting with 'bafkrei'
+
expect(cid).toMatch(/^bafkrei[a-z0-9]+$/)
+
expect(cid.length).toBeGreaterThan(10)
+
})
+
+
test('should compute deterministic CIDs for identical content', () => {
+
const content = Buffer.from('Test content for CID calculation')
+
const gzipped = compressFile(content)
+
const base64Content = Buffer.from(gzipped.toString('base64'), 'binary')
+
+
const cid1 = computeCID(base64Content)
+
const cid2 = computeCID(base64Content)
+
+
expect(cid1).toBe(cid2)
+
})
+
+
test('should compute different CIDs for different content', () => {
+
const content1 = Buffer.from('Content A')
+
const content2 = Buffer.from('Content B')
+
+
const gzipped1 = compressFile(content1)
+
const gzipped2 = compressFile(content2)
+
+
const base64Content1 = Buffer.from(gzipped1.toString('base64'), 'binary')
+
const base64Content2 = Buffer.from(gzipped2.toString('base64'), 'binary')
+
+
const cid1 = computeCID(base64Content1)
+
const cid2 = computeCID(base64Content2)
+
+
expect(cid1).not.toBe(cid2)
+
})
+
+
test('should handle empty content', () => {
+
const emptyContent = Buffer.from('')
+
const gzipped = compressFile(emptyContent)
+
const base64Content = Buffer.from(gzipped.toString('base64'), 'binary')
+
+
const cid = computeCID(base64Content)
+
+
expect(cid).toMatch(/^bafkrei[a-z0-9]+$/)
+
})
+
+
test('should compute same CID as PDS for base64-encoded content', () => {
+
// Test that binary encoding produces correct bytes for CID calculation
+
const testContent = Buffer.from('<!DOCTYPE html><html><body>Hello</body></html>')
+
const gzipped = compressFile(testContent)
+
const base64Content = Buffer.from(gzipped.toString('base64'), 'binary')
+
+
// Compute CID twice to ensure consistency
+
const cid1 = computeCID(base64Content)
+
const cid2 = computeCID(base64Content)
+
+
expect(cid1).toBe(cid2)
+
expect(cid1).toMatch(/^bafkrei/)
+
})
+
+
test('should use binary encoding for base64 strings', () => {
+
// This test verifies we're using the correct encoding method
+
// For base64 strings, 'binary' encoding ensures each character becomes exactly one byte
+
const content = Buffer.from('Test content')
+
const gzipped = compressFile(content)
+
const base64String = gzipped.toString('base64')
+
+
// Using binary encoding (what we use in production)
+
const base64Content = Buffer.from(base64String, 'binary')
+
+
// Verify the length matches the base64 string length
+
expect(base64Content.length).toBe(base64String.length)
+
+
// Verify CID is computed correctly
+
const cid = computeCID(base64Content)
+
expect(cid).toMatch(/^bafkrei/)
+
})
+
})
+
+
describe('extractBlobMap', () => {
+
test('should extract blob map from flat directory structure', () => {
+
const mockCid = CID.parse(TEST_CID_STRING)
+
const mockBlob = new BlobRef(mockCid, 'text/html', 100)
+
+
const directory: Directory = {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'index.html',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: mockBlob,
+
},
+
},
+
],
+
}
+
+
const blobMap = extractBlobMap(directory)
+
+
expect(blobMap.size).toBe(1)
+
expect(blobMap.has('index.html')).toBe(true)
+
+
const entry = blobMap.get('index.html')
+
expect(entry?.cid).toBe(TEST_CID_STRING)
+
expect(entry?.blobRef).toBe(mockBlob)
+
})
+
+
test('should extract blob map from nested directory structure', () => {
+
const mockCid1 = CID.parse(TEST_CID_STRING)
+
const mockCid2 = CID.parse('bafkreiabaduc3573q6snt2xgxzpglwuaojkzflocncrh2vj5j3jykdpqhi')
+
+
const mockBlob1 = new BlobRef(mockCid1, 'text/html', 100)
+
const mockBlob2 = new BlobRef(mockCid2, 'text/css', 50)
+
+
const directory: Directory = {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'index.html',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: mockBlob1,
+
},
+
},
+
{
+
name: 'assets',
+
node: {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'styles.css',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: mockBlob2,
+
},
+
},
+
],
+
},
+
},
+
],
+
}
+
+
const blobMap = extractBlobMap(directory)
+
+
expect(blobMap.size).toBe(2)
+
expect(blobMap.has('index.html')).toBe(true)
+
expect(blobMap.has('assets/styles.css')).toBe(true)
+
+
expect(blobMap.get('index.html')?.cid).toBe(TEST_CID_STRING)
+
expect(blobMap.get('assets/styles.css')?.cid).toBe('bafkreiabaduc3573q6snt2xgxzpglwuaojkzflocncrh2vj5j3jykdpqhi')
+
})
+
+
test('should handle deeply nested directory structures', () => {
+
const mockCid = CID.parse(TEST_CID_STRING)
+
const mockBlob = new BlobRef(mockCid, 'text/javascript', 200)
+
+
const directory: Directory = {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'src',
+
node: {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'lib',
+
node: {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'utils.js',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: mockBlob,
+
},
+
},
+
],
+
},
+
},
+
],
+
},
+
},
+
],
+
}
+
+
const blobMap = extractBlobMap(directory)
+
+
expect(blobMap.size).toBe(1)
+
expect(blobMap.has('src/lib/utils.js')).toBe(true)
+
expect(blobMap.get('src/lib/utils.js')?.cid).toBe(TEST_CID_STRING)
+
})
+
+
test('should handle empty directory', () => {
+
const directory: Directory = {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [],
+
}
+
+
const blobMap = extractBlobMap(directory)
+
+
expect(blobMap.size).toBe(0)
+
})
+
+
test('should correctly extract CID from BlobRef instances (not plain objects)', () => {
+
// This test verifies the fix: AT Protocol SDK returns BlobRef instances,
+
// not plain objects with $type and $link properties
+
const mockCid = CID.parse(TEST_CID_STRING)
+
const mockBlob = new BlobRef(mockCid, 'application/octet-stream', 500)
+
+
const directory: Directory = {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'test.bin',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: mockBlob,
+
},
+
},
+
],
+
}
+
+
const blobMap = extractBlobMap(directory)
+
+
// The fix: we call .toString() on the CID instance instead of accessing $link
+
expect(blobMap.get('test.bin')?.cid).toBe(TEST_CID_STRING)
+
expect(blobMap.get('test.bin')?.blobRef.ref.toString()).toBe(TEST_CID_STRING)
+
})
+
+
test('should handle multiple files in same directory', () => {
+
const mockCid1 = CID.parse(TEST_CID_STRING)
+
const mockCid2 = CID.parse('bafkreiabaduc3573q6snt2xgxzpglwuaojkzflocncrh2vj5j3jykdpqhi')
+
const mockCid3 = CID.parse('bafkreieb3ixgchss44kw7xiavnkns47emdfsqbhcdfluo3p6n3o53fl3vq')
+
+
const mockBlob1 = new BlobRef(mockCid1, 'image/png', 1000)
+
const mockBlob2 = new BlobRef(mockCid2, 'image/png', 2000)
+
const mockBlob3 = new BlobRef(mockCid3, 'image/png', 3000)
+
+
const directory: Directory = {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'images',
+
node: {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'logo.png',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: mockBlob1,
+
},
+
},
+
{
+
name: 'banner.png',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: mockBlob2,
+
},
+
},
+
{
+
name: 'icon.png',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: mockBlob3,
+
},
+
},
+
],
+
},
+
},
+
],
+
}
+
+
const blobMap = extractBlobMap(directory)
+
+
expect(blobMap.size).toBe(3)
+
expect(blobMap.has('images/logo.png')).toBe(true)
+
expect(blobMap.has('images/banner.png')).toBe(true)
+
expect(blobMap.has('images/icon.png')).toBe(true)
+
})
+
+
test('should handle mixed directory and file structure', () => {
+
const mockCid1 = CID.parse(TEST_CID_STRING)
+
const mockCid2 = CID.parse('bafkreiabaduc3573q6snt2xgxzpglwuaojkzflocncrh2vj5j3jykdpqhi')
+
const mockCid3 = CID.parse('bafkreieb3ixgchss44kw7xiavnkns47emdfsqbhcdfluo3p6n3o53fl3vq')
+
+
const directory: Directory = {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'index.html',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: new BlobRef(mockCid1, 'text/html', 100),
+
},
+
},
+
{
+
name: 'assets',
+
node: {
+
$type: 'place.wisp.fs#directory',
+
type: 'directory',
+
entries: [
+
{
+
name: 'styles.css',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: new BlobRef(mockCid2, 'text/css', 50),
+
},
+
},
+
],
+
},
+
},
+
{
+
name: 'README.md',
+
node: {
+
$type: 'place.wisp.fs#file',
+
type: 'file',
+
blob: new BlobRef(mockCid3, 'text/markdown', 200),
+
},
+
},
+
],
+
}
+
+
const blobMap = extractBlobMap(directory)
+
+
expect(blobMap.size).toBe(3)
+
expect(blobMap.has('index.html')).toBe(true)
+
expect(blobMap.has('assets/styles.css')).toBe(true)
+
expect(blobMap.has('README.md')).toBe(true)
+
})
+
})
+65 -2
src/lib/wisp-utils.ts
···
import type { Record, Directory, File, Entry } from "../lexicons/types/place/wisp/fs";
import { validateRecord } from "../lexicons/types/place/wisp/fs";
import { gzipSync } from 'zlib';
export interface UploadedFile {
name: string;
···
}
/**
-
* Compress a file using gzip
*/
export function compressFile(content: Buffer): Buffer {
-
return gzipSync(content, { level: 9 });
}
/**
···
const directoryMap = new Map<string, UploadedFile[]>();
for (const file of files) {
// Remove any base folder name from the path
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
const parts = normalizedPath.split('/');
···
return result;
}
···
import type { Record, Directory, File, Entry } from "../lexicons/types/place/wisp/fs";
import { validateRecord } from "../lexicons/types/place/wisp/fs";
import { gzipSync } from 'zlib';
+
import { CID } from 'multiformats/cid';
+
import { sha256 } from 'multiformats/hashes/sha2';
+
import * as raw from 'multiformats/codecs/raw';
+
import { createHash } from 'crypto';
+
import * as mf from 'multiformats';
export interface UploadedFile {
name: string;
···
}
/**
+
* Compress a file using gzip with deterministic output
+
* Sets mtime to 0 to ensure identical content produces identical compressed output
*/
export function compressFile(content: Buffer): Buffer {
+
return gzipSync(content, {
+
level: 9,
+
mtime: 0 // Fixed timestamp for deterministic compression
+
});
}
/**
···
const directoryMap = new Map<string, UploadedFile[]>();
for (const file of files) {
+
// Skip undefined/null files (defensive)
+
if (!file || !file.name) {
+
console.error('Skipping undefined or invalid file in processUploadedFiles');
+
continue;
+
}
+
// Remove any base folder name from the path
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
const parts = normalizedPath.split('/');
···
return result;
}
+
+
/**
+
* Compute CID (Content Identifier) for blob content
+
* Uses the same algorithm as AT Protocol: CIDv1 with raw codec and SHA-256
+
* Based on @atproto/common/src/ipld.ts sha256RawToCid implementation
+
*/
+
export function computeCID(content: Buffer): string {
+
// Use node crypto to compute sha256 hash (same as AT Protocol)
+
const hash = createHash('sha256').update(content).digest();
+
// Create digest object from hash bytes
+
const digest = mf.digest.create(sha256.code, hash);
+
// Create CIDv1 with raw codec
+
const cid = CID.createV1(raw.code, digest);
+
return cid.toString();
+
}
+
+
/**
+
* Extract blob information from a directory tree
+
* Returns a map of file paths to their blob refs and CIDs
+
*/
+
export function extractBlobMap(
+
directory: Directory,
+
currentPath: string = ''
+
): Map<string, { blobRef: BlobRef; cid: string }> {
+
const blobMap = new Map<string, { blobRef: BlobRef; cid: string }>();
+
+
for (const entry of directory.entries) {
+
const fullPath = currentPath ? `${currentPath}/${entry.name}` : entry.name;
+
+
if ('type' in entry.node && entry.node.type === 'file') {
+
const fileNode = entry.node as File;
+
// AT Protocol SDK returns BlobRef class instances, not plain objects
+
// The ref is a CID instance that can be converted to string
+
if (fileNode.blob && fileNode.blob.ref) {
+
const cidString = fileNode.blob.ref.toString();
+
blobMap.set(fullPath, {
+
blobRef: fileNode.blob,
+
cid: cidString
+
});
+
}
+
} else if ('type' in entry.node && entry.node.type === 'directory') {
+
const subMap = extractBlobMap(entry.node as Directory, fullPath);
+
subMap.forEach((value, key) => blobMap.set(key, value));
+
}
+
}
+
+
return blobMap;
+
}
+130 -10
src/routes/wisp.ts
···
createManifest,
updateFileBlobs,
shouldCompressFile,
-
compressFile
} from '../lib/wisp-utils'
import { upsertSite } from '../lib/db'
import { logger } from '../lib/observability'
···
siteName: string;
files: File | File[]
};
try {
if (!siteName) {
···
// Create agent with OAuth session
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
// Convert File objects to UploadedFile format
// Elysia gives us File objects directly, handle both single file and array
···
const uploadedFiles: UploadedFile[] = [];
const skippedFiles: Array<{ name: string; reason: string }> = [];
-
for (let i = 0; i < fileArray.length; i++) {
const file = fileArray[i];
// Skip files that are too large (limit to 100MB per file)
const maxSize = MAX_FILE_SIZE; // 100MB
···
// Compress and base64 encode ALL files
const compressedContent = compressFile(originalContent);
// Base64 encode the gzipped content to prevent PDS content sniffing
-
const base64Content = Buffer.from(compressedContent.toString('base64'), 'utf-8');
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
uploadedFiles.push({
name: file.name,
-
content: base64Content,
mimeType: originalMimeType,
size: base64Content.length,
compressed: true,
···
}
// Process files into directory structure
-
const { directory, fileCount } = processUploadedFiles(uploadedFiles);
-
// Upload files as blobs in parallel
// For compressed files, we upload as octet-stream and store the original MIME type in metadata
// For text/html files, we also use octet-stream as a workaround for PDS image pipeline issues
-
const uploadPromises = uploadedFiles.map(async (file, i) => {
try {
// If compressed, always upload as octet-stream
// Otherwise, workaround: PDS incorrectly processes text/html through image pipeline
const uploadMimeType = file.compressed || file.mimeType.startsWith('text/html')
···
: file.mimeType;
const compressionInfo = file.compressed ? ' (gzipped)' : '';
-
logger.info(`[File Upload] Uploading file: ${file.name} (original: ${file.mimeType}, sending as: ${uploadMimeType}, ${file.size} bytes${compressionInfo})`);
const uploadResult = await agent.com.atproto.repo.uploadBlob(
file.content,
···
},
filePath: file.name,
sentMimeType: file.mimeType,
-
returnedMimeType: returnedBlobRef.mimeType
};
} catch (uploadError) {
logger.error('Upload failed for file', uploadError);
···
// Wait for all uploads to complete
const uploadedBlobs = await Promise.all(uploadPromises);
// Extract results and file paths in correct order
const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
// Update directory with file blobs
const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths);
// Create manifest
const manifest = createManifest(siteName, updatedDirectory, fileCount);
// Use site name as rkey
const rkey = siteName;
let record;
try {
record = await agent.com.atproto.repo.putRecord({
repo: auth.did,
collection: 'place.wisp.fs',
rkey: rkey,
record: manifest
});
} catch (putRecordError: any) {
logger.error('Failed to create record on PDS', putRecordError);
throw putRecordError;
···
fileCount,
siteName,
skippedFiles,
-
uploadedCount: uploadedFiles.length
};
return result;
} catch (error) {
logger.error('Upload error', error, {
message: error instanceof Error ? error.message : 'Unknown error',
name: error instanceof Error ? error.name : undefined
···
createManifest,
updateFileBlobs,
shouldCompressFile,
+
compressFile,
+
computeCID,
+
extractBlobMap
} from '../lib/wisp-utils'
import { upsertSite } from '../lib/db'
import { logger } from '../lib/observability'
···
siteName: string;
files: File | File[]
};
+
+
console.log('=== UPLOAD FILES START ===');
+
console.log('Site name:', siteName);
+
console.log('Files received:', Array.isArray(files) ? files.length : 'single file');
try {
if (!siteName) {
···
// Create agent with OAuth session
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
+
console.log('Agent created for DID:', auth.did);
+
+
// Try to fetch existing record to enable incremental updates
+
let existingBlobMap = new Map<string, { blobRef: any; cid: string }>();
+
console.log('Attempting to fetch existing record...');
+
try {
+
const rkey = siteName;
+
const existingRecord = await agent.com.atproto.repo.getRecord({
+
repo: auth.did,
+
collection: 'place.wisp.fs',
+
rkey: rkey
+
});
+
console.log('Existing record found!');
+
+
if (existingRecord.data.value && typeof existingRecord.data.value === 'object' && 'root' in existingRecord.data.value) {
+
const manifest = existingRecord.data.value as any;
+
existingBlobMap = extractBlobMap(manifest.root);
+
console.log(`Found existing manifest with ${existingBlobMap.size} files for incremental update`);
+
logger.info(`Found existing manifest with ${existingBlobMap.size} files for incremental update`);
+
}
+
} catch (error: any) {
+
console.log('No existing record found or error:', error?.message || error);
+
// Record doesn't exist yet, this is a new site
+
if (error?.status !== 400 && error?.error !== 'RecordNotFound') {
+
logger.warn('Failed to fetch existing record, proceeding with full upload', error);
+
}
+
}
// Convert File objects to UploadedFile format
// Elysia gives us File objects directly, handle both single file and array
···
const uploadedFiles: UploadedFile[] = [];
const skippedFiles: Array<{ name: string; reason: string }> = [];
+
console.log('Processing files, count:', fileArray.length);
for (let i = 0; i < fileArray.length; i++) {
const file = fileArray[i];
+
console.log(`Processing file ${i + 1}/${fileArray.length}:`, file.name, file.size, 'bytes');
// Skip files that are too large (limit to 100MB per file)
const maxSize = MAX_FILE_SIZE; // 100MB
···
// Compress and base64 encode ALL files
const compressedContent = compressFile(originalContent);
// Base64 encode the gzipped content to prevent PDS content sniffing
+
// Convert base64 string to bytes using binary encoding (each char becomes exactly one byte)
+
// This is what PDS receives and computes CID on
+
const base64Content = Buffer.from(compressedContent.toString('base64'), 'binary');
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
+
console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
uploadedFiles.push({
name: file.name,
+
content: base64Content, // This is the gzipped+base64 content that will be uploaded and CID-computed
mimeType: originalMimeType,
size: base64Content.length,
compressed: true,
···
}
// Process files into directory structure
+
console.log('Processing uploaded files into directory structure...');
+
console.log('uploadedFiles array length:', uploadedFiles.length);
+
console.log('uploadedFiles contents:', uploadedFiles.map((f, i) => `${i}: ${f?.name || 'UNDEFINED'}`));
+
// Filter out any undefined/null/invalid entries (defensive)
+
const validUploadedFiles = uploadedFiles.filter((f, i) => {
+
if (!f) {
+
console.error(`Filtering out undefined/null file at index ${i}`);
+
return false;
+
}
+
if (!f.name) {
+
console.error(`Filtering out file with no name at index ${i}:`, f);
+
return false;
+
}
+
if (!f.content) {
+
console.error(`Filtering out file with no content at index ${i}:`, f.name);
+
return false;
+
}
+
return true;
+
});
+
if (validUploadedFiles.length !== uploadedFiles.length) {
+
console.warn(`Filtered out ${uploadedFiles.length - validUploadedFiles.length} invalid files`);
+
}
+
console.log('validUploadedFiles length:', validUploadedFiles.length);
+
+
const { directory, fileCount } = processUploadedFiles(validUploadedFiles);
+
console.log('Directory structure created, file count:', fileCount);
+
+
// Upload files as blobs in parallel (or reuse existing blobs with matching CIDs)
+
console.log('Starting blob upload/reuse phase...');
// For compressed files, we upload as octet-stream and store the original MIME type in metadata
// For text/html files, we also use octet-stream as a workaround for PDS image pipeline issues
+
const uploadPromises = validUploadedFiles.map(async (file, i) => {
try {
+
// Skip undefined files (shouldn't happen after filter, but defensive)
+
if (!file || !file.name) {
+
console.error(`ERROR: Undefined file at index ${i} in validUploadedFiles!`);
+
throw new Error(`Undefined file at index ${i}`);
+
}
+
+
// Compute CID for this file to check if it already exists
+
// Note: file.content is already gzipped+base64 encoded
+
const fileCID = computeCID(file.content);
+
+
// Normalize the file path for comparison (remove base folder prefix like "cobblemon/")
+
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
+
+
// Check if we have an existing blob with the same CID
+
// Try both the normalized path and the full path
+
const existingBlob = existingBlobMap.get(normalizedPath) || existingBlobMap.get(file.name);
+
+
if (existingBlob && existingBlob.cid === fileCID) {
+
// Reuse existing blob - no need to upload
+
logger.info(`[File Upload] Reusing existing blob for: ${file.name} (CID: ${fileCID})`);
+
+
return {
+
result: {
+
hash: existingBlob.cid,
+
blobRef: existingBlob.blobRef,
+
...(file.compressed && {
+
encoding: 'gzip' as const,
+
mimeType: file.originalMimeType || file.mimeType,
+
base64: true
+
})
+
},
+
filePath: file.name,
+
sentMimeType: file.mimeType,
+
returnedMimeType: existingBlob.blobRef.mimeType,
+
reused: true
+
};
+
}
+
+
// File is new or changed - upload it
// If compressed, always upload as octet-stream
// Otherwise, workaround: PDS incorrectly processes text/html through image pipeline
const uploadMimeType = file.compressed || file.mimeType.startsWith('text/html')
···
: file.mimeType;
const compressionInfo = file.compressed ? ' (gzipped)' : '';
+
logger.info(`[File Upload] Uploading new/changed file: ${file.name} (original: ${file.mimeType}, sending as: ${uploadMimeType}, ${file.size} bytes${compressionInfo}, CID: ${fileCID})`);
const uploadResult = await agent.com.atproto.repo.uploadBlob(
file.content,
···
},
filePath: file.name,
sentMimeType: file.mimeType,
+
returnedMimeType: returnedBlobRef.mimeType,
+
reused: false
};
} catch (uploadError) {
logger.error('Upload failed for file', uploadError);
···
// Wait for all uploads to complete
const uploadedBlobs = await Promise.all(uploadPromises);
+
// Count reused vs uploaded blobs
+
const reusedCount = uploadedBlobs.filter(b => (b as any).reused).length;
+
const uploadedCount = uploadedBlobs.filter(b => !(b as any).reused).length;
+
console.log(`Blob statistics: ${reusedCount} reused, ${uploadedCount} uploaded, ${uploadedBlobs.length} total`);
+
logger.info(`Blob statistics: ${reusedCount} reused, ${uploadedCount} uploaded, ${uploadedBlobs.length} total`);
+
// Extract results and file paths in correct order
const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
// Update directory with file blobs
+
console.log('Updating directory with blob references...');
const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths);
// Create manifest
+
console.log('Creating manifest...');
const manifest = createManifest(siteName, updatedDirectory, fileCount);
+
console.log('Manifest created successfully');
// Use site name as rkey
const rkey = siteName;
let record;
try {
+
console.log('Putting record to PDS with rkey:', rkey);
record = await agent.com.atproto.repo.putRecord({
repo: auth.did,
collection: 'place.wisp.fs',
rkey: rkey,
record: manifest
});
+
console.log('Record successfully created on PDS:', record.data.uri);
} catch (putRecordError: any) {
+
console.error('FAILED to create record on PDS:', putRecordError);
logger.error('Failed to create record on PDS', putRecordError);
throw putRecordError;
···
fileCount,
siteName,
skippedFiles,
+
uploadedCount: validUploadedFiles.length
};
+
console.log('=== UPLOAD FILES COMPLETE ===');
return result;
} catch (error) {
+
console.error('=== UPLOAD ERROR ===');
+
console.error('Error details:', error);
+
console.error('Stack trace:', error instanceof Error ? error.stack : 'N/A');
logger.error('Upload error', error, {
message: error instanceof Error ? error.message : 'Unknown error',
name: error instanceof Error ? error.name : undefined