Monorepo for wisp.place. A static site hosting service built on top of the AT Protocol.
wisp.place
1import { Elysia } from 'elysia'
2import { requireAuth, type AuthenticatedContext } from '../lib/wisp-auth'
3import { NodeOAuthClient } from '@atproto/oauth-client-node'
4import { Agent } from '@atproto/api'
5import {
6 type UploadedFile,
7 type FileUploadResult,
8 processUploadedFiles,
9 createManifest,
10 updateFileBlobs,
11 shouldCompressFile,
12 compressFile,
13 computeCID,
14 extractBlobMap
15} from '../lib/wisp-utils'
16import { upsertSite } from '../lib/db'
17import { logger } from '../lib/observability'
18import { validateRecord } from '../lexicons/types/place/wisp/fs'
19import { MAX_SITE_SIZE, MAX_FILE_SIZE, MAX_FILE_COUNT } from '../lib/constants'
20
21function isValidSiteName(siteName: string): boolean {
22 if (!siteName || typeof siteName !== 'string') return false;
23
24 // Length check (AT Protocol rkey limit)
25 if (siteName.length < 1 || siteName.length > 512) return false;
26
27 // Check for path traversal
28 if (siteName === '.' || siteName === '..') return false;
29 if (siteName.includes('/') || siteName.includes('\\')) return false;
30 if (siteName.includes('\0')) return false;
31
32 // AT Protocol rkey format: alphanumeric, dots, dashes, underscores, tildes, colons
33 // Based on NSID format rules
34 const validRkeyPattern = /^[a-zA-Z0-9._~:-]+$/;
35 if (!validRkeyPattern.test(siteName)) return false;
36
37 return true;
38}
39
40export const wispRoutes = (client: NodeOAuthClient, cookieSecret: string) =>
41 new Elysia({
42 prefix: '/wisp',
43 cookie: {
44 secrets: cookieSecret,
45 sign: ['did']
46 }
47 })
48 .derive(async ({ cookie }) => {
49 const auth = await requireAuth(client, cookie)
50 return { auth }
51 })
52 .post(
53 '/upload-files',
54 async ({ body, auth }) => {
55 const { siteName, files } = body as {
56 siteName: string;
57 files: File | File[]
58 };
59
60 console.log('=== UPLOAD FILES START ===');
61 console.log('Site name:', siteName);
62 console.log('Files received:', Array.isArray(files) ? files.length : 'single file');
63
64 try {
65 if (!siteName) {
66 throw new Error('Site name is required')
67 }
68
69 if (!isValidSiteName(siteName)) {
70 throw new Error('Invalid site name: must be 1-512 characters and contain only alphanumeric, dots, dashes, underscores, tildes, and colons')
71 }
72
73 // Check if files were provided
74 const hasFiles = files && (Array.isArray(files) ? files.length > 0 : !!files);
75
76 if (!hasFiles) {
77 // Create agent with OAuth session
78 const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
79
80 // Create empty manifest
81 const emptyManifest = {
82 $type: 'place.wisp.fs',
83 site: siteName,
84 root: {
85 type: 'directory',
86 entries: []
87 },
88 fileCount: 0,
89 createdAt: new Date().toISOString()
90 };
91
92 // Validate the manifest
93 const validationResult = validateRecord(emptyManifest);
94 if (!validationResult.success) {
95 throw new Error(`Invalid manifest: ${validationResult.error?.message || 'Validation failed'}`);
96 }
97
98 // Use site name as rkey
99 const rkey = siteName;
100
101 const record = await agent.com.atproto.repo.putRecord({
102 repo: auth.did,
103 collection: 'place.wisp.fs',
104 rkey: rkey,
105 record: emptyManifest
106 });
107
108 await upsertSite(auth.did, rkey, siteName);
109
110 return {
111 success: true,
112 uri: record.data.uri,
113 cid: record.data.cid,
114 fileCount: 0,
115 siteName
116 };
117 }
118
119 // Create agent with OAuth session
120 const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
121 console.log('Agent created for DID:', auth.did);
122
123 // Try to fetch existing record to enable incremental updates
124 let existingBlobMap = new Map<string, { blobRef: any; cid: string }>();
125 console.log('Attempting to fetch existing record...');
126 try {
127 const rkey = siteName;
128 const existingRecord = await agent.com.atproto.repo.getRecord({
129 repo: auth.did,
130 collection: 'place.wisp.fs',
131 rkey: rkey
132 });
133 console.log('Existing record found!');
134
135 if (existingRecord.data.value && typeof existingRecord.data.value === 'object' && 'root' in existingRecord.data.value) {
136 const manifest = existingRecord.data.value as any;
137 existingBlobMap = extractBlobMap(manifest.root);
138 console.log(`Found existing manifest with ${existingBlobMap.size} files for incremental update`);
139 logger.info(`Found existing manifest with ${existingBlobMap.size} files for incremental update`);
140 }
141 } catch (error: any) {
142 console.log('No existing record found or error:', error?.message || error);
143 // Record doesn't exist yet, this is a new site
144 if (error?.status !== 400 && error?.error !== 'RecordNotFound') {
145 logger.warn('Failed to fetch existing record, proceeding with full upload', error);
146 }
147 }
148
149 // Convert File objects to UploadedFile format
150 // Elysia gives us File objects directly, handle both single file and array
151 const fileArray = Array.isArray(files) ? files : [files];
152 const uploadedFiles: UploadedFile[] = [];
153 const skippedFiles: Array<{ name: string; reason: string }> = [];
154
155 console.log('Processing files, count:', fileArray.length);
156
157 for (let i = 0; i < fileArray.length; i++) {
158 const file = fileArray[i];
159 console.log(`Processing file ${i + 1}/${fileArray.length}:`, file.name, file.size, 'bytes');
160
161 // Skip files that are too large (limit to 100MB per file)
162 const maxSize = MAX_FILE_SIZE; // 100MB
163 if (file.size > maxSize) {
164 skippedFiles.push({
165 name: file.name,
166 reason: `file too large (${(file.size / 1024 / 1024).toFixed(2)}MB, max 100MB)`
167 });
168 continue;
169 }
170
171 const arrayBuffer = await file.arrayBuffer();
172 const originalContent = Buffer.from(arrayBuffer);
173 const originalMimeType = file.type || 'application/octet-stream';
174
175 // Compress and base64 encode ALL files
176 const compressedContent = compressFile(originalContent);
177 // Base64 encode the gzipped content to prevent PDS content sniffing
178 // Convert base64 string to bytes using binary encoding (each char becomes exactly one byte)
179 // This is what PDS receives and computes CID on
180 const base64Content = Buffer.from(compressedContent.toString('base64'), 'binary');
181 const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
182 console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
183 logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
184
185 uploadedFiles.push({
186 name: file.name,
187 content: base64Content, // This is the gzipped+base64 content that will be uploaded and CID-computed
188 mimeType: originalMimeType,
189 size: base64Content.length,
190 compressed: true,
191 originalMimeType
192 });
193 }
194
195 // Check total size limit (300MB)
196 const totalSize = uploadedFiles.reduce((sum, file) => sum + file.size, 0);
197 const maxTotalSize = MAX_SITE_SIZE; // 300MB
198
199 if (totalSize > maxTotalSize) {
200 throw new Error(`Total upload size ${(totalSize / 1024 / 1024).toFixed(2)}MB exceeds 300MB limit`);
201 }
202
203 // Check file count limit (2000 files)
204 if (uploadedFiles.length > MAX_FILE_COUNT) {
205 throw new Error(`File count ${uploadedFiles.length} exceeds ${MAX_FILE_COUNT} files limit`);
206 }
207
208 if (uploadedFiles.length === 0) {
209
210 // Create empty manifest
211 const emptyManifest = {
212 $type: 'place.wisp.fs',
213 site: siteName,
214 root: {
215 type: 'directory',
216 entries: []
217 },
218 fileCount: 0,
219 createdAt: new Date().toISOString()
220 };
221
222 // Validate the manifest
223 const validationResult = validateRecord(emptyManifest);
224 if (!validationResult.success) {
225 throw new Error(`Invalid manifest: ${validationResult.error?.message || 'Validation failed'}`);
226 }
227
228 // Use site name as rkey
229 const rkey = siteName;
230
231 const record = await agent.com.atproto.repo.putRecord({
232 repo: auth.did,
233 collection: 'place.wisp.fs',
234 rkey: rkey,
235 record: emptyManifest
236 });
237
238 await upsertSite(auth.did, rkey, siteName);
239
240 return {
241 success: true,
242 uri: record.data.uri,
243 cid: record.data.cid,
244 fileCount: 0,
245 siteName,
246 skippedFiles,
247 message: 'Site created but no valid web files were found to upload'
248 };
249 }
250
251 // Process files into directory structure
252 console.log('Processing uploaded files into directory structure...');
253 console.log('uploadedFiles array length:', uploadedFiles.length);
254 console.log('uploadedFiles contents:', uploadedFiles.map((f, i) => `${i}: ${f?.name || 'UNDEFINED'}`));
255
256 // Filter out any undefined/null/invalid entries (defensive)
257 const validUploadedFiles = uploadedFiles.filter((f, i) => {
258 if (!f) {
259 console.error(`Filtering out undefined/null file at index ${i}`);
260 return false;
261 }
262 if (!f.name) {
263 console.error(`Filtering out file with no name at index ${i}:`, f);
264 return false;
265 }
266 if (!f.content) {
267 console.error(`Filtering out file with no content at index ${i}:`, f.name);
268 return false;
269 }
270 return true;
271 });
272 if (validUploadedFiles.length !== uploadedFiles.length) {
273 console.warn(`Filtered out ${uploadedFiles.length - validUploadedFiles.length} invalid files`);
274 }
275 console.log('validUploadedFiles length:', validUploadedFiles.length);
276
277 const { directory, fileCount } = processUploadedFiles(validUploadedFiles);
278 console.log('Directory structure created, file count:', fileCount);
279
280 // Upload files as blobs in parallel (or reuse existing blobs with matching CIDs)
281 console.log('Starting blob upload/reuse phase...');
282 // For compressed files, we upload as octet-stream and store the original MIME type in metadata
283 // For text/html files, we also use octet-stream as a workaround for PDS image pipeline issues
284 const uploadPromises = validUploadedFiles.map(async (file, i) => {
285 try {
286 // Skip undefined files (shouldn't happen after filter, but defensive)
287 if (!file || !file.name) {
288 console.error(`ERROR: Undefined file at index ${i} in validUploadedFiles!`);
289 throw new Error(`Undefined file at index ${i}`);
290 }
291
292 // Compute CID for this file to check if it already exists
293 // Note: file.content is already gzipped+base64 encoded
294 const fileCID = computeCID(file.content);
295
296 // Normalize the file path for comparison (remove base folder prefix like "cobblemon/")
297 const normalizedPath = file.name.replace(/^[^\/]*\//, '');
298
299 // Check if we have an existing blob with the same CID
300 // Try both the normalized path and the full path
301 const existingBlob = existingBlobMap.get(normalizedPath) || existingBlobMap.get(file.name);
302
303 if (existingBlob && existingBlob.cid === fileCID) {
304 // Reuse existing blob - no need to upload
305 logger.info(`[File Upload] Reusing existing blob for: ${file.name} (CID: ${fileCID})`);
306
307 return {
308 result: {
309 hash: existingBlob.cid,
310 blobRef: existingBlob.blobRef,
311 ...(file.compressed && {
312 encoding: 'gzip' as const,
313 mimeType: file.originalMimeType || file.mimeType,
314 base64: true
315 })
316 },
317 filePath: file.name,
318 sentMimeType: file.mimeType,
319 returnedMimeType: existingBlob.blobRef.mimeType,
320 reused: true
321 };
322 }
323
324 // File is new or changed - upload it
325 // If compressed, always upload as octet-stream
326 // Otherwise, workaround: PDS incorrectly processes text/html through image pipeline
327 const uploadMimeType = file.compressed || file.mimeType.startsWith('text/html')
328 ? 'application/octet-stream'
329 : file.mimeType;
330
331 const compressionInfo = file.compressed ? ' (gzipped)' : '';
332 logger.info(`[File Upload] Uploading new/changed file: ${file.name} (original: ${file.mimeType}, sending as: ${uploadMimeType}, ${file.size} bytes${compressionInfo}, CID: ${fileCID})`);
333
334 const uploadResult = await agent.com.atproto.repo.uploadBlob(
335 file.content,
336 {
337 encoding: uploadMimeType
338 }
339 );
340
341 const returnedBlobRef = uploadResult.data.blob;
342
343 // Use the blob ref exactly as returned from PDS
344 return {
345 result: {
346 hash: returnedBlobRef.ref.toString(),
347 blobRef: returnedBlobRef,
348 ...(file.compressed && {
349 encoding: 'gzip' as const,
350 mimeType: file.originalMimeType || file.mimeType,
351 base64: true
352 })
353 },
354 filePath: file.name,
355 sentMimeType: file.mimeType,
356 returnedMimeType: returnedBlobRef.mimeType,
357 reused: false
358 };
359 } catch (uploadError) {
360 logger.error('Upload failed for file', uploadError);
361 throw uploadError;
362 }
363 });
364
365 // Wait for all uploads to complete
366 const uploadedBlobs = await Promise.all(uploadPromises);
367
368 // Count reused vs uploaded blobs
369 const reusedCount = uploadedBlobs.filter(b => (b as any).reused).length;
370 const uploadedCount = uploadedBlobs.filter(b => !(b as any).reused).length;
371 console.log(`Blob statistics: ${reusedCount} reused, ${uploadedCount} uploaded, ${uploadedBlobs.length} total`);
372 logger.info(`Blob statistics: ${reusedCount} reused, ${uploadedCount} uploaded, ${uploadedBlobs.length} total`);
373
374 // Extract results and file paths in correct order
375 const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
376 const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
377
378 // Update directory with file blobs
379 console.log('Updating directory with blob references...');
380 const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths);
381
382 // Create manifest
383 console.log('Creating manifest...');
384 const manifest = createManifest(siteName, updatedDirectory, fileCount);
385 console.log('Manifest created successfully');
386
387 // Use site name as rkey
388 const rkey = siteName;
389
390 let record;
391 try {
392 console.log('Putting record to PDS with rkey:', rkey);
393 record = await agent.com.atproto.repo.putRecord({
394 repo: auth.did,
395 collection: 'place.wisp.fs',
396 rkey: rkey,
397 record: manifest
398 });
399 console.log('Record successfully created on PDS:', record.data.uri);
400 } catch (putRecordError: any) {
401 console.error('FAILED to create record on PDS:', putRecordError);
402 logger.error('Failed to create record on PDS', putRecordError);
403
404 throw putRecordError;
405 }
406
407 // Store site in database cache
408 await upsertSite(auth.did, rkey, siteName);
409
410 const result = {
411 success: true,
412 uri: record.data.uri,
413 cid: record.data.cid,
414 fileCount,
415 siteName,
416 skippedFiles,
417 uploadedCount: validUploadedFiles.length
418 };
419
420 console.log('=== UPLOAD FILES COMPLETE ===');
421 return result;
422 } catch (error) {
423 console.error('=== UPLOAD ERROR ===');
424 console.error('Error details:', error);
425 console.error('Stack trace:', error instanceof Error ? error.stack : 'N/A');
426 logger.error('Upload error', error, {
427 message: error instanceof Error ? error.message : 'Unknown error',
428 name: error instanceof Error ? error.name : undefined
429 });
430 throw new Error(`Failed to upload files: ${error instanceof Error ? error.message : 'Unknown error'}`);
431 }
432 }
433 )