Monorepo for wisp.place. A static site hosting service built on top of the AT Protocol.
wisp.place
1import { Elysia } from 'elysia'
2import { requireAuth, type AuthenticatedContext } from '../lib/wisp-auth'
3import { NodeOAuthClient } from '@atproto/oauth-client-node'
4import { Agent } from '@atproto/api'
5import {
6 type UploadedFile,
7 type FileUploadResult,
8 processUploadedFiles,
9 createManifest,
10 updateFileBlobs,
11 shouldCompressFile,
12 compressFile,
13 computeCID,
14 extractBlobMap
15} from '../lib/wisp-utils'
16import { upsertSite } from '../lib/db'
17import { logger } from '../lib/observability'
18import { validateRecord } from '../lexicons/types/place/wisp/fs'
19import { MAX_SITE_SIZE, MAX_FILE_SIZE, MAX_FILE_COUNT } from '../lib/constants'
20
21function isValidSiteName(siteName: string): boolean {
22 if (!siteName || typeof siteName !== 'string') return false;
23
24 // Length check (AT Protocol rkey limit)
25 if (siteName.length < 1 || siteName.length > 512) return false;
26
27 // Check for path traversal
28 if (siteName === '.' || siteName === '..') return false;
29 if (siteName.includes('/') || siteName.includes('\\')) return false;
30 if (siteName.includes('\0')) return false;
31
32 // AT Protocol rkey format: alphanumeric, dots, dashes, underscores, tildes, colons
33 // Based on NSID format rules
34 const validRkeyPattern = /^[a-zA-Z0-9._~:-]+$/;
35 if (!validRkeyPattern.test(siteName)) return false;
36
37 return true;
38}
39
40export const wispRoutes = (client: NodeOAuthClient) =>
41 new Elysia({ prefix: '/wisp' })
42 .derive(async ({ cookie }) => {
43 const auth = await requireAuth(client, cookie)
44 return { auth }
45 })
46 .post(
47 '/upload-files',
48 async ({ body, auth }) => {
49 const { siteName, files } = body as {
50 siteName: string;
51 files: File | File[]
52 };
53
54 console.log('=== UPLOAD FILES START ===');
55 console.log('Site name:', siteName);
56 console.log('Files received:', Array.isArray(files) ? files.length : 'single file');
57
58 try {
59 if (!siteName) {
60 throw new Error('Site name is required')
61 }
62
63 if (!isValidSiteName(siteName)) {
64 throw new Error('Invalid site name: must be 1-512 characters and contain only alphanumeric, dots, dashes, underscores, tildes, and colons')
65 }
66
67 // Check if files were provided
68 const hasFiles = files && (Array.isArray(files) ? files.length > 0 : !!files);
69
70 if (!hasFiles) {
71 // Create agent with OAuth session
72 const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
73
74 // Create empty manifest
75 const emptyManifest = {
76 $type: 'place.wisp.fs',
77 site: siteName,
78 root: {
79 type: 'directory',
80 entries: []
81 },
82 fileCount: 0,
83 createdAt: new Date().toISOString()
84 };
85
86 // Validate the manifest
87 const validationResult = validateRecord(emptyManifest);
88 if (!validationResult.success) {
89 throw new Error(`Invalid manifest: ${validationResult.error?.message || 'Validation failed'}`);
90 }
91
92 // Use site name as rkey
93 const rkey = siteName;
94
95 const record = await agent.com.atproto.repo.putRecord({
96 repo: auth.did,
97 collection: 'place.wisp.fs',
98 rkey: rkey,
99 record: emptyManifest
100 });
101
102 await upsertSite(auth.did, rkey, siteName);
103
104 return {
105 success: true,
106 uri: record.data.uri,
107 cid: record.data.cid,
108 fileCount: 0,
109 siteName
110 };
111 }
112
113 // Create agent with OAuth session
114 const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
115 console.log('Agent created for DID:', auth.did);
116
117 // Try to fetch existing record to enable incremental updates
118 let existingBlobMap = new Map<string, { blobRef: any; cid: string }>();
119 console.log('Attempting to fetch existing record...');
120 try {
121 const rkey = siteName;
122 const existingRecord = await agent.com.atproto.repo.getRecord({
123 repo: auth.did,
124 collection: 'place.wisp.fs',
125 rkey: rkey
126 });
127 console.log('Existing record found!');
128
129 if (existingRecord.data.value && typeof existingRecord.data.value === 'object' && 'root' in existingRecord.data.value) {
130 const manifest = existingRecord.data.value as any;
131 existingBlobMap = extractBlobMap(manifest.root);
132 console.log(`Found existing manifest with ${existingBlobMap.size} files for incremental update`);
133 logger.info(`Found existing manifest with ${existingBlobMap.size} files for incremental update`);
134 }
135 } catch (error: any) {
136 console.log('No existing record found or error:', error?.message || error);
137 // Record doesn't exist yet, this is a new site
138 if (error?.status !== 400 && error?.error !== 'RecordNotFound') {
139 logger.warn('Failed to fetch existing record, proceeding with full upload', error);
140 }
141 }
142
143 // Convert File objects to UploadedFile format
144 // Elysia gives us File objects directly, handle both single file and array
145 const fileArray = Array.isArray(files) ? files : [files];
146 const uploadedFiles: UploadedFile[] = [];
147 const skippedFiles: Array<{ name: string; reason: string }> = [];
148
149 console.log('Processing files, count:', fileArray.length);
150
151 for (let i = 0; i < fileArray.length; i++) {
152 const file = fileArray[i];
153 console.log(`Processing file ${i + 1}/${fileArray.length}:`, file.name, file.size, 'bytes');
154
155 // Skip files that are too large (limit to 100MB per file)
156 const maxSize = MAX_FILE_SIZE; // 100MB
157 if (file.size > maxSize) {
158 skippedFiles.push({
159 name: file.name,
160 reason: `file too large (${(file.size / 1024 / 1024).toFixed(2)}MB, max 100MB)`
161 });
162 continue;
163 }
164
165 const arrayBuffer = await file.arrayBuffer();
166 const originalContent = Buffer.from(arrayBuffer);
167 const originalMimeType = file.type || 'application/octet-stream';
168
169 // Compress and base64 encode ALL files
170 const compressedContent = compressFile(originalContent);
171 // Base64 encode the gzipped content to prevent PDS content sniffing
172 // Convert base64 string to bytes using binary encoding (each char becomes exactly one byte)
173 // This is what PDS receives and computes CID on
174 const base64Content = Buffer.from(compressedContent.toString('base64'), 'binary');
175 const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
176 console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
177 logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
178
179 uploadedFiles.push({
180 name: file.name,
181 content: base64Content, // This is the gzipped+base64 content that will be uploaded and CID-computed
182 mimeType: originalMimeType,
183 size: base64Content.length,
184 compressed: true,
185 originalMimeType
186 });
187 }
188
189 // Check total size limit (300MB)
190 const totalSize = uploadedFiles.reduce((sum, file) => sum + file.size, 0);
191 const maxTotalSize = MAX_SITE_SIZE; // 300MB
192
193 if (totalSize > maxTotalSize) {
194 throw new Error(`Total upload size ${(totalSize / 1024 / 1024).toFixed(2)}MB exceeds 300MB limit`);
195 }
196
197 // Check file count limit (2000 files)
198 if (uploadedFiles.length > MAX_FILE_COUNT) {
199 throw new Error(`File count ${uploadedFiles.length} exceeds ${MAX_FILE_COUNT} files limit`);
200 }
201
202 if (uploadedFiles.length === 0) {
203
204 // Create empty manifest
205 const emptyManifest = {
206 $type: 'place.wisp.fs',
207 site: siteName,
208 root: {
209 type: 'directory',
210 entries: []
211 },
212 fileCount: 0,
213 createdAt: new Date().toISOString()
214 };
215
216 // Validate the manifest
217 const validationResult = validateRecord(emptyManifest);
218 if (!validationResult.success) {
219 throw new Error(`Invalid manifest: ${validationResult.error?.message || 'Validation failed'}`);
220 }
221
222 // Use site name as rkey
223 const rkey = siteName;
224
225 const record = await agent.com.atproto.repo.putRecord({
226 repo: auth.did,
227 collection: 'place.wisp.fs',
228 rkey: rkey,
229 record: emptyManifest
230 });
231
232 await upsertSite(auth.did, rkey, siteName);
233
234 return {
235 success: true,
236 uri: record.data.uri,
237 cid: record.data.cid,
238 fileCount: 0,
239 siteName,
240 skippedFiles,
241 message: 'Site created but no valid web files were found to upload'
242 };
243 }
244
245 // Process files into directory structure
246 console.log('Processing uploaded files into directory structure...');
247 console.log('uploadedFiles array length:', uploadedFiles.length);
248 console.log('uploadedFiles contents:', uploadedFiles.map((f, i) => `${i}: ${f?.name || 'UNDEFINED'}`));
249
250 // Filter out any undefined/null/invalid entries (defensive)
251 const validUploadedFiles = uploadedFiles.filter((f, i) => {
252 if (!f) {
253 console.error(`Filtering out undefined/null file at index ${i}`);
254 return false;
255 }
256 if (!f.name) {
257 console.error(`Filtering out file with no name at index ${i}:`, f);
258 return false;
259 }
260 if (!f.content) {
261 console.error(`Filtering out file with no content at index ${i}:`, f.name);
262 return false;
263 }
264 return true;
265 });
266 if (validUploadedFiles.length !== uploadedFiles.length) {
267 console.warn(`Filtered out ${uploadedFiles.length - validUploadedFiles.length} invalid files`);
268 }
269 console.log('validUploadedFiles length:', validUploadedFiles.length);
270
271 const { directory, fileCount } = processUploadedFiles(validUploadedFiles);
272 console.log('Directory structure created, file count:', fileCount);
273
274 // Upload files as blobs in parallel (or reuse existing blobs with matching CIDs)
275 console.log('Starting blob upload/reuse phase...');
276 // For compressed files, we upload as octet-stream and store the original MIME type in metadata
277 // For text/html files, we also use octet-stream as a workaround for PDS image pipeline issues
278 const uploadPromises = validUploadedFiles.map(async (file, i) => {
279 try {
280 // Skip undefined files (shouldn't happen after filter, but defensive)
281 if (!file || !file.name) {
282 console.error(`ERROR: Undefined file at index ${i} in validUploadedFiles!`);
283 throw new Error(`Undefined file at index ${i}`);
284 }
285
286 // Compute CID for this file to check if it already exists
287 // Note: file.content is already gzipped+base64 encoded
288 const fileCID = computeCID(file.content);
289
290 // Normalize the file path for comparison (remove base folder prefix like "cobblemon/")
291 const normalizedPath = file.name.replace(/^[^\/]*\//, '');
292
293 // Check if we have an existing blob with the same CID
294 // Try both the normalized path and the full path
295 const existingBlob = existingBlobMap.get(normalizedPath) || existingBlobMap.get(file.name);
296
297 if (existingBlob && existingBlob.cid === fileCID) {
298 // Reuse existing blob - no need to upload
299 logger.info(`[File Upload] Reusing existing blob for: ${file.name} (CID: ${fileCID})`);
300
301 return {
302 result: {
303 hash: existingBlob.cid,
304 blobRef: existingBlob.blobRef,
305 ...(file.compressed && {
306 encoding: 'gzip' as const,
307 mimeType: file.originalMimeType || file.mimeType,
308 base64: true
309 })
310 },
311 filePath: file.name,
312 sentMimeType: file.mimeType,
313 returnedMimeType: existingBlob.blobRef.mimeType,
314 reused: true
315 };
316 }
317
318 // File is new or changed - upload it
319 // If compressed, always upload as octet-stream
320 // Otherwise, workaround: PDS incorrectly processes text/html through image pipeline
321 const uploadMimeType = file.compressed || file.mimeType.startsWith('text/html')
322 ? 'application/octet-stream'
323 : file.mimeType;
324
325 const compressionInfo = file.compressed ? ' (gzipped)' : '';
326 logger.info(`[File Upload] Uploading new/changed file: ${file.name} (original: ${file.mimeType}, sending as: ${uploadMimeType}, ${file.size} bytes${compressionInfo}, CID: ${fileCID})`);
327
328 const uploadResult = await agent.com.atproto.repo.uploadBlob(
329 file.content,
330 {
331 encoding: uploadMimeType
332 }
333 );
334
335 const returnedBlobRef = uploadResult.data.blob;
336
337 // Use the blob ref exactly as returned from PDS
338 return {
339 result: {
340 hash: returnedBlobRef.ref.toString(),
341 blobRef: returnedBlobRef,
342 ...(file.compressed && {
343 encoding: 'gzip' as const,
344 mimeType: file.originalMimeType || file.mimeType,
345 base64: true
346 })
347 },
348 filePath: file.name,
349 sentMimeType: file.mimeType,
350 returnedMimeType: returnedBlobRef.mimeType,
351 reused: false
352 };
353 } catch (uploadError) {
354 logger.error('Upload failed for file', uploadError);
355 throw uploadError;
356 }
357 });
358
359 // Wait for all uploads to complete
360 const uploadedBlobs = await Promise.all(uploadPromises);
361
362 // Count reused vs uploaded blobs
363 const reusedCount = uploadedBlobs.filter(b => (b as any).reused).length;
364 const uploadedCount = uploadedBlobs.filter(b => !(b as any).reused).length;
365 console.log(`Blob statistics: ${reusedCount} reused, ${uploadedCount} uploaded, ${uploadedBlobs.length} total`);
366 logger.info(`Blob statistics: ${reusedCount} reused, ${uploadedCount} uploaded, ${uploadedBlobs.length} total`);
367
368 // Extract results and file paths in correct order
369 const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
370 const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
371
372 // Update directory with file blobs
373 console.log('Updating directory with blob references...');
374 const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths);
375
376 // Create manifest
377 console.log('Creating manifest...');
378 const manifest = createManifest(siteName, updatedDirectory, fileCount);
379 console.log('Manifest created successfully');
380
381 // Use site name as rkey
382 const rkey = siteName;
383
384 let record;
385 try {
386 console.log('Putting record to PDS with rkey:', rkey);
387 record = await agent.com.atproto.repo.putRecord({
388 repo: auth.did,
389 collection: 'place.wisp.fs',
390 rkey: rkey,
391 record: manifest
392 });
393 console.log('Record successfully created on PDS:', record.data.uri);
394 } catch (putRecordError: any) {
395 console.error('FAILED to create record on PDS:', putRecordError);
396 logger.error('Failed to create record on PDS', putRecordError);
397
398 throw putRecordError;
399 }
400
401 // Store site in database cache
402 await upsertSite(auth.did, rkey, siteName);
403
404 const result = {
405 success: true,
406 uri: record.data.uri,
407 cid: record.data.cid,
408 fileCount,
409 siteName,
410 skippedFiles,
411 uploadedCount: validUploadedFiles.length
412 };
413
414 console.log('=== UPLOAD FILES COMPLETE ===');
415 return result;
416 } catch (error) {
417 console.error('=== UPLOAD ERROR ===');
418 console.error('Error details:', error);
419 console.error('Stack trace:', error instanceof Error ? error.stack : 'N/A');
420 logger.error('Upload error', error, {
421 message: error instanceof Error ? error.message : 'Unknown error',
422 name: error instanceof Error ? error.name : undefined
423 });
424 throw new Error(`Failed to upload files: ${error instanceof Error ? error.message : 'Unknown error'}`);
425 }
426 }
427 )