Monorepo for Wisp.place. A static site hosting service built on top of the AT Protocol.

wah

Changed files
+265 -160
hosting-service
src
src
+8 -2
hosting-service/src/lib/db.ts
···
export async function upsertSite(did: string, rkey: string, displayName?: string) {
try {
+
// Only set display_name if provided (not undefined/null/empty)
+
const cleanDisplayName = displayName && displayName.trim() ? displayName.trim() : null;
+
await sql`
INSERT INTO sites (did, rkey, display_name, created_at, updated_at)
-
VALUES (${did}, ${rkey}, ${displayName || null}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW()))
+
VALUES (${did}, ${rkey}, ${cleanDisplayName}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW()))
ON CONFLICT (did, rkey)
DO UPDATE SET
-
display_name = COALESCE(EXCLUDED.display_name, sites.display_name),
+
display_name = CASE
+
WHEN EXCLUDED.display_name IS NOT NULL THEN EXCLUDED.display_name
+
ELSE sites.display_name
+
END,
updated_at = EXTRACT(EPOCH FROM NOW())
`;
} catch (err) {
+12
hosting-service/src/lib/utils.ts
···
export async function downloadAndCacheSite(did: string, rkey: string, record: WispFsRecord, pdsEndpoint: string): Promise<void> {
console.log('Caching site', did, rkey);
+
+
// Validate record structure
+
if (!record.root) {
+
console.error('Record missing root directory:', JSON.stringify(record, null, 2));
+
throw new Error('Invalid record structure: missing root directory');
+
}
+
+
if (!record.root.entries || !Array.isArray(record.root.entries)) {
+
console.error('Record root missing entries array:', JSON.stringify(record.root, null, 2));
+
throw new Error('Invalid record structure: root missing entries array');
+
}
+
await cacheFiles(did, rkey, record.root.entries, pdsEndpoint, '');
}
+7 -1
src/index.ts
···
})
}
})
-
.use(cors())
+
.use(cors({
+
origin: config.domain,
+
credentials: true,
+
methods: ['GET', 'POST', 'DELETE', 'OPTIONS'],
+
allowedHeaders: ['Content-Type', 'Authorization'],
+
maxAge: 86400 // 24 hours
+
}))
.listen(8000)
console.log(
+8 -2
src/lib/db.ts
···
export const upsertSite = async (did: string, rkey: string, displayName?: string) => {
try {
+
// Only set display_name if provided (not undefined/null/empty)
+
const cleanDisplayName = displayName && displayName.trim() ? displayName.trim() : null;
+
await db`
INSERT INTO sites (did, rkey, display_name, created_at, updated_at)
-
VALUES (${did}, ${rkey}, ${displayName || null}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW()))
+
VALUES (${did}, ${rkey}, ${cleanDisplayName}, EXTRACT(EPOCH FROM NOW()), EXTRACT(EPOCH FROM NOW()))
ON CONFLICT (did, rkey)
DO UPDATE SET
-
display_name = COALESCE(EXCLUDED.display_name, sites.display_name),
+
display_name = CASE
+
WHEN EXCLUDED.display_name IS NOT NULL THEN EXCLUDED.display_name
+
ELSE sites.display_name
+
END,
updated_at = EXTRACT(EPOCH FROM NOW())
`;
return { success: true };
+55 -31
src/lib/wisp-utils.ts
···
* Process uploaded files into a directory structure
*/
export function processUploadedFiles(files: UploadedFile[]): ProcessedDirectory {
-
console.log(`๐Ÿ—๏ธ Processing ${files.length} uploaded files`);
const entries: Entry[] = [];
let fileCount = 0;
···
// Remove any base folder name from the path
const normalizedPath = file.name.replace(/^[^\/]*\//, '');
const parts = normalizedPath.split('/');
-
-
console.log(`๐Ÿ“„ Processing file: ${file.name} -> normalized: ${normalizedPath}`);
if (parts.length === 1) {
// Root level file
-
console.log(`๐Ÿ“ Root level file: ${parts[0]}`);
entries.push({
name: parts[0],
node: {
···
} else {
// File in subdirectory
const dirPath = parts.slice(0, -1).join('/');
-
console.log(`๐Ÿ“‚ Subdirectory file: ${dirPath}/${parts[parts.length - 1]}`);
if (!directoryMap.has(dirPath)) {
directoryMap.set(dirPath, []);
-
console.log(`โž• Created directory: ${dirPath}`);
}
directoryMap.get(dirPath)!.push({
...file,
···
}
// Process subdirectories
-
console.log(`๐Ÿ“‚ Processing ${directoryMap.size} subdirectories`);
for (const [dirPath, dirFiles] of directoryMap) {
-
console.log(`๐Ÿ“ Processing directory: ${dirPath} with ${dirFiles.length} files`);
const dirEntries: Entry[] = [];
for (const file of dirFiles) {
const fileName = file.name.split('/').pop()!;
-
console.log(` ๐Ÿ“„ Adding file to directory: ${fileName}`);
dirEntries.push({
name: fileName,
node: {
···
// Build nested directory structure
const pathParts = dirPath.split('/');
let currentEntries = entries;
-
-
console.log(`๐Ÿ—๏ธ Building nested structure for path: ${pathParts.join('/')}`);
for (let i = 0; i < pathParts.length; i++) {
const part = pathParts[i];
···
node: newDir
};
currentEntries.push(existingEntry);
-
console.log(` โž• Created directory entry: ${part}`);
} else if ('entries' in existingEntry.node && isLast) {
(existingEntry.node as any).entries.push(...dirEntries);
-
console.log(` ๐Ÿ“ Added files to existing directory: ${part}`);
}
if (existingEntry && 'entries' in existingEntry.node) {
···
}
}
-
console.log(`โœ… Directory structure completed with ${fileCount} total files`);
-
const result = {
directory: {
$type: 'place.wisp.fs#directory' as const,
···
fileCount
};
-
console.log('๐Ÿ“‹ Final directory structure:', JSON.stringify(result, null, 2));
return result;
}
···
root: Directory,
fileCount: number
): Record {
-
const manifest: Record = {
+
return {
$type: 'place.wisp.fs' as const,
site: siteName,
root,
fileCount,
createdAt: new Date().toISOString()
};
-
-
console.log(`๐Ÿ“‹ Created manifest for site "${siteName}" with ${fileCount} files`);
-
console.log('๐Ÿ“„ Manifest structure:', JSON.stringify(manifest, null, 2));
-
-
return manifest;
}
/**
* Update file blobs in directory structure after upload
+
* Uses path-based matching to correctly match files in nested directories
*/
export function updateFileBlobs(
directory: Directory,
uploadResults: FileUploadResult[],
-
filePaths: string[]
+
filePaths: string[],
+
currentPath: string = ''
): Directory {
-
console.log(`๐Ÿ”„ Updating file blobs: ${uploadResults.length} results for ${filePaths.length} paths`);
+
const mimeTypeMismatches: string[] = [];
const updatedEntries = directory.entries.map(entry => {
if ('type' in entry.node && entry.node.type === 'file') {
-
const fileIndex = filePaths.findIndex(path => path.endsWith(entry.name));
+
// Build the full path for this file
+
const fullPath = currentPath ? `${currentPath}/${entry.name}` : entry.name;
+
+
// Find exact match in filePaths (need to handle normalized paths)
+
const fileIndex = filePaths.findIndex((path) => {
+
// Normalize both paths by removing leading base folder
+
const normalizedUploadPath = path.replace(/^[^\/]*\//, '');
+
const normalizedEntryPath = fullPath;
+
return normalizedUploadPath === normalizedEntryPath || path === fullPath;
+
});
+
if (fileIndex !== -1 && uploadResults[fileIndex]) {
-
console.log(` ๐Ÿ”— Updating blob for file: ${entry.name} -> ${uploadResults[fileIndex].hash}`);
+
const blobRef = uploadResults[fileIndex].blobRef;
+
const uploadedPath = filePaths[fileIndex];
+
+
// Check if MIME types make sense for this file extension
+
const expectedMime = getExpectedMimeType(entry.name);
+
if (expectedMime && blobRef.mimeType !== expectedMime && !blobRef.mimeType.startsWith(expectedMime)) {
+
mimeTypeMismatches.push(`${fullPath}: expected ${expectedMime}, got ${blobRef.mimeType} (from upload: ${uploadedPath})`);
+
}
+
return {
...entry,
node: {
$type: 'place.wisp.fs#file' as const,
type: 'file' as const,
-
blob: uploadResults[fileIndex].blobRef
+
blob: blobRef
}
};
} else {
-
console.warn(` โš ๏ธ Could not find upload result for file: ${entry.name}`);
+
console.error(`โŒ BLOB MATCHING ERROR: Could not find blob for file: ${fullPath}`);
+
console.error(` Available paths:`, filePaths.slice(0, 10), filePaths.length > 10 ? `... and ${filePaths.length - 10} more` : '');
}
} else if ('type' in entry.node && entry.node.type === 'directory') {
-
console.log(` ๐Ÿ“‚ Recursively updating directory: ${entry.name}`);
+
const dirPath = currentPath ? `${currentPath}/${entry.name}` : entry.name;
return {
...entry,
-
node: updateFileBlobs(entry.node as Directory, uploadResults, filePaths)
+
node: updateFileBlobs(entry.node as Directory, uploadResults, filePaths, dirPath)
};
}
return entry;
}) as Entry[];
+
if (mimeTypeMismatches.length > 0) {
+
console.error('\nโš ๏ธ MIME TYPE MISMATCHES DETECTED IN MANIFEST:');
+
mimeTypeMismatches.forEach(m => console.error(` ${m}`));
+
console.error('');
+
}
+
const result = {
$type: 'place.wisp.fs#directory' as const,
type: 'directory' as const,
entries: updatedEntries
};
-
console.log('โœ… File blobs updated');
return result;
}
+
+
function getExpectedMimeType(filename: string): string | null {
+
const ext = filename.toLowerCase().split('.').pop();
+
const mimeMap: Record<string, string> = {
+
'html': 'text/html',
+
'htm': 'text/html',
+
'css': 'text/css',
+
'js': 'text/javascript',
+
'mjs': 'text/javascript',
+
'json': 'application/json',
+
'jpg': 'image/jpeg',
+
'jpeg': 'image/jpeg',
+
'png': 'image/png',
+
'gif': 'image/gif',
+
'webp': 'image/webp',
+
'svg': 'image/svg+xml',
+
};
+
return ext ? (mimeMap[ext] || null) : null;
+
}
+78 -21
src/routes/auth.ts
···
import { NodeOAuthClient } from '@atproto/oauth-client-node'
import { getSitesByDid, getDomainByDid } from '../lib/db'
import { syncSitesFromPDS } from '../lib/sync-sites'
+
import { authenticateRequest } from '../lib/wisp-auth'
export const authRoutes = (client: NodeOAuthClient) => new Elysia()
.post('/api/auth/signin', async (c) => {
···
}
})
.get('/api/auth/callback', async (c) => {
-
const params = new URLSearchParams(c.query)
-
const { session } = await client.callback(params)
-
if (!session) return { error: 'Authentication failed' }
+
try {
+
const params = new URLSearchParams(c.query)
+
+
// client.callback() validates the state parameter internally
+
// It will throw an error if state validation fails (CSRF protection)
+
const { session } = await client.callback(params)
+
+
if (!session) {
+
console.error('[Auth] OAuth callback failed: no session returned')
+
return c.redirect('/?error=auth_failed')
+
}
-
const cookieSession = c.cookie
-
cookieSession.did.value = session.did
+
const cookieSession = c.cookie
+
cookieSession.did.value = session.did
-
// Sync sites from PDS to database cache
-
console.log('[Auth] Syncing sites from PDS for', session.did)
+
// Sync sites from PDS to database cache
+
console.log('[Auth] Syncing sites from PDS for', session.did)
+
try {
+
const syncResult = await syncSitesFromPDS(session.did, session)
+
console.log(`[Auth] Sync complete: ${syncResult.synced} sites synced`)
+
if (syncResult.errors.length > 0) {
+
console.warn('[Auth] Sync errors:', syncResult.errors)
+
}
+
} catch (err) {
+
console.error('[Auth] Failed to sync sites:', err)
+
// Don't fail auth if sync fails, just log it
+
}
+
+
// Check if user has any sites or domain
+
const sites = await getSitesByDid(session.did)
+
const domain = await getDomainByDid(session.did)
+
+
// If no sites and no domain, redirect to onboarding
+
if (sites.length === 0 && !domain) {
+
return c.redirect('/onboarding')
+
}
+
+
return c.redirect('/editor')
+
} catch (err) {
+
// This catches state validation failures and other OAuth errors
+
console.error('[Auth] OAuth callback error:', err)
+
return c.redirect('/?error=auth_failed')
+
}
+
})
+
.post('/api/auth/logout', async (c) => {
try {
-
const syncResult = await syncSitesFromPDS(session.did, session)
-
console.log(`[Auth] Sync complete: ${syncResult.synced} sites synced`)
-
if (syncResult.errors.length > 0) {
-
console.warn('[Auth] Sync errors:', syncResult.errors)
+
const cookieSession = c.cookie
+
const did = cookieSession.did?.value
+
+
// Clear the session cookie
+
cookieSession.did.value = ''
+
cookieSession.did.maxAge = 0
+
+
// If we have a DID, try to revoke the OAuth session
+
if (did && typeof did === 'string') {
+
try {
+
await client.revoke(did)
+
console.log('[Auth] Revoked OAuth session for', did)
+
} catch (err) {
+
console.error('[Auth] Failed to revoke session:', err)
+
// Continue with logout even if revoke fails
+
}
}
+
+
return { success: true }
} catch (err) {
-
console.error('[Auth] Failed to sync sites:', err)
-
// Don't fail auth if sync fails, just log it
+
console.error('[Auth] Logout error:', err)
+
return { error: 'Logout failed' }
}
+
})
+
.get('/api/auth/status', async (c) => {
+
try {
+
const auth = await authenticateRequest(client, c.cookie)
-
// Check if user has any sites or domain
-
const sites = await getSitesByDid(session.did)
-
const domain = await getDomainByDid(session.did)
+
if (!auth) {
+
return { authenticated: false }
+
}
-
// If no sites and no domain, redirect to onboarding
-
if (sites.length === 0 && !domain) {
-
return c.redirect('/onboarding')
+
return {
+
authenticated: true,
+
did: auth.did
+
}
+
} catch (err) {
+
console.error('[Auth] Status check error:', err)
+
return { authenticated: false }
}
-
-
return c.redirect('/editor')
})
+97 -103
src/routes/wisp.ts
···
files: File | File[]
};
-
console.log('๐Ÿš€ Starting upload process', { siteName, fileCount: Array.isArray(files) ? files.length : 1 });
-
try {
if (!siteName) {
-
console.error('โŒ Site name is required');
throw new Error('Site name is required')
}
if (!isValidSiteName(siteName)) {
-
console.error('โŒ Invalid site name format');
throw new Error('Invalid site name: must be 1-512 characters and contain only alphanumeric, dots, dashes, underscores, tildes, and colons')
}
-
console.log('โœ… Initial validation passed');
-
// Check if files were provided
const hasFiles = files && (Array.isArray(files) ? files.length > 0 : !!files);
if (!hasFiles) {
-
console.log('๐Ÿ“ Creating empty site (no files provided)');
-
// Create agent with OAuth session
-
console.log('๐Ÿ” Creating agent with OAuth session');
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
-
console.log('โœ… Agent created successfully');
// Create empty manifest
const emptyManifest = {
···
// Use site name as rkey
const rkey = siteName;
-
// Create the record with explicit rkey
-
console.log(`๐Ÿ“ Creating empty site record in repo with rkey: ${rkey}`);
const record = await agent.com.atproto.repo.putRecord({
repo: auth.did,
collection: 'place.wisp.fs',
···
record: emptyManifest
});
-
console.log('โœ… Empty site record created successfully:', {
-
uri: record.data.uri,
-
cid: record.data.cid
-
});
-
-
// Store site in database cache
-
console.log('๐Ÿ’พ Storing site in database cache');
await upsertSite(auth.did, rkey, siteName);
-
console.log('โœ… Site stored in database');
return {
success: true,
···
}
// Create agent with OAuth session
-
console.log('๐Ÿ” Creating agent with OAuth session');
const agent = new Agent((url, init) => auth.session.fetchHandler(url, init))
-
console.log('โœ… Agent created successfully');
// Convert File objects to UploadedFile format
// Elysia gives us File objects directly, handle both single file and array
const fileArray = Array.isArray(files) ? files : [files];
-
console.log(`๐Ÿ“ Processing ${fileArray.length} files`);
const uploadedFiles: UploadedFile[] = [];
// Define allowed file extensions for static site hosting
···
for (let i = 0; i < fileArray.length; i++) {
const file = fileArray[i];
const fileExtension = '.' + file.name.split('.').pop()?.toLowerCase();
-
-
console.log(`๐Ÿ“„ Processing file ${i + 1}/${fileArray.length}: ${file.name} (${file.size} bytes, ${file.type})`);
-
+
// Skip excluded files
if (excludedFiles.has(fileExtension)) {
-
console.log(`โญ๏ธ Skipping excluded file: ${file.name}`);
continue;
}
-
+
// Skip files that aren't in allowed extensions
if (!allowedExtensions.has(fileExtension)) {
-
console.log(`โญ๏ธ Skipping non-web file: ${file.name} (${fileExtension})`);
continue;
}
-
+
// Skip files that are too large (limit to 100MB per file)
const maxSize = 100 * 1024 * 1024; // 100MB
if (file.size > maxSize) {
-
console.log(`โญ๏ธ Skipping large file: ${file.name} (${(file.size / 1024 / 1024).toFixed(2)}MB > 100MB limit)`);
continue;
}
-
-
console.log(`โœ… Including file: ${file.name}`);
+
const arrayBuffer = await file.arrayBuffer();
uploadedFiles.push({
name: file.name,
···
// Check total size limit (300MB)
const totalSize = uploadedFiles.reduce((sum, file) => sum + file.size, 0);
const maxTotalSize = 300 * 1024 * 1024; // 300MB
-
-
console.log(`๐Ÿ“Š Filtered to ${uploadedFiles.length} files from ${fileArray.length} total files`);
-
console.log(`๐Ÿ“ฆ Total size: ${(totalSize / 1024 / 1024).toFixed(2)}MB (limit: 300MB)`);
if (totalSize > maxTotalSize) {
throw new Error(`Total upload size ${(totalSize / 1024 / 1024).toFixed(2)}MB exceeds 300MB limit`);
}
if (uploadedFiles.length === 0) {
-
console.log('โš ๏ธ No valid web files found, creating empty site instead');
// Create empty manifest
const emptyManifest = {
···
// Use site name as rkey
const rkey = siteName;
-
// Create the record with explicit rkey
-
console.log(`๐Ÿ“ Creating empty site record in repo with rkey: ${rkey}`);
const record = await agent.com.atproto.repo.putRecord({
repo: auth.did,
collection: 'place.wisp.fs',
···
record: emptyManifest
});
-
console.log('โœ… Empty site record created successfully:', {
-
uri: record.data.uri,
-
cid: record.data.cid
-
});
-
-
// Store site in database cache
-
console.log('๐Ÿ’พ Storing site in database cache');
await upsertSite(auth.did, rkey, siteName);
-
console.log('โœ… Site stored in database');
return {
success: true,
···
};
}
-
console.log('โœ… File conversion completed');
-
// Process files into directory structure
-
console.log('๐Ÿ—๏ธ Building directory structure');
const { directory, fileCount } = processUploadedFiles(uploadedFiles);
-
console.log(`โœ… Directory structure created with ${fileCount} files`);
-
// Upload files as blobs
-
const uploadResults: FileUploadResult[] = [];
-
const filePaths: string[] = [];
+
// Upload files as blobs in parallel
+
const mimeTypeMismatches: Array<{file: string, sent: string, returned: string}> = [];
-
console.log('โฌ†๏ธ Starting blob upload process');
-
for (let i = 0; i < uploadedFiles.length; i++) {
-
const file = uploadedFiles[i];
-
console.log(`๐Ÿ“ค Uploading blob ${i + 1}/${uploadedFiles.length}: ${file.name}`);
-
+
const uploadPromises = uploadedFiles.map(async (file, i) => {
try {
-
console.log(`๐Ÿ” Upload details:`, {
-
fileName: file.name,
-
fileSize: file.size,
-
mimeType: file.mimeType,
-
contentLength: file.content.length
-
});
-
const uploadResult = await agent.com.atproto.repo.uploadBlob(
file.content,
{
···
}
);
-
console.log(`โœ… Upload successful for ${file.name}:`, {
-
hash: uploadResult.data.blob.ref.toString(),
-
mimeType: uploadResult.data.blob.mimeType,
-
size: uploadResult.data.blob.size
-
});
+
const sentMimeType = file.mimeType;
+
const returnedBlobRef = uploadResult.data.blob;
-
uploadResults.push({
-
hash: uploadResult.data.blob.ref.toString(),
-
blobRef: uploadResult.data.blob
-
});
+
// Track MIME type mismatches for summary
+
if (sentMimeType !== returnedBlobRef.mimeType) {
+
mimeTypeMismatches.push({
+
file: file.name,
+
sent: sentMimeType,
+
returned: returnedBlobRef.mimeType
+
});
+
}
-
filePaths.push(file.name);
+
// Use the blob ref exactly as returned from PDS
+
return {
+
result: {
+
hash: returnedBlobRef.ref.$link || returnedBlobRef.ref.toString(),
+
blobRef: returnedBlobRef
+
},
+
filePath: file.name,
+
sentMimeType,
+
returnedMimeType: returnedBlobRef.mimeType
+
};
} catch (uploadError) {
-
console.error(`โŒ Upload failed for file ${file.name}:`, uploadError);
-
console.error('Upload error details:', {
-
fileName: file.name,
-
fileSize: file.size,
-
mimeType: file.mimeType,
-
error: uploadError
-
});
+
console.error(`โŒ Upload failed for ${file.name}:`, uploadError);
throw uploadError;
}
+
});
+
+
// Wait for all uploads to complete
+
const uploadedBlobs = await Promise.all(uploadPromises);
+
+
// Show MIME type mismatch summary
+
if (mimeTypeMismatches.length > 0) {
+
console.warn(`\nโš ๏ธ PDS changed MIME types for ${mimeTypeMismatches.length} files:`);
+
mimeTypeMismatches.slice(0, 20).forEach(m => {
+
console.warn(` ${m.file}: ${m.sent} โ†’ ${m.returned}`);
+
});
+
if (mimeTypeMismatches.length > 20) {
+
console.warn(` ... and ${mimeTypeMismatches.length - 20} more`);
+
}
+
console.warn('');
}
-
console.log('โœ… All blobs uploaded successfully');
+
// CRITICAL: Find files uploaded as application/octet-stream
+
const octetStreamFiles = uploadedBlobs.filter(b => b.returnedMimeType === 'application/octet-stream');
+
if (octetStreamFiles.length > 0) {
+
console.error(`\n๐Ÿšจ FILES UPLOADED AS application/octet-stream (${octetStreamFiles.length}):`);
+
octetStreamFiles.forEach(f => {
+
console.error(` ${f.filePath}: sent=${f.sentMimeType}, returned=${f.returnedMimeType}`);
+
});
+
console.error('');
+
}
+
+
// Extract results and file paths in correct order
+
const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
+
const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
// Update directory with file blobs
-
console.log('๐Ÿ”„ Updating file blobs in directory structure');
const updatedDirectory = updateFileBlobs(directory, uploadResults, filePaths);
-
console.log('โœ… File blobs updated');
// Create manifest
-
console.log('๐Ÿ“‹ Creating manifest');
const manifest = createManifest(siteName, updatedDirectory, fileCount);
-
console.log('โœ… Manifest created');
// Use site name as rkey
const rkey = siteName;
-
// Create the record with explicit rkey
-
console.log(`๐Ÿ“ Creating record in repo with rkey: ${rkey}`);
-
const record = await agent.com.atproto.repo.putRecord({
-
repo: auth.did,
-
collection: 'place.wisp.fs',
-
rkey: rkey,
-
record: manifest
-
});
+
let record;
+
try {
+
record = await agent.com.atproto.repo.putRecord({
+
repo: auth.did,
+
collection: 'place.wisp.fs',
+
rkey: rkey,
+
record: manifest
+
});
+
} catch (putRecordError: any) {
+
console.error('\nโŒ Failed to create record on PDS');
+
console.error('Error:', putRecordError.message);
-
console.log('โœ… Record created successfully:', {
-
uri: record.data.uri,
-
cid: record.data.cid
-
});
+
// Try to identify which file has the MIME type mismatch
+
if (putRecordError.message?.includes('Mimetype') || putRecordError.message?.includes('mimeType')) {
+
console.error('\n๐Ÿ” Analyzing manifest for MIME type issues...');
+
+
// Recursively check all blobs in manifest
+
const checkBlobs = (node: any, path: string = '') => {
+
if (node.type === 'file' && node.blob) {
+
const mimeType = node.blob.mimeType;
+
console.error(` File: ${path} - MIME: ${mimeType}`);
+
} else if (node.type === 'directory' && node.entries) {
+
for (const entry of node.entries) {
+
const entryPath = path ? `${path}/${entry.name}` : entry.name;
+
checkBlobs(entry.node, entryPath);
+
}
+
}
+
};
+
+
checkBlobs(manifest.root, '');
+
+
console.error('\n๐Ÿ“Š Blob upload summary:');
+
uploadedBlobs.slice(0, 20).forEach((b, i) => {
+
console.error(` [${i}] ${b.filePath}: sent=${b.sentMimeType}, returned=${b.returnedMimeType}`);
+
});
+
if (uploadedBlobs.length > 20) {
+
console.error(` ... and ${uploadedBlobs.length - 20} more`);
+
}
+
}
+
+
throw putRecordError;
+
}
// Store site in database cache
-
console.log('๐Ÿ’พ Storing site in database cache');
await upsertSite(auth.did, rkey, siteName);
-
console.log('โœ… Site stored in database');
const result = {
success: true,
···
siteName
};
-
console.log('๐ŸŽ‰ Upload process completed successfully');
return result;
} catch (error) {
console.error('โŒ Upload error:', error);