Monorepo for wisp.place. A static site hosting service built on top of the AT Protocol. wisp.place

better validations during file uploading

Changed files
+399 -58
hosting-service
public
editor
src
+10 -4
hosting-service/src/lib/utils.ts
···
*/
export function shouldCompressMimeType(mimeType: string | undefined): boolean {
if (!mimeType) return false;
-
+
const mime = mimeType.toLowerCase();
-
-
// Text-based web assets that benefit from compression
+
+
// Text-based web assets and uncompressed audio that benefit from compression
const compressibleTypes = [
'text/html',
'text/css',
···
'application/json',
'text/plain',
'image/svg+xml',
+
// Uncompressed audio formats
+
'audio/wav',
+
'audio/wave',
+
'audio/x-wav',
+
'audio/aiff',
+
'audio/x-aiff',
];
-
+
if (compressibleTypes.some(type => mime === type || mime.startsWith(type))) {
return true;
}
+43 -12
hosting-service/src/server.ts
···
const shouldServeCompressed = shouldCompressMimeType(meta.mimeType);
if (!shouldServeCompressed) {
-
const { gunzipSync } = await import('zlib');
-
const decompressed = gunzipSync(content);
-
headers['Content-Type'] = meta.mimeType;
-
headers['Cache-Control'] = 'public, max-age=31536000, immutable';
-
return new Response(decompressed, { headers });
+
// Verify content is actually gzipped before attempting decompression
+
const isGzipped = content.length >= 2 && content[0] === 0x1f && content[1] === 0x8b;
+
if (isGzipped) {
+
const { gunzipSync } = await import('zlib');
+
const decompressed = gunzipSync(content);
+
headers['Content-Type'] = meta.mimeType;
+
headers['Cache-Control'] = 'public, max-age=31536000, immutable';
+
return new Response(decompressed, { headers });
+
} else {
+
// Meta says gzipped but content isn't - serve as-is
+
console.warn(`File ${filePath} has gzip encoding in meta but content lacks gzip magic bytes`);
+
headers['Content-Type'] = meta.mimeType;
+
headers['Cache-Control'] = 'public, max-age=31536000, immutable';
+
return new Response(content, { headers });
+
}
}
headers['Content-Type'] = meta.mimeType;
···
if (isHtmlContent(requestPath, mimeType)) {
let htmlContent: string;
if (isGzipped) {
-
const { gunzipSync } = await import('zlib');
-
htmlContent = gunzipSync(content).toString('utf-8');
+
// Verify content is actually gzipped
+
const hasGzipMagic = content.length >= 2 && content[0] === 0x1f && content[1] === 0x8b;
+
if (hasGzipMagic) {
+
const { gunzipSync } = await import('zlib');
+
htmlContent = gunzipSync(content).toString('utf-8');
+
} else {
+
console.warn(`File ${requestPath} marked as gzipped but lacks magic bytes, serving as-is`);
+
htmlContent = content.toString('utf-8');
+
}
} else {
htmlContent = content.toString('utf-8');
}
···
if (isGzipped) {
const shouldServeCompressed = shouldCompressMimeType(mimeType);
if (!shouldServeCompressed) {
-
const { gunzipSync } = await import('zlib');
-
const decompressed = gunzipSync(content);
-
return new Response(decompressed, { headers });
+
// Verify content is actually gzipped
+
const hasGzipMagic = content.length >= 2 && content[0] === 0x1f && content[1] === 0x8b;
+
if (hasGzipMagic) {
+
const { gunzipSync } = await import('zlib');
+
const decompressed = gunzipSync(content);
+
return new Response(decompressed, { headers });
+
} else {
+
console.warn(`File ${requestPath} marked as gzipped but lacks magic bytes, serving as-is`);
+
return new Response(content, { headers });
+
}
}
headers['Content-Encoding'] = 'gzip';
}
···
let htmlContent: string;
if (isGzipped) {
-
const { gunzipSync } = await import('zlib');
-
htmlContent = gunzipSync(indexContent).toString('utf-8');
+
// Verify content is actually gzipped
+
const hasGzipMagic = indexContent.length >= 2 && indexContent[0] === 0x1f && indexContent[1] === 0x8b;
+
if (hasGzipMagic) {
+
const { gunzipSync } = await import('zlib');
+
htmlContent = gunzipSync(indexContent).toString('utf-8');
+
} else {
+
console.warn(`Index file marked as gzipped but lacks magic bytes, serving as-is`);
+
htmlContent = indexContent.toString('utf-8');
+
}
} else {
htmlContent = indexContent.toString('utf-8');
}
+162 -8
public/editor/tabs/UploadTab.tsx
···
Globe,
Upload,
AlertCircle,
-
Loader2
+
Loader2,
+
ChevronDown,
+
ChevronUp,
+
CheckCircle2,
+
XCircle,
+
RefreshCw
} from 'lucide-react'
import type { SiteWithDomains } from '../hooks/useSiteData'
+
+
type FileStatus = 'pending' | 'checking' | 'uploading' | 'uploaded' | 'reused' | 'failed'
+
+
interface FileProgress {
+
name: string
+
status: FileStatus
+
error?: string
+
}
interface UploadTabProps {
sites: SiteWithDomains[]
···
const [isUploading, setIsUploading] = useState(false)
const [uploadProgress, setUploadProgress] = useState('')
const [skippedFiles, setSkippedFiles] = useState<Array<{ name: string; reason: string }>>([])
+
const [failedFiles, setFailedFiles] = useState<Array<{ name: string; index: number; error: string; size: number }>>([])
const [uploadedCount, setUploadedCount] = useState(0)
+
const [fileProgressList, setFileProgressList] = useState<FileProgress[]>([])
+
const [showFileProgress, setShowFileProgress] = useState(false)
// Keep SSE connection alive across tab switches
const eventSourceRef = useRef<EventSource | null>(null)
···
const progressData = JSON.parse(event.data)
const { progress, status } = progressData
+
// Update file progress list if we have current file info
+
if (progress.currentFile && progress.currentFileStatus) {
+
setFileProgressList(prev => {
+
const existing = prev.find(f => f.name === progress.currentFile)
+
if (existing) {
+
// Update existing file status
+
return prev.map(f =>
+
f.name === progress.currentFile
+
? { ...f, status: progress.currentFileStatus as FileStatus }
+
: f
+
)
+
} else {
+
// Add new file
+
return [...prev, {
+
name: progress.currentFile,
+
status: progress.currentFileStatus as FileStatus
+
}]
+
}
+
})
+
}
+
// Update progress message based on phase
let message = 'Processing...'
if (progress.phase === 'validating') {
···
eventSourceRef.current = null
currentJobIdRef.current = null
-
setUploadProgress('Upload complete!')
+
const hasIssues = (result.skippedFiles && result.skippedFiles.length > 0) ||
+
(result.failedFiles && result.failedFiles.length > 0)
+
+
// Update file progress list with failed files
+
if (result.failedFiles && result.failedFiles.length > 0) {
+
setFileProgressList(prev => {
+
const updated = [...prev]
+
result.failedFiles.forEach((failedFile: any) => {
+
const existing = updated.find(f => f.name === failedFile.name)
+
if (existing) {
+
existing.status = 'failed'
+
existing.error = failedFile.error
+
} else {
+
updated.push({
+
name: failedFile.name,
+
status: 'failed',
+
error: failedFile.error
+
})
+
}
+
})
+
return updated
+
})
+
}
+
+
setUploadProgress(hasIssues ? 'Upload completed with issues' : 'Upload complete!')
setSkippedFiles(result.skippedFiles || [])
+
setFailedFiles(result.failedFiles || [])
setUploadedCount(result.uploadedCount || result.fileCount || 0)
setSelectedSiteRkey('')
setNewSiteName('')
···
// Refresh sites list
onUploadComplete()
-
// Reset form
-
const resetDelay = result.skippedFiles && result.skippedFiles.length > 0 ? 4000 : 1500
+
// Reset form (wait longer if there are issues to show)
+
const resetDelay = hasIssues ? 6000 : 1500
setTimeout(() => {
setUploadProgress('')
setSkippedFiles([])
+
setFailedFiles([])
setUploadedCount(0)
+
setFileProgressList([])
setIsUploading(false)
}, resetDelay)
})
···
</div>
</div>
-
{skippedFiles.length > 0 && (
-
<div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg">
-
<div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2">
+
{fileProgressList.length > 0 && (
+
<div className="border rounded-lg overflow-hidden">
+
<button
+
onClick={() => setShowFileProgress(!showFileProgress)}
+
className="w-full p-3 bg-muted/50 hover:bg-muted transition-colors flex items-center justify-between text-sm font-medium"
+
>
+
<span>
+
Processing files ({fileProgressList.filter(f => f.status === 'uploaded' || f.status === 'reused').length}/{fileProgressList.length})
+
</span>
+
{showFileProgress ? (
+
<ChevronUp className="w-4 h-4" />
+
) : (
+
<ChevronDown className="w-4 h-4" />
+
)}
+
</button>
+
{showFileProgress && (
+
<div className="max-h-64 overflow-y-auto p-3 space-y-1 bg-background">
+
{fileProgressList.map((file, idx) => (
+
<div
+
key={idx}
+
className="flex items-start gap-2 text-xs p-2 rounded hover:bg-muted/50 transition-colors"
+
>
+
{file.status === 'checking' && (
+
<Loader2 className="w-3 h-3 mt-0.5 animate-spin text-blue-500 shrink-0" />
+
)}
+
{file.status === 'uploading' && (
+
<Loader2 className="w-3 h-3 mt-0.5 animate-spin text-purple-500 shrink-0" />
+
)}
+
{file.status === 'uploaded' && (
+
<CheckCircle2 className="w-3 h-3 mt-0.5 text-green-500 shrink-0" />
+
)}
+
{file.status === 'reused' && (
+
<RefreshCw className="w-3 h-3 mt-0.5 text-cyan-500 shrink-0" />
+
)}
+
{file.status === 'failed' && (
+
<XCircle className="w-3 h-3 mt-0.5 text-red-500 shrink-0" />
+
)}
+
<div className="flex-1 min-w-0">
+
<div className="font-mono truncate">{file.name}</div>
+
{file.error && (
+
<div className="text-red-500 mt-0.5">
+
{file.error}
+
</div>
+
)}
+
{file.status === 'checking' && (
+
<div className="text-muted-foreground">Checking for changes...</div>
+
)}
+
{file.status === 'uploading' && (
+
<div className="text-muted-foreground">Uploading to PDS...</div>
+
)}
+
{file.status === 'reused' && (
+
<div className="text-muted-foreground">Reused (unchanged)</div>
+
)}
+
</div>
+
</div>
+
))}
+
</div>
+
)}
+
</div>
+
)}
+
+
{failedFiles.length > 0 && (
+
<div className="p-4 bg-red-500/10 border border-red-500/20 rounded-lg">
+
<div className="flex items-start gap-2 text-red-600 dark:text-red-400 mb-2">
<AlertCircle className="w-4 h-4 mt-0.5 shrink-0" />
<div className="flex-1">
<span className="font-medium">
-
{skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
+
{failedFiles.length} file{failedFiles.length > 1 ? 's' : ''} failed to upload
</span>
{uploadedCount > 0 && (
<span className="text-sm ml-2">
({uploadedCount} uploaded successfully)
</span>
)}
+
</div>
+
</div>
+
<div className="ml-6 space-y-1 max-h-40 overflow-y-auto">
+
{failedFiles.slice(0, 10).map((file, idx) => (
+
<div key={idx} className="text-xs">
+
<div className="font-mono font-semibold">{file.name}</div>
+
<div className="text-muted-foreground ml-2">
+
Error: {file.error}
+
{file.size > 0 && ` (${(file.size / 1024).toFixed(1)} KB)`}
+
</div>
+
</div>
+
))}
+
{failedFiles.length > 10 && (
+
<div className="text-xs text-muted-foreground">
+
...and {failedFiles.length - 10} more
+
</div>
+
)}
+
</div>
+
</div>
+
)}
+
+
{skippedFiles.length > 0 && (
+
<div className="p-4 bg-yellow-500/10 border border-yellow-500/20 rounded-lg">
+
<div className="flex items-start gap-2 text-yellow-600 dark:text-yellow-400 mb-2">
+
<AlertCircle className="w-4 h-4 mt-0.5 shrink-0" />
+
<div className="flex-1">
+
<span className="font-medium">
+
{skippedFiles.length} file{skippedFiles.length > 1 ? 's' : ''} skipped
+
</span>
</div>
</div>
<div className="ml-6 space-y-1 max-h-32 overflow-y-auto">
+3
src/lib/upload-jobs.ts
···
filesUploaded: number;
filesReused: number;
currentFile?: string;
+
currentFileStatus?: 'checking' | 'uploading' | 'uploaded' | 'reused' | 'failed';
phase: 'validating' | 'compressing' | 'uploading' | 'creating_manifest' | 'finalizing' | 'done';
}
···
fileCount?: number;
siteName?: string;
skippedFiles?: Array<{ name: string; reason: string }>;
+
failedFiles?: Array<{ name: string; index: number; error: string; size: number }>;
uploadedCount?: number;
+
hasFailures?: boolean;
};
error?: string;
createdAt: number;
+9 -2
src/lib/wisp-utils.ts
···
mimeType: string;
size: number;
compressed?: boolean;
+
base64Encoded?: boolean;
originalMimeType?: string;
}
···
* Determine if a file should be gzip compressed based on its MIME type
*/
export function shouldCompressFile(mimeType: string): boolean {
-
// Compress text-based files
+
// Compress text-based files and uncompressed audio formats
const compressibleTypes = [
'text/html',
'text/css',
···
'text/xml',
'application/xml',
'text/plain',
-
'application/x-javascript'
+
'application/x-javascript',
+
// Uncompressed audio formats (WAV, AIFF, etc.)
+
'audio/wav',
+
'audio/wave',
+
'audio/x-wav',
+
'audio/aiff',
+
'audio/x-aiff'
];
// Check if mime type starts with any compressible type
+172 -32
src/routes/wisp.ts
···
for (let i = 0; i < fileArray.length; i++) {
const file = fileArray[i];
+
+
// Skip undefined/null files
+
if (!file || !file.name) {
+
console.log(`Skipping undefined file at index ${i}`);
+
skippedFiles.push({
+
name: `[undefined file at index ${i}]`,
+
reason: 'Invalid file object'
+
});
+
continue;
+
}
+
console.log(`Processing file ${i + 1}/${fileArray.length}:`, file.name, file.size, 'bytes');
updateJobProgress(jobId, {
filesProcessed: i + 1,
···
const originalContent = Buffer.from(arrayBuffer);
const originalMimeType = file.type || 'application/octet-stream';
-
// Compress and base64 encode ALL files
-
const compressedContent = compressFile(originalContent);
-
const base64Content = Buffer.from(compressedContent.toString('base64'), 'binary');
-
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
-
console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
-
logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${base64Content.length} bytes`);
+
// Determine if file should be compressed
+
const shouldCompress = shouldCompressFile(originalMimeType);
+
+
// Text files (HTML/CSS/JS) need base64 encoding to prevent PDS content sniffing
+
// Audio files just need compression without base64
+
const needsBase64 = originalMimeType.startsWith('text/') ||
+
originalMimeType.includes('html') ||
+
originalMimeType.includes('javascript') ||
+
originalMimeType.includes('css') ||
+
originalMimeType.includes('json') ||
+
originalMimeType.includes('xml') ||
+
originalMimeType.includes('svg');
+
+
let finalContent: Buffer;
+
let compressed = false;
+
let base64Encoded = false;
+
+
if (shouldCompress) {
+
const compressedContent = compressFile(originalContent);
+
compressed = true;
+
+
if (needsBase64) {
+
// Text files: compress AND base64 encode
+
finalContent = Buffer.from(compressedContent.toString('base64'), 'binary');
+
base64Encoded = true;
+
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
+
console.log(`Compressing+base64 ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${finalContent.length} bytes`);
+
logger.info(`Compressing+base64 ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%), base64: ${finalContent.length} bytes`);
+
} else {
+
// Audio files: just compress, no base64
+
finalContent = compressedContent;
+
const compressionRatio = (compressedContent.length / originalContent.length * 100).toFixed(1);
+
console.log(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%)`);
+
logger.info(`Compressing ${file.name}: ${originalContent.length} -> ${compressedContent.length} bytes (${compressionRatio}%)`);
+
}
+
} else {
+
// Binary files: upload directly
+
finalContent = originalContent;
+
console.log(`Uploading ${file.name} directly: ${originalContent.length} bytes (no compression)`);
+
logger.info(`Uploading ${file.name} directly: ${originalContent.length} bytes (binary)`);
+
}
uploadedFiles.push({
name: file.name,
-
content: base64Content,
+
content: finalContent,
mimeType: originalMimeType,
-
size: base64Content.length,
-
compressed: true,
+
size: finalContent.length,
+
compressed,
+
base64Encoded,
originalMimeType
});
}
···
console.log('Starting blob upload/reuse phase...');
updateJobProgress(jobId, { phase: 'uploading' });
-
// Helper function to upload blob with exponential backoff retry
+
// Helper function to upload blob with exponential backoff retry and timeout
const uploadBlobWithRetry = async (
agent: Agent,
content: Buffer,
mimeType: string,
fileName: string,
-
maxRetries = 3
+
maxRetries = 5
) => {
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
-
return await agent.com.atproto.repo.uploadBlob(content, { encoding: mimeType });
+
console.log(`[File Upload] Starting upload attempt ${attempt + 1}/${maxRetries} for ${fileName} (${content.length} bytes, ${mimeType})`);
+
+
// Add timeout wrapper to prevent hanging requests
+
const uploadPromise = agent.com.atproto.repo.uploadBlob(content, { encoding: mimeType });
+
const timeoutMs = 300000; // 5 minute timeout per upload
+
+
const timeoutPromise = new Promise((_, reject) => {
+
setTimeout(() => reject(new Error('Upload timeout')), timeoutMs);
+
});
+
+
const result = await Promise.race([uploadPromise, timeoutPromise]) as any;
+
console.log(`[File Upload] โœ… Successfully uploaded ${fileName} on attempt ${attempt + 1}`);
+
return result;
} catch (error: any) {
const isDPoPNonceError =
error?.message?.toLowerCase().includes('nonce') ||
error?.message?.toLowerCase().includes('dpop') ||
error?.status === 409;
-
if (isDPoPNonceError && attempt < maxRetries - 1) {
-
const backoffMs = 100 * Math.pow(2, attempt); // 100ms, 200ms, 400ms
-
logger.info(`[File Upload] ๐Ÿ”„ DPoP nonce conflict for ${fileName}, retrying in ${backoffMs}ms (attempt ${attempt + 1}/${maxRetries})`);
+
const isTimeout = error?.message === 'Upload timeout';
+
const isRateLimited = error?.status === 429 || error?.message?.toLowerCase().includes('rate');
+
+
// Retry on DPoP nonce conflicts, timeouts, or rate limits
+
if ((isDPoPNonceError || isTimeout || isRateLimited) && attempt < maxRetries - 1) {
+
let backoffMs: number;
+
if (isRateLimited) {
+
backoffMs = 2000 * Math.pow(2, attempt); // 2s, 4s, 8s, 16s for rate limits
+
} else if (isTimeout) {
+
backoffMs = 1000 * Math.pow(2, attempt); // 1s, 2s, 4s, 8s for timeouts
+
} else {
+
backoffMs = 100 * Math.pow(2, attempt); // 100ms, 200ms, 400ms for DPoP
+
}
+
+
const reason = isDPoPNonceError ? 'DPoP nonce conflict' : isTimeout ? 'timeout' : 'rate limit';
+
logger.info(`[File Upload] ๐Ÿ”„ ${reason} for ${fileName}, retrying in ${backoffMs}ms (attempt ${attempt + 1}/${maxRetries})`);
+
console.log(`[File Upload] ๐Ÿ”„ ${reason} for ${fileName}, retrying in ${backoffMs}ms`);
await new Promise(resolve => setTimeout(resolve, backoffMs));
continue;
}
+
+
// Log detailed error information before throwing
+
logger.error(`[File Upload] โŒ Upload failed for ${fileName} (size: ${content.length} bytes, mimeType: ${mimeType}, attempt: ${attempt + 1}/${maxRetries})`, {
+
error: error?.error || error?.message || 'Unknown error',
+
status: error?.status,
+
headers: error?.headers,
+
success: error?.success
+
});
+
console.error(`[File Upload] โŒ Upload failed for ${fileName}:`, {
+
error: error?.error || error?.message || 'Unknown error',
+
status: error?.status,
+
size: content.length,
+
mimeType,
+
attempt: attempt + 1
+
});
throw error;
}
}
···
};
// Use sliding window concurrency for maximum throughput
-
const CONCURRENCY_LIMIT = 50; // Maximum concurrent uploads with retry logic
+
const CONCURRENCY_LIMIT = 20; // Maximum concurrent uploads
const uploadedBlobs: Array<{
result: FileUploadResult;
filePath: string;
···
returnedMimeType: string;
reused: boolean;
}> = [];
+
const failedFiles: Array<{
+
name: string;
+
index: number;
+
error: string;
+
size: number;
+
}> = [];
// Process file with sliding window concurrency
const processFile = async (file: UploadedFile, index: number) => {
···
if (existingBlob && existingBlob.cid === fileCID) {
logger.info(`[File Upload] โ™ป๏ธ Reused: ${file.name} (unchanged, CID: ${fileCID})`);
-
updateJobProgress(jobId, { filesReused: (getUploadJob(jobId)?.progress.filesReused || 0) + 1 });
+
updateJobProgress(jobId, {
+
filesReused: (getUploadJob(jobId)?.progress.filesReused || 0) + 1
+
});
return {
result: {
···
...(file.compressed && {
encoding: 'gzip' as const,
mimeType: file.originalMimeType || file.mimeType,
-
base64: true
+
base64: file.base64Encoded || false
})
},
filePath: file.name,
···
);
const returnedBlobRef = uploadResult.data.blob;
-
updateJobProgress(jobId, { filesUploaded: (getUploadJob(jobId)?.progress.filesUploaded || 0) + 1 });
+
updateJobProgress(jobId, {
+
filesUploaded: (getUploadJob(jobId)?.progress.filesUploaded || 0) + 1
+
});
logger.info(`[File Upload] โœ… Uploaded: ${file.name} (CID: ${fileCID})`);
return {
···
...(file.compressed && {
encoding: 'gzip' as const,
mimeType: file.originalMimeType || file.mimeType,
-
base64: true
+
base64: file.base64Encoded || false
})
},
filePath: file.name,
···
reused: false
};
} catch (uploadError) {
-
logger.error('Upload failed for file', uploadError);
-
throw uploadError;
+
const fileName = file?.name || 'unknown';
+
const fileSize = file?.size || 0;
+
const errorMessage = uploadError instanceof Error ? uploadError.message : 'Unknown error';
+
const errorDetails = {
+
fileName,
+
fileSize,
+
index,
+
error: errorMessage,
+
stack: uploadError instanceof Error ? uploadError.stack : undefined
+
};
+
logger.error(`Upload failed for file: ${fileName} (${fileSize} bytes) at index ${index}`, errorDetails);
+
console.error(`Upload failed for file: ${fileName} (${fileSize} bytes) at index ${index}`, errorDetails);
+
+
// Track failed file but don't throw - continue with other files
+
failedFiles.push({
+
name: fileName,
+
index,
+
error: errorMessage,
+
size: fileSize
+
});
+
+
return null; // Return null to indicate failure
}
};
···
const processWithConcurrency = async () => {
const results: any[] = [];
let fileIndex = 0;
-
const executing = new Set<Promise<void>>();
+
const executing = new Map<Promise<void>, { index: number; name: string }>();
for (const file of validUploadedFiles) {
const currentIndex = fileIndex++;
···
const promise = processFile(file, currentIndex)
.then(result => {
results[currentIndex] = result;
+
console.log(`[Concurrency] File ${currentIndex} (${file.name}) completed successfully`);
})
.catch(error => {
-
logger.error(`Failed to process file at index ${currentIndex}`, error);
-
throw error; // Re-throw to fail the entire upload
+
// This shouldn't happen since processFile catches errors, but just in case
+
logger.error(`Unexpected error processing file at index ${currentIndex}`, error);
+
console.error(`[Concurrency] File ${currentIndex} (${file.name}) had unexpected error:`, error);
+
results[currentIndex] = null;
})
.finally(() => {
executing.delete(promise);
+
const remaining = Array.from(executing.values()).map(f => `${f.index}:${f.name}`);
+
console.log(`[Concurrency] File ${currentIndex} (${file.name}) removed. Remaining ${executing.size}: [${remaining.join(', ')}]`);
});
-
executing.add(promise);
+
executing.set(promise, { index: currentIndex, name: file.name });
+
const current = Array.from(executing.values()).map(f => `${f.index}:${f.name}`);
+
console.log(`[Concurrency] Added file ${currentIndex} (${file.name}). Total ${executing.size}: [${current.join(', ')}]`);
if (executing.size >= CONCURRENCY_LIMIT) {
-
await Promise.race(executing);
+
console.log(`[Concurrency] Hit limit (${CONCURRENCY_LIMIT}), waiting for one to complete...`);
+
await Promise.race(executing.keys());
+
console.log(`[Concurrency] One completed, continuing. Remaining: ${executing.size}`);
}
}
// Wait for remaining uploads
-
await Promise.all(executing);
-
return results.filter(r => r !== undefined); // Filter out any undefined entries
+
const remaining = Array.from(executing.values()).map(f => `${f.index}:${f.name}`);
+
console.log(`[Concurrency] Waiting for ${executing.size} remaining uploads: [${remaining.join(', ')}]`);
+
await Promise.all(executing.keys());
+
console.log(`[Concurrency] All uploads complete!`);
+
return results.filter(r => r !== undefined && r !== null); // Filter out null (failed) and undefined entries
};
const allResults = await processWithConcurrency();
···
const currentReused = uploadedBlobs.filter(b => b.reused).length;
const currentUploaded = uploadedBlobs.filter(b => !b.reused).length;
-
logger.info(`[File Upload] ๐ŸŽ‰ Upload complete โ†’ ${uploadedBlobs.length}/${validUploadedFiles.length} files (${currentUploaded} uploaded, ${currentReused} reused)`);
+
const successfulCount = uploadedBlobs.length;
+
const failedCount = failedFiles.length;
+
+
logger.info(`[File Upload] ๐ŸŽ‰ Upload complete โ†’ ${successfulCount}/${validUploadedFiles.length} files succeeded (${currentUploaded} uploaded, ${currentReused} reused), ${failedCount} failed`);
+
+
if (failedCount > 0) {
+
logger.warn(`[File Upload] โš ๏ธ Failed files:`, failedFiles);
+
console.warn(`[File Upload] โš ๏ธ ${failedCount} files failed to upload:`, failedFiles.map(f => f.name).join(', '));
+
}
const reusedCount = uploadedBlobs.filter(b => b.reused).length;
const uploadedCount = uploadedBlobs.filter(b => !b.reused).length;
-
logger.info(`[File Upload] ๐ŸŽ‰ Upload phase complete! Total: ${uploadedBlobs.length} files (${uploadedCount} uploaded, ${reusedCount} reused)`);
+
logger.info(`[File Upload] ๐ŸŽ‰ Upload phase complete! Total: ${successfulCount} files (${uploadedCount} uploaded, ${reusedCount} reused)`);
const uploadResults: FileUploadResult[] = uploadedBlobs.map(blob => blob.result);
const filePaths: string[] = uploadedBlobs.map(blob => blob.filePath);
···
fileCount,
siteName,
skippedFiles,
-
uploadedCount: validUploadedFiles.length
+
failedFiles,
+
uploadedCount: validUploadedFiles.length - failedFiles.length,
+
hasFailures: failedFiles.length > 0
});
console.log('=== UPLOAD FILES COMPLETE ===');