From 9a8033813d927f3db006be4ffd5be1216c781b0b Mon Sep 17 00:00:00 2001 From: "@nekomimi.pet" Date: Wed, 12 Nov 2025 17:28:10 -0500 Subject: [PATCH 1/6] init support for redirects file --- README.md | 32 +- hosting-service/EXAMPLE.md | 123 ------- hosting-service/example-_redirects | 134 +++++++ hosting-service/src/lib/redirects.test.ts | 215 +++++++++++ hosting-service/src/lib/redirects.ts | 413 ++++++++++++++++++++++ hosting-service/src/server.ts | 174 ++++++++- 6 files changed, 961 insertions(+), 130 deletions(-) delete mode 100644 hosting-service/EXAMPLE.md create mode 100644 hosting-service/example-_redirects create mode 100644 hosting-service/src/lib/redirects.test.ts create mode 100644 hosting-service/src/lib/redirects.ts diff --git a/README.md b/README.md index 0271aa3..211f12d 100644 --- a/README.md +++ b/README.md @@ -50,10 +50,40 @@ cd cli cargo build ``` +## Features + +### URL Redirects and Rewrites + +The hosting service supports Netlify-style `_redirects` files for managing URLs. Place a `_redirects` file in your site root to enable: + +- **301/302 Redirects**: Permanent and temporary URL redirects +- **200 Rewrites**: Serve different content without changing the URL +- **404 Custom Pages**: Custom error pages for specific paths +- **Splats & Placeholders**: Dynamic path matching (`/blog/:year/:month/:day`, `/news/*`) +- **Query Parameter Matching**: Redirect based on URL parameters +- **Conditional Redirects**: Route by country, language, or cookie presence +- **Force Redirects**: Override existing files with redirects + +Example `_redirects`: +``` +# Single-page app routing (React, Vue, etc.) +/* /index.html 200 + +# Simple redirects +/home / +/old-blog/* /blog/:splat + +# API proxy +/api/* https://api.example.com/:splat 200 + +# Country-based routing +/ /us/ 302 Country=us +/ /uk/ 302 Country=gb +``` + ## Limits - Max file size: 100MB (PDS limit) -- Max site size: 300MB - Max files: 2000 ## Tech Stack diff --git a/hosting-service/EXAMPLE.md b/hosting-service/EXAMPLE.md deleted file mode 100644 index adf7cd9..0000000 --- a/hosting-service/EXAMPLE.md +++ /dev/null @@ -1,123 +0,0 @@ -# HTML Path Rewriting Example - -This document demonstrates how HTML path rewriting works when serving sites via the `/s/:identifier/:site/*` route. - -## Problem - -When you create a static site with absolute paths like `/style.css` or `/images/logo.png`, these paths work fine when served from the root domain. However, when served from a subdirectory like `/s/alice.bsky.social/mysite/`, these absolute paths break because they resolve to the server root instead of the site root. - -## Solution - -The hosting service automatically rewrites absolute paths in HTML files to work correctly in the subdirectory context. - -## Example - -**Original HTML file (index.html):** -```html - - - - - My Site - - - - - -
- Logo - -
- -
-

Welcome

- Hero - -
- - -
-
- - - - -``` - -**When accessed via `/s/alice.bsky.social/mysite/`, the HTML is rewritten to:** -```html - - - - - My Site - - - - - -
- Logo - -
- -
-

Welcome

- Hero - -
- - -
-
- - - - -``` - -## What's Preserved - -Notice that: -- ✅ Absolute paths are rewritten: `/style.css` → `/s/alice.bsky.social/mysite/style.css` -- ✅ External URLs are preserved: `https://example.com` stays the same -- ✅ Anchors are preserved: `#top` stays the same -- ✅ The rewriting is safe and won't break your site - -## Supported Attributes - -The rewriter handles these HTML attributes: -- `src` - images, scripts, iframes, videos, audio -- `href` - links, stylesheets -- `action` - forms -- `data` - objects -- `poster` - video posters -- `srcset` - responsive images - -## Testing Your Site - -To test if your site works with path rewriting: - -1. Upload your site to your PDS as a `place.wisp.fs` record -2. Access it via: `https://hosting.wisp.place/s/YOUR_HANDLE/SITE_NAME/` -3. Check that all resources load correctly - -If you're using relative paths already (like `./style.css` or `../images/logo.png`), they'll work without any rewriting. diff --git a/hosting-service/example-_redirects b/hosting-service/example-_redirects new file mode 100644 index 0000000..901c201 --- /dev/null +++ b/hosting-service/example-_redirects @@ -0,0 +1,134 @@ +# Example _redirects file for Wisp hosting +# Place this file in the root directory of your site as "_redirects" +# Lines starting with # are comments + +# =================================== +# SIMPLE REDIRECTS +# =================================== + +# Redirect home page +# /home / + +# Redirect old URLs to new ones +# /old-blog /blog +# /about-us /about + +# =================================== +# SPLAT REDIRECTS (WILDCARDS) +# =================================== + +# Redirect entire directories +# /news/* /blog/:splat +# /old-site/* /new-site/:splat + +# =================================== +# PLACEHOLDER REDIRECTS +# =================================== + +# Restructure blog URLs +# /blog/:year/:month/:day/:slug /posts/:year-:month-:day/:slug + +# Capture multiple parameters +# /products/:category/:id /shop/:category/item/:id + +# =================================== +# STATUS CODES +# =================================== + +# Permanent redirect (301) - default if not specified +# /permanent-move /new-location 301 + +# Temporary redirect (302) +# /temp-redirect /temp-location 302 + +# Rewrite (200) - serves different content, URL stays the same +# /api/* /functions/:splat 200 + +# Custom 404 page +# /shop/* /shop-closed.html 404 + +# =================================== +# FORCE REDIRECTS +# =================================== + +# Force redirect even if file exists (note the ! after status code) +# /override-file /other-file.html 200! + +# =================================== +# CONDITIONAL REDIRECTS +# =================================== + +# Country-based redirects (ISO 3166-1 alpha-2 codes) +# / /us/ 302 Country=us +# / /uk/ 302 Country=gb +# / /anz/ 302 Country=au,nz + +# Language-based redirects +# /products /en/products 301 Language=en +# /products /de/products 301 Language=de +# /products /fr/products 301 Language=fr + +# Cookie-based redirects (checks if cookie exists) +# /* /legacy/:splat 200 Cookie=is_legacy + +# =================================== +# QUERY PARAMETERS +# =================================== + +# Match specific query parameters +# /store id=:id /blog/:id 301 + +# Multiple parameters +# /search q=:query category=:cat /find/:cat/:query 301 + +# =================================== +# DOMAIN-LEVEL REDIRECTS +# =================================== + +# Redirect to different domain (must include protocol) +# /external https://example.com/path + +# Redirect entire subdomain +# http://blog.example.com/* https://example.com/blog/:splat 301! +# https://blog.example.com/* https://example.com/blog/:splat 301! + +# =================================== +# COMMON PATTERNS +# =================================== + +# Remove .html extensions +# /page.html /page + +# Add trailing slash +# /about /about/ + +# Single-page app fallback (serve index.html for all paths) +# /* /index.html 200 + +# API proxy +# /api/* https://api.example.com/:splat 200 + +# =================================== +# CUSTOM ERROR PAGES +# =================================== + +# Language-specific 404 pages +# /en/* /en/404.html 404 +# /de/* /de/404.html 404 + +# Section-specific 404 pages +# /shop/* /shop/not-found.html 404 +# /blog/* /blog/404.html 404 + +# =================================== +# NOTES +# =================================== +# +# - Rules are processed in order (first match wins) +# - More specific rules should come before general ones +# - Splats (*) can only be used at the end of a path +# - Query parameters are automatically preserved for 200, 301, 302 +# - Trailing slashes are normalized (/ and no / are treated the same) +# - Default status code is 301 if not specified +# + diff --git a/hosting-service/src/lib/redirects.test.ts b/hosting-service/src/lib/redirects.test.ts new file mode 100644 index 0000000..f61d5a3 --- /dev/null +++ b/hosting-service/src/lib/redirects.test.ts @@ -0,0 +1,215 @@ +import { describe, it, expect } from 'bun:test' +import { parseRedirectsFile, matchRedirectRule } from './redirects'; + +describe('parseRedirectsFile', () => { + it('should parse simple redirects', () => { + const content = ` +# Comment line +/old-path /new-path +/home / 301 +`; + const rules = parseRedirectsFile(content); + expect(rules).toHaveLength(2); + expect(rules[0]).toMatchObject({ + from: '/old-path', + to: '/new-path', + status: 301, + force: false, + }); + expect(rules[1]).toMatchObject({ + from: '/home', + to: '/', + status: 301, + force: false, + }); + }); + + it('should parse redirects with different status codes', () => { + const content = ` +/temp-redirect /target 302 +/rewrite /content 200 +/not-found /404 404 +`; + const rules = parseRedirectsFile(content); + expect(rules).toHaveLength(3); + expect(rules[0]?.status).toBe(302); + expect(rules[1]?.status).toBe(200); + expect(rules[2]?.status).toBe(404); + }); + + it('should parse force redirects', () => { + const content = `/force-path /target 301!`; + const rules = parseRedirectsFile(content); + expect(rules[0]?.force).toBe(true); + expect(rules[0]?.status).toBe(301); + }); + + it('should parse splat redirects', () => { + const content = `/news/* /blog/:splat`; + const rules = parseRedirectsFile(content); + expect(rules[0]?.from).toBe('/news/*'); + expect(rules[0]?.to).toBe('/blog/:splat'); + }); + + it('should parse placeholder redirects', () => { + const content = `/blog/:year/:month/:day /posts/:year-:month-:day`; + const rules = parseRedirectsFile(content); + expect(rules[0]?.from).toBe('/blog/:year/:month/:day'); + expect(rules[0]?.to).toBe('/posts/:year-:month-:day'); + }); + + it('should parse country-based redirects', () => { + const content = `/ /anz 302 Country=au,nz`; + const rules = parseRedirectsFile(content); + expect(rules[0]?.conditions?.country).toEqual(['au', 'nz']); + }); + + it('should parse language-based redirects', () => { + const content = `/products /en/products 301 Language=en`; + const rules = parseRedirectsFile(content); + expect(rules[0]?.conditions?.language).toEqual(['en']); + }); + + it('should parse cookie-based redirects', () => { + const content = `/* /legacy/:splat 200 Cookie=is_legacy,my_cookie`; + const rules = parseRedirectsFile(content); + expect(rules[0]?.conditions?.cookie).toEqual(['is_legacy', 'my_cookie']); + }); +}); + +describe('matchRedirectRule', () => { + it('should match exact paths', () => { + const rules = parseRedirectsFile('/old-path /new-path'); + const match = matchRedirectRule('/old-path', rules); + expect(match).toBeTruthy(); + expect(match?.targetPath).toBe('/new-path'); + expect(match?.status).toBe(301); + }); + + it('should match paths with trailing slash', () => { + const rules = parseRedirectsFile('/old-path /new-path'); + const match = matchRedirectRule('/old-path/', rules); + expect(match).toBeTruthy(); + expect(match?.targetPath).toBe('/new-path'); + }); + + it('should match splat patterns', () => { + const rules = parseRedirectsFile('/news/* /blog/:splat'); + const match = matchRedirectRule('/news/2024/01/15/my-post', rules); + expect(match).toBeTruthy(); + expect(match?.targetPath).toBe('/blog/2024/01/15/my-post'); + }); + + it('should match placeholder patterns', () => { + const rules = parseRedirectsFile('/blog/:year/:month/:day /posts/:year-:month-:day'); + const match = matchRedirectRule('/blog/2024/01/15', rules); + expect(match).toBeTruthy(); + expect(match?.targetPath).toBe('/posts/2024-01-15'); + }); + + it('should preserve query strings for 301/302 redirects', () => { + const rules = parseRedirectsFile('/old /new 301'); + const match = matchRedirectRule('/old', rules, { + queryParams: { foo: 'bar', baz: 'qux' }, + }); + expect(match?.targetPath).toContain('?'); + expect(match?.targetPath).toContain('foo=bar'); + expect(match?.targetPath).toContain('baz=qux'); + }); + + it('should match based on query parameters', () => { + const rules = parseRedirectsFile('/store id=:id /blog/:id 301'); + const match = matchRedirectRule('/store', rules, { + queryParams: { id: 'my-post' }, + }); + expect(match).toBeTruthy(); + expect(match?.targetPath).toContain('/blog/my-post'); + }); + + it('should not match when query params are missing', () => { + const rules = parseRedirectsFile('/store id=:id /blog/:id 301'); + const match = matchRedirectRule('/store', rules, { + queryParams: {}, + }); + expect(match).toBeNull(); + }); + + it('should match based on country header', () => { + const rules = parseRedirectsFile('/ /aus 302 Country=au'); + const match = matchRedirectRule('/', rules, { + headers: { 'cf-ipcountry': 'AU' }, + }); + expect(match).toBeTruthy(); + expect(match?.targetPath).toBe('/aus'); + }); + + it('should not match wrong country', () => { + const rules = parseRedirectsFile('/ /aus 302 Country=au'); + const match = matchRedirectRule('/', rules, { + headers: { 'cf-ipcountry': 'US' }, + }); + expect(match).toBeNull(); + }); + + it('should match based on language header', () => { + const rules = parseRedirectsFile('/products /en/products 301 Language=en'); + const match = matchRedirectRule('/products', rules, { + headers: { 'accept-language': 'en-US,en;q=0.9' }, + }); + expect(match).toBeTruthy(); + expect(match?.targetPath).toBe('/en/products'); + }); + + it('should match based on cookie presence', () => { + const rules = parseRedirectsFile('/* /legacy/:splat 200 Cookie=is_legacy'); + const match = matchRedirectRule('/some-path', rules, { + cookies: { is_legacy: 'true' }, + }); + expect(match).toBeTruthy(); + expect(match?.targetPath).toBe('/legacy/some-path'); + }); + + it('should return first matching rule', () => { + const content = ` +/path /first +/path /second +`; + const rules = parseRedirectsFile(content); + const match = matchRedirectRule('/path', rules); + expect(match?.targetPath).toBe('/first'); + }); + + it('should match more specific rules before general ones', () => { + const content = ` +/jobs/customer-ninja /careers/support +/jobs/* /careers/:splat +`; + const rules = parseRedirectsFile(content); + + const match1 = matchRedirectRule('/jobs/customer-ninja', rules); + expect(match1?.targetPath).toBe('/careers/support'); + + const match2 = matchRedirectRule('/jobs/developer', rules); + expect(match2?.targetPath).toBe('/careers/developer'); + }); + + it('should handle SPA routing pattern', () => { + const rules = parseRedirectsFile('/* /index.html 200'); + + // Should match any path + const match1 = matchRedirectRule('/about', rules); + expect(match1).toBeTruthy(); + expect(match1?.targetPath).toBe('/index.html'); + expect(match1?.status).toBe(200); + + const match2 = matchRedirectRule('/users/123/profile', rules); + expect(match2).toBeTruthy(); + expect(match2?.targetPath).toBe('/index.html'); + expect(match2?.status).toBe(200); + + const match3 = matchRedirectRule('/', rules); + expect(match3).toBeTruthy(); + expect(match3?.targetPath).toBe('/index.html'); + }); +}); + diff --git a/hosting-service/src/lib/redirects.ts b/hosting-service/src/lib/redirects.ts new file mode 100644 index 0000000..f3c5273 --- /dev/null +++ b/hosting-service/src/lib/redirects.ts @@ -0,0 +1,413 @@ +import { readFile } from 'fs/promises'; +import { existsSync } from 'fs'; + +export interface RedirectRule { + from: string; + to: string; + status: number; + force: boolean; + conditions?: { + country?: string[]; + language?: string[]; + role?: string[]; + cookie?: string[]; + }; + // For pattern matching + fromPattern?: RegExp; + fromParams?: string[]; // Named parameters from the pattern + queryParams?: Record; // Expected query parameters +} + +export interface RedirectMatch { + rule: RedirectRule; + targetPath: string; + status: number; +} + +/** + * Parse a _redirects file into an array of redirect rules + */ +export function parseRedirectsFile(content: string): RedirectRule[] { + const lines = content.split('\n'); + const rules: RedirectRule[] = []; + + for (let lineNum = 0; lineNum < lines.length; lineNum++) { + const lineRaw = lines[lineNum]; + if (!lineRaw) continue; + + const line = lineRaw.trim(); + + // Skip empty lines and comments + if (!line || line.startsWith('#')) { + continue; + } + + try { + const rule = parseRedirectLine(line); + if (rule && rule.fromPattern) { + rules.push(rule); + } + } catch (err) { + console.warn(`Failed to parse redirect rule on line ${lineNum + 1}: ${line}`, err); + } + } + + return rules; +} + +/** + * Parse a single redirect rule line + * Format: /from [query_params] /to [status] [conditions] + */ +function parseRedirectLine(line: string): RedirectRule | null { + // Split by whitespace, but respect quoted strings (though not commonly used) + const parts = line.split(/\s+/); + + if (parts.length < 2) { + return null; + } + + let idx = 0; + const from = parts[idx++]; + + if (!from) { + return null; + } + + let status = 301; // Default status + let force = false; + const conditions: NonNullable = {}; + const queryParams: Record = {}; + + // Parse query parameters that come before the destination path + // They look like: key=:value (and don't start with /) + while (idx < parts.length) { + const part = parts[idx]; + if (!part) { + idx++; + continue; + } + + // If it starts with / or http, it's the destination path + if (part.startsWith('/') || part.startsWith('http://') || part.startsWith('https://')) { + break; + } + + // If it contains = and comes before the destination, it's a query param + if (part.includes('=')) { + const splitIndex = part.indexOf('='); + const key = part.slice(0, splitIndex); + const value = part.slice(splitIndex + 1); + + if (key && value) { + queryParams[key] = value; + } + idx++; + } else { + // Not a query param, must be destination or something else + break; + } + } + + // Next part should be the destination + if (idx >= parts.length) { + return null; + } + + const to = parts[idx++]; + if (!to) { + return null; + } + + // Parse remaining parts for status code and conditions + for (let i = idx; i < parts.length; i++) { + const part = parts[i]; + + if (!part) continue; + + // Check for status code (with optional ! for force) + if (/^\d+!?$/.test(part)) { + if (part.endsWith('!')) { + force = true; + status = parseInt(part.slice(0, -1)); + } else { + status = parseInt(part); + } + continue; + } + + // Check for condition parameters (Country=, Language=, Role=, Cookie=) + if (part.includes('=')) { + const splitIndex = part.indexOf('='); + const key = part.slice(0, splitIndex); + const value = part.slice(splitIndex + 1); + + if (!key || !value) continue; + + const keyLower = key.toLowerCase(); + + if (keyLower === 'country') { + conditions.country = value.split(',').map(v => v.trim().toLowerCase()); + } else if (keyLower === 'language') { + conditions.language = value.split(',').map(v => v.trim().toLowerCase()); + } else if (keyLower === 'role') { + conditions.role = value.split(',').map(v => v.trim()); + } else if (keyLower === 'cookie') { + conditions.cookie = value.split(',').map(v => v.trim().toLowerCase()); + } + } + } + + // Parse the 'from' pattern + const { pattern, params } = convertPathToRegex(from); + + return { + from, + to, + status, + force, + conditions: Object.keys(conditions).length > 0 ? conditions : undefined, + queryParams: Object.keys(queryParams).length > 0 ? queryParams : undefined, + fromPattern: pattern, + fromParams: params, + }; +} + +/** + * Convert a path pattern with placeholders and splats to a regex + * Examples: + * /blog/:year/:month/:day -> captures year, month, day + * /news/* -> captures splat + */ +function convertPathToRegex(pattern: string): { pattern: RegExp; params: string[] } { + const params: string[] = []; + let regexStr = '^'; + + // Split by query string if present + const pathPart = pattern.split('?')[0] || pattern; + + // Escape special regex characters except * and : + let escaped = pathPart.replace(/[.+^${}()|[\]\\]/g, '\\$&'); + + // Replace :param with named capture groups + escaped = escaped.replace(/:([a-zA-Z_][a-zA-Z0-9_]*)/g, (match, paramName) => { + params.push(paramName); + // Match path segment (everything except / and ?) + return '([^/?]+)'; + }); + + // Replace * with splat capture (matches everything including /) + if (escaped.includes('*')) { + escaped = escaped.replace(/\*/g, '(.*)'); + params.push('splat'); + } + + regexStr += escaped; + + // Make trailing slash optional + if (!regexStr.endsWith('.*')) { + regexStr += '/?'; + } + + regexStr += '$'; + + return { + pattern: new RegExp(regexStr), + params, + }; +} + +/** + * Match a request path against redirect rules + */ +export function matchRedirectRule( + requestPath: string, + rules: RedirectRule[], + context?: { + queryParams?: Record; + headers?: Record; + cookies?: Record; + } +): RedirectMatch | null { + // Normalize path: ensure leading slash, remove trailing slash (except for root) + let normalizedPath = requestPath.startsWith('/') ? requestPath : `/${requestPath}`; + + for (const rule of rules) { + // Check query parameter conditions first (if any) + if (rule.queryParams) { + // If rule requires query params but none provided, skip this rule + if (!context?.queryParams) { + continue; + } + + const queryMatches = Object.entries(rule.queryParams).every(([key, value]) => { + const actualValue = context.queryParams?.[key]; + return actualValue !== undefined; + }); + + if (!queryMatches) { + continue; + } + } + + // Check conditional redirects (country, language, role, cookie) + if (rule.conditions) { + if (rule.conditions.country && context?.headers) { + const cfCountry = context.headers['cf-ipcountry']; + const xCountry = context.headers['x-country']; + const country = (cfCountry?.toLowerCase() || xCountry?.toLowerCase()); + if (!country || !rule.conditions.country.includes(country)) { + continue; + } + } + + if (rule.conditions.language && context?.headers) { + const acceptLang = context.headers['accept-language']; + if (!acceptLang) { + continue; + } + // Parse accept-language header (simplified) + const langs = acceptLang.split(',').map(l => { + const langPart = l.split(';')[0]; + return langPart ? langPart.trim().toLowerCase() : ''; + }).filter(l => l !== ''); + const hasMatch = rule.conditions.language.some(lang => + langs.some(l => l === lang || l.startsWith(lang + '-')) + ); + if (!hasMatch) { + continue; + } + } + + if (rule.conditions.cookie && context?.cookies) { + const hasCookie = rule.conditions.cookie.some(cookieName => + context.cookies && cookieName in context.cookies + ); + if (!hasCookie) { + continue; + } + } + + // Role-based redirects would need JWT verification - skip for now + if (rule.conditions.role) { + continue; + } + } + + // Match the path pattern + const match = rule.fromPattern?.exec(normalizedPath); + if (!match) { + continue; + } + + // Build the target path by replacing placeholders + let targetPath = rule.to; + + // Replace captured parameters + if (rule.fromParams && match.length > 1) { + for (let i = 0; i < rule.fromParams.length; i++) { + const paramName = rule.fromParams[i]; + const paramValue = match[i + 1]; + + if (!paramName || !paramValue) continue; + + if (paramName === 'splat') { + targetPath = targetPath.replace(':splat', paramValue); + } else { + targetPath = targetPath.replace(`:${paramName}`, paramValue); + } + } + } + + // Handle query parameter replacements + if (rule.queryParams && context?.queryParams) { + for (const [key, placeholder] of Object.entries(rule.queryParams)) { + const actualValue = context.queryParams[key]; + if (actualValue && placeholder && placeholder.startsWith(':')) { + const paramName = placeholder.slice(1); + if (paramName) { + targetPath = targetPath.replace(`:${paramName}`, actualValue); + } + } + } + } + + // Preserve query string for 200, 301, 302 redirects (unless target already has one) + if ([200, 301, 302].includes(rule.status) && context?.queryParams && !targetPath.includes('?')) { + const queryString = Object.entries(context.queryParams) + .map(([k, v]) => `${encodeURIComponent(k)}=${encodeURIComponent(v)}`) + .join('&'); + if (queryString) { + targetPath += `?${queryString}`; + } + } + + return { + rule, + targetPath, + status: rule.status, + }; + } + + return null; +} + +/** + * Load redirect rules from a cached site + */ +export async function loadRedirectRules(did: string, rkey: string): Promise { + const CACHE_DIR = process.env.CACHE_DIR || './cache/sites'; + const redirectsPath = `${CACHE_DIR}/${did}/${rkey}/_redirects`; + + if (!existsSync(redirectsPath)) { + return []; + } + + try { + const content = await readFile(redirectsPath, 'utf-8'); + return parseRedirectsFile(content); + } catch (err) { + console.error('Failed to load _redirects file', err); + return []; + } +} + +/** + * Parse cookies from Cookie header + */ +export function parseCookies(cookieHeader?: string): Record { + if (!cookieHeader) return {}; + + const cookies: Record = {}; + const parts = cookieHeader.split(';'); + + for (const part of parts) { + const [key, ...valueParts] = part.split('='); + if (key && valueParts.length > 0) { + cookies[key.trim()] = valueParts.join('=').trim(); + } + } + + return cookies; +} + +/** + * Parse query string into object + */ +export function parseQueryString(url: string): Record { + const queryStart = url.indexOf('?'); + if (queryStart === -1) return {}; + + const queryString = url.slice(queryStart + 1); + const params: Record = {}; + + for (const pair of queryString.split('&')) { + const [key, value] = pair.split('='); + if (key) { + params[decodeURIComponent(key)] = value ? decodeURIComponent(value) : ''; + } + } + + return params; +} + diff --git a/hosting-service/src/server.ts b/hosting-service/src/server.ts index 45971c1..a76a0c8 100644 --- a/hosting-service/src/server.ts +++ b/hosting-service/src/server.ts @@ -7,6 +7,7 @@ import { readFile, access } from 'fs/promises'; import { lookup } from 'mime-types'; import { logger, observabilityMiddleware, observabilityErrorHandler, logCollector, errorTracker, metricsCollector } from './lib/observability'; import { fileCache, metadataCache, rewrittenHtmlCache, getCacheKey, type FileMetadata } from './lib/cache'; +import { loadRedirectRules, matchRedirectRule, parseCookies, parseQueryString, type RedirectRule } from './lib/redirects'; const BASE_HOST = process.env.BASE_HOST || 'wisp.place'; @@ -35,8 +36,85 @@ async function fileExists(path: string): Promise { } } +// Cache for redirect rules (per site) +const redirectRulesCache = new Map(); + +/** + * Clear redirect rules cache for a specific site + * Should be called when a site is updated/recached + */ +export function clearRedirectRulesCache(did: string, rkey: string) { + const cacheKey = `${did}:${rkey}`; + redirectRulesCache.delete(cacheKey); +} + // Helper to serve files from cache -async function serveFromCache(did: string, rkey: string, filePath: string) { +async function serveFromCache( + did: string, + rkey: string, + filePath: string, + fullUrl?: string, + headers?: Record +) { + // Check for redirect rules first + const redirectCacheKey = `${did}:${rkey}`; + let redirectRules = redirectRulesCache.get(redirectCacheKey); + + if (redirectRules === undefined) { + // Load rules for the first time + redirectRules = await loadRedirectRules(did, rkey); + redirectRulesCache.set(redirectCacheKey, redirectRules); + } + + // Apply redirect rules if any exist + if (redirectRules.length > 0) { + const requestPath = '/' + (filePath || ''); + const queryParams = fullUrl ? parseQueryString(fullUrl) : {}; + const cookies = parseCookies(headers?.['cookie']); + + const redirectMatch = matchRedirectRule(requestPath, redirectRules, { + queryParams, + headers, + cookies, + }); + + if (redirectMatch) { + const { targetPath, status } = redirectMatch; + + // Handle different status codes + if (status === 200) { + // Rewrite: serve different content but keep URL the same + // Remove leading slash for internal path resolution + const rewritePath = targetPath.startsWith('/') ? targetPath.slice(1) : targetPath; + return serveFileInternal(did, rkey, rewritePath); + } else if (status === 301 || status === 302) { + // External redirect: change the URL + return new Response(null, { + status, + headers: { + 'Location': targetPath, + 'Cache-Control': status === 301 ? 'public, max-age=31536000' : 'public, max-age=0', + }, + }); + } else if (status === 404) { + // Custom 404 page + const custom404Path = targetPath.startsWith('/') ? targetPath.slice(1) : targetPath; + const response = await serveFileInternal(did, rkey, custom404Path); + // Override status to 404 + return new Response(response.body, { + status: 404, + headers: response.headers, + }); + } + } + } + + // No redirect matched, serve normally + return serveFileInternal(did, rkey, filePath); +} + +// Internal function to serve a file (used by both normal serving and rewrites) +async function serveFileInternal(did: string, rkey: string, filePath: string) { // Default to index.html if path is empty or ends with / let requestPath = filePath || 'index.html'; if (requestPath.endsWith('/')) { @@ -138,8 +216,74 @@ async function serveFromCacheWithRewrite( did: string, rkey: string, filePath: string, - basePath: string + basePath: string, + fullUrl?: string, + headers?: Record ) { + // Check for redirect rules first + const redirectCacheKey = `${did}:${rkey}`; + let redirectRules = redirectRulesCache.get(redirectCacheKey); + + if (redirectRules === undefined) { + // Load rules for the first time + redirectRules = await loadRedirectRules(did, rkey); + redirectRulesCache.set(redirectCacheKey, redirectRules); + } + + // Apply redirect rules if any exist + if (redirectRules.length > 0) { + const requestPath = '/' + (filePath || ''); + const queryParams = fullUrl ? parseQueryString(fullUrl) : {}; + const cookies = parseCookies(headers?.['cookie']); + + const redirectMatch = matchRedirectRule(requestPath, redirectRules, { + queryParams, + headers, + cookies, + }); + + if (redirectMatch) { + const { targetPath, status } = redirectMatch; + + // Handle different status codes + if (status === 200) { + // Rewrite: serve different content but keep URL the same + const rewritePath = targetPath.startsWith('/') ? targetPath.slice(1) : targetPath; + return serveFileInternalWithRewrite(did, rkey, rewritePath, basePath); + } else if (status === 301 || status === 302) { + // External redirect: change the URL + // For sites.wisp.place, we need to adjust the target path to include the base path + // unless it's an absolute URL + let redirectTarget = targetPath; + if (!targetPath.startsWith('http://') && !targetPath.startsWith('https://')) { + redirectTarget = basePath + (targetPath.startsWith('/') ? targetPath.slice(1) : targetPath); + } + return new Response(null, { + status, + headers: { + 'Location': redirectTarget, + 'Cache-Control': status === 301 ? 'public, max-age=31536000' : 'public, max-age=0', + }, + }); + } else if (status === 404) { + // Custom 404 page + const custom404Path = targetPath.startsWith('/') ? targetPath.slice(1) : targetPath; + const response = await serveFileInternalWithRewrite(did, rkey, custom404Path, basePath); + // Override status to 404 + return new Response(response.body, { + status: 404, + headers: response.headers, + }); + } + } + } + + // No redirect matched, serve normally + return serveFileInternalWithRewrite(did, rkey, filePath, basePath); +} + +// Internal function to serve a file with rewriting +async function serveFileInternalWithRewrite(did: string, rkey: string, filePath: string, basePath: string) { // Default to index.html if path is empty or ends with / let requestPath = filePath || 'index.html'; if (requestPath.endsWith('/')) { @@ -317,6 +461,8 @@ async function ensureSiteCached(did: string, rkey: string): Promise { try { await downloadAndCacheSite(did, rkey, siteData.record, pdsEndpoint, siteData.cid); + // Clear redirect rules cache since the site was updated + clearRedirectRulesCache(did, rkey); logger.info('Site cached successfully', { did, rkey }); return true; } catch (err) { @@ -384,7 +530,11 @@ app.get('/*', async (c) => { // Serve with HTML path rewriting to handle absolute paths const basePath = `/${identifier}/${site}/`; - return serveFromCacheWithRewrite(did, site, filePath, basePath); + const headers: Record = {}; + c.req.raw.headers.forEach((value, key) => { + headers[key.toLowerCase()] = value; + }); + return serveFromCacheWithRewrite(did, site, filePath, basePath, c.req.url, headers); } // Check if this is a DNS hash subdomain @@ -420,7 +570,11 @@ app.get('/*', async (c) => { return c.text('Site not found', 404); } - return serveFromCache(customDomain.did, rkey, path); + const headers: Record = {}; + c.req.raw.headers.forEach((value, key) => { + headers[key.toLowerCase()] = value; + }); + return serveFromCache(customDomain.did, rkey, path, c.req.url, headers); } // Route 2: Registered subdomains - /*.wisp.place/* @@ -444,7 +598,11 @@ app.get('/*', async (c) => { return c.text('Site not found', 404); } - return serveFromCache(domainInfo.did, rkey, path); + const headers: Record = {}; + c.req.raw.headers.forEach((value, key) => { + headers[key.toLowerCase()] = value; + }); + return serveFromCache(domainInfo.did, rkey, path, c.req.url, headers); } // Route 1: Custom domains - /* @@ -467,7 +625,11 @@ app.get('/*', async (c) => { return c.text('Site not found', 404); } - return serveFromCache(customDomain.did, rkey, path); + const headers: Record = {}; + c.req.raw.headers.forEach((value, key) => { + headers[key.toLowerCase()] = value; + }); + return serveFromCache(customDomain.did, rkey, path, c.req.url, headers); }); // Internal observability endpoints (for admin panel) -- 2.50.1 (Apple Git-155) From f1f70b3b22ddf300959c8855fb721e139b9ec8a6 Mon Sep 17 00:00:00 2001 From: "@nekomimi.pet" Date: Wed, 12 Nov 2025 18:33:31 -0500 Subject: [PATCH 2/6] Add support for existing blob reuse in deployment process --- cli/.gitignore | 1 + cli/Cargo.lock | 3 + cli/Cargo.toml | 3 + cli/src/blob_map.rs | 92 +++++++++++++++++++++++++ cli/src/cid.rs | 66 ++++++++++++++++++ cli/src/main.rs | 159 +++++++++++++++++++++++++++++++++----------- 6 files changed, 286 insertions(+), 38 deletions(-) create mode 100644 cli/src/blob_map.rs create mode 100644 cli/src/cid.rs diff --git a/cli/.gitignore b/cli/.gitignore index fcd9e40..15fe010 100644 --- a/cli/.gitignore +++ b/cli/.gitignore @@ -1,3 +1,4 @@ +test/ .DS_STORE jacquard/ binaries/ diff --git a/cli/Cargo.lock b/cli/Cargo.lock index 4b0ba8b..a100cf6 100644 --- a/cli/Cargo.lock +++ b/cli/Cargo.lock @@ -4385,10 +4385,13 @@ dependencies = [ "jacquard-oauth", "miette", "mime_guess", + "multibase", + "multihash", "reqwest", "rustversion", "serde", "serde_json", + "sha2", "shellexpand", "tokio", "walkdir", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 99493fb..6e0d1e2 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -30,3 +30,6 @@ walkdir = "2.5" mime_guess = "2.0" bytes = "1.10" futures = "0.3.31" +multihash = "0.19.3" +multibase = "0.9" +sha2 = "0.10" diff --git a/cli/src/blob_map.rs b/cli/src/blob_map.rs new file mode 100644 index 0000000..93c86bd --- /dev/null +++ b/cli/src/blob_map.rs @@ -0,0 +1,92 @@ +use jacquard_common::types::blob::BlobRef; +use jacquard_common::IntoStatic; +use std::collections::HashMap; + +use crate::place_wisp::fs::{Directory, EntryNode}; + +/// Extract blob information from a directory tree +/// Returns a map of file paths to their blob refs and CIDs +/// +/// This mirrors the TypeScript implementation in src/lib/wisp-utils.ts lines 275-302 +pub fn extract_blob_map( + directory: &Directory, +) -> HashMap, String)> { + extract_blob_map_recursive(directory, String::new()) +} + +fn extract_blob_map_recursive( + directory: &Directory, + current_path: String, +) -> HashMap, String)> { + let mut blob_map = HashMap::new(); + + for entry in &directory.entries { + let full_path = if current_path.is_empty() { + entry.name.to_string() + } else { + format!("{}/{}", current_path, entry.name) + }; + + match &entry.node { + EntryNode::File(file_node) => { + // Extract CID from blob ref + // BlobRef is an enum with Blob variant, which has a ref field (CidLink) + let blob_ref = &file_node.blob; + let cid_string = blob_ref.blob().r#ref.to_string(); + + // Store both normalized and full paths + // Normalize by removing base folder prefix (e.g., "cobblemon/index.html" -> "index.html") + let normalized_path = normalize_path(&full_path); + + blob_map.insert( + normalized_path.clone(), + (blob_ref.clone().into_static(), cid_string.clone()) + ); + + // Also store the full path for matching + if normalized_path != full_path { + blob_map.insert( + full_path, + (blob_ref.clone().into_static(), cid_string) + ); + } + } + EntryNode::Directory(subdir) => { + let sub_map = extract_blob_map_recursive(subdir, full_path); + blob_map.extend(sub_map); + } + EntryNode::Unknown(_) => { + // Skip unknown node types + } + } + } + + blob_map +} + +/// Normalize file path by removing base folder prefix +/// Example: "cobblemon/index.html" -> "index.html" +/// +/// Mirrors TypeScript implementation at src/routes/wisp.ts line 291 +pub fn normalize_path(path: &str) -> String { + // Remove base folder prefix (everything before first /) + if let Some(idx) = path.find('/') { + path[idx + 1..].to_string() + } else { + path.to_string() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_normalize_path() { + assert_eq!(normalize_path("index.html"), "index.html"); + assert_eq!(normalize_path("cobblemon/index.html"), "index.html"); + assert_eq!(normalize_path("folder/subfolder/file.txt"), "subfolder/file.txt"); + assert_eq!(normalize_path("a/b/c/d.txt"), "b/c/d.txt"); + } +} + diff --git a/cli/src/cid.rs b/cli/src/cid.rs new file mode 100644 index 0000000..5190d30 --- /dev/null +++ b/cli/src/cid.rs @@ -0,0 +1,66 @@ +use jacquard_common::types::cid::IpldCid; +use sha2::{Digest, Sha256}; + +/// Compute CID (Content Identifier) for blob content +/// Uses the same algorithm as AT Protocol: CIDv1 with raw codec (0x55) and SHA-256 +/// +/// CRITICAL: This must be called on BASE64-ENCODED GZIPPED content, not just gzipped content +/// +/// Based on @atproto/common/src/ipld.ts sha256RawToCid implementation +pub fn compute_cid(content: &[u8]) -> String { + // Use node crypto to compute sha256 hash (same as AT Protocol) + let hash = Sha256::digest(content); + + // Create multihash (code 0x12 = sha2-256) + let multihash = multihash::Multihash::wrap(0x12, &hash) + .expect("SHA-256 hash should always fit in multihash"); + + // Create CIDv1 with raw codec (0x55) + let cid = IpldCid::new_v1(0x55, multihash); + + // Convert to base32 string representation + cid.to_string_of_base(multibase::Base::Base32Lower) + .unwrap_or_else(|_| cid.to_string()) +} + +#[cfg(test)] +mod tests { + use super::*; + use base64::Engine; + + #[test] + fn test_compute_cid() { + // Test with a simple string: "hello" + let content = b"hello"; + let cid = compute_cid(content); + + // CID should start with 'baf' for raw codec base32 + assert!(cid.starts_with("baf")); + } + + #[test] + fn test_compute_cid_base64_encoded() { + // Simulate the actual use case: gzipped then base64 encoded + use flate2::write::GzEncoder; + use flate2::Compression; + use std::io::Write; + + let original = b"hello world"; + + // Gzip compress + let mut encoder = GzEncoder::new(Vec::new(), Compression::default()); + encoder.write_all(original).unwrap(); + let gzipped = encoder.finish().unwrap(); + + // Base64 encode the gzipped data + let base64_bytes = base64::prelude::BASE64_STANDARD.encode(&gzipped).into_bytes(); + + // Compute CID on the base64 bytes + let cid = compute_cid(&base64_bytes); + + // Should be a valid CID + assert!(cid.starts_with("baf")); + assert!(cid.len() > 10); + } +} + diff --git a/cli/src/main.rs b/cli/src/main.rs index cfeb908..db0e7cf 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,9 +1,11 @@ mod builder_types; mod place_wisp; +mod cid; +mod blob_map; use clap::Parser; use jacquard::CowStr; -use jacquard::client::{Agent, FileAuthStore, AgentSessionExt, MemoryCredentialSession}; +use jacquard::client::{Agent, FileAuthStore, AgentSessionExt, MemoryCredentialSession, AgentSession}; use jacquard::oauth::client::OAuthClient; use jacquard::oauth::loopback::LoopbackConfig; use jacquard::prelude::IdentityResolver; @@ -11,6 +13,7 @@ use jacquard_common::types::string::{Datetime, Rkey, RecordKey}; use jacquard_common::types::blob::MimeType; use miette::IntoDiagnostic; use std::path::{Path, PathBuf}; +use std::collections::HashMap; use flate2::Compression; use flate2::write::GzEncoder; use std::io::Write; @@ -107,17 +110,56 @@ async fn deploy_site( println!("Deploying site '{}'...", site_name); - // Build directory tree - let root_dir = build_directory(agent, &path).await?; + // Try to fetch existing manifest for incremental updates + let existing_blob_map: HashMap, String)> = { + use jacquard_common::types::string::AtUri; + + // Get the DID for this session + let session_info = agent.session_info().await; + if let Some((did, _)) = session_info { + // Construct the AT URI for the record + let uri_string = format!("at://{}/place.wisp.fs/{}", did, site_name); + if let Ok(uri) = AtUri::new(&uri_string) { + match agent.get_record::(&uri).await { + Ok(response) => { + match response.into_output() { + Ok(record_output) => { + let existing_manifest = record_output.value; + let blob_map = blob_map::extract_blob_map(&existing_manifest.root); + println!("Found existing manifest with {} files, checking for changes...", blob_map.len()); + blob_map + } + Err(_) => { + println!("No existing manifest found, uploading all files..."); + HashMap::new() + } + } + } + Err(_) => { + // Record doesn't exist yet - this is a new site + println!("No existing manifest found, uploading all files..."); + HashMap::new() + } + } + } else { + println!("No existing manifest found (invalid URI), uploading all files..."); + HashMap::new() + } + } else { + println!("No existing manifest found (could not get DID), uploading all files..."); + HashMap::new() + } + }; - // Count total files - let file_count = count_files(&root_dir); + // Build directory tree + let (root_dir, total_files, reused_count) = build_directory(agent, &path, &existing_blob_map).await?; + let uploaded_count = total_files - reused_count; // Create the Fs record let fs_record = Fs::new() .site(CowStr::from(site_name.clone())) .root(root_dir) - .file_count(file_count as i64) + .file_count(total_files as i64) .created_at(Datetime::now()) .build(); @@ -132,8 +174,9 @@ async fn deploy_site( .and_then(|s| s.split('/').next()) .ok_or_else(|| miette::miette!("Failed to parse DID from URI"))?; - println!("Deployed site '{}': {}", site_name, output.uri); - println!("Available at: https://sites.wisp.place/{}/{}", did, site_name); + println!("\n✓ Deployed site '{}': {}", site_name, output.uri); + println!(" Total files: {} ({} reused, {} uploaded)", total_files, reused_count, uploaded_count); + println!(" Available at: https://sites.wisp.place/{}/{}", did, site_name); Ok(()) } @@ -142,7 +185,8 @@ async fn deploy_site( fn build_directory<'a>( agent: &'a Agent, dir_path: &'a Path, -) -> std::pin::Pin>> + 'a>> + existing_blobs: &'a HashMap, String)>, +) -> std::pin::Pin, usize, usize)>> + 'a>> { Box::pin(async move { // Collect all directory entries first @@ -177,46 +221,66 @@ fn build_directory<'a>( } // Process files concurrently with a limit of 5 - let file_entries: Vec = stream::iter(file_tasks) + let file_results: Vec<(Entry<'static>, bool)> = stream::iter(file_tasks) .map(|(name, path)| async move { - let file_node = process_file(agent, &path).await?; - Ok::<_, miette::Report>(Entry::new() + let (file_node, reused) = process_file(agent, &path, &name, existing_blobs).await?; + let entry = Entry::new() .name(CowStr::from(name)) .node(EntryNode::File(Box::new(file_node))) - .build()) + .build(); + Ok::<_, miette::Report>((entry, reused)) }) .buffer_unordered(5) .collect::>() .await .into_iter() .collect::>>()?; + + let mut file_entries = Vec::new(); + let mut reused_count = 0; + let mut total_files = 0; + + for (entry, reused) in file_results { + file_entries.push(entry); + total_files += 1; + if reused { + reused_count += 1; + } + } // Process directories recursively (sequentially to avoid too much nesting) let mut dir_entries = Vec::new(); for (name, path) in dir_tasks { - let subdir = build_directory(agent, &path).await?; + let (subdir, sub_total, sub_reused) = build_directory(agent, &path, existing_blobs).await?; dir_entries.push(Entry::new() .name(CowStr::from(name)) .node(EntryNode::Directory(Box::new(subdir))) .build()); + total_files += sub_total; + reused_count += sub_reused; } // Combine file and directory entries let mut entries = file_entries; entries.extend(dir_entries); - Ok(Directory::new() + let directory = Directory::new() .r#type(CowStr::from("directory")) .entries(entries) - .build()) + .build(); + + Ok((directory, total_files, reused_count)) }) } -/// Process a single file: gzip -> base64 -> upload blob +/// Process a single file: gzip -> base64 -> upload blob (or reuse existing) +/// Returns (File, reused: bool) async fn process_file( agent: &Agent, file_path: &Path, -) -> miette::Result> + file_name: &str, + existing_blobs: &HashMap, String)>, +) -> miette::Result<(File<'static>, bool)> { // Read file let file_data = std::fs::read(file_path).into_diagnostic()?; @@ -234,30 +298,49 @@ async fn process_file( // Base64 encode the gzipped data let base64_bytes = base64::prelude::BASE64_STANDARD.encode(&gzipped).into_bytes(); - // Upload blob as octet-stream + // Compute CID for this file (CRITICAL: on base64-encoded gzipped content) + let file_cid = cid::compute_cid(&base64_bytes); + + // Normalize the file path for comparison + let normalized_path = blob_map::normalize_path(file_name); + + // Check if we have an existing blob with the same CID + let existing_blob = existing_blobs.get(&normalized_path) + .or_else(|| existing_blobs.get(file_name)); + + if let Some((existing_blob_ref, existing_cid)) = existing_blob { + if existing_cid == &file_cid { + // CIDs match - reuse existing blob + println!(" ✓ Reusing blob for {} (CID: {})", file_name, file_cid); + return Ok(( + File::new() + .r#type(CowStr::from("file")) + .blob(existing_blob_ref.clone()) + .encoding(CowStr::from("gzip")) + .mime_type(CowStr::from(original_mime)) + .base64(true) + .build(), + true + )); + } + } + + // File is new or changed - upload it + println!(" ↑ Uploading {} ({} bytes, CID: {})", file_name, base64_bytes.len(), file_cid); let blob = agent.upload_blob( base64_bytes, MimeType::new_static("application/octet-stream"), ).await?; - Ok(File::new() - .r#type(CowStr::from("file")) - .blob(blob) - .encoding(CowStr::from("gzip")) - .mime_type(CowStr::from(original_mime)) - .base64(true) - .build()) + Ok(( + File::new() + .r#type(CowStr::from("file")) + .blob(blob) + .encoding(CowStr::from("gzip")) + .mime_type(CowStr::from(original_mime)) + .base64(true) + .build(), + false + )) } -/// Count total files in a directory tree -fn count_files(dir: &Directory) -> usize { - let mut count = 0; - for entry in &dir.entries { - match &entry.node { - EntryNode::File(_) => count += 1, - EntryNode::Directory(subdir) => count += count_files(subdir), - _ => {} // Unknown variants - } - } - count -} -- 2.50.1 (Apple Git-155) From 56b1ef45ccab3ffc0112d4895e2c31a0954d0199 Mon Sep 17 00:00:00 2001 From: "@nekomimi.pet" Date: Wed, 12 Nov 2025 20:28:44 -0500 Subject: [PATCH 3/6] dont normalize paths when comparing CIDs --- cli/src/blob_map.rs | 23 ++++++++--------------- cli/src/main.rs | 37 ++++++++++++++++++++++++------------- 2 files changed, 32 insertions(+), 28 deletions(-) diff --git a/cli/src/blob_map.rs b/cli/src/blob_map.rs index 93c86bd..de5f211 100644 --- a/cli/src/blob_map.rs +++ b/cli/src/blob_map.rs @@ -34,22 +34,11 @@ fn extract_blob_map_recursive( let blob_ref = &file_node.blob; let cid_string = blob_ref.blob().r#ref.to_string(); - // Store both normalized and full paths - // Normalize by removing base folder prefix (e.g., "cobblemon/index.html" -> "index.html") - let normalized_path = normalize_path(&full_path); - + // Store with full path (mirrors TypeScript implementation) blob_map.insert( - normalized_path.clone(), - (blob_ref.clone().into_static(), cid_string.clone()) + full_path, + (blob_ref.clone().into_static(), cid_string) ); - - // Also store the full path for matching - if normalized_path != full_path { - blob_map.insert( - full_path, - (blob_ref.clone().into_static(), cid_string) - ); - } } EntryNode::Directory(subdir) => { let sub_map = extract_blob_map_recursive(subdir, full_path); @@ -67,7 +56,11 @@ fn extract_blob_map_recursive( /// Normalize file path by removing base folder prefix /// Example: "cobblemon/index.html" -> "index.html" /// -/// Mirrors TypeScript implementation at src/routes/wisp.ts line 291 +/// Note: This function is kept for reference but is no longer used in production code. +/// The TypeScript server has a similar normalization (src/routes/wisp.ts line 291) to handle +/// uploads that include a base folder prefix, but our CLI doesn't need this since we +/// track full paths consistently. +#[allow(dead_code)] pub fn normalize_path(path: &str) -> String { // Remove base folder prefix (everything before first /) if let Some(idx) = path.find('/') { diff --git a/cli/src/main.rs b/cli/src/main.rs index db0e7cf..8db65f6 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -152,7 +152,7 @@ async fn deploy_site( }; // Build directory tree - let (root_dir, total_files, reused_count) = build_directory(agent, &path, &existing_blob_map).await?; + let (root_dir, total_files, reused_count) = build_directory(agent, &path, &existing_blob_map, String::new()).await?; let uploaded_count = total_files - reused_count; // Create the Fs record @@ -182,10 +182,12 @@ async fn deploy_site( } /// Recursively build a Directory from a filesystem path +/// current_path is the path from the root of the site (e.g., "" for root, "config" for config dir) fn build_directory<'a>( agent: &'a Agent, dir_path: &'a Path, existing_blobs: &'a HashMap, String)>, + current_path: String, ) -> std::pin::Pin, usize, usize)>> + 'a>> { Box::pin(async move { @@ -214,7 +216,13 @@ fn build_directory<'a>( let metadata = entry.metadata().into_diagnostic()?; if metadata.is_file() { - file_tasks.push((name_str, path)); + // Construct full path for this file (for blob map lookup) + let full_path = if current_path.is_empty() { + name_str.clone() + } else { + format!("{}/{}", current_path, name_str) + }; + file_tasks.push((name_str, path, full_path)); } else if metadata.is_dir() { dir_tasks.push((name_str, path)); } @@ -222,8 +230,8 @@ fn build_directory<'a>( // Process files concurrently with a limit of 5 let file_results: Vec<(Entry<'static>, bool)> = stream::iter(file_tasks) - .map(|(name, path)| async move { - let (file_node, reused) = process_file(agent, &path, &name, existing_blobs).await?; + .map(|(name, path, full_path)| async move { + let (file_node, reused) = process_file(agent, &path, &full_path, existing_blobs).await?; let entry = Entry::new() .name(CowStr::from(name)) .node(EntryNode::File(Box::new(file_node))) @@ -251,7 +259,13 @@ fn build_directory<'a>( // Process directories recursively (sequentially to avoid too much nesting) let mut dir_entries = Vec::new(); for (name, path) in dir_tasks { - let (subdir, sub_total, sub_reused) = build_directory(agent, &path, existing_blobs).await?; + // Construct full path for subdirectory + let subdir_path = if current_path.is_empty() { + name.clone() + } else { + format!("{}/{}", current_path, name) + }; + let (subdir, sub_total, sub_reused) = build_directory(agent, &path, existing_blobs, subdir_path).await?; dir_entries.push(Entry::new() .name(CowStr::from(name)) .node(EntryNode::Directory(Box::new(subdir))) @@ -275,10 +289,11 @@ fn build_directory<'a>( /// Process a single file: gzip -> base64 -> upload blob (or reuse existing) /// Returns (File, reused: bool) +/// file_path_key is the full path from the site root (e.g., "config/file.json") for blob map lookup async fn process_file( agent: &Agent, file_path: &Path, - file_name: &str, + file_path_key: &str, existing_blobs: &HashMap, String)>, ) -> miette::Result<(File<'static>, bool)> { @@ -301,17 +316,13 @@ async fn process_file( // Compute CID for this file (CRITICAL: on base64-encoded gzipped content) let file_cid = cid::compute_cid(&base64_bytes); - // Normalize the file path for comparison - let normalized_path = blob_map::normalize_path(file_name); - // Check if we have an existing blob with the same CID - let existing_blob = existing_blobs.get(&normalized_path) - .or_else(|| existing_blobs.get(file_name)); + let existing_blob = existing_blobs.get(file_path_key); if let Some((existing_blob_ref, existing_cid)) = existing_blob { if existing_cid == &file_cid { // CIDs match - reuse existing blob - println!(" ✓ Reusing blob for {} (CID: {})", file_name, file_cid); + println!(" ✓ Reusing blob for {} (CID: {})", file_path_key, file_cid); return Ok(( File::new() .r#type(CowStr::from("file")) @@ -326,7 +337,7 @@ async fn process_file( } // File is new or changed - upload it - println!(" ↑ Uploading {} ({} bytes, CID: {})", file_name, base64_bytes.len(), file_cid); + println!(" ↑ Uploading {} ({} bytes, CID: {})", file_path_key, base64_bytes.len(), file_cid); let blob = agent.upload_blob( base64_bytes, MimeType::new_static("application/octet-stream"), -- 2.50.1 (Apple Git-155) From 38b1c4c6f7cc6e8f298ef3af629d2761b5f3b908 Mon Sep 17 00:00:00 2001 From: "@nekomimi.pet" Date: Wed, 12 Nov 2025 23:57:22 -0500 Subject: [PATCH 4/6] add pull and serve to cli --- cli/Cargo.lock | 560 +++++++++++++++++++++++++++++++++++++++++++- cli/Cargo.toml | 8 +- cli/src/download.rs | 71 ++++++ cli/src/main.rs | 125 ++++++++-- cli/src/metadata.rs | 46 ++++ cli/src/pull.rs | 305 ++++++++++++++++++++++++ cli/src/serve.rs | 202 ++++++++++++++++ 7 files changed, 1295 insertions(+), 22 deletions(-) create mode 100644 cli/src/download.rs create mode 100644 cli/src/metadata.rs create mode 100644 cli/src/pull.rs create mode 100644 cli/src/serve.rs diff --git a/cli/Cargo.lock b/cli/Cargo.lock index a100cf6..5fa5a99 100644 --- a/cli/Cargo.lock +++ b/cli/Cargo.lock @@ -173,6 +173,61 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "axum" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +dependencies = [ + "async-trait", + "axum-core", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower 0.5.2", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "backtrace" version = "0.3.76" @@ -347,6 +402,12 @@ version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + [[package]] name = "bytes" version = "1.10.1" @@ -548,6 +609,16 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3" +[[package]] +name = "cordyceps" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "688d7fbb8092b8de775ef2536f36c8c31f2bc4006ece2e8d8ad2d17d00ce0a2a" +dependencies = [ + "loom", + "tracing", +] + [[package]] name = "core-foundation" version = "0.9.4" @@ -750,6 +821,33 @@ dependencies = [ "serde_core", ] +[[package]] +name = "derive_more" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.108", + "unicode-xid", +] + +[[package]] +name = "diatomic-waker" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab03c107fafeb3ee9f5925686dbb7a73bc76e3932abb0d2b365cb64b169cf04c" + [[package]] name = "digest" version = "0.10.7" @@ -955,6 +1053,19 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-buffered" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8e0e1f38ec07ba4abbde21eed377082f17ccb988be9d988a5adbf4bafc118fd" +dependencies = [ + "cordyceps", + "diatomic-waker", + "futures-core", + "pin-project-lite", + "spin 0.10.0", +] + [[package]] name = "futures-channel" version = "0.3.31" @@ -988,6 +1099,19 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + [[package]] name = "futures-macro" version = "0.3.31" @@ -1029,6 +1153,20 @@ dependencies = [ "slab", ] +[[package]] +name = "generator" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "605183a538e3e2a9c1038635cc5c2d194e2ee8fd0d1b66b8349fad7dbacce5a2" +dependencies = [ + "cc", + "cfg-if", + "libc", + "log", + "rustversion", + "windows", +] + [[package]] name = "generic-array" version = "0.14.9" @@ -1273,6 +1411,12 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "http-range-header" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" + [[package]] name = "httparse" version = "1.10.1" @@ -1299,6 +1443,7 @@ dependencies = [ "http", "http-body", "httparse", + "httpdate", "itoa", "pin-project-lite", "pin-utils", @@ -1362,7 +1507,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core", + "windows-core 0.62.2", ] [[package]] @@ -1635,7 +1780,9 @@ dependencies = [ "bon", "bytes", "chrono", + "ciborium", "cid", + "futures", "getrandom 0.2.16", "getrandom 0.3.4", "http", @@ -1645,6 +1792,7 @@ dependencies = [ "miette", "multibase", "multihash", + "n0-future", "ouroboros", "p256", "rand 0.9.2", @@ -1658,6 +1806,7 @@ dependencies = [ "smol_str", "thiserror 2.0.17", "tokio", + "tokio-tungstenite-wasm", "tokio-util", "trait-variant", "url", @@ -1856,7 +2005,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin", + "spin 0.9.8", ] [[package]] @@ -1915,6 +2064,19 @@ version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" +[[package]] +name = "loom" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" +dependencies = [ + "cfg-if", + "generator", + "scoped-tls", + "tracing", + "tracing-subscriber", +] + [[package]] name = "lru-cache" version = "0.1.2" @@ -1973,6 +2135,21 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + [[package]] name = "memchr" version = "2.7.6" @@ -2107,6 +2284,27 @@ dependencies = [ "twoway", ] +[[package]] +name = "n0-future" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bb0e5d99e681ab3c938842b96fcb41bf8a7bb4bfdb11ccbd653a7e83e06c794" +dependencies = [ + "cfg_aliases", + "derive_more", + "futures-buffered", + "futures-lite", + "futures-util", + "js-sys", + "pin-project", + "send_wrapper", + "tokio", + "tokio-util", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-time", +] + [[package]] name = "ndk-context" version = "0.1.1" @@ -2129,6 +2327,15 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "num-bigint-dig" version = "0.8.5" @@ -2246,6 +2453,12 @@ version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + [[package]] name = "option-ext" version = "0.2.0" @@ -2304,6 +2517,12 @@ dependencies = [ "primeorder", ] +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + [[package]] name = "parking_lot" version = "0.12.5" @@ -2380,6 +2599,26 @@ dependencies = [ "siphasher", ] +[[package]] +name = "pin-project" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.108", +] + [[package]] name = "pin-project-lite" version = "0.2.16" @@ -2752,8 +2991,8 @@ dependencies = [ "tokio", "tokio-rustls", "tokio-util", - "tower", - "tower-http", + "tower 0.5.2", + "tower-http 0.6.6", "tower-service", "url", "wasm-bindgen", @@ -2876,6 +3115,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework", +] + [[package]] name = "rustls-pki-types" version = "1.13.0" @@ -2924,6 +3175,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "schemars" version = "0.9.0" @@ -2948,6 +3208,12 @@ dependencies = [ "serde_json", ] +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + [[package]] name = "scopeguard" version = "1.2.0" @@ -2968,6 +3234,35 @@ dependencies = [ "zeroize", ] +[[package]] +name = "security-framework" +version = "3.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +dependencies = [ + "bitflags", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + [[package]] name = "serde" version = "1.0.228" @@ -3046,6 +3341,17 @@ dependencies = [ "serde_core", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + [[package]] name = "serde_repr" version = "0.1.20" @@ -3100,6 +3406,17 @@ dependencies = [ "syn 2.0.108", ] +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sha1_smol" version = "1.0.1" @@ -3117,6 +3434,15 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "shellexpand" version = "3.1.1" @@ -3211,6 +3537,12 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +[[package]] +name = "spin" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe4ccb98d9c292d56fec89a5e07da7fc4cf0dc11e156b41793132775d3e591" + [[package]] name = "spki" version = "0.7.3" @@ -3464,6 +3796,15 @@ dependencies = [ "syn 2.0.108", ] +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + [[package]] name = "threadpool" version = "1.8.1" @@ -3581,6 +3922,41 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edc5f74e248dc973e0dbb7b74c7e0d6fcc301c694ff50049504004ef4d0cdcd9" +dependencies = [ + "futures-util", + "log", + "rustls", + "rustls-native-certs", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tungstenite", +] + +[[package]] +name = "tokio-tungstenite-wasm" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e21a5c399399c3db9f08d8297ac12b500e86bca82e930253fdc62eaf9c0de6ae" +dependencies = [ + "futures-channel", + "futures-util", + "http", + "httparse", + "js-sys", + "rustls", + "thiserror 1.0.69", + "tokio", + "tokio-tungstenite", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "tokio-util" version = "0.7.16" @@ -3590,10 +3966,22 @@ dependencies = [ "bytes", "futures-core", "futures-sink", + "futures-util", "pin-project-lite", "tokio", ] +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower" version = "0.5.2" @@ -3607,6 +3995,34 @@ dependencies = [ "tokio", "tower-layer", "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" +dependencies = [ + "async-compression", + "bitflags", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "http-range-header", + "httpdate", + "mime", + "mime_guess", + "percent-encoding", + "pin-project-lite", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", ] [[package]] @@ -3622,7 +4038,7 @@ dependencies = [ "http-body", "iri-string", "pin-project-lite", - "tower", + "tower 0.5.2", "tower-layer", "tower-service", ] @@ -3645,6 +4061,7 @@ version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -3668,6 +4085,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", ] [[package]] @@ -3693,6 +4140,26 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "rand 0.8.5", + "rustls", + "rustls-pki-types", + "sha1", + "thiserror 1.0.69", + "utf-8", +] + [[package]] name = "twoway" version = "0.1.8" @@ -3744,6 +4211,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + [[package]] name = "unsigned-varint" version = "0.8.0" @@ -3792,6 +4265,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + [[package]] name = "version_check" version = "0.9.5" @@ -3975,6 +4454,41 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "windows" +version = "0.61.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" +dependencies = [ + "windows-collections", + "windows-core 0.61.2", + "windows-future", + "windows-link 0.1.3", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" +dependencies = [ + "windows-core 0.61.2", +] + +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", +] + [[package]] name = "windows-core" version = "0.62.2" @@ -3988,6 +4502,17 @@ dependencies = [ "windows-strings 0.5.1", ] +[[package]] +name = "windows-future" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", + "windows-threading", +] + [[package]] name = "windows-implement" version = "0.60.2" @@ -4022,6 +4547,16 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" +[[package]] +name = "windows-numerics" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", +] + [[package]] name = "windows-registry" version = "0.5.3" @@ -4177,6 +4712,15 @@ dependencies = [ "windows_x86_64_msvc 0.53.1", ] +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" +dependencies = [ + "windows-link 0.1.3", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -4371,8 +4915,10 @@ dependencies = [ name = "wisp-cli" version = "0.1.0" dependencies = [ + "axum", "base64 0.22.1", "bytes", + "chrono", "clap", "flate2", "futures", @@ -4387,6 +4933,7 @@ dependencies = [ "mime_guess", "multibase", "multihash", + "n0-future", "reqwest", "rustversion", "serde", @@ -4394,6 +4941,9 @@ dependencies = [ "sha2", "shellexpand", "tokio", + "tower 0.4.13", + "tower-http 0.5.2", + "url", "walkdir", ] diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 6e0d1e2..c3eb22c 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -11,7 +11,7 @@ place_wisp = [] jacquard = { git = "https://tangled.org/@nonbinary.computer/jacquard", features = ["loopback"] } jacquard-oauth = { git = "https://tangled.org/@nonbinary.computer/jacquard" } jacquard-api = { git = "https://tangled.org/@nonbinary.computer/jacquard" } -jacquard-common = { git = "https://tangled.org/@nonbinary.computer/jacquard" } +jacquard-common = { git = "https://tangled.org/@nonbinary.computer/jacquard", features = ["websocket"] } jacquard-identity = { git = "https://tangled.org/@nonbinary.computer/jacquard", features = ["dns"] } jacquard-derive = { git = "https://tangled.org/@nonbinary.computer/jacquard" } jacquard-lexicon = { git = "https://tangled.org/@nonbinary.computer/jacquard" } @@ -33,3 +33,9 @@ futures = "0.3.31" multihash = "0.19.3" multibase = "0.9" sha2 = "0.10" +axum = "0.7" +tower-http = { version = "0.5", features = ["fs", "compression-gzip"] } +tower = "0.4" +n0-future = "0.1" +chrono = "0.4" +url = "2.5" diff --git a/cli/src/download.rs b/cli/src/download.rs new file mode 100644 index 0000000..a88a065 --- /dev/null +++ b/cli/src/download.rs @@ -0,0 +1,71 @@ +use base64::Engine; +use bytes::Bytes; +use flate2::read::GzDecoder; +use jacquard_common::types::blob::BlobRef; +use miette::IntoDiagnostic; +use std::io::Read; +use url::Url; + +/// Download a blob from the PDS +pub async fn download_blob(pds_url: &Url, blob_ref: &BlobRef<'_>, did: &str) -> miette::Result { + // Extract CID from blob ref + let cid = blob_ref.blob().r#ref.to_string(); + + // Construct blob download URL + // The correct endpoint is: /xrpc/com.atproto.sync.getBlob?did={did}&cid={cid} + let blob_url = pds_url + .join(&format!("/xrpc/com.atproto.sync.getBlob?did={}&cid={}", did, cid)) + .into_diagnostic()?; + + let client = reqwest::Client::new(); + let response = client + .get(blob_url) + .send() + .await + .into_diagnostic()?; + + if !response.status().is_success() { + return Err(miette::miette!( + "Failed to download blob: {}", + response.status() + )); + } + + let bytes = response.bytes().await.into_diagnostic()?; + Ok(bytes) +} + +/// Decompress and decode a blob (base64 + gzip) +pub fn decompress_blob(data: &[u8], is_base64: bool, is_gzipped: bool) -> miette::Result> { + let mut current_data = data.to_vec(); + + // First, decode base64 if needed + if is_base64 { + current_data = base64::prelude::BASE64_STANDARD + .decode(¤t_data) + .into_diagnostic()?; + } + + // Then, decompress gzip if needed + if is_gzipped { + let mut decoder = GzDecoder::new(¤t_data[..]); + let mut decompressed = Vec::new(); + decoder.read_to_end(&mut decompressed).into_diagnostic()?; + current_data = decompressed; + } + + Ok(current_data) +} + +/// Download and decompress a blob +pub async fn download_and_decompress_blob( + pds_url: &Url, + blob_ref: &BlobRef<'_>, + did: &str, + is_base64: bool, + is_gzipped: bool, +) -> miette::Result> { + let data = download_blob(pds_url, blob_ref, did).await?; + decompress_blob(&data, is_base64, is_gzipped) +} + diff --git a/cli/src/main.rs b/cli/src/main.rs index 8db65f6..46ce4bc 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -2,8 +2,12 @@ mod builder_types; mod place_wisp; mod cid; mod blob_map; +mod metadata; +mod download; +mod pull; +mod serve; -use clap::Parser; +use clap::{Parser, Subcommand}; use jacquard::CowStr; use jacquard::client::{Agent, FileAuthStore, AgentSessionExt, MemoryCredentialSession, AgentSession}; use jacquard::oauth::client::OAuthClient; @@ -23,37 +27,126 @@ use futures::stream::{self, StreamExt}; use place_wisp::fs::*; #[derive(Parser, Debug)] -#[command(author, version, about = "Deploy a static site to wisp.place")] +#[command(author, version, about = "wisp.place CLI tool")] struct Args { + #[command(subcommand)] + command: Option, + + // Deploy arguments (when no subcommand is specified) /// Handle (e.g., alice.bsky.social), DID, or PDS URL - input: CowStr<'static>, + #[arg(global = true, conflicts_with = "command")] + input: Option>, /// Path to the directory containing your static site - #[arg(short, long, default_value = ".")] - path: PathBuf, + #[arg(short, long, global = true, conflicts_with = "command")] + path: Option, /// Site name (defaults to directory name) - #[arg(short, long)] + #[arg(short, long, global = true, conflicts_with = "command")] site: Option, - /// Path to auth store file (will be created if missing, only used with OAuth) - #[arg(long, default_value = "/tmp/wisp-oauth-session.json")] - store: String, + /// Path to auth store file + #[arg(long, global = true, conflicts_with = "command")] + store: Option, - /// App Password for authentication (alternative to OAuth) - #[arg(long)] + /// App Password for authentication + #[arg(long, global = true, conflicts_with = "command")] password: Option>, } +#[derive(Subcommand, Debug)] +enum Commands { + /// Deploy a static site to wisp.place (default command) + Deploy { + /// Handle (e.g., alice.bsky.social), DID, or PDS URL + input: CowStr<'static>, + + /// Path to the directory containing your static site + #[arg(short, long, default_value = ".")] + path: PathBuf, + + /// Site name (defaults to directory name) + #[arg(short, long)] + site: Option, + + /// Path to auth store file (will be created if missing, only used with OAuth) + #[arg(long, default_value = "/tmp/wisp-oauth-session.json")] + store: String, + + /// App Password for authentication (alternative to OAuth) + #[arg(long)] + password: Option>, + }, + /// Pull a site from the PDS to a local directory + Pull { + /// Handle (e.g., alice.bsky.social) or DID + input: CowStr<'static>, + + /// Site name (record key) + #[arg(short, long)] + site: String, + + /// Output directory for the downloaded site + #[arg(short, long, default_value = ".")] + output: PathBuf, + }, + /// Serve a site locally with real-time firehose updates + Serve { + /// Handle (e.g., alice.bsky.social) or DID + input: CowStr<'static>, + + /// Site name (record key) + #[arg(short, long)] + site: String, + + /// Output directory for the site files + #[arg(short, long, default_value = ".")] + output: PathBuf, + + /// Port to serve on + #[arg(short, long, default_value = "8080")] + port: u16, + }, +} + #[tokio::main] async fn main() -> miette::Result<()> { let args = Args::parse(); - // Dispatch to appropriate authentication method - if let Some(password) = args.password { - run_with_app_password(args.input, password, args.path, args.site).await - } else { - run_with_oauth(args.input, args.store, args.path, args.site).await + match args.command { + Some(Commands::Deploy { input, path, site, store, password }) => { + // Dispatch to appropriate authentication method + if let Some(password) = password { + run_with_app_password(input, password, path, site).await + } else { + run_with_oauth(input, store, path, site).await + } + } + Some(Commands::Pull { input, site, output }) => { + pull::pull_site(input, CowStr::from(site), output).await + } + Some(Commands::Serve { input, site, output, port }) => { + serve::serve_site(input, CowStr::from(site), output, port).await + } + None => { + // Legacy mode: if input is provided, assume deploy command + if let Some(input) = args.input { + let path = args.path.unwrap_or_else(|| PathBuf::from(".")); + let store = args.store.unwrap_or_else(|| "/tmp/wisp-oauth-session.json".to_string()); + + // Dispatch to appropriate authentication method + if let Some(password) = args.password { + run_with_app_password(input, password, path, args.site).await + } else { + run_with_oauth(input, store, path, args.site).await + } + } else { + // No command and no input, show help + use clap::CommandFactory; + Args::command().print_help().into_diagnostic()?; + Ok(()) + } + } } } diff --git a/cli/src/metadata.rs b/cli/src/metadata.rs new file mode 100644 index 0000000..843831b --- /dev/null +++ b/cli/src/metadata.rs @@ -0,0 +1,46 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::Path; +use miette::IntoDiagnostic; + +/// Metadata tracking file CIDs for incremental updates +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SiteMetadata { + /// Record CID from the PDS + pub record_cid: String, + /// Map of file paths to their blob CIDs + pub file_cids: HashMap, + /// Timestamp when the site was last synced + pub last_sync: i64, +} + +impl SiteMetadata { + pub fn new(record_cid: String, file_cids: HashMap) -> Self { + Self { + record_cid, + file_cids, + last_sync: chrono::Utc::now().timestamp(), + } + } + + /// Load metadata from a directory + pub fn load(dir: &Path) -> miette::Result> { + let metadata_path = dir.join(".wisp-metadata.json"); + if !metadata_path.exists() { + return Ok(None); + } + + let contents = std::fs::read_to_string(&metadata_path).into_diagnostic()?; + let metadata: SiteMetadata = serde_json::from_str(&contents).into_diagnostic()?; + Ok(Some(metadata)) + } + + /// Save metadata to a directory + pub fn save(&self, dir: &Path) -> miette::Result<()> { + let metadata_path = dir.join(".wisp-metadata.json"); + let contents = serde_json::to_string_pretty(self).into_diagnostic()?; + std::fs::write(&metadata_path, contents).into_diagnostic()?; + Ok(()) + } +} + diff --git a/cli/src/pull.rs b/cli/src/pull.rs new file mode 100644 index 0000000..01cfaf5 --- /dev/null +++ b/cli/src/pull.rs @@ -0,0 +1,305 @@ +use crate::blob_map; +use crate::download; +use crate::metadata::SiteMetadata; +use crate::place_wisp::fs::*; +use jacquard::CowStr; +use jacquard::prelude::IdentityResolver; +use jacquard_common::types::string::Did; +use jacquard_common::xrpc::XrpcExt; +use jacquard_identity::PublicResolver; +use miette::IntoDiagnostic; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use url::Url; + +/// Pull a site from the PDS to a local directory +pub async fn pull_site( + input: CowStr<'static>, + rkey: CowStr<'static>, + output_dir: PathBuf, +) -> miette::Result<()> { + println!("Pulling site {} from {}...", rkey, input); + + // Resolve handle to DID if needed + let resolver = PublicResolver::default(); + let did = if input.starts_with("did:") { + Did::new(&input).into_diagnostic()? + } else { + // It's a handle, resolve it + let handle = jacquard_common::types::string::Handle::new(&input).into_diagnostic()?; + resolver.resolve_handle(&handle).await.into_diagnostic()? + }; + + // Resolve PDS endpoint for the DID + let pds_url = resolver.pds_for_did(&did).await.into_diagnostic()?; + println!("Resolved PDS: {}", pds_url); + + // Fetch the place.wisp.fs record + + println!("Fetching record from PDS..."); + let client = reqwest::Client::new(); + + // Use com.atproto.repo.getRecord + use jacquard::api::com_atproto::repo::get_record::GetRecord; + use jacquard_common::types::string::Rkey as RkeyType; + let rkey_parsed = RkeyType::new(&rkey).into_diagnostic()?; + + use jacquard_common::types::ident::AtIdentifier; + use jacquard_common::types::string::RecordKey; + let request = GetRecord::new() + .repo(AtIdentifier::Did(did.clone())) + .collection(CowStr::from("place.wisp.fs")) + .rkey(RecordKey::from(rkey_parsed)) + .build(); + + let response = client + .xrpc(pds_url.clone()) + .send(&request) + .await + .into_diagnostic()?; + + let record_output = response.into_output().into_diagnostic()?; + let record_cid = record_output.cid.as_ref().map(|c| c.to_string()).unwrap_or_default(); + + // Parse the record value as Fs + use jacquard_common::types::value::from_data; + let fs_record: Fs = from_data(&record_output.value).into_diagnostic()?; + + let file_count = fs_record.file_count.map(|c| c.to_string()).unwrap_or_else(|| "?".to_string()); + println!("Found site '{}' with {} files", fs_record.site, file_count); + + // Load existing metadata for incremental updates + let existing_metadata = SiteMetadata::load(&output_dir)?; + let existing_file_cids = existing_metadata + .as_ref() + .map(|m| m.file_cids.clone()) + .unwrap_or_default(); + + // Extract blob map from the new manifest + let new_blob_map = blob_map::extract_blob_map(&fs_record.root); + let new_file_cids: HashMap = new_blob_map + .iter() + .map(|(path, (_blob_ref, cid))| (path.clone(), cid.clone())) + .collect(); + + // Clean up any leftover temp directories from previous failed attempts + let parent = output_dir.parent().unwrap_or_else(|| std::path::Path::new(".")); + let output_name = output_dir.file_name().unwrap_or_else(|| std::ffi::OsStr::new("site")).to_string_lossy(); + let temp_prefix = format!(".tmp-{}-", output_name); + + if let Ok(entries) = parent.read_dir() { + for entry in entries.flatten() { + let name = entry.file_name(); + if name.to_string_lossy().starts_with(&temp_prefix) { + let _ = std::fs::remove_dir_all(entry.path()); + } + } + } + + // Check if we need to update (but only if output directory actually exists with files) + if let Some(metadata) = &existing_metadata { + if metadata.record_cid == record_cid { + // Verify that the output directory actually exists and has content + let has_content = output_dir.exists() && + output_dir.read_dir() + .map(|mut entries| entries.any(|e| { + if let Ok(entry) = e { + !entry.file_name().to_string_lossy().starts_with(".wisp-metadata") + } else { + false + } + })) + .unwrap_or(false); + + if has_content { + println!("Site is already up to date!"); + return Ok(()); + } + } + } + + // Create temporary directory for atomic update + // Place temp dir in parent directory to avoid issues with non-existent output_dir + let parent = output_dir.parent().unwrap_or_else(|| std::path::Path::new(".")); + let temp_dir_name = format!( + ".tmp-{}-{}", + output_dir.file_name().unwrap_or_else(|| std::ffi::OsStr::new("site")).to_string_lossy(), + chrono::Utc::now().timestamp() + ); + let temp_dir = parent.join(temp_dir_name); + std::fs::create_dir_all(&temp_dir).into_diagnostic()?; + + println!("Downloading files..."); + let mut downloaded = 0; + let mut reused = 0; + + // Download files recursively + let download_result = download_directory( + &fs_record.root, + &temp_dir, + &pds_url, + did.as_str(), + &new_blob_map, + &existing_file_cids, + &output_dir, + String::new(), + &mut downloaded, + &mut reused, + ) + .await; + + // If download failed, clean up temp directory + if let Err(e) = download_result { + let _ = std::fs::remove_dir_all(&temp_dir); + return Err(e); + } + + println!( + "Downloaded {} files, reused {} files", + downloaded, reused + ); + + // Save metadata + let metadata = SiteMetadata::new(record_cid, new_file_cids); + metadata.save(&temp_dir)?; + + // Move files from temp to output directory + let output_abs = std::fs::canonicalize(&output_dir).unwrap_or_else(|_| output_dir.clone()); + let current_dir = std::env::current_dir().into_diagnostic()?; + + // Special handling for pulling to current directory + if output_abs == current_dir { + // Move files from temp to current directory + for entry in std::fs::read_dir(&temp_dir).into_diagnostic()? { + let entry = entry.into_diagnostic()?; + let dest = current_dir.join(entry.file_name()); + + // Remove existing file/dir if it exists + if dest.exists() { + if dest.is_dir() { + std::fs::remove_dir_all(&dest).into_diagnostic()?; + } else { + std::fs::remove_file(&dest).into_diagnostic()?; + } + } + + // Move from temp to current dir + std::fs::rename(entry.path(), dest).into_diagnostic()?; + } + + // Clean up temp directory + std::fs::remove_dir_all(&temp_dir).into_diagnostic()?; + } else { + // If output directory exists and has content, remove it first + if output_dir.exists() { + std::fs::remove_dir_all(&output_dir).into_diagnostic()?; + } + + // Ensure parent directory exists + if let Some(parent) = output_dir.parent() { + if !parent.as_os_str().is_empty() && !parent.exists() { + std::fs::create_dir_all(parent).into_diagnostic()?; + } + } + + // Rename temp to final location + match std::fs::rename(&temp_dir, &output_dir) { + Ok(_) => {}, + Err(e) => { + // Clean up temp directory on failure + let _ = std::fs::remove_dir_all(&temp_dir); + return Err(miette::miette!("Failed to move temp directory: {}", e)); + } + } + } + + println!("✓ Site pulled successfully to {}", output_dir.display()); + + Ok(()) +} + +/// Recursively download a directory +fn download_directory<'a>( + dir: &'a Directory<'_>, + output_dir: &'a Path, + pds_url: &'a Url, + did: &'a str, + new_blob_map: &'a HashMap, String)>, + existing_file_cids: &'a HashMap, + existing_output_dir: &'a Path, + path_prefix: String, + downloaded: &'a mut usize, + reused: &'a mut usize, +) -> std::pin::Pin> + Send + 'a>> { + Box::pin(async move { + for entry in &dir.entries { + let entry_name = entry.name.as_str(); + let current_path = if path_prefix.is_empty() { + entry_name.to_string() + } else { + format!("{}/{}", path_prefix, entry_name) + }; + + match &entry.node { + EntryNode::File(file) => { + let output_path = output_dir.join(entry_name); + + // Check if file CID matches existing + if let Some((_blob_ref, new_cid)) = new_blob_map.get(¤t_path) { + if let Some(existing_cid) = existing_file_cids.get(¤t_path) { + if existing_cid == new_cid { + // File unchanged, copy from existing directory + let existing_path = existing_output_dir.join(¤t_path); + if existing_path.exists() { + std::fs::copy(&existing_path, &output_path).into_diagnostic()?; + *reused += 1; + println!(" ✓ Reused {}", current_path); + continue; + } + } + } + } + + // File is new or changed, download it + println!(" ↓ Downloading {}", current_path); + let data = download::download_and_decompress_blob( + pds_url, + &file.blob, + did, + file.base64.unwrap_or(false), + file.encoding.as_ref().map(|e| e.as_str() == "gzip").unwrap_or(false), + ) + .await?; + + std::fs::write(&output_path, data).into_diagnostic()?; + *downloaded += 1; + } + EntryNode::Directory(subdir) => { + let subdir_path = output_dir.join(entry_name); + std::fs::create_dir_all(&subdir_path).into_diagnostic()?; + + download_directory( + subdir, + &subdir_path, + pds_url, + did, + new_blob_map, + existing_file_cids, + existing_output_dir, + current_path, + downloaded, + reused, + ) + .await?; + } + EntryNode::Unknown(_) => { + // Skip unknown node types + println!(" ⚠ Skipping unknown node type for {}", current_path); + } + } + } + + Ok(()) + }) +} + diff --git a/cli/src/serve.rs b/cli/src/serve.rs new file mode 100644 index 0000000..240bf93 --- /dev/null +++ b/cli/src/serve.rs @@ -0,0 +1,202 @@ +use crate::pull::pull_site; +use axum::Router; +use jacquard::CowStr; +use jacquard_common::jetstream::{CommitOperation, JetstreamMessage, JetstreamParams}; +use jacquard_common::types::string::Did; +use jacquard_common::xrpc::{SubscriptionClient, TungsteniteSubscriptionClient}; +use miette::IntoDiagnostic; +use n0_future::StreamExt; +use std::path::PathBuf; +use std::sync::Arc; +use tokio::sync::RwLock; +use tower_http::compression::CompressionLayer; +use tower_http::services::ServeDir; +use url::Url; + +/// Shared state for the server +#[derive(Clone)] +struct ServerState { + did: CowStr<'static>, + rkey: CowStr<'static>, + output_dir: PathBuf, + last_cid: Arc>>, +} + +/// Serve a site locally with real-time firehose updates +pub async fn serve_site( + input: CowStr<'static>, + rkey: CowStr<'static>, + output_dir: PathBuf, + port: u16, +) -> miette::Result<()> { + println!("Serving site {} from {} on port {}...", rkey, input, port); + + // Resolve handle to DID if needed + use jacquard_identity::PublicResolver; + use jacquard::prelude::IdentityResolver; + + let resolver = PublicResolver::default(); + let did = if input.starts_with("did:") { + Did::new(&input).into_diagnostic()? + } else { + // It's a handle, resolve it + let handle = jacquard_common::types::string::Handle::new(&input).into_diagnostic()?; + resolver.resolve_handle(&handle).await.into_diagnostic()? + }; + + println!("Resolved to DID: {}", did.as_str()); + + // Create output directory if it doesn't exist + std::fs::create_dir_all(&output_dir).into_diagnostic()?; + + // Initial pull of the site + println!("Performing initial pull..."); + let did_str = CowStr::from(did.as_str().to_string()); + pull_site(did_str.clone(), rkey.clone(), output_dir.clone()).await?; + + // Create shared state + let state = ServerState { + did: did_str.clone(), + rkey: rkey.clone(), + output_dir: output_dir.clone(), + last_cid: Arc::new(RwLock::new(None)), + }; + + // Start firehose listener in background + let firehose_state = state.clone(); + tokio::spawn(async move { + if let Err(e) = watch_firehose(firehose_state).await { + eprintln!("Firehose error: {}", e); + } + }); + + // Create HTTP server with gzip compression + let app = Router::new() + .fallback_service( + ServeDir::new(&output_dir) + .precompressed_gzip() + ) + .layer(CompressionLayer::new()) + .with_state(state); + + let addr = format!("0.0.0.0:{}", port); + let listener = tokio::net::TcpListener::bind(&addr) + .await + .into_diagnostic()?; + + println!("\n✓ Server running at http://localhost:{}", port); + println!(" Watching for updates on the firehose...\n"); + + axum::serve(listener, app).await.into_diagnostic()?; + + Ok(()) +} + +/// Watch the firehose for updates to the specific site +fn watch_firehose(state: ServerState) -> std::pin::Pin> + Send>> { + Box::pin(async move { + let jetstream_url = Url::parse("wss://jetstream1.us-east.fire.hose.cam") + .into_diagnostic()?; + + println!("[Firehose] Connecting to Jetstream..."); + + // Create subscription client + let client = TungsteniteSubscriptionClient::from_base_uri(jetstream_url); + + // Subscribe with no filters (we'll filter manually) + // Jetstream doesn't support filtering by collection in the params builder + let params = JetstreamParams::new().build(); + + let stream = client.subscribe(¶ms).await.into_diagnostic()?; + println!("[Firehose] Connected! Watching for updates..."); + + // Convert to typed message stream + let (_sink, mut messages) = stream.into_stream(); + + loop { + match messages.next().await { + Some(Ok(msg)) => { + if let Err(e) = handle_firehose_message(&state, msg).await { + eprintln!("[Firehose] Error handling message: {}", e); + } + } + Some(Err(e)) => { + eprintln!("[Firehose] Stream error: {}", e); + // Try to reconnect after a delay + tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; + return Box::pin(watch_firehose(state)).await; + } + None => { + println!("[Firehose] Stream ended, reconnecting..."); + tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; + return Box::pin(watch_firehose(state)).await; + } + } + } + }) +} + +/// Handle a firehose message +async fn handle_firehose_message( + state: &ServerState, + msg: JetstreamMessage<'_>, +) -> miette::Result<()> { + match msg { + JetstreamMessage::Commit { + did, + commit, + .. + } => { + // Check if this is our site + if did.as_str() == state.did.as_str() + && commit.collection.as_str() == "place.wisp.fs" + && commit.rkey.as_str() == state.rkey.as_str() + { + match commit.operation { + CommitOperation::Create | CommitOperation::Update => { + let new_cid = commit.cid.as_ref().map(|c| c.to_string()); + + // Check if CID changed + let should_update = { + let last_cid = state.last_cid.read().await; + new_cid != *last_cid + }; + + if should_update { + println!("\n[Update] Detected change to site {} (CID: {:?})", state.rkey, new_cid); + println!("[Update] Pulling latest version..."); + + // Pull the updated site + match pull_site( + state.did.clone(), + state.rkey.clone(), + state.output_dir.clone(), + ) + .await + { + Ok(_) => { + // Update last CID + let mut last_cid = state.last_cid.write().await; + *last_cid = new_cid; + println!("[Update] ✓ Site updated successfully!\n"); + } + Err(e) => { + eprintln!("[Update] Failed to pull site: {}", e); + } + } + } + } + CommitOperation::Delete => { + println!("\n[Update] Site {} was deleted", state.rkey); + } + } + } + } + _ => { + // Ignore identity and account messages + } + } + + Ok(()) +} + -- 2.50.1 (Apple Git-155) From 436d7a062732626f17d71b91adccc8492e4d1977 Mon Sep 17 00:00:00 2001 From: "@nekomimi.pet" Date: Thu, 13 Nov 2025 00:32:52 -0500 Subject: [PATCH 5/6] remove jacquard submodule --- .gitmodules | 3 -- cli/Cargo.lock | 136 ++++++++++++++++++++++++------------------------- cli/jacquard | 1 - 3 files changed, 68 insertions(+), 72 deletions(-) delete mode 100644 .gitmodules delete mode 160000 cli/jacquard diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 784460f..0000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "cli/jacquard"] - path = cli/jacquard - url = https://tangled.org/@nonbinary.computer/jacquard diff --git a/cli/Cargo.lock b/cli/Cargo.lock index 5fa5a99..8c1748e 100644 --- a/cli/Cargo.lock +++ b/cli/Cargo.lock @@ -139,9 +139,9 @@ checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16" [[package]] name = "async-compression" -version = "0.4.32" +version = "0.4.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a89bce6054c720275ac2432fbba080a66a2106a44a1b804553930ca6909f4e0" +checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2" dependencies = [ "compression-codecs", "compression-core", @@ -158,7 +158,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -329,7 +329,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -428,9 +428,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.44" +version = "1.2.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37521ac7aabe3d13122dc382493e20c9416f299d2ccd5b3a5340a2570cdeb0f3" +checksum = "35900b6c8d709fb1d854671ae27aeaa9eec2f8b01b364e1619a40da3e6fe2afe" dependencies = [ "find-msvc-tools", "shlex", @@ -555,7 +555,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -582,9 +582,9 @@ dependencies = [ [[package]] name = "compression-codecs" -version = "0.4.31" +version = "0.4.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef8a506ec4b81c460798f572caead636d57d3d7e940f998160f52bd254bf2d23" +checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b" dependencies = [ "compression-core", "flate2", @@ -593,9 +593,9 @@ dependencies = [ [[package]] name = "compression-core" -version = "0.4.29" +version = "0.4.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e47641d3deaf41fb1538ac1f54735925e275eaf3bf4d55c81b137fba797e5cbb" +checksum = "3a9b614a5787ef0c8802a55766480563cb3a93b435898c422ed2a359cf811582" [[package]] name = "const-oid" @@ -736,7 +736,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -747,7 +747,7 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ "darling_core", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -787,7 +787,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976" dependencies = [ "data-encoding", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -838,7 +838,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", "unicode-xid", ] @@ -889,7 +889,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -950,7 +950,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -1120,7 +1120,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -1374,7 +1374,7 @@ dependencies = [ "markup5ever", "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -1431,9 +1431,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" +checksum = "1744436df46f0bde35af3eda22aeaba453aada65d8f1c171cd8a5f59030bd69f" dependencies = [ "atomic-waker", "bytes", @@ -1699,9 +1699,9 @@ checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "iri-string" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" dependencies = [ "memchr", "serde", @@ -1728,7 +1728,7 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jacquard" version = "0.9.0" -source = "git+https://tangled.org/@nonbinary.computer/jacquard#b5cc9b35e38e24e1890ae55e700dcfad0d6d433a" +source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65" dependencies = [ "bytes", "getrandom 0.2.16", @@ -1756,7 +1756,7 @@ dependencies = [ [[package]] name = "jacquard-api" version = "0.9.0" -source = "git+https://tangled.org/@nonbinary.computer/jacquard#b5cc9b35e38e24e1890ae55e700dcfad0d6d433a" +source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65" dependencies = [ "bon", "bytes", @@ -1774,7 +1774,7 @@ dependencies = [ [[package]] name = "jacquard-common" version = "0.9.0" -source = "git+https://tangled.org/@nonbinary.computer/jacquard#b5cc9b35e38e24e1890ae55e700dcfad0d6d433a" +source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65" dependencies = [ "base64 0.22.1", "bon", @@ -1815,19 +1815,19 @@ dependencies = [ [[package]] name = "jacquard-derive" version = "0.9.0" -source = "git+https://tangled.org/@nonbinary.computer/jacquard#b5cc9b35e38e24e1890ae55e700dcfad0d6d433a" +source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65" dependencies = [ "heck 0.5.0", "jacquard-lexicon", "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] name = "jacquard-identity" version = "0.9.1" -source = "git+https://tangled.org/@nonbinary.computer/jacquard#b5cc9b35e38e24e1890ae55e700dcfad0d6d433a" +source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65" dependencies = [ "bon", "bytes", @@ -1853,7 +1853,7 @@ dependencies = [ [[package]] name = "jacquard-lexicon" version = "0.9.1" -source = "git+https://tangled.org/@nonbinary.computer/jacquard#b5cc9b35e38e24e1890ae55e700dcfad0d6d433a" +source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65" dependencies = [ "cid", "dashmap", @@ -1871,7 +1871,7 @@ dependencies = [ "serde_repr", "serde_with", "sha2", - "syn 2.0.108", + "syn 2.0.110", "thiserror 2.0.17", "unicode-segmentation", ] @@ -1879,7 +1879,7 @@ dependencies = [ [[package]] name = "jacquard-oauth" version = "0.9.0" -source = "git+https://tangled.org/@nonbinary.computer/jacquard#b5cc9b35e38e24e1890ae55e700dcfad0d6d433a" +source = "git+https://tangled.org/@nonbinary.computer/jacquard#5c79bb76de544cbd4fa8d5d8b01ba6e828f8ba65" dependencies = [ "base64 0.22.1", "bytes", @@ -2183,7 +2183,7 @@ checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -2338,9 +2338,9 @@ dependencies = [ [[package]] name = "num-bigint-dig" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82c79c15c05d4bf82b6f5ef163104cc81a760d8e874d38ac50ab67c8877b647b" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" dependencies = [ "lazy_static", "libm", @@ -2486,7 +2486,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -2616,7 +2616,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -2689,7 +2689,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -2742,7 +2742,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", "version_check", "yansi", ] @@ -2810,9 +2810,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.41" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] @@ -2925,7 +2925,7 @@ checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -3103,9 +3103,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.34" +version = "0.23.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9586e9ee2b4f8fab52a0048ca7334d7024eef48e2cb9407e3497bb7cab7fa7" +checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ "once_cell", "ring", @@ -3198,9 +3198,9 @@ dependencies = [ [[package]] name = "schemars" -version = "1.0.4" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289" dependencies = [ "dyn-clone", "ref-cast", @@ -3300,7 +3300,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -3360,7 +3360,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -3387,7 +3387,7 @@ dependencies = [ "indexmap 1.9.3", "indexmap 2.12.0", "schemars 0.9.0", - "schemars 1.0.4", + "schemars 1.1.0", "serde_core", "serde_json", "serde_with_macros", @@ -3403,7 +3403,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -3575,7 +3575,7 @@ dependencies = [ "quote", "serde", "sha2", - "syn 2.0.108", + "syn 2.0.110", "thiserror 1.0.69", ] @@ -3656,9 +3656,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.108" +version = "2.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917" +checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea" dependencies = [ "proc-macro2", "quote", @@ -3682,7 +3682,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -3782,7 +3782,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -3793,7 +3793,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -3909,7 +3909,7 @@ checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -3959,9 +3959,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.16" +version = "0.7.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" dependencies = [ "bytes", "futures-core", @@ -4075,7 +4075,7 @@ checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -4125,7 +4125,7 @@ checksum = "70977707304198400eb4835a78f6a9f928bf41bba420deb8fdb175cd965d77a7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -4356,7 +4356,7 @@ dependencies = [ "bumpalo", "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", "wasm-bindgen-shared", ] @@ -4521,7 +4521,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -4532,7 +4532,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -4995,7 +4995,7 @@ checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", "synstructure", ] @@ -5016,7 +5016,7 @@ checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] [[package]] @@ -5036,7 +5036,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", "synstructure", ] @@ -5079,5 +5079,5 @@ checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.108", + "syn 2.0.110", ] diff --git a/cli/jacquard b/cli/jacquard deleted file mode 160000 index d533482..0000000 --- a/cli/jacquard +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d533482a61f540586b1eea620b8e9a01a59d5650 -- 2.50.1 (Apple Git-155) From 122e18dd70661462c0e9e325636a820367f0d893 Mon Sep 17 00:00:00 2001 From: "@nekomimi.pet" Date: Thu, 13 Nov 2025 02:31:33 -0500 Subject: [PATCH 6/6] update flake --- cli/Cargo.lock | 2 +- cli/Cargo.toml | 2 +- crates.nix | 29 ++++++++++++++++++++++++++++- flake.nix | 19 +++++++++++++++++-- 4 files changed, 47 insertions(+), 5 deletions(-) diff --git a/cli/Cargo.lock b/cli/Cargo.lock index 8c1748e..c553def 100644 --- a/cli/Cargo.lock +++ b/cli/Cargo.lock @@ -4913,7 +4913,7 @@ dependencies = [ [[package]] name = "wisp-cli" -version = "0.1.0" +version = "0.2.0" dependencies = [ "axum", "base64 0.22.1", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index c3eb22c..af3bc4b 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "wisp-cli" -version = "0.1.0" +version = "0.2.0" edition = "2024" [features] diff --git a/crates.nix b/crates.nix index 9dbb8e0..21fea62 100644 --- a/crates.nix +++ b/crates.nix @@ -19,6 +19,7 @@ targets.x86_64-pc-windows-gnu.latest.rust-std targets.x86_64-unknown-linux-gnu.latest.rust-std targets.aarch64-apple-darwin.latest.rust-std + targets.aarch64-unknown-linux-gnu.latest.rust-std ]; # configure crates nci.crates."wisp-cli" = { @@ -26,8 +27,20 @@ dev.runTests = false; release.runTests = false; }; - targets."x86_64-unknown-linux-gnu" = { + targets."x86_64-unknown-linux-gnu" = let + targetPkgs = pkgs.pkgsCross.gnu64; + targetCC = targetPkgs.stdenv.cc; + targetCargoEnvVarTarget = targetPkgs.stdenv.hostPlatform.rust.cargoEnvVarTarget; + in rec { default = true; + depsDrvConfig.mkDerivation = { + nativeBuildInputs = [targetCC]; + }; + depsDrvConfig.env = rec { + TARGET_CC = "${targetCC.targetPrefix}cc"; + "CARGO_TARGET_${targetCargoEnvVarTarget}_LINKER" = TARGET_CC; + }; + drvConfig = depsDrvConfig; }; targets."x86_64-pc-windows-gnu" = let targetPkgs = pkgs.pkgsCross.mingwW64; @@ -58,6 +71,20 @@ }; drvConfig = depsDrvConfig; }; + targets."aarch64-unknown-linux-gnu" = let + targetPkgs = pkgs.pkgsCross.aarch64-multiplatform; + targetCC = targetPkgs.stdenv.cc; + targetCargoEnvVarTarget = targetPkgs.stdenv.hostPlatform.rust.cargoEnvVarTarget; + in rec { + depsDrvConfig.mkDerivation = { + nativeBuildInputs = [targetCC]; + }; + depsDrvConfig.env = rec { + TARGET_CC = "${targetCC.targetPrefix}cc"; + "CARGO_TARGET_${targetCargoEnvVarTarget}_LINKER" = TARGET_CC; + }; + drvConfig = depsDrvConfig; + }; }; }; } diff --git a/flake.nix b/flake.nix index 1870e01..a8f33e2 100644 --- a/flake.nix +++ b/flake.nix @@ -26,11 +26,26 @@ ... }: let crateOutputs = config.nci.outputs."wisp-cli"; + mkRenamedPackage = name: pkg: pkgs.runCommand name {} '' + mkdir -p $out/bin + cp ${pkg}/bin/wisp-cli $out/bin/${name} + ''; in { devShells.default = crateOutputs.devShell; packages.default = crateOutputs.packages.release; - packages.wisp-cli-windows = crateOutputs.allTargets."x86_64-pc-windows-gnu".packages.release; - packages.wisp-cli-darwin = crateOutputs.allTargets."aarch64-apple-darwin".packages.release; + packages.wisp-cli-x86_64-linux = mkRenamedPackage "wisp-cli-x86_64-linux" crateOutputs.packages.release; + packages.wisp-cli-aarch64-linux = mkRenamedPackage "wisp-cli-aarch64-linux" crateOutputs.allTargets."aarch64-unknown-linux-gnu".packages.release; + packages.wisp-cli-x86_64-windows = mkRenamedPackage "wisp-cli-x86_64-windows.exe" crateOutputs.allTargets."x86_64-pc-windows-gnu".packages.release; + packages.wisp-cli-aarch64-darwin = mkRenamedPackage "wisp-cli-aarch64-darwin" crateOutputs.allTargets."aarch64-apple-darwin".packages.release; + packages.all = pkgs.symlinkJoin { + name = "wisp-cli-all"; + paths = [ + config.packages.wisp-cli-x86_64-linux + config.packages.wisp-cli-aarch64-linux + config.packages.wisp-cli-x86_64-windows + config.packages.wisp-cli-aarch64-darwin + ]; + }; }; }; } -- 2.50.1 (Apple Git-155)