A community based topic aggregation platform built on atproto

feat: add production deployment infrastructure

- Docker configuration (Dockerfile, docker-compose.prod.yml)
- Caddy reverse proxy with HSTS, CSP, wildcard SSL
- Deployment scripts (deploy.sh, setup-production.sh, backup.sh)
- DID key generation script
- OAuth callback with XSS protection
- Environment template (.env.prod.example)

+57
.env.prod.example
···
+
# Coves Production Environment Variables
+
# Copy to .env.prod and fill in real values
+
# NEVER commit .env.prod to git!
+
+
# =============================================================================
+
# PostgreSQL (AppView Database)
+
# =============================================================================
+
POSTGRES_DB=coves_prod
+
POSTGRES_USER=coves_user
+
POSTGRES_PASSWORD=CHANGE_ME_SECURE_PASSWORD_HERE
+
+
# =============================================================================
+
# PDS (Personal Data Server)
+
# =============================================================================
+
# Generate with: openssl rand -hex 32
+
PDS_JWT_SECRET=CHANGE_ME_64_HEX_CHARS
+
+
# Admin password for PDS management
+
PDS_ADMIN_PASSWORD=CHANGE_ME_SECURE_ADMIN_PASSWORD
+
+
# K256 private key for DID rotation
+
# Generate with: openssl rand -hex 32
+
PDS_ROTATION_KEY=CHANGE_ME_64_HEX_CHARS
+
+
# Optional: Email configuration for account recovery
+
# PDS_EMAIL_SMTP_URL=smtp://user:pass@smtp.example.com:587
+
# PDS_EMAIL_FROM_ADDRESS=noreply@coves.me
+
+
# =============================================================================
+
# AppView OAuth (for mobile app authentication)
+
# =============================================================================
+
OAUTH_CLIENT_ID=https://coves.social/client-metadata.json
+
OAUTH_REDIRECT_URI=https://coves.social/oauth/callback
+
+
# Generate EC P-256 private key in JWK format
+
# See: https://atproto.com/specs/oauth#client-metadata
+
OAUTH_PRIVATE_JWK={"kty":"EC","crv":"P-256","x":"...","y":"...","d":"..."}
+
+
# =============================================================================
+
# AppView Encryption
+
# =============================================================================
+
# For encrypting community credentials in database
+
# Generate with: openssl rand -base64 32
+
ENCRYPTION_KEY=CHANGE_ME_BASE64_ENCODED_KEY
+
+
# =============================================================================
+
# Cloudflare (for wildcard SSL certificates)
+
# =============================================================================
+
# Required for *.coves.social wildcard certificate
+
# Create at: Cloudflare Dashboard → My Profile → API Tokens → Create Token
+
# Template: "Edit zone DNS" with permissions for coves.social zone
+
CLOUDFLARE_API_TOKEN=CHANGE_ME_CLOUDFLARE_TOKEN
+
+
# =============================================================================
+
# Optional: Versioning
+
# =============================================================================
+
VERSION=latest
+1
.gitignore
···
.env.local
.env.development
.env.production
+
.env.prod
.env.test
# IDE
+139
Caddyfile
···
+
# Coves Production Caddyfile
+
# Handles HTTPS for both coves.social (AppView) and coves.me (PDS)
+
#
+
# Domain architecture:
+
# - coves.social: AppView (API, web app)
+
# - *.coves.social: Community handles (route atproto-did to PDS)
+
# - coves.me: PDS (data storage)
+
+
# Community handle subdomains (e.g., gaming.coves.social)
+
# These need to route /.well-known/atproto-did to PDS for handle resolution
+
#
+
# NOTE: Wildcard certs require DNS challenge. For Cloudflare:
+
# 1. Create API token with Zone:DNS:Edit permissions
+
# 2. Set CLOUDFLARE_API_TOKEN environment variable
+
# 3. Use caddy-dns/cloudflare plugin (see docker-compose.prod.yml)
+
*.coves.social {
+
tls {
+
dns cloudflare {env.CLOUDFLARE_API_TOKEN}
+
}
+
# Handle resolution - proxy to PDS
+
handle /.well-known/atproto-did {
+
reverse_proxy pds:3000
+
}
+
+
# OAuth well-known endpoints - proxy to PDS
+
handle /.well-known/oauth-protected-resource {
+
reverse_proxy pds:3000
+
}
+
+
handle /.well-known/oauth-authorization-server {
+
reverse_proxy pds:3000
+
}
+
+
# All other requests return 404 (subdomains only exist for handle resolution)
+
handle {
+
respond "Not Found" 404
+
}
+
+
# Security headers
+
header {
+
Strict-Transport-Security "max-age=31536000; includeSubDomains; preload"
+
X-Content-Type-Options "nosniff"
+
-Server
+
}
+
}
+
+
# AppView Domain (root)
+
coves.social {
+
# Serve .well-known files for DID verification
+
handle /.well-known/* {
+
root * /srv
+
file_server
+
}
+
+
# Serve OAuth client metadata
+
handle /client-metadata.json {
+
root * /srv
+
file_server
+
}
+
+
# Serve OAuth callback page
+
handle /oauth/callback {
+
root * /srv
+
rewrite * /oauth/callback.html
+
file_server
+
}
+
+
# Proxy all other requests to AppView
+
handle {
+
reverse_proxy appview:8080 {
+
# Health check
+
health_uri /xrpc/_health
+
health_interval 30s
+
health_timeout 5s
+
+
# Headers
+
header_up X-Real-IP {remote_host}
+
header_up X-Forwarded-For {remote_host}
+
header_up X-Forwarded-Proto {scheme}
+
}
+
}
+
+
# Logging (Docker captures stdout/stderr)
+
log {
+
output stdout
+
format json
+
}
+
+
# Security headers
+
header {
+
Strict-Transport-Security "max-age=31536000; includeSubDomains; preload"
+
X-Content-Type-Options "nosniff"
+
X-Frame-Options "DENY"
+
Referrer-Policy "strict-origin-when-cross-origin"
+
Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; connect-src 'self' https://*.bsky.network wss://*.bsky.network"
+
# Remove Server header
+
-Server
+
}
+
+
# Enable compression
+
encode gzip zstd
+
}
+
+
# PDS Domain
+
coves.me {
+
reverse_proxy pds:3000 {
+
# Health check
+
health_uri /xrpc/_health
+
health_interval 30s
+
health_timeout 5s
+
+
# Headers for proper client IP handling
+
header_up X-Real-IP {remote_host}
+
header_up X-Forwarded-For {remote_host}
+
header_up X-Forwarded-Proto {scheme}
+
+
# WebSocket support for firehose
+
header_up Connection {>Connection}
+
header_up Upgrade {>Upgrade}
+
}
+
+
# Logging (Docker captures stdout/stderr)
+
log {
+
output stdout
+
format json
+
}
+
+
# Security headers
+
header {
+
Strict-Transport-Security "max-age=31536000; includeSubDomains; preload"
+
X-Content-Type-Options "nosniff"
+
X-Frame-Options "DENY"
+
Referrer-Policy "strict-origin-when-cross-origin"
+
-Server
+
}
+
+
# Enable compression
+
encode gzip zstd
+
}
+61
Dockerfile
···
+
# Coves AppView - Multi-stage Dockerfile
+
# Builds a minimal production image for the Go server
+
+
# Stage 1: Build
+
FROM golang:1.23-alpine AS builder
+
+
# Install build dependencies
+
RUN apk add --no-cache git ca-certificates tzdata
+
+
# Set working directory
+
WORKDIR /build
+
+
# Copy go mod files first (better caching)
+
COPY go.mod go.sum ./
+
RUN go mod download
+
+
# Copy source code
+
COPY . .
+
+
# Build the binary
+
# CGO_ENABLED=0 for static binary (no libc dependency)
+
# -ldflags="-s -w" strips debug info for smaller binary
+
RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build \
+
-ldflags="-s -w" \
+
-o /build/coves-server \
+
./cmd/server
+
+
# Stage 2: Runtime
+
FROM alpine:3.19
+
+
# Install runtime dependencies
+
RUN apk add --no-cache ca-certificates tzdata
+
+
# Create non-root user for security
+
RUN addgroup -g 1000 coves && \
+
adduser -u 1000 -G coves -s /bin/sh -D coves
+
+
# Set working directory
+
WORKDIR /app
+
+
# Copy binary from builder
+
COPY --from=builder /build/coves-server /app/coves-server
+
+
# Copy migrations (needed for goose)
+
COPY --from=builder /build/internal/db/migrations /app/migrations
+
+
# Set ownership
+
RUN chown -R coves:coves /app
+
+
# Switch to non-root user
+
USER coves
+
+
# Expose port
+
EXPOSE 8080
+
+
# Health check
+
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
+
CMD wget --spider -q http://localhost:8080/xrpc/_health || exit 1
+
+
# Run the server
+
ENTRYPOINT ["/app/coves-server"]
+205
docker-compose.prod.yml
···
+
# Coves Production Stack
+
#
+
# Architecture:
+
# - coves.social: AppView domain (API, frontend, .well-known/did.json)
+
# - coves.me: PDS domain (must be separate from AppView)
+
#
+
# Hardware: AMD Epyc 7351p (16c/32t), 256GB RAM, 2x500GB NVMe RAID
+
#
+
# Usage:
+
# docker-compose -f docker-compose.prod.yml up -d
+
#
+
# Prerequisites:
+
# 1. DNS configured for both domains
+
# 2. SSL certificates (Caddy handles this automatically)
+
# 3. .env.prod file with secrets
+
# 4. .well-known/did.json deployed to coves.social
+
+
services:
+
# PostgreSQL Database for AppView
+
postgres:
+
image: postgres:15
+
container_name: coves-prod-postgres
+
restart: unless-stopped
+
environment:
+
POSTGRES_DB: ${POSTGRES_DB}
+
POSTGRES_USER: ${POSTGRES_USER}
+
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
+
volumes:
+
- postgres-data:/var/lib/postgresql/data
+
# Mount backup directory for pg_dump
+
- ./backups:/backups
+
networks:
+
- coves-internal
+
healthcheck:
+
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
+
interval: 10s
+
timeout: 5s
+
retries: 5
+
# Generous limits for 256GB server
+
deploy:
+
resources:
+
limits:
+
memory: 32G
+
reservations:
+
memory: 4G
+
+
# Coves AppView (Go Server)
+
appview:
+
build:
+
context: .
+
dockerfile: Dockerfile
+
image: coves/appview:${VERSION:-latest}
+
container_name: coves-prod-appview
+
restart: unless-stopped
+
ports:
+
- "127.0.0.1:8080:8080" # Only expose to localhost (Caddy proxies)
+
environment:
+
# Database
+
DATABASE_URL: postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}?sslmode=disable
+
+
# Instance identity
+
INSTANCE_DID: did:web:coves.social
+
INSTANCE_DOMAIN: coves.social
+
+
# PDS connection (separate domain!)
+
PDS_URL: https://coves.me
+
+
# Jetstream (Bluesky production firehose)
+
JETSTREAM_URL: wss://jetstream2.us-east.bsky.network/subscribe
+
+
# Security - MUST be false in production
+
AUTH_SKIP_VERIFY: "false"
+
SKIP_DID_WEB_VERIFICATION: "false"
+
+
# OAuth (for community account provisioning)
+
OAUTH_CLIENT_ID: ${OAUTH_CLIENT_ID}
+
OAUTH_REDIRECT_URI: ${OAUTH_REDIRECT_URI}
+
OAUTH_PRIVATE_JWK: ${OAUTH_PRIVATE_JWK}
+
+
# Application settings
+
PORT: 8080
+
ENV: production
+
LOG_LEVEL: info
+
+
# Encryption key for community credentials
+
ENCRYPTION_KEY: ${ENCRYPTION_KEY}
+
networks:
+
- coves-internal
+
depends_on:
+
postgres:
+
condition: service_healthy
+
healthcheck:
+
test: ["CMD", "wget", "--spider", "-q", "http://localhost:8080/xrpc/_health"]
+
interval: 30s
+
timeout: 5s
+
retries: 3
+
start_period: 10s
+
# Go is memory-efficient, but give it room for connection pools
+
deploy:
+
resources:
+
limits:
+
memory: 8G
+
reservations:
+
memory: 512M
+
+
# Bluesky PDS (Personal Data Server)
+
# Handles community accounts and their repositories
+
pds:
+
image: ghcr.io/bluesky-social/pds:latest
+
container_name: coves-prod-pds
+
restart: unless-stopped
+
ports:
+
- "127.0.0.1:3000:3000" # Only expose to localhost (Caddy proxies)
+
environment:
+
# PDS identity
+
PDS_HOSTNAME: coves.me
+
PDS_PORT: 3000
+
PDS_DATA_DIRECTORY: /pds
+
PDS_BLOBSTORE_DISK_LOCATION: /pds/blocks
+
+
# PLC Directory (production)
+
PDS_DID_PLC_URL: https://plc.directory
+
+
# Handle domains
+
# Community handles use @community.coves.social (AppView domain)
+
PDS_SERVICE_HANDLE_DOMAINS: .coves.social
+
+
# Security (set real values in .env.prod)
+
PDS_JWT_SECRET: ${PDS_JWT_SECRET}
+
PDS_ADMIN_PASSWORD: ${PDS_ADMIN_PASSWORD}
+
PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX: ${PDS_ROTATION_KEY}
+
+
# Email (optional, for account recovery)
+
PDS_EMAIL_SMTP_URL: ${PDS_EMAIL_SMTP_URL:-}
+
PDS_EMAIL_FROM_ADDRESS: ${PDS_EMAIL_FROM_ADDRESS:-noreply@coves.me}
+
+
# Production mode
+
PDS_DEV_MODE: "false"
+
PDS_INVITE_REQUIRED: "false" # Set to true if you want invite-only
+
+
# Logging
+
NODE_ENV: production
+
LOG_ENABLED: "true"
+
LOG_LEVEL: info
+
volumes:
+
- pds-data:/pds
+
networks:
+
- coves-internal
+
healthcheck:
+
test: ["CMD", "wget", "--spider", "-q", "http://localhost:3000/xrpc/_health"]
+
interval: 30s
+
timeout: 5s
+
retries: 5
+
# PDS (Node.js) needs memory for blob handling
+
deploy:
+
resources:
+
limits:
+
memory: 16G
+
reservations:
+
memory: 1G
+
+
# Caddy Reverse Proxy
+
# Handles HTTPS automatically via Let's Encrypt
+
# Uses Cloudflare plugin for wildcard SSL certificates (*.coves.social)
+
caddy:
+
# Pre-built Caddy with Cloudflare DNS plugin
+
# Updates automatically with docker-compose pull
+
# Alternative: build your own with Dockerfile.caddy
+
image: ghcr.io/slothcroissant/caddy-cloudflaredns:latest
+
container_name: coves-prod-caddy
+
restart: unless-stopped
+
ports:
+
- "80:80"
+
- "443:443"
+
environment:
+
# Required for wildcard SSL via DNS challenge
+
# Create at: Cloudflare Dashboard → My Profile → API Tokens → Create Token
+
# Permissions: Zone:DNS:Edit for coves.social zone
+
CLOUDFLARE_API_TOKEN: ${CLOUDFLARE_API_TOKEN}
+
volumes:
+
- ./Caddyfile:/etc/caddy/Caddyfile:ro
+
- caddy-data:/data
+
- caddy-config:/config
+
# Static files (.well-known, client-metadata.json, oauth callback)
+
- ./static:/srv:ro
+
networks:
+
- coves-internal
+
depends_on:
+
- appview
+
- pds
+
+
networks:
+
coves-internal:
+
driver: bridge
+
name: coves-prod-network
+
+
volumes:
+
postgres-data:
+
name: coves-prod-postgres-data
+
pds-data:
+
name: coves-prod-pds-data
+
caddy-data:
+
name: coves-prod-caddy-data
+
caddy-config:
+
name: coves-prod-caddy-config
+57
scripts/backup.sh
···
+
#!/bin/bash
+
# Coves Database Backup Script
+
# Usage: ./scripts/backup.sh
+
#
+
# Creates timestamped PostgreSQL backups in ./backups/
+
# Retention: Keeps last 30 days of backups
+
+
set -e
+
+
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
+
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
+
BACKUP_DIR="$PROJECT_DIR/backups"
+
COMPOSE_FILE="$PROJECT_DIR/docker-compose.prod.yml"
+
+
# Load environment
+
set -a
+
source "$PROJECT_DIR/.env.prod"
+
set +a
+
+
# Colors
+
GREEN='\033[0;32m'
+
YELLOW='\033[1;33m'
+
NC='\033[0m'
+
+
log() { echo -e "${GREEN}[BACKUP]${NC} $1"; }
+
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
+
+
# Create backup directory
+
mkdir -p "$BACKUP_DIR"
+
+
# Generate timestamp
+
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
+
BACKUP_FILE="$BACKUP_DIR/coves_${TIMESTAMP}.sql.gz"
+
+
log "Starting backup..."
+
+
# Run pg_dump inside container
+
docker compose -f "$COMPOSE_FILE" exec -T postgres \
+
pg_dump -U "$POSTGRES_USER" -d "$POSTGRES_DB" --clean --if-exists \
+
| gzip > "$BACKUP_FILE"
+
+
# Get file size
+
SIZE=$(du -h "$BACKUP_FILE" | cut -f1)
+
+
log "✅ Backup complete: $BACKUP_FILE ($SIZE)"
+
+
# Cleanup old backups (keep last 30 days)
+
log "Cleaning up backups older than 30 days..."
+
find "$BACKUP_DIR" -name "coves_*.sql.gz" -mtime +30 -delete
+
+
# List recent backups
+
log ""
+
log "Recent backups:"
+
ls -lh "$BACKUP_DIR"/*.sql.gz 2>/dev/null | tail -5
+
+
log ""
+
log "To restore: gunzip -c $BACKUP_FILE | docker compose -f docker-compose.prod.yml exec -T postgres psql -U $POSTGRES_USER -d $POSTGRES_DB"
+133
scripts/deploy.sh
···
+
#!/bin/bash
+
# Coves Deployment Script
+
# Usage: ./scripts/deploy.sh [service]
+
#
+
# Examples:
+
# ./scripts/deploy.sh # Deploy all services
+
# ./scripts/deploy.sh appview # Deploy only AppView
+
# ./scripts/deploy.sh --pull # Pull from git first, then deploy
+
+
set -e
+
+
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
+
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
+
COMPOSE_FILE="$PROJECT_DIR/docker-compose.prod.yml"
+
+
# Colors for output
+
RED='\033[0;31m'
+
GREEN='\033[0;32m'
+
YELLOW='\033[1;33m'
+
NC='\033[0m' # No Color
+
+
log() {
+
echo -e "${GREEN}[DEPLOY]${NC} $1"
+
}
+
+
warn() {
+
echo -e "${YELLOW}[WARN]${NC} $1"
+
}
+
+
error() {
+
echo -e "${RED}[ERROR]${NC} $1"
+
exit 1
+
}
+
+
# Parse arguments
+
PULL_GIT=false
+
SERVICE=""
+
+
for arg in "$@"; do
+
case $arg in
+
--pull)
+
PULL_GIT=true
+
;;
+
*)
+
SERVICE="$arg"
+
;;
+
esac
+
done
+
+
cd "$PROJECT_DIR"
+
+
# Load environment variables
+
if [ ! -f ".env.prod" ]; then
+
error ".env.prod not found! Copy from .env.prod.example and configure secrets."
+
fi
+
+
log "Loading environment from .env.prod..."
+
set -a
+
source .env.prod
+
set +a
+
+
# Optional: Pull from git
+
if [ "$PULL_GIT" = true ]; then
+
log "Pulling latest code from git..."
+
git fetch origin
+
git pull origin main
+
fi
+
+
# Check database connectivity before deployment
+
log "Checking database connectivity..."
+
if docker compose -f "$COMPOSE_FILE" exec -T postgres pg_isready -U "$POSTGRES_USER" -d "$POSTGRES_DB" > /dev/null 2>&1; then
+
log "Database is ready"
+
else
+
warn "Database not ready yet - it will start with the deployment"
+
fi
+
+
# Build and deploy
+
if [ -n "$SERVICE" ]; then
+
log "Building $SERVICE..."
+
docker compose -f "$COMPOSE_FILE" build --no-cache "$SERVICE"
+
+
log "Deploying $SERVICE..."
+
docker compose -f "$COMPOSE_FILE" up -d "$SERVICE"
+
else
+
log "Building all services..."
+
docker compose -f "$COMPOSE_FILE" build --no-cache
+
+
log "Deploying all services..."
+
docker compose -f "$COMPOSE_FILE" up -d
+
fi
+
+
# Health check
+
log "Waiting for services to be healthy..."
+
sleep 10
+
+
# Wait for database to be ready before running migrations
+
log "Waiting for database..."
+
for i in {1..30}; do
+
if docker compose -f "$COMPOSE_FILE" exec -T postgres pg_isready -U "$POSTGRES_USER" -d "$POSTGRES_DB" > /dev/null 2>&1; then
+
break
+
fi
+
sleep 1
+
done
+
+
# Run database migrations
+
# The AppView runs migrations on startup, but we can also trigger them explicitly
+
log "Running database migrations..."
+
if docker compose -f "$COMPOSE_FILE" exec -T appview /app/coves-server migrate 2>/dev/null; then
+
log "✅ Migrations completed"
+
else
+
warn "⚠️ Migration command not available or failed - AppView will run migrations on startup"
+
fi
+
+
# Check AppView health
+
if docker compose -f "$COMPOSE_FILE" exec -T appview wget --spider -q http://localhost:8080/xrpc/_health 2>/dev/null; then
+
log "✅ AppView is healthy"
+
else
+
warn "⚠️ AppView health check failed - check logs with: docker compose -f docker-compose.prod.yml logs appview"
+
fi
+
+
# Check PDS health
+
if docker compose -f "$COMPOSE_FILE" exec -T pds wget --spider -q http://localhost:3000/xrpc/_health 2>/dev/null; then
+
log "✅ PDS is healthy"
+
else
+
warn "⚠️ PDS health check failed - check logs with: docker compose -f docker-compose.prod.yml logs pds"
+
fi
+
+
log "Deployment complete!"
+
log ""
+
log "Useful commands:"
+
log " View logs: docker compose -f docker-compose.prod.yml logs -f"
+
log " Check status: docker compose -f docker-compose.prod.yml ps"
+
log " Rollback: docker compose -f docker-compose.prod.yml down && git checkout HEAD~1 && ./scripts/deploy.sh"
+149
scripts/generate-did-keys.sh
···
+
#!/bin/bash
+
# Generate cryptographic keys for Coves did:web DID document
+
#
+
# This script generates a secp256k1 (K-256) key pair as required by atproto.
+
# Reference: https://atproto.com/specs/cryptography
+
#
+
# Key format:
+
# - Curve: secp256k1 (K-256) - same as Bitcoin/Ethereum
+
# - Type: Multikey
+
# - Encoding: publicKeyMultibase with base58btc ('z' prefix)
+
# - Multicodec: 0xe7 for secp256k1 compressed public key
+
#
+
# Output:
+
# - Private key (hex) for PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX
+
# - Public key (multibase) for did.json publicKeyMultibase field
+
# - Complete did.json file
+
+
set -e
+
+
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
+
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
+
OUTPUT_DIR="$PROJECT_DIR/static/.well-known"
+
+
# Colors
+
GREEN='\033[0;32m'
+
YELLOW='\033[1;33m'
+
RED='\033[0;31m'
+
NC='\033[0m'
+
+
log() { echo -e "${GREEN}[KEYGEN]${NC} $1"; }
+
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
+
error() { echo -e "${RED}[ERROR]${NC} $1"; exit 1; }
+
+
# Check for required tools
+
if ! command -v openssl &> /dev/null; then
+
error "openssl is required but not installed"
+
fi
+
+
if ! command -v python3 &> /dev/null; then
+
error "python3 is required for base58 encoding"
+
fi
+
+
# Check for base58 library
+
if ! python3 -c "import base58" 2>/dev/null; then
+
warn "Installing base58 Python library..."
+
pip3 install base58 || error "Failed to install base58. Run: pip3 install base58"
+
fi
+
+
log "Generating secp256k1 key pair for did:web..."
+
+
# Generate private key
+
PRIVATE_KEY_PEM=$(mktemp)
+
openssl ecparam -name secp256k1 -genkey -noout -out "$PRIVATE_KEY_PEM" 2>/dev/null
+
+
# Extract private key as hex (for PDS config)
+
PRIVATE_KEY_HEX=$(openssl ec -in "$PRIVATE_KEY_PEM" -text -noout 2>/dev/null | \
+
grep -A 3 "priv:" | tail -n 3 | tr -d ' :\n' | tr -d '\r')
+
+
# Extract public key as compressed format
+
# OpenSSL outputs the public key, we need to get the compressed form
+
PUBLIC_KEY_HEX=$(openssl ec -in "$PRIVATE_KEY_PEM" -pubout -conv_form compressed -outform DER 2>/dev/null | \
+
tail -c 33 | xxd -p | tr -d '\n')
+
+
# Clean up temp file
+
rm -f "$PRIVATE_KEY_PEM"
+
+
# Encode public key as multibase with multicodec
+
# Multicodec 0xe7 = secp256k1 compressed public key
+
# Then base58btc encode with 'z' prefix
+
PUBLIC_KEY_MULTIBASE=$(python3 << EOF
+
import base58
+
+
# Compressed public key bytes
+
pub_hex = "$PUBLIC_KEY_HEX"
+
pub_bytes = bytes.fromhex(pub_hex)
+
+
# Prepend multicodec 0xe7 for secp256k1-pub
+
# 0xe7 as varint is just 0xe7 (single byte, < 128)
+
multicodec = bytes([0xe7, 0x01]) # 0xe701 for secp256k1-pub compressed
+
key_with_codec = multicodec + pub_bytes
+
+
# Base58btc encode
+
encoded = base58.b58encode(key_with_codec).decode('ascii')
+
+
# Add 'z' prefix for multibase
+
print('z' + encoded)
+
EOF
+
)
+
+
log "Keys generated successfully!"
+
echo ""
+
echo "============================================"
+
echo " PRIVATE KEY (keep secret!)"
+
echo "============================================"
+
echo ""
+
echo "Add this to your .env.prod file:"
+
echo ""
+
echo "PDS_ROTATION_KEY=$PRIVATE_KEY_HEX"
+
echo ""
+
echo "============================================"
+
echo " PUBLIC KEY (for did.json)"
+
echo "============================================"
+
echo ""
+
echo "publicKeyMultibase: $PUBLIC_KEY_MULTIBASE"
+
echo ""
+
+
# Generate the did.json file
+
log "Generating did.json..."
+
+
mkdir -p "$OUTPUT_DIR"
+
+
cat > "$OUTPUT_DIR/did.json" << EOF
+
{
+
"id": "did:web:coves.social",
+
"alsoKnownAs": ["at://coves.social"],
+
"verificationMethod": [
+
{
+
"id": "did:web:coves.social#atproto",
+
"type": "Multikey",
+
"controller": "did:web:coves.social",
+
"publicKeyMultibase": "$PUBLIC_KEY_MULTIBASE"
+
}
+
],
+
"service": [
+
{
+
"id": "#atproto_pds",
+
"type": "AtprotoPersonalDataServer",
+
"serviceEndpoint": "https://coves.me"
+
}
+
]
+
}
+
EOF
+
+
log "Created: $OUTPUT_DIR/did.json"
+
echo ""
+
echo "============================================"
+
echo " NEXT STEPS"
+
echo "============================================"
+
echo ""
+
echo "1. Copy the PDS_ROTATION_KEY value to your .env.prod file"
+
echo ""
+
echo "2. Verify the did.json looks correct:"
+
echo " cat $OUTPUT_DIR/did.json"
+
echo ""
+
echo "3. After deployment, verify it's accessible:"
+
echo " curl https://coves.social/.well-known/did.json"
+
echo ""
+
warn "IMPORTANT: Keep the private key secret! Only share the public key."
+
warn "The did.json file with the public key IS safe to commit to git."
+106
scripts/setup-production.sh
···
+
#!/bin/bash
+
# Coves Production Setup Script
+
# Run this once on a fresh server to set up everything
+
#
+
# Prerequisites:
+
# - Docker and docker-compose installed
+
# - Git installed
+
# - .env.prod file configured
+
+
set -e
+
+
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
+
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
+
+
# Colors
+
GREEN='\033[0;32m'
+
YELLOW='\033[1;33m'
+
RED='\033[0;31m'
+
NC='\033[0m'
+
+
log() { echo -e "${GREEN}[SETUP]${NC} $1"; }
+
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
+
error() { echo -e "${RED}[ERROR]${NC} $1"; exit 1; }
+
+
cd "$PROJECT_DIR"
+
+
# Check prerequisites
+
log "Checking prerequisites..."
+
+
if ! command -v docker &> /dev/null; then
+
error "Docker is not installed. Install with: curl -fsSL https://get.docker.com | sh"
+
fi
+
+
if ! docker compose version &> /dev/null; then
+
error "docker compose is not available. Install with: apt install docker-compose-plugin"
+
fi
+
+
# Check for .env.prod
+
if [ ! -f ".env.prod" ]; then
+
error ".env.prod not found! Copy from .env.prod.example and configure secrets."
+
fi
+
+
# Load environment
+
set -a
+
source .env.prod
+
set +a
+
+
# Create required directories
+
log "Creating directories..."
+
mkdir -p backups
+
mkdir -p static/.well-known
+
+
# Check for did.json
+
if [ ! -f "static/.well-known/did.json" ]; then
+
warn "static/.well-known/did.json not found!"
+
warn "Run ./scripts/generate-did-keys.sh to create it."
+
fi
+
+
# Note: Caddy logs are written to Docker volume (caddy-data)
+
# If you need host-accessible logs, uncomment and run as root:
+
# mkdir -p /var/log/caddy && chown 1000:1000 /var/log/caddy
+
+
# Pull Docker images
+
log "Pulling Docker images..."
+
docker compose -f docker-compose.prod.yml pull postgres pds caddy
+
+
# Build AppView
+
log "Building AppView..."
+
docker compose -f docker-compose.prod.yml build appview
+
+
# Start services
+
log "Starting services..."
+
docker compose -f docker-compose.prod.yml up -d
+
+
# Wait for PostgreSQL
+
log "Waiting for PostgreSQL to be ready..."
+
until docker compose -f docker-compose.prod.yml exec -T postgres pg_isready -U "$POSTGRES_USER" -d "$POSTGRES_DB" > /dev/null 2>&1; do
+
sleep 2
+
done
+
log "PostgreSQL is ready!"
+
+
# Run migrations
+
log "Running database migrations..."
+
# The AppView runs migrations on startup, but you can also run them manually:
+
# docker compose -f docker-compose.prod.yml exec appview /app/coves-server migrate
+
+
# Final status
+
log ""
+
log "============================================"
+
log " Coves Production Setup Complete!"
+
log "============================================"
+
log ""
+
log "Services running:"
+
docker compose -f docker-compose.prod.yml ps
+
log ""
+
log "Next steps:"
+
log " 1. Configure DNS for coves.social and coves.me"
+
log " 2. Run ./scripts/generate-did-keys.sh to create DID keys"
+
log " 3. Test health endpoints:"
+
log " curl https://coves.social/xrpc/_health"
+
log " curl https://coves.me/xrpc/_health"
+
log ""
+
log "Useful commands:"
+
log " View logs: docker compose -f docker-compose.prod.yml logs -f"
+
log " Deploy update: ./scripts/deploy.sh appview"
+
log " Backup DB: ./scripts/backup.sh"
+19
static/.well-known/did.json.template
···
+
{
+
"id": "did:web:coves.social",
+
"alsoKnownAs": ["at://coves.social"],
+
"verificationMethod": [
+
{
+
"id": "did:web:coves.social#atproto",
+
"type": "Multikey",
+
"controller": "did:web:coves.social",
+
"publicKeyMultibase": "REPLACE_WITH_YOUR_PUBLIC_KEY"
+
}
+
],
+
"service": [
+
{
+
"id": "#atproto_pds",
+
"type": "AtprotoPersonalDataServer",
+
"serviceEndpoint": "https://coves.me"
+
}
+
]
+
}
+18
static/client-metadata.json
···
+
{
+
"client_id": "https://coves.social/client-metadata.json",
+
"client_name": "Coves",
+
"client_uri": "https://coves.social",
+
"logo_uri": "https://coves.social/logo.png",
+
"tos_uri": "https://coves.social/terms",
+
"policy_uri": "https://coves.social/privacy",
+
"redirect_uris": [
+
"https://coves.social/oauth/callback",
+
"social.coves:/oauth/callback"
+
],
+
"scope": "atproto transition:generic",
+
"grant_types": ["authorization_code", "refresh_token"],
+
"response_types": ["code"],
+
"application_type": "native",
+
"token_endpoint_auth_method": "none",
+
"dpop_bound_access_tokens": true
+
}
+97
static/oauth/callback.html
···
+
<!DOCTYPE html>
+
<html>
+
<head>
+
<meta charset="utf-8">
+
<meta name="viewport" content="width=device-width, initial-scale=1">
+
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; script-src 'unsafe-inline'; style-src 'unsafe-inline'">
+
<title>Authorization Successful - Coves</title>
+
<style>
+
body {
+
font-family: system-ui, -apple-system, sans-serif;
+
display: flex;
+
align-items: center;
+
justify-content: center;
+
min-height: 100vh;
+
margin: 0;
+
background: #f5f5f5;
+
}
+
.container {
+
text-align: center;
+
padding: 2rem;
+
background: white;
+
border-radius: 8px;
+
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
+
max-width: 400px;
+
}
+
.success { color: #22c55e; font-size: 3rem; margin-bottom: 1rem; }
+
h1 { margin: 0 0 0.5rem; color: #1f2937; font-size: 1.5rem; }
+
p { color: #6b7280; margin: 0.5rem 0; }
+
a {
+
display: inline-block;
+
margin-top: 1rem;
+
padding: 0.75rem 1.5rem;
+
background: #3b82f6;
+
color: white;
+
text-decoration: none;
+
border-radius: 6px;
+
font-weight: 500;
+
}
+
a:hover { background: #2563eb; }
+
</style>
+
</head>
+
<body>
+
<div class="container">
+
<div class="success">✓</div>
+
<h1>Authorization Successful!</h1>
+
<p id="status">Returning to Coves...</p>
+
<a href="#" id="manualLink">Open Coves</a>
+
</div>
+
<script>
+
(function() {
+
// Parse and sanitize query params - only allow expected OAuth parameters
+
const urlParams = new URLSearchParams(window.location.search);
+
const safeParams = new URLSearchParams();
+
+
// Whitelist only expected OAuth callback parameters
+
const code = urlParams.get('code');
+
const state = urlParams.get('state');
+
const error = urlParams.get('error');
+
const errorDescription = urlParams.get('error_description');
+
const iss = urlParams.get('iss');
+
+
if (code) safeParams.set('code', code);
+
if (state) safeParams.set('state', state);
+
if (error) safeParams.set('error', error);
+
if (errorDescription) safeParams.set('error_description', errorDescription);
+
if (iss) safeParams.set('iss', iss);
+
+
const sanitizedQuery = safeParams.toString() ? '?' + safeParams.toString() : '';
+
+
const userAgent = navigator.userAgent || '';
+
const isAndroid = /Android/i.test(userAgent);
+
+
// Build deep link based on platform
+
let deepLink;
+
if (isAndroid) {
+
// Android: Intent URL format
+
const pathAndQuery = '/oauth/callback' + sanitizedQuery;
+
deepLink = 'intent:/' + pathAndQuery + '#Intent;scheme=social.coves;package=social.coves;end';
+
} else {
+
// iOS: Custom scheme
+
deepLink = 'social.coves:/oauth/callback' + sanitizedQuery;
+
}
+
+
// Update manual link
+
document.getElementById('manualLink').href = deepLink;
+
+
// Attempt automatic redirect
+
window.location.href = deepLink;
+
+
// Update status after 2 seconds if redirect didn't work
+
setTimeout(function() {
+
document.getElementById('status').textContent = 'Click the button above to continue';
+
}, 2000);
+
})();
+
</script>
+
</body>
+
</html>