A community based topic aggregation platform built on atproto

docs: update aggregator client, backlog, and development scripts

Update Kagi News aggregator client for nested external embed structure,
add unfurling roadmap items, and include utility scripts for testing.

**Kagi News Aggregator Updates:**
- Update client to use nested "external" structure (social.coves.embed.external)
- Update tests for new embed format
- Update README with latest API requirements
- Fix E2E tests for lexicon schema changes

**PRD Backlog Additions:**
60 new lines documenting unfurling feature:
- URL metadata enrichment via oEmbed/OpenGraph
- Blob upload for thumbnails
- Cache strategy (24h TTL)
- Circuit breaker for provider failures
- Supported providers (Streamable, YouTube, Reddit, Kagi)

**.env.dev Updates:**
- Add unfurl service configuration
- Add blob upload settings
- Update PDS URLs for testing

**Development Scripts:**
- generate_test_comments.go: Generate test comment data
- post_streamable.py: Test Streamable unfurling E2E

**Summary:**
- Aggregator client: Updated for nested embed structure
- Documentation: Added unfurling feature to backlog
- Scripts: Testing utilities for development

All aggregator tests updated to match new lexicon schema.
Ready for deployment with backward-compatible migration path.

+3
.env.dev
···
PDS_INSTANCE_HANDLE=testuser123.local.coves.dev
PDS_INSTANCE_PASSWORD=test-password-123
+
# Kagi News Aggregator DID (for trusted thumbnail URLs)
+
KAGI_AGGREGATOR_DID=did:plc:yyf34padpfjknejyutxtionr
+
# =============================================================================
# Development Settings
# =============================================================================
+1 -1
aggregators/kagi-news/README.md
···
- Fetches RSS feeds from Kagi News daily via CRON
- Parses HTML descriptions to extract structured content (highlights, perspectives, sources)
- Formats posts using Coves rich text with facets (bold, italic, links)
-
- Hot-links images from Kagi's proxy (no blob upload)
+
- Thumbnails are automatically extracted by the server's unfurl service
- Posts to configured communities via XRPC
## Project Structure
+13 -14
aggregators/kagi-news/src/coves_client.py
···
content: str,
facets: List[Dict],
title: Optional[str] = None,
-
embed: Optional[Dict] = None
+
embed: Optional[Dict] = None,
+
thumbnail_url: Optional[str] = None
) -> str:
"""
Create a post in a community.
···
facets: Rich text facets (formatting, links)
title: Optional post title
embed: Optional external embed
+
thumbnail_url: Optional thumbnail URL (for trusted aggregators only)
Returns:
AT Proto URI of created post (e.g., "at://did:plc:.../social.coves.post/...")
···
if embed:
post_data["embed"] = embed
+
# Add thumbnail URL at top level if provided (for trusted aggregators)
+
if thumbnail_url:
+
post_data["thumbnailUrl"] = thumbnail_url
+
# Use Coves-specific endpoint (not direct PDS write)
# This provides validation, authorization, and business logic
logger.info(f"Creating post in community: {community_handle}")
···
self,
uri: str,
title: str,
-
description: str,
-
thumb: Optional[str] = None
+
description: str
) -> Dict:
"""
Create external embed object for hot-linked content.
Args:
-
uri: External URL (story link)
-
title: Story title
-
description: Story description/summary
-
thumb: Optional thumbnail image URL
+
uri: URL of the external content
+
title: Title of the content
+
description: Description/summary
Returns:
-
External embed dictionary
+
Embed dictionary ready for post creation
"""
-
embed = {
+
return {
"$type": "social.coves.embed.external",
"external": {
"uri": uri,
···
"description": description
}
}
-
-
if thumb:
-
embed["external"]["thumb"] = thumb
-
-
return embed
def _get_timestamp(self) -> str:
"""
+4 -3
aggregators/kagi-news/src/main.py
···
embed = self.coves_client.create_external_embed(
uri=story.link,
title=story.title,
-
description=story.summary[:200] if len(story.summary) > 200 else story.summary,
-
thumb=story.image_url
+
description=story.summary[:200] if len(story.summary) > 200 else story.summary
)
# Post to community
+
# Pass thumbnail URL from RSS feed at top level for trusted aggregator upload
try:
post_uri = self.coves_client.create_post(
community_handle=feed_config.community_handle,
title=story.title,
content=rich_text["content"],
facets=rich_text["facets"],
-
embed=embed
+
embed=embed,
+
thumbnail_url=story.image_url # From RSS feed - server will validate and upload
)
# Mark as posted (only if successful)
+5 -14
aggregators/kagi-news/tests/test_e2e.py
···
Verifies:
- Embed structure matches social.coves.embed.external
- All required fields are present
-
- Optional thumbnail is included when provided
+
- Thumbnails are handled by server's unfurl service (not included in client)
"""
handle, password = aggregator_credentials
···
password=password
)
-
# Test with thumbnail
+
# Create external embed (server will handle thumbnail extraction)
embed = client.create_external_embed(
uri="https://example.com/story",
title="Test Story",
-
description="Test description",
-
thumb="https://example.com/image.jpg"
+
description="Test description"
)
assert embed["$type"] == "social.coves.embed.external"
assert embed["external"]["uri"] == "https://example.com/story"
assert embed["external"]["title"] == "Test Story"
assert embed["external"]["description"] == "Test description"
-
assert embed["external"]["thumb"] == "https://example.com/image.jpg"
-
-
# Test without thumbnail
-
embed_no_thumb = client.create_external_embed(
-
uri="https://example.com/story2",
-
title="Test Story 2",
-
description="Test description 2"
-
)
-
-
assert "thumb" not in embed_no_thumb["external"]
+
# Thumbnail is not included - server's unfurl service handles it
+
assert "thumb" not in embed["external"]
print("\n✅ External embed format correct")
+4 -3
aggregators/kagi-news/tests/test_main.py
···
mock_client.create_post.return_value = "at://did:plc:test/social.coves.post/abc123"
# Mock create_external_embed to return proper embed structure
+
# Note: Thumbnails are handled by server's unfurl service, not client
mock_client.create_external_embed.return_value = {
"$type": "social.coves.embed.external",
"external": {
"uri": sample_story.link,
"title": sample_story.title,
-
"description": sample_story.summary,
-
"thumb": sample_story.image_url
+
"description": sample_story.summary
}
}
···
assert call_kwargs["embed"]["$type"] == "social.coves.embed.external"
assert call_kwargs["embed"]["external"]["uri"] == sample_story.link
assert call_kwargs["embed"]["external"]["title"] == sample_story.title
-
assert call_kwargs["embed"]["external"]["thumb"] == sample_story.image_url
+
# Thumbnail is not included - server's unfurl service handles it
+
assert "thumb" not in call_kwargs["embed"]["external"]
+60
docs/PRD_BACKLOG.md
···
---
+
### Unfurl Cache Cleanup Background Job
+
**Added:** 2025-11-07 | **Effort:** 2-3 hours | **Priority:** Performance/Maintenance
+
+
**Problem:** The `unfurl_cache` table will grow indefinitely as expired entries are not deleted. While the cache uses lazy expiration (checking `expires_at` on read), old records remain in the database consuming disk space.
+
+
**Impact:**
+
- 📊 ~1KB per cached URL
+
- 📈 At 10K cached URLs = ~10MB (negligible for alpha)
+
- ⚠️ At 1M cached URLs = ~1GB (potential issue at scale)
+
- 🐌 Table bloat can slow down queries over time
+
+
**Current Mitigation:**
+
- ✅ Lazy expiration: Cache hits check `expires_at` and refetch if expired
+
- ✅ Indexed on `expires_at` for efficient expiration queries
+
- ✅ Not critical for alpha (growth is gradual)
+
+
**Solution (Beta/Production):**
+
Implement background cleanup job to delete expired entries:
+
+
```go
+
// Periodic cleanup (run daily or weekly)
+
func (r *unfurlRepository) CleanupExpired(ctx context.Context) (int64, error) {
+
query := `DELETE FROM unfurl_cache WHERE expires_at < NOW()`
+
result, err := r.db.ExecContext(ctx, query)
+
if err != nil {
+
return 0, err
+
}
+
return result.RowsAffected()
+
}
+
```
+
+
**Implementation Options:**
+
1. **Cron job**: Separate process runs cleanup on schedule
+
2. **Background goroutine**: Service-level background task with configurable interval
+
3. **PostgreSQL pg_cron extension**: Database-level scheduled cleanup
+
+
**Recommended Approach:**
+
- Phase 1 (Beta): Background goroutine running weekly cleanup
+
- Phase 2 (Production): Migrate to pg_cron or external cron for reliability
+
+
**Configuration:**
+
```bash
+
UNFURL_CACHE_CLEANUP_ENABLED=true
+
UNFURL_CACHE_CLEANUP_INTERVAL=168h # 7 days
+
```
+
+
**Monitoring:**
+
- Log cleanup operations: `[UNFURL-CACHE-CLEANUP] Deleted 1234 expired entries`
+
- Track table size growth over time
+
- Alert if table exceeds threshold (e.g., 100MB)
+
+
**Files to Create:**
+
- `internal/core/unfurl/cleanup.go` - Background cleanup service
+
+
**Related:**
+
- Implemented in oEmbed unfurling feature (2025-11-07)
+
- Cache table: [migration XXX_create_unfurl_cache.sql](../internal/db/migrations/)
+
+
---
+
## 🔵 P3: Technical Debt
### Consolidate Environment Variable Validation
+348
scripts/generate_test_comments.go
···
+
package main
+
+
import (
+
"database/sql"
+
"fmt"
+
"log"
+
"math/rand"
+
"time"
+
+
_ "github.com/lib/pq"
+
)
+
+
// Post URI: at://did:plc:hcuo3qx2lr7h7dquusbeobht/social.coves.community.post/3m4yohkzbkc2b
+
// Community DID: did:plc:hcuo3qx2lr7h7dquusbeobht
+
// Community Handle: test-usnews.community.coves.social
+
+
const (
+
postURI = "at://did:plc:hcuo3qx2lr7h7dquusbeobht/social.coves.community.post/3m4yohkzbkc2b"
+
postCID = "bafyzohran123"
+
communityDID = "did:plc:hcuo3qx2lr7h7dquusbeobht"
+
)
+
+
type User struct {
+
DID string
+
Handle string
+
Name string
+
}
+
+
type Comment struct {
+
URI string
+
CID string
+
RKey string
+
DID string
+
RootURI string
+
RootCID string
+
ParentURI string
+
ParentCID string
+
Content string
+
CreatedAt time.Time
+
}
+
+
var userNames = []string{
+
"sarah_jenkins", "michael_chen", "jessica_rodriguez", "david_nguyen",
+
"emily_williams", "james_patel", "ashley_garcia", "robert_kim",
+
"jennifer_lee", "william_martinez", "amanda_johnson", "daniel_brown",
+
"melissa_davis", "christopher_wilson", "rebecca_anderson", "matthew_taylor",
+
"laura_thomas", "anthony_moore", "stephanie_jackson", "joshua_white",
+
"nicole_harris", "ryan_martin", "rachel_thompson", "kevin_garcia",
+
"michelle_robinson", "brandon_clark", "samantha_lewis", "justin_walker",
+
"kimberly_hall", "tyler_allen", "brittany_young", "andrew_king",
+
}
+
+
var positiveComments = []string{
+
"This is such fantastic news! Zohran represents real progressive values and I couldn't be happier with this outcome!",
+
"Finally! A mayor who actually understands the needs of working families. This is a historic moment for NYC!",
+
"What an incredible victory! Zohran's grassroots campaign shows that people power still matters in politics.",
+
"I'm so proud of our city today. This win gives me hope for the future of progressive politics!",
+
"This is exactly what NYC needed. Zohran's policies on housing and healthcare are going to transform our city!",
+
"Congratulations to Zohran! His commitment to affordable housing is going to make such a difference.",
+
"I've been following his campaign since day one and I'm thrilled to see him win. He truly deserves this!",
+
"This victory is proof that authentic progressive candidates can win. So excited for what's ahead!",
+
"Zohran's dedication to public transit and climate action is exactly what we need. Great day for NYC!",
+
"What a momentous occasion! His policies on education are going to help so many families.",
+
"I'm emotional reading this! Zohran gives me so much hope for the direction of our city.",
+
"This is the change we've been waiting for! Can't wait to see his vision become reality.",
+
"His campaign was inspiring from start to finish. This win is well-deserved!",
+
"Finally, a mayor who will prioritize working people over corporate interests!",
+
"The grassroots organizing that made this happen was incredible to witness. Democracy in action!",
+
"Zohran's focus on social justice is refreshing. This is a win for all New Yorkers!",
+
"I volunteered for his campaign and this victory means everything. So proud!",
+
"This gives me faith in our democratic process. People-powered campaigns can still win!",
+
"His policies on criminal justice reform are exactly what NYC needs right now.",
+
"What an amazing day for progressive politics! Zohran is going to do great things.",
+
}
+
+
var replyComments = []string{
+
"Absolutely agree! This is going to be transformative.",
+
"Couldn't have said it better myself!",
+
"Yes! This is exactly right.",
+
"100% this! So well said.",
+
"This perfectly captures how I feel too!",
+
"Exactly my thoughts! Great perspective.",
+
"So true! I'm equally excited.",
+
"Well put! I share your optimism.",
+
"This! Absolutely this!",
+
"I feel the same way! Great comment.",
+
"You took the words right out of my mouth!",
+
"Perfectly stated! I agree completely.",
+
"Yes yes yes! This is it exactly.",
+
"This is spot on! Thank you for sharing.",
+
"I couldn't agree more with this take!",
+
"Exactly what I was thinking! Well said.",
+
"This captures it perfectly!",
+
"So much this! Great comment.",
+
"You nailed it! I feel exactly the same.",
+
"This is the best take I've seen! Agreed!",
+
}
+
+
var deepReplyComments = []string{
+
"And it's not just about the policies, it's about the movement he's building!",
+
"This thread is giving me life! So glad to see so many people excited.",
+
"I love seeing all this positive energy! We're going to change NYC together.",
+
"Reading these comments makes me even more hopeful. We did this!",
+
"The solidarity in this thread is beautiful. This is what democracy looks like!",
+
"I'm so grateful to be part of this community right now. Historic moment!",
+
"This conversation shows how ready people are for real change.",
+
"Seeing this support gives me so much hope for what's possible.",
+
"This is the kind of energy we need to keep up! Let's go!",
+
"I'm saving this thread to look back on this incredible moment.",
+
}
+
+
func generateTID() string {
+
// Simple TID generator for testing (timestamp in microseconds + random)
+
now := time.Now().UnixMicro()
+
return fmt.Sprintf("%d%04d", now, rand.Intn(10000))
+
}
+
+
func createUser(db *sql.DB, handle, name string, idx int) (*User, error) {
+
did := fmt.Sprintf("did:plc:testuser%d%d", time.Now().Unix(), idx)
+
user := &User{
+
DID: did,
+
Handle: handle,
+
Name: name,
+
}
+
+
query := `
+
INSERT INTO users (did, handle, pds_url, created_at, updated_at)
+
VALUES ($1, $2, $3, NOW(), NOW())
+
ON CONFLICT (did) DO NOTHING
+
`
+
+
_, err := db.Exec(query, user.DID, user.Handle, "http://localhost:3001")
+
if err != nil {
+
return nil, fmt.Errorf("failed to create user: %w", err)
+
}
+
+
log.Printf("Created user: %s (%s)", user.Handle, user.DID)
+
return user, nil
+
}
+
+
func createComment(db *sql.DB, user *User, content, parentURI, parentCID string, createdAt time.Time) (*Comment, error) {
+
rkey := generateTID()
+
uri := fmt.Sprintf("at://%s/social.coves.feed.comment/%s", user.DID, rkey)
+
cid := fmt.Sprintf("bafy%s", rkey)
+
+
comment := &Comment{
+
URI: uri,
+
CID: cid,
+
RKey: rkey,
+
DID: user.DID,
+
RootURI: postURI,
+
RootCID: postCID,
+
ParentURI: parentURI,
+
ParentCID: parentCID,
+
Content: content,
+
CreatedAt: createdAt,
+
}
+
+
query := `
+
INSERT INTO comments (
+
uri, cid, rkey, commenter_did, root_uri, root_cid,
+
parent_uri, parent_cid, content, created_at, indexed_at
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW())
+
ON CONFLICT (uri) DO NOTHING
+
RETURNING id
+
`
+
+
var id int64
+
err := db.QueryRow(query,
+
comment.URI, comment.CID, comment.RKey, comment.DID,
+
comment.RootURI, comment.RootCID, comment.ParentURI, comment.ParentCID,
+
comment.Content, comment.CreatedAt,
+
).Scan(&id)
+
if err != nil {
+
return nil, fmt.Errorf("failed to create comment: %w", err)
+
}
+
+
log.Printf("Created comment by %s: %.50s...", user.Handle, content)
+
return comment, nil
+
}
+
+
func updateCommentCount(db *sql.DB, parentURI string, isPost bool) error {
+
if isPost {
+
_, err := db.Exec(`
+
UPDATE posts
+
SET comment_count = comment_count + 1
+
WHERE uri = $1
+
`, parentURI)
+
return err
+
}
+
+
_, err := db.Exec(`
+
UPDATE comments
+
SET reply_count = reply_count + 1
+
WHERE uri = $1
+
`, parentURI)
+
return err
+
}
+
+
func main() {
+
// Connect to dev database
+
dbURL := "postgres://dev_user:dev_password@localhost:5435/coves_dev?sslmode=disable"
+
db, err := sql.Open("postgres", dbURL)
+
if err != nil {
+
log.Fatalf("Failed to connect to database: %v", err)
+
}
+
defer db.Close()
+
+
if err := db.Ping(); err != nil {
+
log.Fatalf("Failed to ping database: %v", err)
+
}
+
+
log.Println("Connected to database successfully!")
+
log.Printf("Post URI: %s", postURI)
+
log.Println("Starting to generate test data...")
+
+
rand.Seed(time.Now().UnixNano())
+
+
// Create users
+
log.Println("\n=== Creating Users ===")
+
users := make([]*User, 0, len(userNames))
+
for i, name := range userNames {
+
handle := fmt.Sprintf("%s.bsky.social", name)
+
user, err := createUser(db, handle, name, i)
+
if err != nil {
+
log.Printf("Warning: Failed to create user %s: %v", name, err)
+
continue
+
}
+
users = append(users, user)
+
}
+
+
log.Printf("\nCreated %d users", len(users))
+
+
// Generate comments with varied timing
+
log.Println("\n=== Creating Top-Level Comments ===")
+
baseTime := time.Now().Add(-2 * time.Hour) // Comments from 2 hours ago
+
topLevelComments := make([]*Comment, 0)
+
+
// Create 15-20 top-level comments
+
numTopLevel := 15 + rand.Intn(6)
+
for i := 0; i < numTopLevel && i < len(users); i++ {
+
user := users[i]
+
content := positiveComments[i%len(positiveComments)]
+
createdAt := baseTime.Add(time.Duration(i*5+rand.Intn(3)) * time.Minute)
+
+
comment, err := createComment(db, user, content, postURI, postCID, createdAt)
+
if err != nil {
+
log.Printf("Warning: Failed to create top-level comment: %v", err)
+
continue
+
}
+
+
topLevelComments = append(topLevelComments, comment)
+
+
// Update post comment count
+
if err := updateCommentCount(db, postURI, true); err != nil {
+
log.Printf("Warning: Failed to update post comment count: %v", err)
+
}
+
+
// Small delay to avoid timestamp collisions
+
time.Sleep(10 * time.Millisecond)
+
}
+
+
log.Printf("Created %d top-level comments", len(topLevelComments))
+
+
// Create first-level replies (replies to top-level comments)
+
log.Println("\n=== Creating First-Level Replies ===")
+
firstLevelReplies := make([]*Comment, 0)
+
+
for i, parentComment := range topLevelComments {
+
// 60% chance of having replies
+
if rand.Float64() > 0.6 {
+
continue
+
}
+
+
// 1-3 replies per comment
+
numReplies := 1 + rand.Intn(3)
+
for j := 0; j < numReplies; j++ {
+
userIdx := (i*3 + j + len(topLevelComments)) % len(users)
+
user := users[userIdx]
+
content := replyComments[rand.Intn(len(replyComments))]
+
createdAt := parentComment.CreatedAt.Add(time.Duration(5+rand.Intn(10)) * time.Minute)
+
+
comment, err := createComment(db, user, content, parentComment.URI, parentComment.CID, createdAt)
+
if err != nil {
+
log.Printf("Warning: Failed to create first-level reply: %v", err)
+
continue
+
}
+
+
firstLevelReplies = append(firstLevelReplies, comment)
+
+
// Update parent comment reply count
+
if err := updateCommentCount(db, parentComment.URI, false); err != nil {
+
log.Printf("Warning: Failed to update comment reply count: %v", err)
+
}
+
+
time.Sleep(10 * time.Millisecond)
+
}
+
}
+
+
log.Printf("Created %d first-level replies", len(firstLevelReplies))
+
+
// Create second-level replies (replies to replies) - testing nested threading
+
log.Println("\n=== Creating Second-Level Replies ===")
+
secondLevelCount := 0
+
+
for i, parentComment := range firstLevelReplies {
+
// 40% chance of having deep replies
+
if rand.Float64() > 0.4 {
+
continue
+
}
+
+
// 1-2 deep replies
+
numReplies := 1 + rand.Intn(2)
+
for j := 0; j < numReplies; j++ {
+
userIdx := (i*2 + j + len(topLevelComments) + len(firstLevelReplies)) % len(users)
+
user := users[userIdx]
+
content := deepReplyComments[rand.Intn(len(deepReplyComments))]
+
createdAt := parentComment.CreatedAt.Add(time.Duration(3+rand.Intn(7)) * time.Minute)
+
+
_, err := createComment(db, user, content, parentComment.URI, parentComment.CID, createdAt)
+
if err != nil {
+
log.Printf("Warning: Failed to create second-level reply: %v", err)
+
continue
+
}
+
+
secondLevelCount++
+
+
// Update parent comment reply count
+
if err := updateCommentCount(db, parentComment.URI, false); err != nil {
+
log.Printf("Warning: Failed to update comment reply count: %v", err)
+
}
+
+
time.Sleep(10 * time.Millisecond)
+
}
+
}
+
+
log.Printf("Created %d second-level replies", secondLevelCount)
+
+
// Print summary
+
totalComments := len(topLevelComments) + len(firstLevelReplies) + secondLevelCount
+
log.Println("\n=== Summary ===")
+
log.Printf("Total users created: %d", len(users))
+
log.Printf("Total comments created: %d", totalComments)
+
log.Printf(" - Top-level comments: %d", len(topLevelComments))
+
log.Printf(" - First-level replies: %d", len(firstLevelReplies))
+
log.Printf(" - Second-level replies: %d", secondLevelCount)
+
log.Println("\nDone! Check the post at !test-usnews for the comments.")
+
}
+134
scripts/post_streamable.py
···
+
#!/usr/bin/env python3
+
"""
+
Quick script to post a Streamable video to test-usnews community.
+
Uses the kagi-news CovesClient infrastructure.
+
"""
+
+
import sys
+
import os
+
+
# Add kagi-news src to path to use CovesClient
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../aggregators/kagi-news'))
+
+
from src.coves_client import CovesClient
+
+
def main():
+
# Configuration
+
COVES_API_URL = "http://localhost:8081"
+
PDS_URL = "http://localhost:3001"
+
+
# Use PDS instance credentials (from .env.dev)
+
HANDLE = "testuser123.local.coves.dev"
+
PASSWORD = "test-password-123"
+
+
# Post details
+
COMMUNITY_HANDLE = "test-usnews.community.coves.social"
+
+
# Post 1: Streamable video
+
STREAMABLE_URL = "https://streamable.com/7kpdft"
+
STREAMABLE_TITLE = "NBACentral - \"Your son don't wanna be here, we know it's your last weekend. Enjoy ..."
+
+
# Post 2: Reddit highlight
+
REDDIT_URL = "https://www.reddit.com/r/nba/comments/1orfsgm/highlight_giannis_antetokounmpo_41_pts_15_reb_9/"
+
REDDIT_TITLE = "[Highlight] Giannis Antetokounmpo (41 PTS, 15 REB, 9 AST) tallies his 56th career regular season game of 40+ points, passing Kareem Abdul-Jabbar for the most such games in franchise history. Milwaukee defeats Chicago 126-110 to win their NBA Cup opener."
+
+
# Initialize client
+
print(f"Initializing Coves client...")
+
print(f" API URL: {COVES_API_URL}")
+
print(f" PDS URL: {PDS_URL}")
+
print(f" Handle: {HANDLE}")
+
+
client = CovesClient(
+
api_url=COVES_API_URL,
+
handle=HANDLE,
+
password=PASSWORD,
+
pds_url=PDS_URL
+
)
+
+
# Authenticate
+
print("\nAuthenticating...")
+
try:
+
client.authenticate()
+
print(f"✓ Authenticated as {client.did}")
+
except Exception as e:
+
print(f"✗ Authentication failed: {e}")
+
return 1
+
+
# Post 1: Streamable video
+
print("\n" + "="*60)
+
print("POST 1: STREAMABLE VIDEO")
+
print("="*60)
+
+
print("\nCreating minimal external embed (URI only)...")
+
streamable_embed = {
+
"$type": "social.coves.embed.external",
+
"external": {
+
"uri": STREAMABLE_URL
+
}
+
}
+
print(f"✓ Embed created with URI only (unfurl service should enrich)")
+
+
print(f"\nPosting to {COMMUNITY_HANDLE}...")
+
print(f" Title: {STREAMABLE_TITLE}")
+
print(f" Video: {STREAMABLE_URL}")
+
+
try:
+
post_uri = client.create_post(
+
community_handle=COMMUNITY_HANDLE,
+
title=STREAMABLE_TITLE,
+
content="",
+
facets=[],
+
embed=streamable_embed
+
)
+
+
print(f"\n✓ Streamable post created successfully!")
+
print(f" URI: {post_uri}")
+
+
except Exception as e:
+
print(f"\n✗ Streamable post creation failed: {e}")
+
import traceback
+
traceback.print_exc()
+
return 1
+
+
# Post 2: Reddit highlight
+
print("\n" + "="*60)
+
print("POST 2: REDDIT HIGHLIGHT")
+
print("="*60)
+
+
print("\nCreating minimal external embed (URI only)...")
+
reddit_embed = {
+
"$type": "social.coves.embed.external",
+
"external": {
+
"uri": REDDIT_URL
+
}
+
}
+
print(f"✓ Embed created with URI only (unfurl service should enrich)")
+
+
print(f"\nPosting to {COMMUNITY_HANDLE}...")
+
print(f" Title: {REDDIT_TITLE}")
+
print(f" URL: {REDDIT_URL}")
+
+
try:
+
post_uri = client.create_post(
+
community_handle=COMMUNITY_HANDLE,
+
title=REDDIT_TITLE,
+
content="",
+
facets=[],
+
embed=reddit_embed
+
)
+
+
print(f"\n✓ Reddit post created successfully!")
+
print(f" URI: {post_uri}")
+
print(f"\n" + "="*60)
+
print("Both posts created! Check them out at !test-usnews")
+
print("="*60)
+
return 0
+
+
except Exception as e:
+
print(f"\n✗ Reddit post creation failed: {e}")
+
import traceback
+
traceback.print_exc()
+
return 1
+
+
if __name__ == "__main__":
+
sys.exit(main())