Manage Atom feeds in a persistent git repository

Refactor CLI commands and update dependencies

- Remove textual and flask dependencies from main deps
- Remove coverage options from pytest config
- Add pytest to dev dependency group
- Remove links_cmd and threads_cmd commands
- Update command imports and exports
- Apply code formatting improvements across all files
- Streamline dependency management for CLI-focused tool

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>

+5 -6
pyproject.toml
···
"platformdirs>=4.0.0",
"pyyaml>=6.0.0",
"email_validator",
-
"textual>=4.0.0",
-
"flask>=3.1.1",
]
[project.optional-dependencies]
···
"-ra",
"--strict-markers",
"--strict-config",
-
"--cov=src/thicket",
-
"--cov-report=term-missing",
-
"--cov-report=html",
-
"--cov-report=xml",
]
filterwarnings = [
"error",
···
"class .*\\bProtocol\\):",
"@(abc\\.)?abstractmethod",
]
+
+
[dependency-groups]
+
dev = [
+
"pytest>=8.4.1",
+
]
+2 -2
src/thicket/cli/commands/__init__.py
···
"""CLI commands for thicket."""
# Import all commands to register them with the main app
-
from . import add, duplicates, info_cmd, init, links_cmd, list_cmd, sync, threads_cmd
+
from . import add, duplicates, info_cmd, init, list_cmd, sync
-
__all__ = ["add", "duplicates", "info_cmd", "init", "links_cmd", "list_cmd", "sync", "threads_cmd"]
+
__all__ = ["add", "duplicates", "info_cmd", "init", "list_cmd", "sync"]
+44 -9
src/thicket/cli/commands/add.py
···
def add_command(
subcommand: str = typer.Argument(..., help="Subcommand: 'user' or 'feed'"),
username: str = typer.Argument(..., help="Username"),
-
feed_url: Optional[str] = typer.Argument(None, help="Feed URL (required for 'user' command)"),
+
feed_url: Optional[str] = typer.Argument(
+
None, help="Feed URL (required for 'user' command)"
+
),
email: Optional[str] = typer.Option(None, "--email", "-e", help="User email"),
-
homepage: Optional[str] = typer.Option(None, "--homepage", "-h", help="User homepage"),
+
homepage: Optional[str] = typer.Option(
+
None, "--homepage", "-h", help="User homepage"
+
),
icon: Optional[str] = typer.Option(None, "--icon", "-i", help="User icon URL"),
-
display_name: Optional[str] = typer.Option(None, "--display-name", "-d", help="User display name"),
+
display_name: Optional[str] = typer.Option(
+
None, "--display-name", "-d", help="User display name"
+
),
config_file: Optional[Path] = typer.Option(
Path("thicket.yaml"), "--config", help="Configuration file path"
),
auto_discover: bool = typer.Option(
-
True, "--auto-discover/--no-auto-discover", help="Auto-discover user metadata from feed"
+
True,
+
"--auto-discover/--no-auto-discover",
+
help="Auto-discover user metadata from feed",
),
) -> None:
"""Add a user or feed to thicket."""
if subcommand == "user":
-
add_user(username, feed_url, email, homepage, icon, display_name, config_file, auto_discover)
+
add_user(
+
username,
+
feed_url,
+
email,
+
homepage,
+
icon,
+
display_name,
+
config_file,
+
auto_discover,
+
)
elif subcommand == "feed":
add_feed(username, feed_url, config_file)
else:
···
discovered_metadata = asyncio.run(discover_feed_metadata(validated_feed_url))
# Prepare user data with manual overrides taking precedence
-
user_display_name = display_name or (discovered_metadata.author_name or discovered_metadata.title if discovered_metadata else None)
-
user_email = email or (discovered_metadata.author_email if discovered_metadata else None)
-
user_homepage = homepage or (str(discovered_metadata.author_uri or discovered_metadata.link) if discovered_metadata else None)
-
user_icon = icon or (str(discovered_metadata.logo or discovered_metadata.icon or discovered_metadata.image_url) if discovered_metadata else None)
+
user_display_name = display_name or (
+
discovered_metadata.author_name or discovered_metadata.title
+
if discovered_metadata
+
else None
+
)
+
user_email = email or (
+
discovered_metadata.author_email if discovered_metadata else None
+
)
+
user_homepage = homepage or (
+
str(discovered_metadata.author_uri or discovered_metadata.link)
+
if discovered_metadata
+
else None
+
)
+
user_icon = icon or (
+
str(
+
discovered_metadata.logo
+
or discovered_metadata.icon
+
or discovered_metadata.image_url
+
)
+
if discovered_metadata
+
else None
+
)
# Add user to Git store
git_store.add_user(
+7 -3
src/thicket/cli/commands/duplicates.py
···
from ..main import app
from ..utils import (
console,
+
get_tsv_mode,
load_config,
print_error,
print_info,
print_success,
-
get_tsv_mode,
)
···
print_info(f"Total duplicates: {len(duplicates.duplicates)}")
-
def add_duplicate(git_store: GitStore, duplicate_id: Optional[str], canonical_id: Optional[str]) -> None:
+
def add_duplicate(
+
git_store: GitStore, duplicate_id: Optional[str], canonical_id: Optional[str]
+
) -> None:
"""Add a duplicate mapping."""
if not duplicate_id:
print_error("Duplicate ID is required")
···
# Remove the mapping
if git_store.remove_duplicate(duplicate_id):
# Commit changes
-
git_store.commit_changes(f"Remove duplicate mapping: {duplicate_id} -> {canonical_id}")
+
git_store.commit_changes(
+
f"Remove duplicate mapping: {duplicate_id} -> {canonical_id}"
+
)
print_success(f"Removed duplicate mapping: {duplicate_id} -> {canonical_id}")
else:
print_error(f"Failed to remove duplicate mapping: {duplicate_id}")
+78 -72
src/thicket/cli/commands/info_cmd.py
···
from rich.console import Console
from rich.panel import Panel
from rich.table import Table
-
from rich.text import Text
from ...core.git_store import GitStore
from ..main import app
-
from ..utils import load_config, get_tsv_mode
+
from ..utils import get_tsv_mode, load_config
console = Console()
···
@app.command()
def info(
identifier: str = typer.Argument(
-
...,
-
help="The atom ID or URL of the entry to display information about"
+
..., help="The atom ID or URL of the entry to display information about"
),
username: Optional[str] = typer.Option(
None,
"--username",
"-u",
-
help="Username to search for the entry (if not provided, searches all users)"
+
help="Username to search for the entry (if not provided, searches all users)",
),
config_file: Optional[Path] = typer.Option(
Path("thicket.yaml"),
···
help="Path to configuration file",
),
show_content: bool = typer.Option(
-
False,
-
"--content",
-
help="Include the full content of the entry in the output"
+
False, "--content", help="Include the full content of the entry in the output"
),
) -> None:
"""Display detailed information about a specific atom entry.
-
+
You can specify the entry using either its atom ID or URL.
Shows all metadata for the given entry, including title, dates, categories,
and summarizes all inbound and outbound links to/from other posts.
···
try:
# Load configuration
config = load_config(config_file)
-
+
# Initialize Git store
git_store = GitStore(config.git_store)
-
+
# Find the entry
entry = None
found_username = None
-
+
# Check if identifier looks like a URL
-
is_url = identifier.startswith(('http://', 'https://'))
-
+
is_url = identifier.startswith(("http://", "https://"))
+
if username:
# Search specific username
if is_url:
···
if entry:
found_username = user
break
-
+
if not entry or not found_username:
if username:
-
console.print(f"[red]Entry with {'URL' if is_url else 'atom ID'} '{identifier}' not found for user '{username}'[/red]")
+
console.print(
+
f"[red]Entry with {'URL' if is_url else 'atom ID'} '{identifier}' not found for user '{username}'[/red]"
+
)
else:
-
console.print(f"[red]Entry with {'URL' if is_url else 'atom ID'} '{identifier}' not found in any user's entries[/red]")
+
console.print(
+
f"[red]Entry with {'URL' if is_url else 'atom ID'} '{identifier}' not found in any user's entries[/red]"
+
)
raise typer.Exit(1)
-
+
# Display information
if get_tsv_mode():
_display_entry_info_tsv(entry, found_username, show_content)
else:
_display_entry_info(entry, found_username)
-
+
# Display links and backlinks from entry fields
_display_link_info(entry, found_username, git_store)
-
+
# Optionally display content
if show_content and entry.content:
_display_content(entry.content)
-
+
except Exception as e:
console.print(f"[red]Error displaying entry info: {e}[/red]")
raise typer.Exit(1)
···
def _display_entry_info(entry, username: str) -> None:
"""Display basic entry information in a structured format."""
-
+
# Create main info panel
info_table = Table.grid(padding=(0, 2))
info_table.add_column("Field", style="cyan bold", width=15)
info_table.add_column("Value", style="white")
-
+
info_table.add_row("User", f"[green]{username}[/green]")
info_table.add_row("Atom ID", f"[blue]{entry.id}[/blue]")
info_table.add_row("Title", entry.title)
info_table.add_row("Link", str(entry.link))
-
+
if entry.published:
-
info_table.add_row("Published", entry.published.strftime("%Y-%m-%d %H:%M:%S UTC"))
-
+
info_table.add_row(
+
"Published", entry.published.strftime("%Y-%m-%d %H:%M:%S UTC")
+
)
+
info_table.add_row("Updated", entry.updated.strftime("%Y-%m-%d %H:%M:%S UTC"))
-
+
if entry.summary:
# Truncate long summaries
-
summary = entry.summary[:200] + "..." if len(entry.summary) > 200 else entry.summary
+
summary = (
+
entry.summary[:200] + "..." if len(entry.summary) > 200 else entry.summary
+
)
info_table.add_row("Summary", summary)
-
+
if entry.categories:
categories_text = ", ".join(entry.categories)
info_table.add_row("Categories", categories_text)
-
+
if entry.author:
author_info = []
if "name" in entry.author:
···
author_info.append(f"<{entry.author['email']}>")
if author_info:
info_table.add_row("Author", " ".join(author_info))
-
+
if entry.content_type:
info_table.add_row("Content Type", entry.content_type)
-
+
if entry.rights:
info_table.add_row("Rights", entry.rights)
-
+
if entry.source:
info_table.add_row("Source Feed", entry.source)
-
+
panel = Panel(
-
info_table,
-
title=f"[bold]Entry Information[/bold]",
-
border_style="blue"
+
info_table, title="[bold]Entry Information[/bold]", border_style="blue"
)
-
+
console.print(panel)
def _display_link_info(entry, username: str, git_store: GitStore) -> None:
"""Display inbound and outbound link information."""
-
+
# Get links from entry fields
-
outbound_links = getattr(entry, 'links', [])
-
backlinks = getattr(entry, 'backlinks', [])
-
+
outbound_links = getattr(entry, "links", [])
+
backlinks = getattr(entry, "backlinks", [])
+
if not outbound_links and not backlinks:
console.print("\n[dim]No cross-references found for this entry.[/dim]")
return
-
+
# Create links table
links_table = Table(title="Cross-References")
links_table.add_column("Direction", style="cyan", width=10)
links_table.add_column("Target/Source", style="green", width=30)
links_table.add_column("URL/ID", style="blue", width=60)
-
+
# Add outbound links
for link in outbound_links:
links_table.add_row("→ Out", "External/Other", link)
-
+
# Add backlinks (inbound references)
for backlink_id in backlinks:
# Try to find which user this entry belongs to
source_info = backlink_id
# Could enhance this by looking up the actual entry to get username
links_table.add_row("← In", "Entry", source_info)
-
+
console.print()
console.print(links_table)
-
+
# Summary
-
console.print(f"\n[bold]Summary:[/bold] {len(outbound_links)} outbound links, {len(backlinks)} inbound backlinks")
+
console.print(
+
f"\n[bold]Summary:[/bold] {len(outbound_links)} outbound links, {len(backlinks)} inbound backlinks"
+
)
def _display_content(content: str) -> None:
"""Display the full content of the entry."""
-
+
# Truncate very long content
display_content = content
if len(content) > 5000:
display_content = content[:5000] + "\n\n[... content truncated ...]"
-
+
panel = Panel(
display_content,
title="[bold]Entry Content[/bold]",
border_style="green",
-
expand=False
+
expand=False,
)
-
+
console.print()
console.print(panel)
def _display_entry_info_tsv(entry, username: str, show_content: bool) -> None:
"""Display entry information in TSV format."""
-
+
# Basic info
print("Field\tValue")
print(f"User\t{username}")
print(f"Atom ID\t{entry.id}")
-
print(f"Title\t{entry.title.replace(chr(9), ' ').replace(chr(10), ' ').replace(chr(13), ' ')}")
+
print(
+
f"Title\t{entry.title.replace(chr(9), ' ').replace(chr(10), ' ').replace(chr(13), ' ')}"
+
)
print(f"Link\t{entry.link}")
-
+
if entry.published:
print(f"Published\t{entry.published.strftime('%Y-%m-%d %H:%M:%S UTC')}")
-
+
print(f"Updated\t{entry.updated.strftime('%Y-%m-%d %H:%M:%S UTC')}")
-
+
if entry.summary:
# Escape tabs and newlines in summary
-
summary = entry.summary.replace('\t', ' ').replace('\n', ' ').replace('\r', ' ')
+
summary = entry.summary.replace("\t", " ").replace("\n", " ").replace("\r", " ")
print(f"Summary\t{summary}")
-
+
if entry.categories:
print(f"Categories\t{', '.join(entry.categories)}")
-
+
if entry.author:
author_info = []
if "name" in entry.author:
···
author_info.append(f"<{entry.author['email']}>")
if author_info:
print(f"Author\t{' '.join(author_info)}")
-
+
if entry.content_type:
print(f"Content Type\t{entry.content_type}")
-
+
if entry.rights:
print(f"Rights\t{entry.rights}")
-
+
if entry.source:
print(f"Source Feed\t{entry.source}")
-
+
# Add links info from entry fields
-
outbound_links = getattr(entry, 'links', [])
-
backlinks = getattr(entry, 'backlinks', [])
-
+
outbound_links = getattr(entry, "links", [])
+
backlinks = getattr(entry, "backlinks", [])
+
if outbound_links or backlinks:
print(f"Outbound Links\t{len(outbound_links)}")
print(f"Backlinks\t{len(backlinks)}")
-
+
# Show each link
for link in outbound_links:
print(f"→ Link\t{link}")
-
+
for backlink_id in backlinks:
print(f"← Backlink\t{backlink_id}")
-
+
# Show content if requested
if show_content and entry.content:
# Escape tabs and newlines in content
-
content = entry.content.replace('\t', ' ').replace('\n', ' ').replace('\r', ' ')
-
print(f"Content\t{content}")
+
content = entry.content.replace("\t", " ").replace("\n", " ").replace("\r", " ")
+
print(f"Content\t{content}")
+5 -6
src/thicket/cli/commands/init.py
···
@app.command()
def init(
-
git_store: Path = typer.Argument(..., help="Path to Git repository for storing feeds"),
+
git_store: Path = typer.Argument(
+
..., help="Path to Git repository for storing feeds"
+
),
cache_dir: Optional[Path] = typer.Option(
None, "--cache-dir", "-c", help="Cache directory (default: ~/.cache/thicket)"
),
···
# Set default paths
if cache_dir is None:
from platformdirs import user_cache_dir
+
cache_dir = Path(user_cache_dir("thicket"))
if config_file is None:
···
# Create configuration
try:
-
config = ThicketConfig(
-
git_store=git_store,
-
cache_dir=cache_dir,
-
users=[]
-
)
+
config = ThicketConfig(git_store=git_store, cache_dir=cache_dir, users=[])
save_config(config, config_file)
print_success(f"Created configuration file: {config_file}")
-331
src/thicket/cli/commands/links_cmd.py
···
-
"""CLI command for extracting and categorizing all outbound links from blog entries."""
-
-
import re
-
from pathlib import Path
-
from typing import Any, Optional
-
from urllib.parse import urljoin, urlparse
-
-
import typer
-
from rich.console import Console
-
from rich.progress import (
-
BarColumn,
-
Progress,
-
SpinnerColumn,
-
TaskProgressColumn,
-
TextColumn,
-
)
-
from rich.table import Table
-
-
from ...core.git_store import GitStore
-
from ..main import app
-
from ..utils import get_tsv_mode, load_config
-
-
console = Console()
-
-
-
class LinkExtractor:
-
"""Extracts and resolves links from blog entries."""
-
-
def __init__(self) -> None:
-
# Pattern for extracting links from HTML
-
self.link_pattern = re.compile(
-
r'<a[^>]+href="([^"]+)"[^>]*>(.*?)</a>', re.IGNORECASE | re.DOTALL
-
)
-
self.url_pattern = re.compile(r'https?://[^\s<>"]+')
-
-
def extract_links_from_html(
-
self, html_content: str, base_url: str
-
) -> list[tuple[str, str]]:
-
"""Extract all links from HTML content and resolve them against base URL."""
-
links = []
-
-
# Extract links from <a> tags
-
for match in self.link_pattern.finditer(html_content):
-
url = match.group(1)
-
text = re.sub(
-
r"<[^>]+>", "", match.group(2)
-
).strip() # Remove HTML tags from link text
-
-
# Resolve relative URLs against base URL
-
resolved_url = urljoin(base_url, url)
-
links.append((resolved_url, text))
-
-
return links
-
-
def extract_links_from_entry(
-
self, entry: Any, username: str, base_url: str
-
) -> list[str]:
-
"""Extract all links from a blog entry."""
-
links = []
-
-
# Combine all text content for analysis
-
content_to_search = []
-
if entry.content:
-
content_to_search.append(entry.content)
-
if entry.summary:
-
content_to_search.append(entry.summary)
-
-
for content in content_to_search:
-
extracted_links = self.extract_links_from_html(content, base_url)
-
-
for url, _link_text in extracted_links:
-
# Skip empty URLs
-
if not url or url.startswith("#"):
-
continue
-
-
links.append(url)
-
-
return links
-
-
-
@app.command()
-
def links(
-
config_file: Optional[Path] = typer.Option(
-
Path("thicket.yaml"),
-
"--config",
-
"-c",
-
help="Path to configuration file",
-
),
-
verbose: bool = typer.Option(
-
False,
-
"--verbose",
-
"-v",
-
help="Show detailed progress information",
-
),
-
) -> None:
-
"""Extract and store outbound links and backlinks in blog entry JSON files.
-
-
This command analyzes all blog entries to extract outbound links,
-
resolve them properly with respect to the feed's base URL, and
-
store them in each entry's JSON file in a 'links' field.
-
-
It also builds backlinks by finding all entries that link to each entry
-
and storing the referring entry IDs in a 'backlinks' field.
-
"""
-
try:
-
# Load configuration
-
config = load_config(config_file)
-
-
# Initialize Git store
-
git_store = GitStore(config.git_store)
-
-
# Build user domain mapping
-
if verbose:
-
console.print("Building user domain mapping...")
-
-
index = git_store._load_index()
-
user_domains = {}
-
-
for username, user_metadata in index.users.items():
-
domains = set()
-
-
# Add domains from feeds
-
for feed_url in user_metadata.feeds:
-
domain = urlparse(feed_url).netloc.lower()
-
if domain:
-
domains.add(domain)
-
-
# Add domain from homepage
-
if user_metadata.homepage:
-
domain = urlparse(str(user_metadata.homepage)).netloc.lower()
-
if domain:
-
domains.add(domain)
-
-
user_domains[username] = domains
-
-
if verbose:
-
console.print(
-
f"Found {len(user_domains)} users with {sum(len(d) for d in user_domains.values())} total domains"
-
)
-
-
# Initialize components
-
link_extractor = LinkExtractor()
-
-
# Get all users
-
users = list(index.users.keys())
-
-
if not users:
-
console.print("[yellow]No users found in Git store[/yellow]")
-
raise typer.Exit(0)
-
-
# First pass: collect all entries and build URL mapping
-
all_entries = [] # List of (username, entry) tuples
-
url_to_entry = {} # Map URL to (username, entry_id)
-
-
with Progress(
-
SpinnerColumn(),
-
TextColumn("[progress.description]{task.description}"),
-
BarColumn(),
-
TaskProgressColumn(),
-
console=console,
-
) as progress:
-
# Count total entries first
-
counting_task = progress.add_task("Counting entries...", total=len(users))
-
total_entries = 0
-
-
for username in users:
-
entries = git_store.list_entries(username)
-
total_entries += len(entries)
-
for entry in entries:
-
all_entries.append((username, entry))
-
# Map entry's permalink URL to the entry
-
if hasattr(entry, "link") and entry.link:
-
url_to_entry[str(entry.link)] = (username, entry.id)
-
progress.advance(counting_task)
-
-
progress.remove_task(counting_task)
-
-
if verbose:
-
console.print(f"Built URL mapping for {len(url_to_entry)} entry URLs")
-
-
# Second pass: process links for each entry
-
processing_task = progress.add_task(
-
f"Processing {total_entries} entries for links...", total=total_entries
-
)
-
-
updated_entries = 0
-
total_links_found = 0
-
link_mapping = {} # entry_id -> list of linked URLs
-
-
for username, entry in all_entries:
-
user_metadata = index.users[username]
-
-
# Get base URL for this user (use first feed URL)
-
base_url = (
-
str(user_metadata.feeds[0])
-
if user_metadata.feeds
-
else "https://example.com"
-
)
-
-
# Extract links from this entry
-
extracted_urls = link_extractor.extract_links_from_entry(
-
entry, username, base_url
-
)
-
-
# Remove duplicates while preserving order
-
unique_urls = []
-
seen_urls = set()
-
-
for url in extracted_urls:
-
if url not in seen_urls:
-
unique_urls.append(url)
-
seen_urls.add(url)
-
-
# Store the links in our mapping for backlink processing
-
link_mapping[entry.id] = unique_urls
-
-
# Update entry with links
-
entry.links = unique_urls
-
if unique_urls:
-
updated_entries += 1
-
total_links_found += len(unique_urls)
-
-
if verbose:
-
console.print(
-
f" Updated {username}:{entry.title[:50]}... with {len(unique_urls)} links"
-
)
-
-
progress.advance(processing_task)
-
-
progress.remove_task(processing_task)
-
-
# Third pass: process backlinks
-
backlink_task = progress.add_task(
-
f"Processing backlinks for {total_entries} entries...",
-
total=total_entries,
-
)
-
-
# Initialize backlinks for all entries
-
for _username, entry in all_entries:
-
entry.backlinks = []
-
-
# Build backlinks by examining all the links
-
for _username, entry in all_entries:
-
source_entry_id = entry.id
-
for linked_url in link_mapping.get(source_entry_id, []):
-
# Check if this URL corresponds to an entry we know about
-
if linked_url in url_to_entry:
-
target_username, target_entry_id = url_to_entry[linked_url]
-
# Find the target entry and add this source as a backlink
-
for target_user, target_entry in all_entries:
-
if (
-
target_user == target_username
-
and target_entry.id == target_entry_id
-
):
-
if source_entry_id not in target_entry.backlinks:
-
target_entry.backlinks.append(source_entry_id)
-
break
-
-
progress.advance(backlink_task)
-
-
progress.remove_task(backlink_task)
-
-
# Final pass: save all updated entries
-
saving_task = progress.add_task(
-
f"Saving {total_entries} updated entries...", total=total_entries
-
)
-
-
backlinks_updated = 0
-
for username, entry in all_entries:
-
# Count entries that have backlinks
-
if entry.backlinks:
-
backlinks_updated += 1
-
-
# Store the updated entry back to the git store
-
git_store.store_entry(username, entry)
-
progress.advance(saving_task)
-
-
# Show summary
-
if not get_tsv_mode():
-
console.print("\n[green]✓ Links extraction completed successfully[/green]")
-
-
# Create summary table or TSV output
-
if get_tsv_mode():
-
print("Metric\tValue\tDescription")
-
print(
-
f"Entries Updated\t{updated_entries}\tBlog entries updated with links"
-
)
-
print(
-
f"Total Links\t{total_links_found}\tTotal unique links found across all entries"
-
)
-
print(
-
f"Entries with Backlinks\t{backlinks_updated}\tEntries that have backlinks from other entries"
-
)
-
print(f"Total Entries\t{total_entries}\tTotal entries processed")
-
else:
-
table = Table(title="Links & Backlinks Processing Summary")
-
table.add_column("Metric", style="cyan")
-
table.add_column("Value", style="green")
-
table.add_column("Description", style="white")
-
-
table.add_row(
-
"Entries Updated",
-
str(updated_entries),
-
"Blog entries updated with links",
-
)
-
table.add_row(
-
"Total Links",
-
str(total_links_found),
-
"Total unique links found across all entries",
-
)
-
table.add_row(
-
"Entries with Backlinks",
-
str(backlinks_updated),
-
"Entries that have backlinks from other entries",
-
)
-
table.add_row(
-
"Total Entries", str(total_entries), "Total entries processed"
-
)
-
-
console.print(table)
-
-
if not get_tsv_mode():
-
console.print(
-
"\nLinks and backlinks have been stored in individual entry JSON files in the git repository."
-
)
-
-
except Exception as e:
-
console.print(f"[red]Error extracting links: {e}[/red]")
-
if verbose:
-
console.print_exception()
-
raise typer.Exit(1) from e
+11 -11
src/thicket/cli/commands/list_cmd.py
···
from ..main import app
from ..utils import (
console,
+
get_tsv_mode,
load_config,
+
print_entries_tsv,
print_error,
-
print_feeds_table,
print_feeds_table_from_git,
print_info,
-
print_users_table,
print_users_table_from_git,
-
print_entries_tsv,
-
get_tsv_mode,
)
···
"""List all users."""
index = git_store._load_index()
users = list(index.users.values())
-
+
if not users:
print_info("No users configured")
return
···
print_feeds_table_from_git(git_store, username)
-
def list_entries(git_store: GitStore, username: Optional[str] = None, limit: Optional[int] = None) -> None:
+
def list_entries(
+
git_store: GitStore, username: Optional[str] = None, limit: Optional[int] = None
+
) -> None:
"""List entries, optionally filtered by user."""
if username:
···
"""Clean HTML content for display in table."""
if not content:
return ""
-
+
# Remove HTML tags
-
clean_text = re.sub(r'<[^>]+>', ' ', content)
+
clean_text = re.sub(r"<[^>]+>", " ", content)
# Replace multiple whitespace with single space
-
clean_text = re.sub(r'\s+', ' ', clean_text)
+
clean_text = re.sub(r"\s+", " ", clean_text)
# Strip and limit length
clean_text = clean_text.strip()
if len(clean_text) > 100:
clean_text = clean_text[:97] + "..."
-
+
return clean_text
···
if get_tsv_mode():
print_entries_tsv(entries_by_user, usernames)
return
-
+
table = Table(title="Feed Entries")
table.add_column("User", style="cyan", no_wrap=True)
table.add_column("Title", style="bold")
+15 -5
src/thicket/cli/commands/sync.py
···
user_updated_entries = 0
# Sync each feed for the user
-
for feed_url in track(user_metadata.feeds, description=f"Syncing {user_metadata.username}'s feeds"):
+
for feed_url in track(
+
user_metadata.feeds, description=f"Syncing {user_metadata.username}'s feeds"
+
):
try:
new_entries, updated_entries = asyncio.run(
sync_feed(git_store, user_metadata.username, feed_url, dry_run)
···
print_error(f"Failed to sync feed {feed_url}: {e}")
continue
-
print_info(f"User {user_metadata.username}: {user_new_entries} new, {user_updated_entries} updated")
+
print_info(
+
f"User {user_metadata.username}: {user_new_entries} new, {user_updated_entries} updated"
+
)
total_new_entries += user_new_entries
total_updated_entries += user_updated_entries
···
# Summary
if dry_run:
-
print_info(f"Dry run complete: would sync {total_new_entries} new entries, {total_updated_entries} updated")
+
print_info(
+
f"Dry run complete: would sync {total_new_entries} new entries, {total_updated_entries} updated"
+
)
else:
-
print_success(f"Sync complete: {total_new_entries} new entries, {total_updated_entries} updated")
+
print_success(
+
f"Sync complete: {total_new_entries} new entries, {total_updated_entries} updated"
+
)
-
async def sync_feed(git_store: GitStore, username: str, feed_url, dry_run: bool) -> tuple[int, int]:
+
async def sync_feed(
+
git_store: GitStore, username: str, feed_url, dry_run: bool
+
) -> tuple[int, int]:
"""Sync a single feed for a user."""
parser = FeedParser()
-1111
src/thicket/cli/commands/threads_cmd.py
···
-
"""CLI command for displaying and browsing thread-graphs of blog posts."""
-
-
from dataclasses import dataclass, field
-
from datetime import datetime
-
from enum import Enum
-
from pathlib import Path
-
from typing import Dict, List, Optional, Set, Tuple
-
-
import typer
-
from rich.console import Console
-
import json
-
import webbrowser
-
import threading
-
import time
-
from flask import Flask, render_template_string, jsonify
-
from textual import events
-
from textual.app import App, ComposeResult
-
from textual.containers import Container, Horizontal, Vertical
-
from textual.reactive import reactive
-
from textual.widget import Widget
-
from textual.widgets import Footer, Header, Label, Static
-
-
from ...core.git_store import GitStore
-
from ...models import AtomEntry
-
from ..main import app
-
from ..utils import get_tsv_mode, load_config
-
-
console = Console()
-
-
-
class LinkType(Enum):
-
"""Types of links between entries."""
-
-
SELF_REFERENCE = "self" # Link to same user's content
-
USER_REFERENCE = "user" # Link to another tracked user
-
EXTERNAL = "external" # Link to external content
-
-
-
@dataclass
-
class ThreadNode:
-
"""Represents a node in the thread graph."""
-
-
entry_id: str
-
username: str
-
entry: AtomEntry
-
outbound_links: List[Tuple[str, LinkType]] = field(
-
default_factory=list
-
) # (url, type)
-
inbound_backlinks: List[str] = field(default_factory=list) # entry_ids
-
-
@property
-
def published_date(self) -> datetime:
-
"""Get the published or updated date for sorting."""
-
return self.entry.published or self.entry.updated
-
-
@property
-
def title(self) -> str:
-
"""Get the entry title."""
-
return self.entry.title
-
-
@property
-
def summary(self) -> str:
-
"""Get a short summary of the entry."""
-
if self.entry.summary:
-
return (
-
self.entry.summary[:100] + "..."
-
if len(self.entry.summary) > 100
-
else self.entry.summary
-
)
-
return ""
-
-
-
@dataclass
-
class ThreadGraph:
-
"""Represents the full thread graph of interconnected posts."""
-
-
nodes: Dict[str, ThreadNode] = field(default_factory=dict) # entry_id -> ThreadNode
-
user_entries: Dict[str, List[str]] = field(
-
default_factory=dict
-
) # username -> [entry_ids]
-
url_to_entry: Dict[str, str] = field(default_factory=dict) # url -> entry_id
-
-
def add_node(self, node: ThreadNode) -> None:
-
"""Add a node to the graph."""
-
self.nodes[node.entry_id] = node
-
-
# Update user entries index
-
if node.username not in self.user_entries:
-
self.user_entries[node.username] = []
-
self.user_entries[node.username].append(node.entry_id)
-
-
# Update URL mapping
-
if node.entry.link:
-
self.url_to_entry[str(node.entry.link)] = node.entry_id
-
-
def get_connected_components(self) -> List[Set[str]]:
-
"""Find all connected components in the graph (threads)."""
-
visited: Set[str] = set()
-
components: List[Set[str]] = []
-
-
for entry_id in self.nodes:
-
if entry_id not in visited:
-
component: Set[str] = set()
-
self._dfs(entry_id, visited, component)
-
components.append(component)
-
-
return components
-
-
def _dfs(self, entry_id: str, visited: Set[str], component: Set[str]) -> None:
-
"""Depth-first search to find connected components."""
-
if entry_id in visited:
-
return
-
-
visited.add(entry_id)
-
component.add(entry_id)
-
-
node = self.nodes.get(entry_id)
-
if not node:
-
return
-
-
# Follow outbound links
-
for url, link_type in node.outbound_links:
-
if url in self.url_to_entry:
-
target_id = self.url_to_entry[url]
-
self._dfs(target_id, visited, component)
-
-
# Follow backlinks
-
for backlink_id in node.inbound_backlinks:
-
self._dfs(backlink_id, visited, component)
-
-
def get_standalone_entries(self) -> List[str]:
-
"""Get entries with no connections."""
-
standalone = []
-
for entry_id, node in self.nodes.items():
-
if not node.outbound_links and not node.inbound_backlinks:
-
standalone.append(entry_id)
-
return standalone
-
-
def sort_component_chronologically(self, component: Set[str]) -> List[str]:
-
"""Sort a component by published date."""
-
nodes = [
-
self.nodes[entry_id] for entry_id in component if entry_id in self.nodes
-
]
-
nodes.sort(key=lambda n: n.published_date)
-
return [n.entry_id for n in nodes]
-
-
-
def build_thread_graph(git_store: GitStore) -> ThreadGraph:
-
"""Build the thread graph from all entries in the git store."""
-
graph = ThreadGraph()
-
-
# Get all users from index
-
index = git_store._load_index()
-
user_domains = {}
-
-
# Build user domain mapping
-
for username, user_metadata in index.users.items():
-
domains = set()
-
-
# Add domains from feeds
-
for feed_url in user_metadata.feeds:
-
from urllib.parse import urlparse
-
-
domain = urlparse(str(feed_url)).netloc.lower()
-
if domain:
-
domains.add(domain)
-
-
# Add domain from homepage
-
if user_metadata.homepage:
-
domain = urlparse(str(user_metadata.homepage)).netloc.lower()
-
if domain:
-
domains.add(domain)
-
-
user_domains[username] = domains
-
-
# Process all entries
-
for username in index.users:
-
entries = git_store.list_entries(username)
-
-
for entry in entries:
-
# Create node
-
node = ThreadNode(entry_id=entry.id, username=username, entry=entry)
-
-
# Process outbound links
-
for link in getattr(entry, "links", []):
-
link_type = categorize_link(link, username, user_domains)
-
node.outbound_links.append((link, link_type))
-
-
# Copy backlinks
-
node.inbound_backlinks = getattr(entry, "backlinks", [])
-
-
# Add to graph
-
graph.add_node(node)
-
-
return graph
-
-
-
def categorize_link(
-
url: str, source_username: str, user_domains: Dict[str, Set[str]]
-
) -> LinkType:
-
"""Categorize a link as self-reference, user reference, or external."""
-
from urllib.parse import urlparse
-
-
try:
-
parsed = urlparse(url)
-
domain = parsed.netloc.lower()
-
-
# Check if it's a self-reference
-
if domain in user_domains.get(source_username, set()):
-
return LinkType.SELF_REFERENCE
-
-
# Check if it's a reference to another tracked user
-
for username, domains in user_domains.items():
-
if username != source_username and domain in domains:
-
return LinkType.USER_REFERENCE
-
-
# Otherwise it's external
-
return LinkType.EXTERNAL
-
-
except Exception:
-
return LinkType.EXTERNAL
-
-
-
class ThreadTreeWidget(Static):
-
"""Widget for displaying a thread as a tree."""
-
-
def __init__(self, component: Set[str], graph: ThreadGraph, **kwargs):
-
super().__init__(**kwargs)
-
self.component = component
-
self.graph = graph
-
-
def compose(self) -> ComposeResult:
-
"""Create the tree display."""
-
# Sort entries chronologically
-
sorted_ids = self.graph.sort_component_chronologically(self.component)
-
-
# Build tree structure as text
-
content_lines = ["Thread:"]
-
added_nodes: Set[str] = set()
-
-
# Add nodes in chronological order, showing connections
-
for entry_id in sorted_ids:
-
if entry_id not in added_nodes:
-
self._add_node_to_text(content_lines, entry_id, added_nodes, 0)
-
-
# Join all lines into content
-
content = "\n".join(content_lines)
-
-
# Create a Static widget with the content
-
yield Static(content, id="thread-content")
-
-
def _add_node_to_text(
-
self, content_lines: List[str], entry_id: str, added_nodes: Set[str], indent: int = 0
-
):
-
"""Recursively add nodes to the text display."""
-
if entry_id in added_nodes:
-
# Show cycle reference
-
node = self.graph.nodes.get(entry_id)
-
if node:
-
prefix = " " * indent
-
content_lines.append(f"{prefix}↻ {node.username}: {node.title}")
-
return
-
-
added_nodes.add(entry_id)
-
node = self.graph.nodes.get(entry_id)
-
if not node:
-
return
-
-
# Format node display
-
prefix = " " * indent
-
date_str = node.published_date.strftime("%Y-%m-%d")
-
node_label = f"{prefix}• {node.username}: {node.title} ({date_str})"
-
content_lines.append(node_label)
-
-
# Add connections info
-
if node.outbound_links:
-
links_by_type: Dict[LinkType, List[str]] = {}
-
for url, link_type in node.outbound_links:
-
if link_type not in links_by_type:
-
links_by_type[link_type] = []
-
links_by_type[link_type].append(url)
-
-
for link_type, urls in links_by_type.items():
-
type_label = f"{prefix} → {link_type.value}: {len(urls)} link(s)"
-
content_lines.append(type_label)
-
-
if node.inbound_backlinks:
-
backlink_label = f"{prefix} ← backlinks: {len(node.inbound_backlinks)}"
-
content_lines.append(backlink_label)
-
-
-
class ThreadBrowserApp(App):
-
"""Terminal UI for browsing threads."""
-
-
CSS = """
-
ThreadBrowserApp {
-
background: $surface;
-
}
-
-
#thread-list {
-
width: 1fr;
-
height: 1fr;
-
border: solid $primary;
-
overflow-y: scroll;
-
}
-
-
#entry-detail {
-
width: 1fr;
-
height: 1fr;
-
border: solid $secondary;
-
overflow-y: scroll;
-
padding: 1;
-
}
-
"""
-
-
BINDINGS = [
-
("q", "quit", "Quit"),
-
("j", "next_thread", "Next Thread"),
-
("k", "prev_thread", "Previous Thread"),
-
("enter", "select_thread", "View Thread"),
-
]
-
-
def __init__(self, graph: ThreadGraph):
-
super().__init__()
-
self.graph = graph
-
self.threads = []
-
self.current_thread_index = 0
-
self._build_thread_list()
-
-
def _build_thread_list(self):
-
"""Build the list of threads to display."""
-
# Get connected components (actual threads)
-
components = self.graph.get_connected_components()
-
-
# Sort components by the earliest date in each
-
sorted_components = []
-
for component in components:
-
if len(component) > 1: # Only show actual threads
-
sorted_ids = self.graph.sort_component_chronologically(component)
-
if sorted_ids:
-
first_node = self.graph.nodes.get(sorted_ids[0])
-
if first_node:
-
sorted_components.append((first_node.published_date, component))
-
-
sorted_components.sort(key=lambda x: x[0], reverse=True)
-
self.threads = [comp for _, comp in sorted_components]
-
-
def compose(self) -> ComposeResult:
-
"""Create the UI layout."""
-
yield Header()
-
-
with Horizontal():
-
with Vertical(id="thread-list"):
-
yield Label("Threads", classes="title")
-
for i, thread in enumerate(self.threads):
-
# Get thread summary
-
sorted_ids = self.graph.sort_component_chronologically(thread)
-
if sorted_ids:
-
first_node = self.graph.nodes.get(sorted_ids[0])
-
if first_node:
-
label = f"{i + 1}. {first_node.title} ({len(thread)} posts)"
-
yield Label(label, classes="thread-item")
-
-
with Vertical(id="entry-detail"):
-
if self.threads:
-
yield ThreadTreeWidget(self.threads[0], self.graph)
-
-
yield Footer()
-
-
def action_next_thread(self) -> None:
-
"""Move to next thread."""
-
if self.current_thread_index < len(self.threads) - 1:
-
self.current_thread_index += 1
-
self.update_display()
-
-
def action_prev_thread(self) -> None:
-
"""Move to previous thread."""
-
if self.current_thread_index > 0:
-
self.current_thread_index -= 1
-
self.update_display()
-
-
def action_select_thread(self) -> None:
-
"""View detailed thread."""
-
# In a real implementation, this could show more detail
-
pass
-
-
def update_display(self) -> None:
-
"""Update the thread display."""
-
detail_view = self.query_one("#entry-detail")
-
detail_view.remove_children()
-
-
if self.threads and self.current_thread_index < len(self.threads):
-
widget = ThreadTreeWidget(
-
self.threads[self.current_thread_index], self.graph
-
)
-
detail_view.mount(widget)
-
-
-
@app.command()
-
def threads(
-
config_file: Optional[Path] = typer.Option(
-
Path("thicket.yaml"),
-
"--config",
-
"-c",
-
help="Path to configuration file",
-
),
-
interactive: bool = typer.Option(
-
True,
-
"--interactive/--no-interactive",
-
"-i/-n",
-
help="Launch interactive terminal UI",
-
),
-
web: bool = typer.Option(
-
False,
-
"--web",
-
"-w",
-
help="Launch web server with D3 force graph visualization",
-
),
-
port: int = typer.Option(
-
8080,
-
"--port",
-
"-p",
-
help="Port for web server",
-
),
-
) -> None:
-
"""Browse and visualize thread-graphs of interconnected blog posts.
-
-
This command analyzes all blog entries and their links/backlinks to build
-
a graph of conversations and references between posts. Threads are displayed
-
as connected components in the link graph.
-
"""
-
try:
-
# Load configuration
-
config = load_config(config_file)
-
-
# Initialize Git store
-
git_store = GitStore(config.git_store)
-
-
# Build thread graph
-
console.print("Building thread graph...")
-
graph = build_thread_graph(git_store)
-
-
# Get statistics
-
components = graph.get_connected_components()
-
threads = [c for c in components if len(c) > 1]
-
standalone = graph.get_standalone_entries()
-
-
console.print(
-
f"\n[green]Found {len(threads)} threads and {len(standalone)} standalone posts[/green]"
-
)
-
-
if web:
-
# Launch web server with D3 visualization
-
_launch_web_server(graph, port)
-
elif interactive and threads:
-
# Launch terminal UI
-
app = ThreadBrowserApp(graph)
-
app.run()
-
else:
-
# Display in console
-
if get_tsv_mode():
-
_display_threads_tsv(graph, threads)
-
else:
-
_display_threads_rich(graph, threads)
-
-
except Exception as e:
-
console.print(f"[red]Error building threads: {e}[/red]")
-
raise typer.Exit(1)
-
-
-
def _display_threads_rich(graph: ThreadGraph, threads: List[Set[str]]) -> None:
-
"""Display threads using rich formatting."""
-
for i, thread in enumerate(threads[:10]): # Show first 10 threads
-
sorted_ids = graph.sort_component_chronologically(thread)
-
-
console.print(f"\n[bold]Thread {i + 1}[/bold] ({len(thread)} posts)")
-
-
for j, entry_id in enumerate(sorted_ids):
-
node = graph.nodes.get(entry_id)
-
if node:
-
date_str = node.published_date.strftime("%Y-%m-%d")
-
indent = " " * min(j, 3) # Max 3 levels of indent
-
console.print(f"{indent}• [{node.username}] {node.title} ({date_str})")
-
-
# Show link types
-
if node.outbound_links:
-
link_summary = {}
-
for _, link_type in node.outbound_links:
-
link_summary[link_type] = link_summary.get(link_type, 0) + 1
-
-
link_str = ", ".join(
-
[f"{t.value}:{c}" for t, c in link_summary.items()]
-
)
-
console.print(f"{indent} → Links: {link_str}")
-
-
-
def _display_threads_tsv(graph: ThreadGraph, threads: List[Set[str]]) -> None:
-
"""Display threads in TSV format."""
-
print("Thread\tSize\tFirst Post\tLast Post\tUsers")
-
-
for i, thread in enumerate(threads):
-
sorted_ids = graph.sort_component_chronologically(thread)
-
-
if sorted_ids:
-
first_node = graph.nodes.get(sorted_ids[0])
-
last_node = graph.nodes.get(sorted_ids[-1])
-
-
users = set()
-
for entry_id in thread:
-
node = graph.nodes.get(entry_id)
-
if node:
-
users.add(node.username)
-
-
if first_node and last_node:
-
print(
-
f"{i + 1}\t{len(thread)}\t{first_node.published_date.strftime('%Y-%m-%d')}\t{last_node.published_date.strftime('%Y-%m-%d')}\t{','.join(users)}"
-
)
-
-
-
def _build_graph_json(graph: ThreadGraph) -> dict:
-
"""Convert ThreadGraph to JSON format for D3 visualization."""
-
nodes = []
-
links = []
-
-
# Color mapping for different users
-
user_colors = {}
-
colors = [
-
"#1f77b4", "#ff7f0e", "#2ca02c", "#d62728", "#9467bd",
-
"#8c564b", "#e377c2", "#7f7f7f", "#bcbd22", "#17becf",
-
"#aec7e8", "#ffbb78", "#98df8a", "#ff9896", "#c5b0d5"
-
]
-
-
# Assign colors to users
-
for i, username in enumerate(set(node.username for node in graph.nodes.values())):
-
user_colors[username] = colors[i % len(colors)]
-
-
# Create nodes
-
for entry_id, node in graph.nodes.items():
-
nodes.append({
-
"id": entry_id,
-
"title": node.title,
-
"username": node.username,
-
"date": node.published_date.strftime("%Y-%m-%d"),
-
"summary": node.summary,
-
"color": user_colors[node.username],
-
"outbound_count": len(node.outbound_links),
-
"backlink_count": len(node.inbound_backlinks),
-
"link_types": {
-
"self": len([l for l in node.outbound_links if l[1] == LinkType.SELF_REFERENCE]),
-
"user": len([l for l in node.outbound_links if l[1] == LinkType.USER_REFERENCE]),
-
"external": len([l for l in node.outbound_links if l[1] == LinkType.EXTERNAL])
-
}
-
})
-
-
# Create links (only for links between tracked entries)
-
for entry_id, node in graph.nodes.items():
-
for url, link_type in node.outbound_links:
-
if url in graph.url_to_entry:
-
target_id = graph.url_to_entry[url]
-
if target_id in graph.nodes:
-
links.append({
-
"source": entry_id,
-
"target": target_id,
-
"type": link_type.value,
-
"url": url
-
})
-
-
return {
-
"nodes": nodes,
-
"links": links,
-
"stats": {
-
"total_nodes": len(nodes),
-
"total_links": len(links),
-
"users": list(user_colors.keys()),
-
"user_colors": user_colors
-
}
-
}
-
-
-
def _launch_web_server(graph: ThreadGraph, port: int) -> None:
-
"""Launch Flask web server with D3 force graph visualization."""
-
flask_app = Flask(__name__)
-
-
# Store graph data globally for the Flask app
-
graph_data = _build_graph_json(graph)
-
-
@flask_app.route('/')
-
def index():
-
"""Serve the main visualization page."""
-
return render_template_string(HTML_TEMPLATE, port=port)
-
-
@flask_app.route('/api/graph')
-
def api_graph():
-
"""API endpoint to serve graph data as JSON."""
-
return jsonify(graph_data)
-
-
# Disable Flask logging in development mode
-
import logging
-
log = logging.getLogger('werkzeug')
-
log.setLevel(logging.ERROR)
-
-
def open_browser():
-
"""Open browser after a short delay."""
-
time.sleep(1.5)
-
webbrowser.open(f'http://localhost:{port}')
-
-
# Start browser in a separate thread
-
browser_thread = threading.Thread(target=open_browser)
-
browser_thread.daemon = True
-
browser_thread.start()
-
-
console.print(f"\n[green]Starting web server at http://localhost:{port}[/green]")
-
console.print("[yellow]Press Ctrl+C to stop the server[/yellow]")
-
-
try:
-
flask_app.run(host='0.0.0.0', port=port, debug=False)
-
except KeyboardInterrupt:
-
console.print("\n[green]Server stopped[/green]")
-
-
-
# HTML template for D3 force graph visualization
-
HTML_TEMPLATE = """
-
<!DOCTYPE html>
-
<html lang="en">
-
<head>
-
<meta charset="UTF-8">
-
<meta name="viewport" content="width=device-width, initial-scale=1.0">
-
<title>Thicket Thread Graph Visualization</title>
-
<script src="https://d3js.org/d3.v7.min.js"></script>
-
<style>
-
body {
-
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
-
margin: 0;
-
padding: 20px;
-
background-color: #f5f5f5;
-
}
-
-
.header {
-
text-align: center;
-
margin-bottom: 20px;
-
}
-
-
h1 {
-
color: #333;
-
margin-bottom: 10px;
-
}
-
-
.controls {
-
display: flex;
-
justify-content: center;
-
gap: 15px;
-
margin-bottom: 20px;
-
flex-wrap: wrap;
-
}
-
-
.control-group {
-
display: flex;
-
align-items: center;
-
gap: 5px;
-
}
-
-
select, input[type="range"] {
-
padding: 5px;
-
border: 1px solid #ddd;
-
border-radius: 4px;
-
}
-
-
.stats {
-
display: flex;
-
justify-content: center;
-
gap: 20px;
-
margin-bottom: 20px;
-
font-size: 14px;
-
color: #666;
-
}
-
-
.stat-item {
-
background: white;
-
padding: 10px 15px;
-
border-radius: 6px;
-
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
-
}
-
-
#graph-container {
-
background: white;
-
border-radius: 8px;
-
box-shadow: 0 4px 6px rgba(0,0,0,0.1);
-
overflow: hidden;
-
}
-
-
#graph {
-
cursor: grab;
-
}
-
-
#graph:active {
-
cursor: grabbing;
-
}
-
-
.node {
-
stroke: #fff;
-
stroke-width: 1.5px;
-
cursor: pointer;
-
}
-
-
.node:hover {
-
stroke: #333;
-
stroke-width: 2px;
-
}
-
-
.link {
-
stroke: #999;
-
stroke-opacity: 0.6;
-
stroke-width: 1px;
-
}
-
-
.link.self-link {
-
stroke: #2ca02c;
-
}
-
-
.link.user-link {
-
stroke: #ff7f0e;
-
}
-
-
.link.external-link {
-
stroke: #d62728;
-
}
-
-
.tooltip {
-
position: absolute;
-
background: rgba(0, 0, 0, 0.9);
-
color: white;
-
padding: 10px;
-
border-radius: 4px;
-
font-size: 12px;
-
line-height: 1.4;
-
pointer-events: none;
-
z-index: 1000;
-
max-width: 300px;
-
}
-
-
.legend {
-
position: fixed;
-
top: 20px;
-
right: 20px;
-
background: white;
-
padding: 15px;
-
border-radius: 6px;
-
box-shadow: 0 2px 8px rgba(0,0,0,0.15);
-
font-size: 12px;
-
z-index: 100;
-
}
-
-
.legend h3 {
-
margin: 0 0 10px 0;
-
font-size: 14px;
-
color: #333;
-
}
-
-
.legend-item {
-
display: flex;
-
align-items: center;
-
margin-bottom: 5px;
-
}
-
-
.legend-color {
-
width: 12px;
-
height: 12px;
-
margin-right: 8px;
-
border-radius: 2px;
-
}
-
-
.legend-line {
-
width: 20px;
-
height: 2px;
-
margin-right: 8px;
-
}
-
</style>
-
</head>
-
<body>
-
<div class="header">
-
<h1>Thicket Thread Graph Visualization</h1>
-
<p>Interactive visualization of blog post connections and conversations</p>
-
</div>
-
-
<div class="controls">
-
<div class="control-group">
-
<label for="userFilter">Filter by user:</label>
-
<select id="userFilter">
-
<option value="all">All Users</option>
-
</select>
-
</div>
-
-
<div class="control-group">
-
<label for="linkFilter">Show links:</label>
-
<select id="linkFilter">
-
<option value="all">All Links</option>
-
<option value="user">User Links Only</option>
-
<option value="self">Self Links Only</option>
-
<option value="external">External Links Only</option>
-
</select>
-
</div>
-
-
<div class="control-group">
-
<label for="forceStrength">Force Strength:</label>
-
<input type="range" id="forceStrength" min="0.1" max="2" step="0.1" value="0.3">
-
</div>
-
-
<div class="control-group">
-
<label for="nodeSize">Node Size:</label>
-
<input type="range" id="nodeSize" min="3" max="15" step="1" value="6">
-
</div>
-
</div>
-
-
<div class="stats" id="stats"></div>
-
-
<div id="graph-container">
-
<svg id="graph"></svg>
-
</div>
-
-
<div class="legend">
-
<h3>Link Types</h3>
-
<div class="legend-item">
-
<div class="legend-line" style="background: #2ca02c;"></div>
-
<span>Self References</span>
-
</div>
-
<div class="legend-item">
-
<div class="legend-line" style="background: #ff7f0e;"></div>
-
<span>User References</span>
-
</div>
-
<div class="legend-item">
-
<div class="legend-line" style="background: #d62728;"></div>
-
<span>External References</span>
-
</div>
-
-
<h3 style="margin-top: 15px;">Interactions</h3>
-
<div style="font-size: 11px; color: #666;">
-
• Hover: Show details<br>
-
• Click: Pin/unpin node<br>
-
• Drag: Move nodes<br>
-
• Zoom: Mouse wheel
-
</div>
-
</div>
-
-
<div class="tooltip" id="tooltip" style="display: none;"></div>
-
-
<script>
-
let graphData;
-
let simulation;
-
let svg, g, link, node;
-
let width = window.innerWidth - 40;
-
let height = window.innerHeight - 200;
-
-
// Initialize the visualization
-
async function init() {
-
// Fetch graph data
-
const response = await fetch('/api/graph');
-
graphData = await response.json();
-
-
// Set up SVG
-
svg = d3.select("#graph")
-
.attr("width", width)
-
.attr("height", height);
-
-
// Add zoom behavior
-
const zoom = d3.zoom()
-
.scaleExtent([0.1, 4])
-
.on("zoom", (event) => {
-
g.attr("transform", event.transform);
-
});
-
-
svg.call(zoom);
-
-
// Create main group for all elements
-
g = svg.append("g");
-
-
// Set up controls
-
setupControls();
-
-
// Initial render
-
updateVisualization();
-
-
// Update stats
-
updateStats();
-
-
// Handle window resize
-
window.addEventListener('resize', () => {
-
width = window.innerWidth - 40;
-
height = window.innerHeight - 200;
-
svg.attr("width", width).attr("height", height);
-
simulation.force("center", d3.forceCenter(width / 2, height / 2));
-
simulation.restart();
-
});
-
}
-
-
function setupControls() {
-
// Populate user filter
-
const userFilter = d3.select("#userFilter");
-
graphData.stats.users.forEach(user => {
-
userFilter.append("option").attr("value", user).text(user);
-
});
-
-
// Add event listeners
-
d3.select("#userFilter").on("change", updateVisualization);
-
d3.select("#linkFilter").on("change", updateVisualization);
-
d3.select("#forceStrength").on("input", updateForces);
-
d3.select("#nodeSize").on("input", updateNodeSizes);
-
}
-
-
function updateVisualization() {
-
// Filter data based on controls
-
const userFilter = d3.select("#userFilter").property("value");
-
const linkFilter = d3.select("#linkFilter").property("value");
-
-
let filteredNodes = graphData.nodes;
-
let filteredLinks = graphData.links;
-
-
if (userFilter !== "all") {
-
filteredNodes = graphData.nodes.filter(n => n.username === userFilter);
-
const nodeIds = new Set(filteredNodes.map(n => n.id));
-
filteredLinks = graphData.links.filter(l =>
-
nodeIds.has(l.source.id || l.source) && nodeIds.has(l.target.id || l.target)
-
);
-
}
-
-
if (linkFilter !== "all") {
-
filteredLinks = filteredLinks.filter(l => l.type === linkFilter);
-
}
-
-
// Clear existing elements
-
g.selectAll(".link").remove();
-
g.selectAll(".node").remove();
-
-
// Create force simulation
-
simulation = d3.forceSimulation(filteredNodes)
-
.force("link", d3.forceLink(filteredLinks).id(d => d.id)
-
.distance(d => {
-
// Get source and target nodes
-
const sourceNode = filteredNodes.find(n => n.id === (d.source.id || d.source));
-
const targetNode = filteredNodes.find(n => n.id === (d.target.id || d.target));
-
-
// If nodes are from different users, make them attract more (shorter distance)
-
if (sourceNode && targetNode && sourceNode.username !== targetNode.username) {
-
return 30; // Shorter distance = stronger attraction
-
}
-
-
// Same user posts have normal distance
-
return 60;
-
})
-
.strength(d => {
-
// Get source and target nodes
-
const sourceNode = filteredNodes.find(n => n.id === (d.source.id || d.source));
-
const targetNode = filteredNodes.find(n => n.id === (d.target.id || d.target));
-
-
// If nodes are from different users, make the link stronger
-
if (sourceNode && targetNode && sourceNode.username !== targetNode.username) {
-
return 1.5; // Stronger link force
-
}
-
-
// Same user posts have normal strength
-
return 1.0;
-
}))
-
.force("charge", d3.forceManyBody().strength(-200))
-
.force("center", d3.forceCenter(width / 2, height / 2))
-
.force("collision", d3.forceCollide().radius(15));
-
-
// Create links
-
link = g.append("g")
-
.selectAll(".link")
-
.data(filteredLinks)
-
.enter().append("line")
-
.attr("class", d => `link ${d.type}-link`)
-
.attr("stroke-width", d => {
-
// Get source and target nodes
-
const sourceNode = filteredNodes.find(n => n.id === (d.source.id || d.source));
-
const targetNode = filteredNodes.find(n => n.id === (d.target.id || d.target));
-
-
// If nodes are from different users, make the line thicker
-
if (sourceNode && targetNode && sourceNode.username !== targetNode.username) {
-
return 2.5; // Thicker line for cross-user connections
-
}
-
-
// Same user posts have normal thickness
-
return 1;
-
});
-
-
// Create nodes
-
node = g.append("g")
-
.selectAll(".node")
-
.data(filteredNodes)
-
.enter().append("circle")
-
.attr("class", "node")
-
.attr("r", d => Math.max(4, Math.log(d.outbound_count + d.backlink_count + 1) * 3))
-
.attr("fill", d => d.color)
-
.call(d3.drag()
-
.on("start", dragstarted)
-
.on("drag", dragged)
-
.on("end", dragended))
-
.on("mouseover", showTooltip)
-
.on("mouseout", hideTooltip)
-
.on("click", togglePin);
-
-
// Update force simulation
-
simulation.on("tick", () => {
-
link
-
.attr("x1", d => d.source.x)
-
.attr("y1", d => d.source.y)
-
.attr("x2", d => d.target.x)
-
.attr("y2", d => d.target.y);
-
-
node
-
.attr("cx", d => d.x)
-
.attr("cy", d => d.y);
-
});
-
-
updateStats(filteredNodes, filteredLinks);
-
}
-
-
function updateForces() {
-
const strength = +d3.select("#forceStrength").property("value");
-
if (simulation) {
-
simulation.force("charge").strength(-200 * strength);
-
simulation.alpha(0.3).restart();
-
}
-
}
-
-
function updateNodeSizes() {
-
const size = +d3.select("#nodeSize").property("value");
-
if (node) {
-
node.attr("r", d => Math.max(size * 0.5, Math.log(d.outbound_count + d.backlink_count + 1) * size * 0.5));
-
}
-
}
-
-
function dragstarted(event, d) {
-
if (!event.active) simulation.alphaTarget(0.3).restart();
-
d.fx = d.x;
-
d.fy = d.y;
-
}
-
-
function dragged(event, d) {
-
d.fx = event.x;
-
d.fy = event.y;
-
}
-
-
function dragended(event, d) {
-
if (!event.active) simulation.alphaTarget(0);
-
if (!d.pinned) {
-
d.fx = null;
-
d.fy = null;
-
}
-
}
-
-
function togglePin(event, d) {
-
d.pinned = !d.pinned;
-
if (d.pinned) {
-
d.fx = d.x;
-
d.fy = d.y;
-
} else {
-
d.fx = null;
-
d.fy = null;
-
}
-
}
-
-
function showTooltip(event, d) {
-
const tooltip = d3.select("#tooltip");
-
tooltip.style("display", "block")
-
.html(`
-
<strong>${d.title}</strong><br>
-
<strong>User:</strong> ${d.username}<br>
-
<strong>Date:</strong> ${d.date}<br>
-
<strong>Outbound Links:</strong> ${d.outbound_count}<br>
-
<strong>Backlinks:</strong> ${d.backlink_count}<br>
-
<strong>Link Types:</strong> Self: ${d.link_types.self}, User: ${d.link_types.user}, External: ${d.link_types.external}
-
${d.summary ? '<br><br>' + d.summary : ''}
-
`)
-
.style("left", (event.pageX + 10) + "px")
-
.style("top", (event.pageY - 10) + "px");
-
}
-
-
function hideTooltip() {
-
d3.select("#tooltip").style("display", "none");
-
}
-
-
function updateStats(nodes = graphData.nodes, links = graphData.links) {
-
const stats = d3.select("#stats");
-
const userCounts = {};
-
nodes.forEach(n => {
-
userCounts[n.username] = (userCounts[n.username] || 0) + 1;
-
});
-
-
stats.html(`
-
<div class="stat-item">
-
<strong>${nodes.length}</strong> Nodes
-
</div>
-
<div class="stat-item">
-
<strong>${links.length}</strong> Links
-
</div>
-
<div class="stat-item">
-
<strong>${Object.keys(userCounts).length}</strong> Users
-
</div>
-
<div class="stat-item">
-
Users: ${Object.entries(userCounts).map(([user, count]) => `${user} (${count})`).join(', ')}
-
</div>
-
`);
-
}
-
-
// Initialize when page loads
-
init();
-
</script>
-
</body>
-
</html>
-
"""
+1 -1
src/thicket/cli/main.py
···
# Import commands to register them
-
from .commands import add, duplicates, info_cmd, init, links_cmd, list_cmd, sync, threads_cmd
+
from .commands import add, duplicates, info_cmd, init, list_cmd, sync # noqa: F401
if __name__ == "__main__":
app()
+32 -20
src/thicket/cli/utils.py
···
from rich.progress import Progress, SpinnerColumn, TextColumn
from rich.table import Table
-
from ..models import ThicketConfig, UserMetadata
from ..core.git_store import GitStore
+
from ..models import ThicketConfig, UserMetadata
console = Console()
···
def get_tsv_mode() -> bool:
"""Get the global TSV mode setting."""
from .main import tsv_mode
+
return tsv_mode
···
default_config = Path("thicket.yaml")
if default_config.exists():
import yaml
+
with open(default_config) as f:
config_data = yaml.safe_load(f)
return ThicketConfig(**config_data)
-
+
# Fall back to environment variables
return ThicketConfig()
except Exception as e:
console.print(f"[red]Error loading configuration: {e}[/red]")
-
console.print("[yellow]Run 'thicket init' to create a new configuration.[/yellow]")
+
console.print(
+
"[yellow]Run 'thicket init' to create a new configuration.[/yellow]"
+
)
raise typer.Exit(1) from e
···
if get_tsv_mode():
print_users_tsv(config)
return
-
+
table = Table(title="Users and Feeds")
table.add_column("Username", style="cyan", no_wrap=True)
table.add_column("Display Name", style="magenta")
···
if get_tsv_mode():
print_feeds_tsv(config, username)
return
-
+
table = Table(title=f"Feeds{f' for {username}' if username else ''}")
table.add_column("Username", style="cyan", no_wrap=True)
table.add_column("Feed URL", style="blue")
···
if get_tsv_mode():
print_users_tsv_from_git(users)
return
-
+
table = Table(title="Users and Feeds")
table.add_column("Username", style="cyan", no_wrap=True)
table.add_column("Display Name", style="magenta")
···
console.print(table)
-
def print_feeds_table_from_git(git_store: GitStore, username: Optional[str] = None) -> None:
+
def print_feeds_table_from_git(
+
git_store: GitStore, username: Optional[str] = None
+
) -> None:
"""Print a table of feeds from git repository."""
if get_tsv_mode():
print_feeds_tsv_from_git(git_store, username)
return
-
+
table = Table(title=f"Feeds{f' for {username}' if username else ''}")
table.add_column("Username", style="cyan", no_wrap=True)
table.add_column("Feed URL", style="blue")
···
print("Username\tDisplay Name\tEmail\tHomepage\tFeeds")
for user in config.users:
feeds_str = ",".join(str(feed) for feed in user.feeds)
-
print(f"{user.username}\t{user.display_name or ''}\t{user.email or ''}\t{user.homepage or ''}\t{feeds_str}")
+
print(
+
f"{user.username}\t{user.display_name or ''}\t{user.email or ''}\t{user.homepage or ''}\t{feeds_str}"
+
)
def print_users_tsv_from_git(users: list[UserMetadata]) -> None:
···
print("Username\tDisplay Name\tEmail\tHomepage\tFeeds")
for user in users:
feeds_str = ",".join(user.feeds)
-
print(f"{user.username}\t{user.display_name or ''}\t{user.email or ''}\t{user.homepage or ''}\t{feeds_str}")
+
print(
+
f"{user.username}\t{user.display_name or ''}\t{user.email or ''}\t{user.homepage or ''}\t{feeds_str}"
+
)
def print_feeds_tsv(config: ThicketConfig, username: Optional[str] = None) -> None:
···
print("Username\tFeed URL\tStatus")
users = [config.find_user(username)] if username else config.users
users = [u for u in users if u is not None]
-
+
for user in users:
for feed in user.feeds:
print(f"{user.username}\t{feed}\tActive")
-
def print_feeds_tsv_from_git(git_store: GitStore, username: Optional[str] = None) -> None:
+
def print_feeds_tsv_from_git(
+
git_store: GitStore, username: Optional[str] = None
+
) -> None:
"""Print feeds from git repository in TSV format."""
print("Username\tFeed URL\tStatus")
-
+
if username:
user = git_store.get_user(username)
users = [user] if user else []
else:
index = git_store._load_index()
users = list(index.users.values())
-
+
for user in users:
for feed in user.feeds:
print(f"{user.username}\t{feed}\tActive")
···
def print_entries_tsv(entries_by_user: list[list], usernames: list[str]) -> None:
"""Print entries in TSV format."""
print("User\tAtom ID\tTitle\tUpdated\tURL")
-
+
# Combine all entries with usernames
all_entries = []
for entries, username in zip(entries_by_user, usernames):
for entry in entries:
all_entries.append((username, entry))
-
+
# Sort by updated time (newest first)
all_entries.sort(key=lambda x: x[1].updated, reverse=True)
-
+
for username, entry in all_entries:
# Format updated time
updated_str = entry.updated.strftime("%Y-%m-%d %H:%M")
-
+
# Escape tabs and newlines in title to preserve TSV format
-
title = entry.title.replace('\t', ' ').replace('\n', ' ').replace('\r', ' ')
-
+
title = entry.title.replace("\t", " ").replace("\n", " ").replace("\r", " ")
+
print(f"{username}\t{entry.id}\t{title}\t{updated_str}\t{entry.link}")
+84 -55
src/thicket/core/feed_parser.py
···
"""Initialize the feed parser."""
self.user_agent = user_agent
self.allowed_tags = [
-
"a", "abbr", "acronym", "b", "blockquote", "br", "code", "em",
-
"i", "li", "ol", "p", "pre", "strong", "ul", "h1", "h2", "h3",
-
"h4", "h5", "h6", "img", "div", "span",
+
"a",
+
"abbr",
+
"acronym",
+
"b",
+
"blockquote",
+
"br",
+
"code",
+
"em",
+
"i",
+
"li",
+
"ol",
+
"p",
+
"pre",
+
"strong",
+
"ul",
+
"h1",
+
"h2",
+
"h3",
+
"h4",
+
"h5",
+
"h6",
+
"img",
+
"div",
+
"span",
]
self.allowed_attributes = {
"a": ["href", "title"],
···
response.raise_for_status()
return response.text
-
def parse_feed(self, content: str, source_url: Optional[HttpUrl] = None) -> tuple[FeedMetadata, list[AtomEntry]]:
+
def parse_feed(
+
self, content: str, source_url: Optional[HttpUrl] = None
+
) -> tuple[FeedMetadata, list[AtomEntry]]:
"""Parse feed content and return metadata and entries."""
parsed = feedparser.parse(content)
···
author_email = None
author_uri = None
-
if hasattr(feed, 'author_detail'):
-
author_name = feed.author_detail.get('name')
-
author_email = feed.author_detail.get('email')
-
author_uri = feed.author_detail.get('href')
-
elif hasattr(feed, 'author'):
+
if hasattr(feed, "author_detail"):
+
author_name = feed.author_detail.get("name")
+
author_email = feed.author_detail.get("email")
+
author_uri = feed.author_detail.get("href")
+
elif hasattr(feed, "author"):
author_name = feed.author
# Parse managing editor for RSS feeds
-
if not author_email and hasattr(feed, 'managingEditor'):
+
if not author_email and hasattr(feed, "managingEditor"):
author_email = feed.managingEditor
# Parse feed link
feed_link = None
-
if hasattr(feed, 'link'):
+
if hasattr(feed, "link"):
try:
feed_link = HttpUrl(feed.link)
except ValidationError:
···
icon = None
image_url = None
-
if hasattr(feed, 'image'):
+
if hasattr(feed, "image"):
try:
-
image_url = HttpUrl(feed.image.get('href', feed.image.get('url', '')))
+
image_url = HttpUrl(feed.image.get("href", feed.image.get("url", "")))
except (ValidationError, AttributeError):
pass
-
if hasattr(feed, 'icon'):
+
if hasattr(feed, "icon"):
try:
icon = HttpUrl(feed.icon)
except ValidationError:
pass
-
if hasattr(feed, 'logo'):
+
if hasattr(feed, "logo"):
try:
logo = HttpUrl(feed.logo)
except ValidationError:
pass
return FeedMetadata(
-
title=getattr(feed, 'title', None),
+
title=getattr(feed, "title", None),
author_name=author_name,
author_email=author_email,
author_uri=HttpUrl(author_uri) if author_uri else None,
···
logo=logo,
icon=icon,
image_url=image_url,
-
description=getattr(feed, 'description', None),
+
description=getattr(feed, "description", None),
)
-
def _normalize_entry(self, entry: feedparser.FeedParserDict, source_url: Optional[HttpUrl] = None) -> AtomEntry:
+
def _normalize_entry(
+
self, entry: feedparser.FeedParserDict, source_url: Optional[HttpUrl] = None
+
) -> AtomEntry:
"""Normalize an entry to Atom format."""
# Parse timestamps
-
updated = self._parse_timestamp(entry.get('updated_parsed') or entry.get('published_parsed'))
-
published = self._parse_timestamp(entry.get('published_parsed'))
+
updated = self._parse_timestamp(
+
entry.get("updated_parsed") or entry.get("published_parsed")
+
)
+
published = self._parse_timestamp(entry.get("published_parsed"))
# Parse content
content = self._extract_content(entry)
···
# Parse categories/tags
categories = []
-
if hasattr(entry, 'tags'):
-
categories = [tag.get('term', '') for tag in entry.tags if tag.get('term')]
+
if hasattr(entry, "tags"):
+
categories = [tag.get("term", "") for tag in entry.tags if tag.get("term")]
# Sanitize HTML content
if content:
content = self._sanitize_html(content)
-
summary = entry.get('summary', '')
+
summary = entry.get("summary", "")
if summary:
summary = self._sanitize_html(summary)
return AtomEntry(
-
id=entry.get('id', entry.get('link', '')),
-
title=entry.get('title', ''),
-
link=HttpUrl(entry.get('link', '')),
+
id=entry.get("id", entry.get("link", "")),
+
title=entry.get("title", ""),
+
link=HttpUrl(entry.get("link", "")),
updated=updated,
published=published,
summary=summary or None,
···
content_type=content_type,
author=author,
categories=categories,
-
rights=entry.get('rights', None),
+
rights=entry.get("rights", None),
source=str(source_url) if source_url else None,
)
···
def _extract_content(self, entry: feedparser.FeedParserDict) -> Optional[str]:
"""Extract the best content from an entry."""
# Prefer content over summary
-
if hasattr(entry, 'content') and entry.content:
+
if hasattr(entry, "content") and entry.content:
# Find the best content (prefer text/html, then text/plain)
for content_item in entry.content:
-
if content_item.get('type') in ['text/html', 'html']:
-
return content_item.get('value', '')
-
elif content_item.get('type') in ['text/plain', 'text']:
-
return content_item.get('value', '')
+
if content_item.get("type") in ["text/html", "html"]:
+
return content_item.get("value", "")
+
elif content_item.get("type") in ["text/plain", "text"]:
+
return content_item.get("value", "")
# Fallback to first content item
-
return entry.content[0].get('value', '')
+
return entry.content[0].get("value", "")
# Fallback to summary
-
return entry.get('summary', '')
+
return entry.get("summary", "")
def _extract_content_type(self, entry: feedparser.FeedParserDict) -> str:
"""Extract content type from entry."""
-
if hasattr(entry, 'content') and entry.content:
-
content_type = entry.content[0].get('type', 'html')
+
if hasattr(entry, "content") and entry.content:
+
content_type = entry.content[0].get("type", "html")
# Normalize content type
-
if content_type in ['text/html', 'html']:
-
return 'html'
-
elif content_type in ['text/plain', 'text']:
-
return 'text'
-
elif content_type == 'xhtml':
-
return 'xhtml'
-
return 'html'
+
if content_type in ["text/html", "html"]:
+
return "html"
+
elif content_type in ["text/plain", "text"]:
+
return "text"
+
elif content_type == "xhtml":
+
return "xhtml"
+
return "html"
def _extract_author(self, entry: feedparser.FeedParserDict) -> Optional[dict]:
"""Extract author information from entry."""
author = {}
-
if hasattr(entry, 'author_detail'):
-
author.update({
-
'name': entry.author_detail.get('name'),
-
'email': entry.author_detail.get('email'),
-
'uri': entry.author_detail.get('href'),
-
})
-
elif hasattr(entry, 'author'):
-
author['name'] = entry.author
+
if hasattr(entry, "author_detail"):
+
author.update(
+
{
+
"name": entry.author_detail.get("name"),
+
"email": entry.author_detail.get("email"),
+
"uri": entry.author_detail.get("href"),
+
}
+
)
+
elif hasattr(entry, "author"):
+
author["name"] = entry.author
return author if author else None
···
# Start with the path component
if parsed.path:
# Remove leading slash and replace problematic characters
-
safe_id = parsed.path.lstrip('/').replace('/', '_').replace('\\', '_')
+
safe_id = parsed.path.lstrip("/").replace("/", "_").replace("\\", "_")
else:
# Use the entire ID as fallback
safe_id = entry_id
···
# Replace problematic characters
safe_chars = []
for char in safe_id:
-
if char.isalnum() or char in '-_.':
+
if char.isalnum() or char in "-_.":
safe_chars.append(char)
else:
-
safe_chars.append('_')
+
safe_chars.append("_")
-
safe_id = ''.join(safe_chars)
+
safe_id = "".join(safe_chars)
# Ensure it's not too long (max 200 chars)
if len(safe_id) > 200:
+45 -18
src/thicket/core/git_store.py
···
"""Save the index to index.json."""
index_path = self.repo_path / "index.json"
with open(index_path, "w") as f:
-
json.dump(index.model_dump(mode="json", exclude_none=True), f, indent=2, default=str)
+
json.dump(
+
index.model_dump(mode="json", exclude_none=True),
+
f,
+
indent=2,
+
default=str,
+
)
def _load_index(self) -> GitStoreIndex:
"""Load the index from index.json."""
···
return DuplicateMap(**data)
-
def add_user(self, username: str, display_name: Optional[str] = None,
-
email: Optional[str] = None, homepage: Optional[str] = None,
-
icon: Optional[str] = None, feeds: Optional[list[str]] = None) -> UserMetadata:
+
def add_user(
+
self,
+
username: str,
+
display_name: Optional[str] = None,
+
email: Optional[str] = None,
+
homepage: Optional[str] = None,
+
icon: Optional[str] = None,
+
feeds: Optional[list[str]] = None,
+
) -> UserMetadata:
"""Add a new user to the Git store."""
index = self._load_index()
···
created=datetime.now(),
last_updated=datetime.now(),
)
-
# Update index
index.add_user(user_metadata)
···
user.update_timestamp()
-
# Update index
index.add_user(user)
self._save_index(index)
···
# Sanitize entry ID for filename
from .feed_parser import FeedParser
+
parser = FeedParser()
safe_id = parser.sanitize_entry_id(entry.id)
···
# Save entry
with open(entry_path, "w") as f:
-
json.dump(entry.model_dump(mode="json", exclude_none=True), f, indent=2, default=str)
+
json.dump(
+
entry.model_dump(mode="json", exclude_none=True),
+
f,
+
indent=2,
+
default=str,
+
)
# Update user metadata if new entry
if not entry_exists:
···
# Sanitize entry ID
from .feed_parser import FeedParser
+
parser = FeedParser()
safe_id = parser.sanitize_entry_id(entry_id)
···
return AtomEntry(**data)
-
def list_entries(self, username: str, limit: Optional[int] = None) -> list[AtomEntry]:
+
def list_entries(
+
self, username: str, limit: Optional[int] = None
+
) -> list[AtomEntry]:
"""List entries for a user."""
user = self.get_user(username)
if not user:
···
return []
entries = []
-
entry_files = sorted(user_dir.glob("*.json"), key=lambda p: p.stat().st_mtime, reverse=True)
-
+
entry_files = sorted(
+
user_dir.glob("*.json"), key=lambda p: p.stat().st_mtime, reverse=True
+
)
if limit:
entry_files = entry_files[:limit]
···
"total_entries": index.total_entries,
"total_duplicates": len(duplicates.duplicates),
"last_updated": index.last_updated,
-
"repository_size": sum(f.stat().st_size for f in self.repo_path.rglob("*") if f.is_file()),
+
"repository_size": sum(
+
f.stat().st_size for f in self.repo_path.rglob("*") if f.is_file()
+
),
}
-
def search_entries(self, query: str, username: Optional[str] = None,
-
limit: Optional[int] = None) -> list[tuple[str, AtomEntry]]:
+
def search_entries(
+
self, query: str, username: Optional[str] = None, limit: Optional[int] = None
+
) -> list[tuple[str, AtomEntry]]:
"""Search entries by content."""
results = []
···
entry = AtomEntry(**data)
# Simple text search in title, summary, and content
-
searchable_text = " ".join(filter(None, [
-
entry.title,
-
entry.summary or "",
-
entry.content or "",
-
])).lower()
+
searchable_text = " ".join(
+
filter(
+
None,
+
[
+
entry.title,
+
entry.summary or "",
+
entry.content or "",
+
],
+
)
+
).lower()
if query.lower() in searchable_text:
results.append((user.username, entry))
+24
src/thicket/models/config.py
···
git_store: Path
cache_dir: Path
users: list[UserConfig] = []
+
+
def find_user(self, username: str) -> Optional[UserConfig]:
+
"""Find a user by username."""
+
for user in self.users:
+
if user.username == username:
+
return user
+
return None
+
+
def add_user(self, user: UserConfig) -> bool:
+
"""Add a user to the configuration. Returns True if added, False if already exists."""
+
if self.find_user(user.username) is not None:
+
return False
+
self.users.append(user)
+
return True
+
+
def add_feed_to_user(self, username: str, feed_url: HttpUrl) -> bool:
+
"""Add a feed to an existing user. Returns True if added, False if user not found or feed already exists."""
+
user = self.find_user(username)
+
if user is None:
+
return False
+
if feed_url in user.feeds:
+
return False
+
user.feeds.append(feed_url)
+
return True
-2
src/thicket/models/feed.py
···
categories: list[str] = []
rights: Optional[str] = None # Copyright info
source: Optional[str] = None # Source feed URL
-
links: list[str] = [] # URLs mentioned in this entry
-
backlinks: list[str] = [] # Entry IDs that link to this entry
class FeedMetadata(BaseModel):
+1 -3
src/thicket/models/user.py
···
class GitStoreIndex(BaseModel):
"""Index of all users and their directories in the Git store."""
-
model_config = ConfigDict(
-
json_encoders={datetime: lambda v: v.isoformat()}
-
)
+
model_config = ConfigDict(json_encoders={datetime: lambda v: v.isoformat()})
users: dict[str, UserMetadata] = {} # username -> UserMetadata
created: datetime
+9 -211
uv.lock
···
version = 1
-
revision = 2
+
revision = 3
requires-python = ">=3.9"
resolution-markers = [
"python_full_version >= '3.10'",
···
sdist = { url = "https://files.pythonhosted.org/packages/76/9a/0e33f5054c54d349ea62c277191c020c2d6ef1d65ab2cb1993f91ec846d1/bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f", size = 203083, upload-time = "2024-10-29T18:30:40.477Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fc/55/96142937f66150805c25c4d0f31ee4132fd33497753400734f9dfdcbdc66/bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e", size = 163406, upload-time = "2024-10-29T18:30:38.186Z" },
-
]
-
-
[[package]]
-
name = "blinker"
-
version = "1.9.0"
-
source = { registry = "https://pypi.org/simple" }
-
sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" },
]
[[package]]
···
]
[[package]]
-
name = "flask"
-
version = "3.1.1"
-
source = { registry = "https://pypi.org/simple" }
-
dependencies = [
-
{ name = "blinker" },
-
{ name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
-
{ name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
-
{ name = "importlib-metadata", marker = "python_full_version < '3.10'" },
-
{ name = "itsdangerous" },
-
{ name = "jinja2" },
-
{ name = "markupsafe" },
-
{ name = "werkzeug" },
-
]
-
sdist = { url = "https://files.pythonhosted.org/packages/c0/de/e47735752347f4128bcf354e0da07ef311a78244eba9e3dc1d4a5ab21a98/flask-3.1.1.tar.gz", hash = "sha256:284c7b8f2f58cb737f0cf1c30fd7eaf0ccfcde196099d24ecede3fc2005aa59e", size = 753440, upload-time = "2025-05-13T15:01:17.447Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/3d/68/9d4508e893976286d2ead7f8f571314af6c2037af34853a30fd769c02e9d/flask-3.1.1-py3-none-any.whl", hash = "sha256:07aae2bb5eaf77993ef57e357491839f5fd9f4dc281593a81a9e4d79a24f295c", size = 103305, upload-time = "2025-05-13T15:01:15.591Z" },
-
]
-
-
[[package]]
name = "gitdb"
version = "4.0.12"
source = { registry = "https://pypi.org/simple" }
···
]
[[package]]
-
name = "importlib-metadata"
-
version = "8.7.0"
-
source = { registry = "https://pypi.org/simple" }
-
dependencies = [
-
{ name = "zipp", marker = "python_full_version < '3.10'" },
-
]
-
sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" },
-
]
-
-
[[package]]
name = "iniconfig"
version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
···
]
[[package]]
-
name = "itsdangerous"
-
version = "2.2.0"
-
source = { registry = "https://pypi.org/simple" }
-
sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" },
-
]
-
-
[[package]]
-
name = "jinja2"
-
version = "3.1.6"
-
source = { registry = "https://pypi.org/simple" }
-
dependencies = [
-
{ name = "markupsafe" },
-
]
-
sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
-
]
-
-
[[package]]
-
name = "linkify-it-py"
-
version = "2.0.3"
-
source = { registry = "https://pypi.org/simple" }
-
dependencies = [
-
{ name = "uc-micro-py" },
-
]
-
sdist = { url = "https://files.pythonhosted.org/packages/2a/ae/bb56c6828e4797ba5a4821eec7c43b8bf40f69cda4d4f5f8c8a2810ec96a/linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048", size = 27946, upload-time = "2024-02-04T14:48:04.179Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/04/1e/b832de447dee8b582cac175871d2f6c3d5077cc56d5575cadba1fd1cccfa/linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79", size = 19820, upload-time = "2024-02-04T14:48:02.496Z" },
-
]
-
-
[[package]]
name = "markdown-it-py"
version = "3.0.0"
source = { registry = "https://pypi.org/simple" }
···
sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" },
-
]
-
-
[package.optional-dependencies]
-
linkify = [
-
{ name = "linkify-it-py" },
-
]
-
plugins = [
-
{ name = "mdit-py-plugins" },
-
]
-
-
[[package]]
-
name = "markupsafe"
-
version = "3.0.2"
-
source = { registry = "https://pypi.org/simple" }
-
sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" },
-
{ url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" },
-
{ url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" },
-
{ url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" },
-
{ url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" },
-
{ url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" },
-
{ url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" },
-
{ url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" },
-
{ url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" },
-
{ url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" },
-
{ url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" },
-
{ url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" },
-
{ url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" },
-
{ url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" },
-
{ url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" },
-
{ url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" },
-
{ url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" },
-
{ url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" },
-
{ url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" },
-
{ url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" },
-
{ url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" },
-
{ url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" },
-
{ url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" },
-
{ url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" },
-
{ url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" },
-
{ url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" },
-
{ url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" },
-
{ url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" },
-
{ url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" },
-
{ url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" },
-
{ url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" },
-
{ url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" },
-
{ url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" },
-
{ url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" },
-
{ url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" },
-
{ url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" },
-
{ url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" },
-
{ url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" },
-
{ url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" },
-
{ url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" },
-
{ url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" },
-
{ url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" },
-
{ url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" },
-
{ url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" },
-
{ url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" },
-
{ url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" },
-
{ url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" },
-
{ url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" },
-
{ url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" },
-
{ url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" },
-
{ url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" },
-
{ url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" },
-
{ url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" },
-
{ url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" },
-
{ url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" },
-
{ url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" },
-
{ url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" },
-
{ url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" },
-
{ url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" },
-
{ url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" },
-
]
-
-
[[package]]
-
name = "mdit-py-plugins"
-
version = "0.4.2"
-
source = { registry = "https://pypi.org/simple" }
-
dependencies = [
-
{ name = "markdown-it-py" },
-
]
-
sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542, upload-time = "2024-09-09T20:27:49.564Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316, upload-time = "2024-09-09T20:27:48.397Z" },
]
[[package]]
···
]
[[package]]
-
name = "textual"
-
version = "4.0.0"
-
source = { registry = "https://pypi.org/simple" }
-
dependencies = [
-
{ name = "markdown-it-py", extra = ["linkify", "plugins"] },
-
{ name = "platformdirs" },
-
{ name = "rich" },
-
{ name = "typing-extensions" },
-
]
-
sdist = { url = "https://files.pythonhosted.org/packages/f1/22/a2812ab1e5b0cb3a327a4ea79b430234c2271ba13462b989f435b40a247d/textual-4.0.0.tar.gz", hash = "sha256:1cab4ea3cfc0e47ae773405cdd6bc2a17ed76ff7b648379ac8017ea89c5ad28c", size = 1606128, upload-time = "2025-07-12T09:41:20.812Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/d8/e4/ebe27c54d2534cc41d00ea1d78b783763f97abf3e3d6dd41e5536daa52a5/textual-4.0.0-py3-none-any.whl", hash = "sha256:214051640f890676a670aa7d29cd2a37d27cfe6b2cf866e9d5abc3b6c89c5800", size = 692382, upload-time = "2025-07-12T09:41:18.828Z" },
-
]
-
-
[[package]]
name = "thicket"
source = { editable = "." }
dependencies = [
{ name = "bleach" },
{ name = "email-validator" },
{ name = "feedparser" },
-
{ name = "flask" },
{ name = "gitpython" },
{ name = "httpx" },
{ name = "pendulum" },
···
{ name = "pydantic-settings" },
{ name = "pyyaml" },
{ name = "rich" },
-
{ name = "textual" },
{ name = "typer" },
]
···
{ name = "types-pyyaml" },
]
+
[package.dev-dependencies]
+
dev = [
+
{ name = "pytest" },
+
]
+
[package.metadata]
requires-dist = [
{ name = "black", marker = "extra == 'dev'", specifier = ">=24.0.0" },
{ name = "bleach", specifier = ">=6.0.0" },
{ name = "email-validator" },
{ name = "feedparser", specifier = ">=6.0.11" },
-
{ name = "flask", specifier = ">=3.1.1" },
{ name = "gitpython", specifier = ">=3.1.40" },
{ name = "httpx", specifier = ">=0.28.0" },
{ name = "mypy", marker = "extra == 'dev'", specifier = ">=1.13.0" },
···
{ name = "pyyaml", specifier = ">=6.0.0" },
{ name = "rich", specifier = ">=13.0.0" },
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.8.0" },
-
{ name = "textual", specifier = ">=4.0.0" },
{ name = "typer", specifier = ">=0.15.0" },
{ name = "types-pyyaml", marker = "extra == 'dev'", specifier = ">=6.0.0" },
]
provides-extras = ["dev"]
+
+
[package.metadata.requires-dev]
+
dev = [{ name = "pytest", specifier = ">=8.4.1" }]
[[package]]
name = "tomli"
···
[[package]]
-
name = "uc-micro-py"
-
version = "1.0.3"
-
source = { registry = "https://pypi.org/simple" }
-
sdist = { url = "https://files.pythonhosted.org/packages/91/7a/146a99696aee0609e3712f2b44c6274566bc368dfe8375191278045186b8/uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a", size = 6043, upload-time = "2024-02-09T16:52:01.654Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/37/87/1f677586e8ac487e29672e4b17455758fce261de06a0d086167bb760361a/uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5", size = 6229, upload-time = "2024-02-09T16:52:00.371Z" },
-
]
-
-
[[package]]
name = "webencodings"
version = "0.5.1"
source = { registry = "https://pypi.org/simple" }
···
wheels = [
{ url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" },
-
-
[[package]]
-
name = "werkzeug"
-
version = "3.1.3"
-
source = { registry = "https://pypi.org/simple" }
-
dependencies = [
-
{ name = "markupsafe" },
-
]
-
sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2471d621ab06f6ec6b3324685a2ce0f9777fd4a8b71e/werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746", size = 806925, upload-time = "2024-11-08T15:52:18.093Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498, upload-time = "2024-11-08T15:52:16.132Z" },
-
]
-
-
[[package]]
-
name = "zipp"
-
version = "3.23.0"
-
source = { registry = "https://pypi.org/simple" }
-
sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" }
-
wheels = [
-
{ url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" },
-
]