Manage Atom feeds in a persistent git repository

Add thread visualization with interactive D3 force graph

Major refactoring of the links system to store links/backlinks directly in entry JSON files
instead of separate metadata. Replaced the old index command with a new threads command
that visualizes conversation threads using both terminal and web interfaces.

Key changes:
- Add links and backlinks fields to AtomEntry model
- Rewrite links command to update individual entry files with extracted URLs
- Implement bidirectional link tracking (outbound links and inbound backlinks)
- Remove old index command and reference_parser module
- Create new threads command with:
- Thread detection using connected components algorithm
- Link type categorization (self/user/external references)
- Textual-based terminal UI for browsing threads
- Flask web server with D3.js force-directed graph visualization
- Enhanced cross-user attraction in force simulation for clearer conversation threads
- Add Flask and textual as dependencies

The web visualization includes user/link filtering, interactive tooltips, drag/zoom support,
and visual emphasis on cross-user conversations through stronger attraction forces and
thicker link lines.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>

+3 -1
pyproject.toml
···
"bleach>=6.0.0",
"platformdirs>=4.0.0",
"pyyaml>=6.0.0",
-
"email_validator"
+
"email_validator",
+
"textual>=4.0.0",
+
"flask>=3.1.1",
]
[project.optional-dependencies]
+2 -2
src/thicket/cli/commands/__init__.py
···
"""CLI commands for thicket."""
# Import all commands to register them with the main app
-
from . import add, duplicates, index_cmd, info_cmd, init, links_cmd, list_cmd, sync
+
from . import add, duplicates, info_cmd, init, links_cmd, list_cmd, sync, threads_cmd
-
__all__ = ["add", "duplicates", "index_cmd", "info_cmd", "init", "links_cmd", "list_cmd", "sync"]
+
__all__ = ["add", "duplicates", "info_cmd", "init", "links_cmd", "list_cmd", "sync", "threads_cmd"]
-427
src/thicket/cli/commands/index_cmd.py
···
-
"""CLI command for building reference index from blog entries."""
-
-
import json
-
from pathlib import Path
-
from typing import Optional
-
-
import typer
-
from rich.console import Console
-
from rich.progress import (
-
BarColumn,
-
Progress,
-
SpinnerColumn,
-
TaskProgressColumn,
-
TextColumn,
-
)
-
from rich.table import Table
-
-
from ...core.git_store import GitStore
-
from ...core.reference_parser import ReferenceIndex, ReferenceParser
-
from ..main import app
-
from ..utils import get_tsv_mode, load_config
-
-
console = Console()
-
-
-
@app.command()
-
def index(
-
config_file: Optional[Path] = typer.Option(
-
None,
-
"--config",
-
"-c",
-
help="Path to configuration file",
-
),
-
output_file: Optional[Path] = typer.Option(
-
None,
-
"--output",
-
"-o",
-
help="Path to output index file (default: updates links.json in git store)",
-
),
-
verbose: bool = typer.Option(
-
False,
-
"--verbose",
-
"-v",
-
help="Show detailed progress information",
-
),
-
) -> None:
-
"""Build a reference index showing which blog entries reference others.
-
-
This command analyzes all blog entries to detect cross-references between
-
different blogs, creating an index that can be used to build threaded
-
views of related content.
-
-
Updates the unified links.json file with reference data.
-
"""
-
try:
-
# Load configuration
-
config = load_config(config_file)
-
-
# Initialize Git store
-
git_store = GitStore(config.git_store)
-
-
# Initialize reference parser
-
parser = ReferenceParser()
-
-
# Build user domain mapping
-
if verbose:
-
console.print("Building user domain mapping...")
-
user_domains = parser.build_user_domain_mapping(git_store)
-
-
if verbose:
-
console.print(f"Found {len(user_domains)} users with {sum(len(d) for d in user_domains.values())} total domains")
-
-
# Initialize reference index
-
ref_index = ReferenceIndex()
-
ref_index.user_domains = user_domains
-
-
# Get all users
-
index = git_store._load_index()
-
users = list(index.users.keys())
-
-
if not users:
-
console.print("[yellow]No users found in Git store[/yellow]")
-
raise typer.Exit(0)
-
-
# Process all entries
-
total_entries = 0
-
total_references = 0
-
all_references = []
-
-
with Progress(
-
SpinnerColumn(),
-
TextColumn("[progress.description]{task.description}"),
-
BarColumn(),
-
TaskProgressColumn(),
-
console=console,
-
) as progress:
-
-
# Count total entries first
-
counting_task = progress.add_task("Counting entries...", total=len(users))
-
entry_counts = {}
-
for username in users:
-
entries = git_store.list_entries(username)
-
entry_counts[username] = len(entries)
-
total_entries += len(entries)
-
progress.advance(counting_task)
-
-
progress.remove_task(counting_task)
-
-
# Process entries - extract references
-
processing_task = progress.add_task(
-
f"Extracting references from {total_entries} entries...",
-
total=total_entries
-
)
-
-
for username in users:
-
entries = git_store.list_entries(username)
-
-
for entry in entries:
-
# Extract references from this entry
-
references = parser.extract_references(entry, username, user_domains)
-
all_references.extend(references)
-
-
progress.advance(processing_task)
-
-
if verbose and references:
-
console.print(f" Found {len(references)} references in {username}:{entry.title[:50]}...")
-
-
progress.remove_task(processing_task)
-
-
# Resolve target_entry_ids for references
-
if all_references:
-
resolve_task = progress.add_task(
-
f"Resolving {len(all_references)} references...",
-
total=len(all_references)
-
)
-
-
if verbose:
-
console.print(f"Resolving target entry IDs for {len(all_references)} references...")
-
-
resolved_references = parser.resolve_target_entry_ids(all_references, git_store)
-
-
# Count resolved references
-
resolved_count = sum(1 for ref in resolved_references if ref.target_entry_id is not None)
-
if verbose:
-
console.print(f"Resolved {resolved_count} out of {len(all_references)} references")
-
-
# Add resolved references to index
-
for ref in resolved_references:
-
ref_index.add_reference(ref)
-
total_references += 1
-
progress.advance(resolve_task)
-
-
progress.remove_task(resolve_task)
-
-
# Determine output path
-
if output_file:
-
output_path = output_file
-
else:
-
output_path = config.git_store / "links.json"
-
-
# Load existing links data or create new structure
-
if output_path.exists() and not output_file:
-
# Load existing unified structure
-
with open(output_path) as f:
-
existing_data = json.load(f)
-
else:
-
# Create new structure
-
existing_data = {
-
"links": {},
-
"reverse_mapping": {},
-
"user_domains": {}
-
}
-
-
# Update with reference data
-
existing_data["references"] = ref_index.to_dict()["references"]
-
existing_data["user_domains"] = {k: list(v) for k, v in user_domains.items()}
-
-
# Save updated structure
-
with open(output_path, "w") as f:
-
json.dump(existing_data, f, indent=2, default=str)
-
-
# Show summary
-
if not get_tsv_mode():
-
console.print("\n[green]✓ Reference index built successfully[/green]")
-
-
# Create summary table or TSV output
-
if get_tsv_mode():
-
print("Metric\tCount")
-
print(f"Total Users\t{len(users)}")
-
print(f"Total Entries\t{total_entries}")
-
print(f"Total References\t{total_references}")
-
print(f"Outbound Refs\t{len(ref_index.outbound_refs)}")
-
print(f"Inbound Refs\t{len(ref_index.inbound_refs)}")
-
print(f"Output File\t{output_path}")
-
else:
-
table = Table(title="Reference Index Summary")
-
table.add_column("Metric", style="cyan")
-
table.add_column("Count", style="green")
-
-
table.add_row("Total Users", str(len(users)))
-
table.add_row("Total Entries", str(total_entries))
-
table.add_row("Total References", str(total_references))
-
table.add_row("Outbound Refs", str(len(ref_index.outbound_refs)))
-
table.add_row("Inbound Refs", str(len(ref_index.inbound_refs)))
-
table.add_row("Output File", str(output_path))
-
-
console.print(table)
-
-
# Show some interesting statistics
-
if total_references > 0:
-
if not get_tsv_mode():
-
console.print("\n[bold]Reference Statistics:[/bold]")
-
-
# Most referenced users
-
target_counts = {}
-
unresolved_domains = set()
-
-
for ref in ref_index.references:
-
if ref.target_username:
-
target_counts[ref.target_username] = target_counts.get(ref.target_username, 0) + 1
-
else:
-
# Track unresolved domains
-
from urllib.parse import urlparse
-
domain = urlparse(ref.target_url).netloc.lower()
-
unresolved_domains.add(domain)
-
-
if target_counts:
-
if get_tsv_mode():
-
print("Referenced User\tReference Count")
-
for username, count in sorted(target_counts.items(), key=lambda x: x[1], reverse=True)[:5]:
-
print(f"{username}\t{count}")
-
else:
-
console.print("\nMost referenced users:")
-
for username, count in sorted(target_counts.items(), key=lambda x: x[1], reverse=True)[:5]:
-
console.print(f" {username}: {count} references")
-
-
if unresolved_domains and verbose:
-
if get_tsv_mode():
-
print("Unresolved Domain\tCount")
-
for domain in sorted(list(unresolved_domains)[:10]):
-
print(f"{domain}\t1")
-
if len(unresolved_domains) > 10:
-
print(f"... and {len(unresolved_domains) - 10} more\t...")
-
else:
-
console.print(f"\nUnresolved domains: {len(unresolved_domains)}")
-
for domain in sorted(list(unresolved_domains)[:10]):
-
console.print(f" {domain}")
-
if len(unresolved_domains) > 10:
-
console.print(f" ... and {len(unresolved_domains) - 10} more")
-
-
except Exception as e:
-
console.print(f"[red]Error building reference index: {e}[/red]")
-
if verbose:
-
console.print_exception()
-
raise typer.Exit(1)
-
-
-
@app.command()
-
def threads(
-
config_file: Optional[Path] = typer.Option(
-
None,
-
"--config",
-
"-c",
-
help="Path to configuration file",
-
),
-
index_file: Optional[Path] = typer.Option(
-
None,
-
"--index",
-
"-i",
-
help="Path to reference index file (default: links.json in git store)",
-
),
-
username: Optional[str] = typer.Option(
-
None,
-
"--username",
-
"-u",
-
help="Show threads for specific username only",
-
),
-
entry_id: Optional[str] = typer.Option(
-
None,
-
"--entry",
-
"-e",
-
help="Show thread for specific entry ID",
-
),
-
min_size: int = typer.Option(
-
2,
-
"--min-size",
-
"-m",
-
help="Minimum thread size to display",
-
),
-
) -> None:
-
"""Show threaded view of related blog entries.
-
-
This command uses the reference index to show which blog entries
-
are connected through cross-references, creating an email-style
-
threaded view of the conversation.
-
-
Reads reference data from the unified links.json file.
-
"""
-
try:
-
# Load configuration
-
config = load_config(config_file)
-
-
# Determine index file path
-
if index_file:
-
index_path = index_file
-
else:
-
index_path = config.git_store / "links.json"
-
-
if not index_path.exists():
-
console.print(f"[red]Links file not found: {index_path}[/red]")
-
console.print("Run 'thicket links' and 'thicket index' first to build the reference index")
-
raise typer.Exit(1)
-
-
# Load unified data
-
with open(index_path) as f:
-
unified_data = json.load(f)
-
-
# Check if references exist in the unified structure
-
if "references" not in unified_data:
-
console.print(f"[red]No references found in {index_path}[/red]")
-
console.print("Run 'thicket index' first to build the reference index")
-
raise typer.Exit(1)
-
-
# Extract reference data and reconstruct ReferenceIndex
-
ref_index = ReferenceIndex.from_dict({
-
"references": unified_data["references"],
-
"user_domains": unified_data.get("user_domains", {})
-
})
-
-
# Initialize Git store to get entry details
-
git_store = GitStore(config.git_store)
-
-
if entry_id and username:
-
# Show specific thread
-
thread_members = ref_index.get_thread_members(username, entry_id)
-
_display_thread(thread_members, ref_index, git_store, f"Thread for {username}:{entry_id}")
-
-
elif username:
-
# Show all threads involving this user
-
user_index = git_store._load_index()
-
user = user_index.get_user(username)
-
if not user:
-
console.print(f"[red]User not found: {username}[/red]")
-
raise typer.Exit(1)
-
-
entries = git_store.list_entries(username)
-
threads_found = set()
-
-
console.print(f"[bold]Threads involving {username}:[/bold]\n")
-
-
for entry in entries:
-
thread_members = ref_index.get_thread_members(username, entry.id)
-
if len(thread_members) >= min_size:
-
thread_key = tuple(sorted(thread_members))
-
if thread_key not in threads_found:
-
threads_found.add(thread_key)
-
_display_thread(thread_members, ref_index, git_store, f"Thread #{len(threads_found)}")
-
-
else:
-
# Show all threads
-
console.print("[bold]All conversation threads:[/bold]\n")
-
-
all_threads = set()
-
processed_entries = set()
-
-
# Get all entries
-
user_index = git_store._load_index()
-
for username in user_index.users.keys():
-
entries = git_store.list_entries(username)
-
for entry in entries:
-
entry_key = (username, entry.id)
-
if entry_key in processed_entries:
-
continue
-
-
thread_members = ref_index.get_thread_members(username, entry.id)
-
if len(thread_members) >= min_size:
-
thread_key = tuple(sorted(thread_members))
-
if thread_key not in all_threads:
-
all_threads.add(thread_key)
-
_display_thread(thread_members, ref_index, git_store, f"Thread #{len(all_threads)}")
-
-
# Mark all members as processed
-
for member in thread_members:
-
processed_entries.add(member)
-
-
if not all_threads:
-
console.print("[yellow]No conversation threads found[/yellow]")
-
console.print(f"(minimum thread size: {min_size})")
-
-
except Exception as e:
-
console.print(f"[red]Error showing threads: {e}[/red]")
-
raise typer.Exit(1)
-
-
-
def _display_thread(thread_members, ref_index, git_store, title):
-
"""Display a single conversation thread."""
-
console.print(f"[bold cyan]{title}[/bold cyan]")
-
console.print(f"Thread size: {len(thread_members)} entries")
-
-
# Get entry details for each member
-
thread_entries = []
-
for username, entry_id in thread_members:
-
entry = git_store.get_entry(username, entry_id)
-
if entry:
-
thread_entries.append((username, entry))
-
-
# Sort by publication date
-
thread_entries.sort(key=lambda x: x[1].published or x[1].updated)
-
-
# Display entries
-
for i, (username, entry) in enumerate(thread_entries):
-
prefix = "├─" if i < len(thread_entries) - 1 else "└─"
-
-
# Get references for this entry
-
outbound = ref_index.get_outbound_refs(username, entry.id)
-
inbound = ref_index.get_inbound_refs(username, entry.id)
-
-
ref_info = ""
-
if outbound or inbound:
-
ref_info = f" ({len(outbound)} out, {len(inbound)} in)"
-
-
console.print(f" {prefix} [{username}] {entry.title[:60]}...{ref_info}")
-
-
if entry.published:
-
console.print(f" Published: {entry.published.strftime('%Y-%m-%d')}")
-
-
console.print() # Empty line after each thread
+33 -52
src/thicket/cli/commands/info_cmd.py
···
"""CLI command for displaying detailed information about a specific atom entry."""
-
import json
from pathlib import Path
from typing import Optional
···
from rich.text import Text
from ...core.git_store import GitStore
-
from ...core.reference_parser import ReferenceIndex
from ..main import app
from ..utils import load_config, get_tsv_mode
···
console.print(f"[red]Entry with {'URL' if is_url else 'atom ID'} '{identifier}' not found in any user's entries[/red]")
raise typer.Exit(1)
-
# Load reference index if available
-
links_path = config.git_store / "links.json"
-
ref_index = None
-
if links_path.exists():
-
with open(links_path) as f:
-
unified_data = json.load(f)
-
-
# Check if references exist in the unified structure
-
if "references" in unified_data:
-
ref_index = ReferenceIndex.from_dict({
-
"references": unified_data["references"],
-
"user_domains": unified_data.get("user_domains", {})
-
})
-
# Display information
if get_tsv_mode():
-
_display_entry_info_tsv(entry, found_username, ref_index, show_content)
+
_display_entry_info_tsv(entry, found_username, show_content)
else:
_display_entry_info(entry, found_username)
-
if ref_index:
-
_display_link_info(entry, found_username, ref_index)
-
else:
-
console.print("\n[yellow]No reference index found. Run 'thicket links' and 'thicket index' to build cross-reference data.[/yellow]")
+
# Display links and backlinks from entry fields
+
_display_link_info(entry, found_username, git_store)
# Optionally display content
if show_content and entry.content:
···
console.print(panel)
-
def _display_link_info(entry, username: str, ref_index: ReferenceIndex) -> None:
+
def _display_link_info(entry, username: str, git_store: GitStore) -> None:
"""Display inbound and outbound link information."""
-
# Get links
-
outbound_refs = ref_index.get_outbound_refs(username, entry.id)
-
inbound_refs = ref_index.get_inbound_refs(username, entry.id)
+
# Get links from entry fields
+
outbound_links = getattr(entry, 'links', [])
+
backlinks = getattr(entry, 'backlinks', [])
-
if not outbound_refs and not inbound_refs:
+
if not outbound_links and not backlinks:
console.print("\n[dim]No cross-references found for this entry.[/dim]")
return
# Create links table
links_table = Table(title="Cross-References")
links_table.add_column("Direction", style="cyan", width=10)
-
links_table.add_column("Target/Source", style="green", width=20)
-
links_table.add_column("URL", style="blue", width=50)
+
links_table.add_column("Target/Source", style="green", width=30)
+
links_table.add_column("URL/ID", style="blue", width=60)
-
# Add outbound references
-
for ref in outbound_refs:
-
target_info = f"{ref.target_username}:{ref.target_entry_id}" if ref.target_username and ref.target_entry_id else "External"
-
links_table.add_row("→ Out", target_info, ref.target_url)
+
# Add outbound links
+
for link in outbound_links:
+
links_table.add_row("→ Out", "External/Other", link)
-
# Add inbound references
-
for ref in inbound_refs:
-
source_info = f"{ref.source_username}:{ref.source_entry_id}"
-
links_table.add_row("← In", source_info, ref.target_url)
+
# Add backlinks (inbound references)
+
for backlink_id in backlinks:
+
# Try to find which user this entry belongs to
+
source_info = backlink_id
+
# Could enhance this by looking up the actual entry to get username
+
links_table.add_row("← In", "Entry", source_info)
console.print()
console.print(links_table)
# Summary
-
console.print(f"\n[bold]Summary:[/bold] {len(outbound_refs)} outbound, {len(inbound_refs)} inbound references")
+
console.print(f"\n[bold]Summary:[/bold] {len(outbound_links)} outbound links, {len(backlinks)} inbound backlinks")
def _display_content(content: str) -> None:
···
console.print(panel)
-
def _display_entry_info_tsv(entry, username: str, ref_index: Optional[ReferenceIndex], show_content: bool) -> None:
+
def _display_entry_info_tsv(entry, username: str, show_content: bool) -> None:
"""Display entry information in TSV format."""
# Basic info
···
if entry.source:
print(f"Source Feed\t{entry.source}")
-
# Add reference info if available
-
if ref_index:
-
outbound_refs = ref_index.get_outbound_refs(username, entry.id)
-
inbound_refs = ref_index.get_inbound_refs(username, entry.id)
-
-
print(f"Outbound References\t{len(outbound_refs)}")
-
print(f"Inbound References\t{len(inbound_refs)}")
+
# Add links info from entry fields
+
outbound_links = getattr(entry, 'links', [])
+
backlinks = getattr(entry, 'backlinks', [])
+
+
if outbound_links or backlinks:
+
print(f"Outbound Links\t{len(outbound_links)}")
+
print(f"Backlinks\t{len(backlinks)}")
-
# Show each reference
-
for ref in outbound_refs:
-
target_info = f"{ref.target_username}:{ref.target_entry_id}" if ref.target_username and ref.target_entry_id else "External"
-
print(f"Outbound Reference\t{target_info}\t{ref.target_url}")
+
# Show each link
+
for link in outbound_links:
+
print(f"→ Link\t{link}")
-
for ref in inbound_refs:
-
source_info = f"{ref.source_username}:{ref.source_entry_id}"
-
print(f"Inbound Reference\t{source_info}\t{ref.target_url}")
+
for backlink_id in backlinks:
+
print(f"← Backlink\t{backlink_id}")
# Show content if requested
if show_content and entry.content:
+1111
src/thicket/cli/commands/threads_cmd.py
···
+
"""CLI command for displaying and browsing thread-graphs of blog posts."""
+
+
from dataclasses import dataclass, field
+
from datetime import datetime
+
from enum import Enum
+
from pathlib import Path
+
from typing import Dict, List, Optional, Set, Tuple
+
+
import typer
+
from rich.console import Console
+
import json
+
import webbrowser
+
import threading
+
import time
+
from flask import Flask, render_template_string, jsonify
+
from textual import events
+
from textual.app import App, ComposeResult
+
from textual.containers import Container, Horizontal, Vertical
+
from textual.reactive import reactive
+
from textual.widget import Widget
+
from textual.widgets import Footer, Header, Label, Static
+
+
from ...core.git_store import GitStore
+
from ...models import AtomEntry
+
from ..main import app
+
from ..utils import get_tsv_mode, load_config
+
+
console = Console()
+
+
+
class LinkType(Enum):
+
"""Types of links between entries."""
+
+
SELF_REFERENCE = "self" # Link to same user's content
+
USER_REFERENCE = "user" # Link to another tracked user
+
EXTERNAL = "external" # Link to external content
+
+
+
@dataclass
+
class ThreadNode:
+
"""Represents a node in the thread graph."""
+
+
entry_id: str
+
username: str
+
entry: AtomEntry
+
outbound_links: List[Tuple[str, LinkType]] = field(
+
default_factory=list
+
) # (url, type)
+
inbound_backlinks: List[str] = field(default_factory=list) # entry_ids
+
+
@property
+
def published_date(self) -> datetime:
+
"""Get the published or updated date for sorting."""
+
return self.entry.published or self.entry.updated
+
+
@property
+
def title(self) -> str:
+
"""Get the entry title."""
+
return self.entry.title
+
+
@property
+
def summary(self) -> str:
+
"""Get a short summary of the entry."""
+
if self.entry.summary:
+
return (
+
self.entry.summary[:100] + "..."
+
if len(self.entry.summary) > 100
+
else self.entry.summary
+
)
+
return ""
+
+
+
@dataclass
+
class ThreadGraph:
+
"""Represents the full thread graph of interconnected posts."""
+
+
nodes: Dict[str, ThreadNode] = field(default_factory=dict) # entry_id -> ThreadNode
+
user_entries: Dict[str, List[str]] = field(
+
default_factory=dict
+
) # username -> [entry_ids]
+
url_to_entry: Dict[str, str] = field(default_factory=dict) # url -> entry_id
+
+
def add_node(self, node: ThreadNode) -> None:
+
"""Add a node to the graph."""
+
self.nodes[node.entry_id] = node
+
+
# Update user entries index
+
if node.username not in self.user_entries:
+
self.user_entries[node.username] = []
+
self.user_entries[node.username].append(node.entry_id)
+
+
# Update URL mapping
+
if node.entry.link:
+
self.url_to_entry[str(node.entry.link)] = node.entry_id
+
+
def get_connected_components(self) -> List[Set[str]]:
+
"""Find all connected components in the graph (threads)."""
+
visited: Set[str] = set()
+
components: List[Set[str]] = []
+
+
for entry_id in self.nodes:
+
if entry_id not in visited:
+
component: Set[str] = set()
+
self._dfs(entry_id, visited, component)
+
components.append(component)
+
+
return components
+
+
def _dfs(self, entry_id: str, visited: Set[str], component: Set[str]) -> None:
+
"""Depth-first search to find connected components."""
+
if entry_id in visited:
+
return
+
+
visited.add(entry_id)
+
component.add(entry_id)
+
+
node = self.nodes.get(entry_id)
+
if not node:
+
return
+
+
# Follow outbound links
+
for url, link_type in node.outbound_links:
+
if url in self.url_to_entry:
+
target_id = self.url_to_entry[url]
+
self._dfs(target_id, visited, component)
+
+
# Follow backlinks
+
for backlink_id in node.inbound_backlinks:
+
self._dfs(backlink_id, visited, component)
+
+
def get_standalone_entries(self) -> List[str]:
+
"""Get entries with no connections."""
+
standalone = []
+
for entry_id, node in self.nodes.items():
+
if not node.outbound_links and not node.inbound_backlinks:
+
standalone.append(entry_id)
+
return standalone
+
+
def sort_component_chronologically(self, component: Set[str]) -> List[str]:
+
"""Sort a component by published date."""
+
nodes = [
+
self.nodes[entry_id] for entry_id in component if entry_id in self.nodes
+
]
+
nodes.sort(key=lambda n: n.published_date)
+
return [n.entry_id for n in nodes]
+
+
+
def build_thread_graph(git_store: GitStore) -> ThreadGraph:
+
"""Build the thread graph from all entries in the git store."""
+
graph = ThreadGraph()
+
+
# Get all users from index
+
index = git_store._load_index()
+
user_domains = {}
+
+
# Build user domain mapping
+
for username, user_metadata in index.users.items():
+
domains = set()
+
+
# Add domains from feeds
+
for feed_url in user_metadata.feeds:
+
from urllib.parse import urlparse
+
+
domain = urlparse(str(feed_url)).netloc.lower()
+
if domain:
+
domains.add(domain)
+
+
# Add domain from homepage
+
if user_metadata.homepage:
+
domain = urlparse(str(user_metadata.homepage)).netloc.lower()
+
if domain:
+
domains.add(domain)
+
+
user_domains[username] = domains
+
+
# Process all entries
+
for username in index.users:
+
entries = git_store.list_entries(username)
+
+
for entry in entries:
+
# Create node
+
node = ThreadNode(entry_id=entry.id, username=username, entry=entry)
+
+
# Process outbound links
+
for link in getattr(entry, "links", []):
+
link_type = categorize_link(link, username, user_domains)
+
node.outbound_links.append((link, link_type))
+
+
# Copy backlinks
+
node.inbound_backlinks = getattr(entry, "backlinks", [])
+
+
# Add to graph
+
graph.add_node(node)
+
+
return graph
+
+
+
def categorize_link(
+
url: str, source_username: str, user_domains: Dict[str, Set[str]]
+
) -> LinkType:
+
"""Categorize a link as self-reference, user reference, or external."""
+
from urllib.parse import urlparse
+
+
try:
+
parsed = urlparse(url)
+
domain = parsed.netloc.lower()
+
+
# Check if it's a self-reference
+
if domain in user_domains.get(source_username, set()):
+
return LinkType.SELF_REFERENCE
+
+
# Check if it's a reference to another tracked user
+
for username, domains in user_domains.items():
+
if username != source_username and domain in domains:
+
return LinkType.USER_REFERENCE
+
+
# Otherwise it's external
+
return LinkType.EXTERNAL
+
+
except Exception:
+
return LinkType.EXTERNAL
+
+
+
class ThreadTreeWidget(Static):
+
"""Widget for displaying a thread as a tree."""
+
+
def __init__(self, component: Set[str], graph: ThreadGraph, **kwargs):
+
super().__init__(**kwargs)
+
self.component = component
+
self.graph = graph
+
+
def compose(self) -> ComposeResult:
+
"""Create the tree display."""
+
# Sort entries chronologically
+
sorted_ids = self.graph.sort_component_chronologically(self.component)
+
+
# Build tree structure as text
+
content_lines = ["Thread:"]
+
added_nodes: Set[str] = set()
+
+
# Add nodes in chronological order, showing connections
+
for entry_id in sorted_ids:
+
if entry_id not in added_nodes:
+
self._add_node_to_text(content_lines, entry_id, added_nodes, 0)
+
+
# Join all lines into content
+
content = "\n".join(content_lines)
+
+
# Create a Static widget with the content
+
yield Static(content, id="thread-content")
+
+
def _add_node_to_text(
+
self, content_lines: List[str], entry_id: str, added_nodes: Set[str], indent: int = 0
+
):
+
"""Recursively add nodes to the text display."""
+
if entry_id in added_nodes:
+
# Show cycle reference
+
node = self.graph.nodes.get(entry_id)
+
if node:
+
prefix = " " * indent
+
content_lines.append(f"{prefix}↻ {node.username}: {node.title}")
+
return
+
+
added_nodes.add(entry_id)
+
node = self.graph.nodes.get(entry_id)
+
if not node:
+
return
+
+
# Format node display
+
prefix = " " * indent
+
date_str = node.published_date.strftime("%Y-%m-%d")
+
node_label = f"{prefix}• {node.username}: {node.title} ({date_str})"
+
content_lines.append(node_label)
+
+
# Add connections info
+
if node.outbound_links:
+
links_by_type: Dict[LinkType, List[str]] = {}
+
for url, link_type in node.outbound_links:
+
if link_type not in links_by_type:
+
links_by_type[link_type] = []
+
links_by_type[link_type].append(url)
+
+
for link_type, urls in links_by_type.items():
+
type_label = f"{prefix} → {link_type.value}: {len(urls)} link(s)"
+
content_lines.append(type_label)
+
+
if node.inbound_backlinks:
+
backlink_label = f"{prefix} ← backlinks: {len(node.inbound_backlinks)}"
+
content_lines.append(backlink_label)
+
+
+
class ThreadBrowserApp(App):
+
"""Terminal UI for browsing threads."""
+
+
CSS = """
+
ThreadBrowserApp {
+
background: $surface;
+
}
+
+
#thread-list {
+
width: 1fr;
+
height: 1fr;
+
border: solid $primary;
+
overflow-y: scroll;
+
}
+
+
#entry-detail {
+
width: 1fr;
+
height: 1fr;
+
border: solid $secondary;
+
overflow-y: scroll;
+
padding: 1;
+
}
+
"""
+
+
BINDINGS = [
+
("q", "quit", "Quit"),
+
("j", "next_thread", "Next Thread"),
+
("k", "prev_thread", "Previous Thread"),
+
("enter", "select_thread", "View Thread"),
+
]
+
+
def __init__(self, graph: ThreadGraph):
+
super().__init__()
+
self.graph = graph
+
self.threads = []
+
self.current_thread_index = 0
+
self._build_thread_list()
+
+
def _build_thread_list(self):
+
"""Build the list of threads to display."""
+
# Get connected components (actual threads)
+
components = self.graph.get_connected_components()
+
+
# Sort components by the earliest date in each
+
sorted_components = []
+
for component in components:
+
if len(component) > 1: # Only show actual threads
+
sorted_ids = self.graph.sort_component_chronologically(component)
+
if sorted_ids:
+
first_node = self.graph.nodes.get(sorted_ids[0])
+
if first_node:
+
sorted_components.append((first_node.published_date, component))
+
+
sorted_components.sort(key=lambda x: x[0], reverse=True)
+
self.threads = [comp for _, comp in sorted_components]
+
+
def compose(self) -> ComposeResult:
+
"""Create the UI layout."""
+
yield Header()
+
+
with Horizontal():
+
with Vertical(id="thread-list"):
+
yield Label("Threads", classes="title")
+
for i, thread in enumerate(self.threads):
+
# Get thread summary
+
sorted_ids = self.graph.sort_component_chronologically(thread)
+
if sorted_ids:
+
first_node = self.graph.nodes.get(sorted_ids[0])
+
if first_node:
+
label = f"{i + 1}. {first_node.title} ({len(thread)} posts)"
+
yield Label(label, classes="thread-item")
+
+
with Vertical(id="entry-detail"):
+
if self.threads:
+
yield ThreadTreeWidget(self.threads[0], self.graph)
+
+
yield Footer()
+
+
def action_next_thread(self) -> None:
+
"""Move to next thread."""
+
if self.current_thread_index < len(self.threads) - 1:
+
self.current_thread_index += 1
+
self.update_display()
+
+
def action_prev_thread(self) -> None:
+
"""Move to previous thread."""
+
if self.current_thread_index > 0:
+
self.current_thread_index -= 1
+
self.update_display()
+
+
def action_select_thread(self) -> None:
+
"""View detailed thread."""
+
# In a real implementation, this could show more detail
+
pass
+
+
def update_display(self) -> None:
+
"""Update the thread display."""
+
detail_view = self.query_one("#entry-detail")
+
detail_view.remove_children()
+
+
if self.threads and self.current_thread_index < len(self.threads):
+
widget = ThreadTreeWidget(
+
self.threads[self.current_thread_index], self.graph
+
)
+
detail_view.mount(widget)
+
+
+
@app.command()
+
def threads(
+
config_file: Optional[Path] = typer.Option(
+
Path("thicket.yaml"),
+
"--config",
+
"-c",
+
help="Path to configuration file",
+
),
+
interactive: bool = typer.Option(
+
True,
+
"--interactive/--no-interactive",
+
"-i/-n",
+
help="Launch interactive terminal UI",
+
),
+
web: bool = typer.Option(
+
False,
+
"--web",
+
"-w",
+
help="Launch web server with D3 force graph visualization",
+
),
+
port: int = typer.Option(
+
8080,
+
"--port",
+
"-p",
+
help="Port for web server",
+
),
+
) -> None:
+
"""Browse and visualize thread-graphs of interconnected blog posts.
+
+
This command analyzes all blog entries and their links/backlinks to build
+
a graph of conversations and references between posts. Threads are displayed
+
as connected components in the link graph.
+
"""
+
try:
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
# Build thread graph
+
console.print("Building thread graph...")
+
graph = build_thread_graph(git_store)
+
+
# Get statistics
+
components = graph.get_connected_components()
+
threads = [c for c in components if len(c) > 1]
+
standalone = graph.get_standalone_entries()
+
+
console.print(
+
f"\n[green]Found {len(threads)} threads and {len(standalone)} standalone posts[/green]"
+
)
+
+
if web:
+
# Launch web server with D3 visualization
+
_launch_web_server(graph, port)
+
elif interactive and threads:
+
# Launch terminal UI
+
app = ThreadBrowserApp(graph)
+
app.run()
+
else:
+
# Display in console
+
if get_tsv_mode():
+
_display_threads_tsv(graph, threads)
+
else:
+
_display_threads_rich(graph, threads)
+
+
except Exception as e:
+
console.print(f"[red]Error building threads: {e}[/red]")
+
raise typer.Exit(1)
+
+
+
def _display_threads_rich(graph: ThreadGraph, threads: List[Set[str]]) -> None:
+
"""Display threads using rich formatting."""
+
for i, thread in enumerate(threads[:10]): # Show first 10 threads
+
sorted_ids = graph.sort_component_chronologically(thread)
+
+
console.print(f"\n[bold]Thread {i + 1}[/bold] ({len(thread)} posts)")
+
+
for j, entry_id in enumerate(sorted_ids):
+
node = graph.nodes.get(entry_id)
+
if node:
+
date_str = node.published_date.strftime("%Y-%m-%d")
+
indent = " " * min(j, 3) # Max 3 levels of indent
+
console.print(f"{indent}• [{node.username}] {node.title} ({date_str})")
+
+
# Show link types
+
if node.outbound_links:
+
link_summary = {}
+
for _, link_type in node.outbound_links:
+
link_summary[link_type] = link_summary.get(link_type, 0) + 1
+
+
link_str = ", ".join(
+
[f"{t.value}:{c}" for t, c in link_summary.items()]
+
)
+
console.print(f"{indent} → Links: {link_str}")
+
+
+
def _display_threads_tsv(graph: ThreadGraph, threads: List[Set[str]]) -> None:
+
"""Display threads in TSV format."""
+
print("Thread\tSize\tFirst Post\tLast Post\tUsers")
+
+
for i, thread in enumerate(threads):
+
sorted_ids = graph.sort_component_chronologically(thread)
+
+
if sorted_ids:
+
first_node = graph.nodes.get(sorted_ids[0])
+
last_node = graph.nodes.get(sorted_ids[-1])
+
+
users = set()
+
for entry_id in thread:
+
node = graph.nodes.get(entry_id)
+
if node:
+
users.add(node.username)
+
+
if first_node and last_node:
+
print(
+
f"{i + 1}\t{len(thread)}\t{first_node.published_date.strftime('%Y-%m-%d')}\t{last_node.published_date.strftime('%Y-%m-%d')}\t{','.join(users)}"
+
)
+
+
+
def _build_graph_json(graph: ThreadGraph) -> dict:
+
"""Convert ThreadGraph to JSON format for D3 visualization."""
+
nodes = []
+
links = []
+
+
# Color mapping for different users
+
user_colors = {}
+
colors = [
+
"#1f77b4", "#ff7f0e", "#2ca02c", "#d62728", "#9467bd",
+
"#8c564b", "#e377c2", "#7f7f7f", "#bcbd22", "#17becf",
+
"#aec7e8", "#ffbb78", "#98df8a", "#ff9896", "#c5b0d5"
+
]
+
+
# Assign colors to users
+
for i, username in enumerate(set(node.username for node in graph.nodes.values())):
+
user_colors[username] = colors[i % len(colors)]
+
+
# Create nodes
+
for entry_id, node in graph.nodes.items():
+
nodes.append({
+
"id": entry_id,
+
"title": node.title,
+
"username": node.username,
+
"date": node.published_date.strftime("%Y-%m-%d"),
+
"summary": node.summary,
+
"color": user_colors[node.username],
+
"outbound_count": len(node.outbound_links),
+
"backlink_count": len(node.inbound_backlinks),
+
"link_types": {
+
"self": len([l for l in node.outbound_links if l[1] == LinkType.SELF_REFERENCE]),
+
"user": len([l for l in node.outbound_links if l[1] == LinkType.USER_REFERENCE]),
+
"external": len([l for l in node.outbound_links if l[1] == LinkType.EXTERNAL])
+
}
+
})
+
+
# Create links (only for links between tracked entries)
+
for entry_id, node in graph.nodes.items():
+
for url, link_type in node.outbound_links:
+
if url in graph.url_to_entry:
+
target_id = graph.url_to_entry[url]
+
if target_id in graph.nodes:
+
links.append({
+
"source": entry_id,
+
"target": target_id,
+
"type": link_type.value,
+
"url": url
+
})
+
+
return {
+
"nodes": nodes,
+
"links": links,
+
"stats": {
+
"total_nodes": len(nodes),
+
"total_links": len(links),
+
"users": list(user_colors.keys()),
+
"user_colors": user_colors
+
}
+
}
+
+
+
def _launch_web_server(graph: ThreadGraph, port: int) -> None:
+
"""Launch Flask web server with D3 force graph visualization."""
+
flask_app = Flask(__name__)
+
+
# Store graph data globally for the Flask app
+
graph_data = _build_graph_json(graph)
+
+
@flask_app.route('/')
+
def index():
+
"""Serve the main visualization page."""
+
return render_template_string(HTML_TEMPLATE, port=port)
+
+
@flask_app.route('/api/graph')
+
def api_graph():
+
"""API endpoint to serve graph data as JSON."""
+
return jsonify(graph_data)
+
+
# Disable Flask logging in development mode
+
import logging
+
log = logging.getLogger('werkzeug')
+
log.setLevel(logging.ERROR)
+
+
def open_browser():
+
"""Open browser after a short delay."""
+
time.sleep(1.5)
+
webbrowser.open(f'http://localhost:{port}')
+
+
# Start browser in a separate thread
+
browser_thread = threading.Thread(target=open_browser)
+
browser_thread.daemon = True
+
browser_thread.start()
+
+
console.print(f"\n[green]Starting web server at http://localhost:{port}[/green]")
+
console.print("[yellow]Press Ctrl+C to stop the server[/yellow]")
+
+
try:
+
flask_app.run(host='0.0.0.0', port=port, debug=False)
+
except KeyboardInterrupt:
+
console.print("\n[green]Server stopped[/green]")
+
+
+
# HTML template for D3 force graph visualization
+
HTML_TEMPLATE = """
+
<!DOCTYPE html>
+
<html lang="en">
+
<head>
+
<meta charset="UTF-8">
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
+
<title>Thicket Thread Graph Visualization</title>
+
<script src="https://d3js.org/d3.v7.min.js"></script>
+
<style>
+
body {
+
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
+
margin: 0;
+
padding: 20px;
+
background-color: #f5f5f5;
+
}
+
+
.header {
+
text-align: center;
+
margin-bottom: 20px;
+
}
+
+
h1 {
+
color: #333;
+
margin-bottom: 10px;
+
}
+
+
.controls {
+
display: flex;
+
justify-content: center;
+
gap: 15px;
+
margin-bottom: 20px;
+
flex-wrap: wrap;
+
}
+
+
.control-group {
+
display: flex;
+
align-items: center;
+
gap: 5px;
+
}
+
+
select, input[type="range"] {
+
padding: 5px;
+
border: 1px solid #ddd;
+
border-radius: 4px;
+
}
+
+
.stats {
+
display: flex;
+
justify-content: center;
+
gap: 20px;
+
margin-bottom: 20px;
+
font-size: 14px;
+
color: #666;
+
}
+
+
.stat-item {
+
background: white;
+
padding: 10px 15px;
+
border-radius: 6px;
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
+
}
+
+
#graph-container {
+
background: white;
+
border-radius: 8px;
+
box-shadow: 0 4px 6px rgba(0,0,0,0.1);
+
overflow: hidden;
+
}
+
+
#graph {
+
cursor: grab;
+
}
+
+
#graph:active {
+
cursor: grabbing;
+
}
+
+
.node {
+
stroke: #fff;
+
stroke-width: 1.5px;
+
cursor: pointer;
+
}
+
+
.node:hover {
+
stroke: #333;
+
stroke-width: 2px;
+
}
+
+
.link {
+
stroke: #999;
+
stroke-opacity: 0.6;
+
stroke-width: 1px;
+
}
+
+
.link.self-link {
+
stroke: #2ca02c;
+
}
+
+
.link.user-link {
+
stroke: #ff7f0e;
+
}
+
+
.link.external-link {
+
stroke: #d62728;
+
}
+
+
.tooltip {
+
position: absolute;
+
background: rgba(0, 0, 0, 0.9);
+
color: white;
+
padding: 10px;
+
border-radius: 4px;
+
font-size: 12px;
+
line-height: 1.4;
+
pointer-events: none;
+
z-index: 1000;
+
max-width: 300px;
+
}
+
+
.legend {
+
position: fixed;
+
top: 20px;
+
right: 20px;
+
background: white;
+
padding: 15px;
+
border-radius: 6px;
+
box-shadow: 0 2px 8px rgba(0,0,0,0.15);
+
font-size: 12px;
+
z-index: 100;
+
}
+
+
.legend h3 {
+
margin: 0 0 10px 0;
+
font-size: 14px;
+
color: #333;
+
}
+
+
.legend-item {
+
display: flex;
+
align-items: center;
+
margin-bottom: 5px;
+
}
+
+
.legend-color {
+
width: 12px;
+
height: 12px;
+
margin-right: 8px;
+
border-radius: 2px;
+
}
+
+
.legend-line {
+
width: 20px;
+
height: 2px;
+
margin-right: 8px;
+
}
+
</style>
+
</head>
+
<body>
+
<div class="header">
+
<h1>Thicket Thread Graph Visualization</h1>
+
<p>Interactive visualization of blog post connections and conversations</p>
+
</div>
+
+
<div class="controls">
+
<div class="control-group">
+
<label for="userFilter">Filter by user:</label>
+
<select id="userFilter">
+
<option value="all">All Users</option>
+
</select>
+
</div>
+
+
<div class="control-group">
+
<label for="linkFilter">Show links:</label>
+
<select id="linkFilter">
+
<option value="all">All Links</option>
+
<option value="user">User Links Only</option>
+
<option value="self">Self Links Only</option>
+
<option value="external">External Links Only</option>
+
</select>
+
</div>
+
+
<div class="control-group">
+
<label for="forceStrength">Force Strength:</label>
+
<input type="range" id="forceStrength" min="0.1" max="2" step="0.1" value="0.3">
+
</div>
+
+
<div class="control-group">
+
<label for="nodeSize">Node Size:</label>
+
<input type="range" id="nodeSize" min="3" max="15" step="1" value="6">
+
</div>
+
</div>
+
+
<div class="stats" id="stats"></div>
+
+
<div id="graph-container">
+
<svg id="graph"></svg>
+
</div>
+
+
<div class="legend">
+
<h3>Link Types</h3>
+
<div class="legend-item">
+
<div class="legend-line" style="background: #2ca02c;"></div>
+
<span>Self References</span>
+
</div>
+
<div class="legend-item">
+
<div class="legend-line" style="background: #ff7f0e;"></div>
+
<span>User References</span>
+
</div>
+
<div class="legend-item">
+
<div class="legend-line" style="background: #d62728;"></div>
+
<span>External References</span>
+
</div>
+
+
<h3 style="margin-top: 15px;">Interactions</h3>
+
<div style="font-size: 11px; color: #666;">
+
• Hover: Show details<br>
+
• Click: Pin/unpin node<br>
+
• Drag: Move nodes<br>
+
• Zoom: Mouse wheel
+
</div>
+
</div>
+
+
<div class="tooltip" id="tooltip" style="display: none;"></div>
+
+
<script>
+
let graphData;
+
let simulation;
+
let svg, g, link, node;
+
let width = window.innerWidth - 40;
+
let height = window.innerHeight - 200;
+
+
// Initialize the visualization
+
async function init() {
+
// Fetch graph data
+
const response = await fetch('/api/graph');
+
graphData = await response.json();
+
+
// Set up SVG
+
svg = d3.select("#graph")
+
.attr("width", width)
+
.attr("height", height);
+
+
// Add zoom behavior
+
const zoom = d3.zoom()
+
.scaleExtent([0.1, 4])
+
.on("zoom", (event) => {
+
g.attr("transform", event.transform);
+
});
+
+
svg.call(zoom);
+
+
// Create main group for all elements
+
g = svg.append("g");
+
+
// Set up controls
+
setupControls();
+
+
// Initial render
+
updateVisualization();
+
+
// Update stats
+
updateStats();
+
+
// Handle window resize
+
window.addEventListener('resize', () => {
+
width = window.innerWidth - 40;
+
height = window.innerHeight - 200;
+
svg.attr("width", width).attr("height", height);
+
simulation.force("center", d3.forceCenter(width / 2, height / 2));
+
simulation.restart();
+
});
+
}
+
+
function setupControls() {
+
// Populate user filter
+
const userFilter = d3.select("#userFilter");
+
graphData.stats.users.forEach(user => {
+
userFilter.append("option").attr("value", user).text(user);
+
});
+
+
// Add event listeners
+
d3.select("#userFilter").on("change", updateVisualization);
+
d3.select("#linkFilter").on("change", updateVisualization);
+
d3.select("#forceStrength").on("input", updateForces);
+
d3.select("#nodeSize").on("input", updateNodeSizes);
+
}
+
+
function updateVisualization() {
+
// Filter data based on controls
+
const userFilter = d3.select("#userFilter").property("value");
+
const linkFilter = d3.select("#linkFilter").property("value");
+
+
let filteredNodes = graphData.nodes;
+
let filteredLinks = graphData.links;
+
+
if (userFilter !== "all") {
+
filteredNodes = graphData.nodes.filter(n => n.username === userFilter);
+
const nodeIds = new Set(filteredNodes.map(n => n.id));
+
filteredLinks = graphData.links.filter(l =>
+
nodeIds.has(l.source.id || l.source) && nodeIds.has(l.target.id || l.target)
+
);
+
}
+
+
if (linkFilter !== "all") {
+
filteredLinks = filteredLinks.filter(l => l.type === linkFilter);
+
}
+
+
// Clear existing elements
+
g.selectAll(".link").remove();
+
g.selectAll(".node").remove();
+
+
// Create force simulation
+
simulation = d3.forceSimulation(filteredNodes)
+
.force("link", d3.forceLink(filteredLinks).id(d => d.id)
+
.distance(d => {
+
// Get source and target nodes
+
const sourceNode = filteredNodes.find(n => n.id === (d.source.id || d.source));
+
const targetNode = filteredNodes.find(n => n.id === (d.target.id || d.target));
+
+
// If nodes are from different users, make them attract more (shorter distance)
+
if (sourceNode && targetNode && sourceNode.username !== targetNode.username) {
+
return 30; // Shorter distance = stronger attraction
+
}
+
+
// Same user posts have normal distance
+
return 60;
+
})
+
.strength(d => {
+
// Get source and target nodes
+
const sourceNode = filteredNodes.find(n => n.id === (d.source.id || d.source));
+
const targetNode = filteredNodes.find(n => n.id === (d.target.id || d.target));
+
+
// If nodes are from different users, make the link stronger
+
if (sourceNode && targetNode && sourceNode.username !== targetNode.username) {
+
return 1.5; // Stronger link force
+
}
+
+
// Same user posts have normal strength
+
return 1.0;
+
}))
+
.force("charge", d3.forceManyBody().strength(-200))
+
.force("center", d3.forceCenter(width / 2, height / 2))
+
.force("collision", d3.forceCollide().radius(15));
+
+
// Create links
+
link = g.append("g")
+
.selectAll(".link")
+
.data(filteredLinks)
+
.enter().append("line")
+
.attr("class", d => `link ${d.type}-link`)
+
.attr("stroke-width", d => {
+
// Get source and target nodes
+
const sourceNode = filteredNodes.find(n => n.id === (d.source.id || d.source));
+
const targetNode = filteredNodes.find(n => n.id === (d.target.id || d.target));
+
+
// If nodes are from different users, make the line thicker
+
if (sourceNode && targetNode && sourceNode.username !== targetNode.username) {
+
return 2.5; // Thicker line for cross-user connections
+
}
+
+
// Same user posts have normal thickness
+
return 1;
+
});
+
+
// Create nodes
+
node = g.append("g")
+
.selectAll(".node")
+
.data(filteredNodes)
+
.enter().append("circle")
+
.attr("class", "node")
+
.attr("r", d => Math.max(4, Math.log(d.outbound_count + d.backlink_count + 1) * 3))
+
.attr("fill", d => d.color)
+
.call(d3.drag()
+
.on("start", dragstarted)
+
.on("drag", dragged)
+
.on("end", dragended))
+
.on("mouseover", showTooltip)
+
.on("mouseout", hideTooltip)
+
.on("click", togglePin);
+
+
// Update force simulation
+
simulation.on("tick", () => {
+
link
+
.attr("x1", d => d.source.x)
+
.attr("y1", d => d.source.y)
+
.attr("x2", d => d.target.x)
+
.attr("y2", d => d.target.y);
+
+
node
+
.attr("cx", d => d.x)
+
.attr("cy", d => d.y);
+
});
+
+
updateStats(filteredNodes, filteredLinks);
+
}
+
+
function updateForces() {
+
const strength = +d3.select("#forceStrength").property("value");
+
if (simulation) {
+
simulation.force("charge").strength(-200 * strength);
+
simulation.alpha(0.3).restart();
+
}
+
}
+
+
function updateNodeSizes() {
+
const size = +d3.select("#nodeSize").property("value");
+
if (node) {
+
node.attr("r", d => Math.max(size * 0.5, Math.log(d.outbound_count + d.backlink_count + 1) * size * 0.5));
+
}
+
}
+
+
function dragstarted(event, d) {
+
if (!event.active) simulation.alphaTarget(0.3).restart();
+
d.fx = d.x;
+
d.fy = d.y;
+
}
+
+
function dragged(event, d) {
+
d.fx = event.x;
+
d.fy = event.y;
+
}
+
+
function dragended(event, d) {
+
if (!event.active) simulation.alphaTarget(0);
+
if (!d.pinned) {
+
d.fx = null;
+
d.fy = null;
+
}
+
}
+
+
function togglePin(event, d) {
+
d.pinned = !d.pinned;
+
if (d.pinned) {
+
d.fx = d.x;
+
d.fy = d.y;
+
} else {
+
d.fx = null;
+
d.fy = null;
+
}
+
}
+
+
function showTooltip(event, d) {
+
const tooltip = d3.select("#tooltip");
+
tooltip.style("display", "block")
+
.html(`
+
<strong>${d.title}</strong><br>
+
<strong>User:</strong> ${d.username}<br>
+
<strong>Date:</strong> ${d.date}<br>
+
<strong>Outbound Links:</strong> ${d.outbound_count}<br>
+
<strong>Backlinks:</strong> ${d.backlink_count}<br>
+
<strong>Link Types:</strong> Self: ${d.link_types.self}, User: ${d.link_types.user}, External: ${d.link_types.external}
+
${d.summary ? '<br><br>' + d.summary : ''}
+
`)
+
.style("left", (event.pageX + 10) + "px")
+
.style("top", (event.pageY - 10) + "px");
+
}
+
+
function hideTooltip() {
+
d3.select("#tooltip").style("display", "none");
+
}
+
+
function updateStats(nodes = graphData.nodes, links = graphData.links) {
+
const stats = d3.select("#stats");
+
const userCounts = {};
+
nodes.forEach(n => {
+
userCounts[n.username] = (userCounts[n.username] || 0) + 1;
+
});
+
+
stats.html(`
+
<div class="stat-item">
+
<strong>${nodes.length}</strong> Nodes
+
</div>
+
<div class="stat-item">
+
<strong>${links.length}</strong> Links
+
</div>
+
<div class="stat-item">
+
<strong>${Object.keys(userCounts).length}</strong> Users
+
</div>
+
<div class="stat-item">
+
Users: ${Object.entries(userCounts).map(([user, count]) => `${user} (${count})`).join(', ')}
+
</div>
+
`);
+
}
+
+
// Initialize when page loads
+
init();
+
</script>
+
</body>
+
</html>
+
"""
+1 -1
src/thicket/cli/main.py
···
# Import commands to register them
-
from .commands import add, duplicates, index_cmd, info_cmd, init, links_cmd, list_cmd, sync
+
from .commands import add, duplicates, info_cmd, init, links_cmd, list_cmd, sync, threads_cmd
if __name__ == "__main__":
app()
-301
src/thicket/core/reference_parser.py
···
-
"""Reference detection and parsing for blog entries."""
-
-
import re
-
from typing import Optional
-
from urllib.parse import urlparse
-
-
from ..models import AtomEntry
-
-
-
class BlogReference:
-
"""Represents a reference from one blog entry to another."""
-
-
def __init__(
-
self,
-
source_entry_id: str,
-
source_username: str,
-
target_url: str,
-
target_username: Optional[str] = None,
-
target_entry_id: Optional[str] = None,
-
):
-
self.source_entry_id = source_entry_id
-
self.source_username = source_username
-
self.target_url = target_url
-
self.target_username = target_username
-
self.target_entry_id = target_entry_id
-
-
def to_dict(self) -> dict:
-
"""Convert to dictionary for JSON serialization."""
-
result = {
-
"source_entry_id": self.source_entry_id,
-
"source_username": self.source_username,
-
"target_url": self.target_url,
-
}
-
-
# Only include optional fields if they are not None
-
if self.target_username is not None:
-
result["target_username"] = self.target_username
-
if self.target_entry_id is not None:
-
result["target_entry_id"] = self.target_entry_id
-
-
return result
-
-
@classmethod
-
def from_dict(cls, data: dict) -> "BlogReference":
-
"""Create from dictionary."""
-
return cls(
-
source_entry_id=data["source_entry_id"],
-
source_username=data["source_username"],
-
target_url=data["target_url"],
-
target_username=data.get("target_username"),
-
target_entry_id=data.get("target_entry_id"),
-
)
-
-
-
class ReferenceIndex:
-
"""Index of blog-to-blog references for creating threaded views."""
-
-
def __init__(self):
-
self.references: list[BlogReference] = []
-
self.outbound_refs: dict[
-
str, list[BlogReference]
-
] = {} # entry_id -> outbound refs
-
self.inbound_refs: dict[
-
str, list[BlogReference]
-
] = {} # entry_id -> inbound refs
-
self.user_domains: dict[str, set[str]] = {} # username -> set of domains
-
-
def add_reference(self, ref: BlogReference) -> None:
-
"""Add a reference to the index."""
-
self.references.append(ref)
-
-
# Update outbound references
-
source_key = f"{ref.source_username}:{ref.source_entry_id}"
-
if source_key not in self.outbound_refs:
-
self.outbound_refs[source_key] = []
-
self.outbound_refs[source_key].append(ref)
-
-
# Update inbound references if we can identify the target
-
if ref.target_username and ref.target_entry_id:
-
target_key = f"{ref.target_username}:{ref.target_entry_id}"
-
if target_key not in self.inbound_refs:
-
self.inbound_refs[target_key] = []
-
self.inbound_refs[target_key].append(ref)
-
-
def get_outbound_refs(self, username: str, entry_id: str) -> list[BlogReference]:
-
"""Get all outbound references from an entry."""
-
key = f"{username}:{entry_id}"
-
return self.outbound_refs.get(key, [])
-
-
def get_inbound_refs(self, username: str, entry_id: str) -> list[BlogReference]:
-
"""Get all inbound references to an entry."""
-
key = f"{username}:{entry_id}"
-
return self.inbound_refs.get(key, [])
-
-
def get_thread_members(self, username: str, entry_id: str) -> set[tuple[str, str]]:
-
"""Get all entries that are part of the same thread."""
-
visited = set()
-
to_visit = [(username, entry_id)]
-
thread_members = set()
-
-
while to_visit:
-
current_user, current_entry = to_visit.pop()
-
if (current_user, current_entry) in visited:
-
continue
-
-
visited.add((current_user, current_entry))
-
thread_members.add((current_user, current_entry))
-
-
# Add outbound references
-
for ref in self.get_outbound_refs(current_user, current_entry):
-
if ref.target_username and ref.target_entry_id:
-
to_visit.append((ref.target_username, ref.target_entry_id))
-
-
# Add inbound references
-
for ref in self.get_inbound_refs(current_user, current_entry):
-
to_visit.append((ref.source_username, ref.source_entry_id))
-
-
return thread_members
-
-
def to_dict(self) -> dict:
-
"""Convert to dictionary for JSON serialization."""
-
return {
-
"references": [ref.to_dict() for ref in self.references],
-
"user_domains": {k: list(v) for k, v in self.user_domains.items()},
-
}
-
-
@classmethod
-
def from_dict(cls, data: dict) -> "ReferenceIndex":
-
"""Create from dictionary."""
-
index = cls()
-
for ref_data in data.get("references", []):
-
ref = BlogReference.from_dict(ref_data)
-
index.add_reference(ref)
-
-
for username, domains in data.get("user_domains", {}).items():
-
index.user_domains[username] = set(domains)
-
-
return index
-
-
-
class ReferenceParser:
-
"""Parses blog entries to detect references to other blogs."""
-
-
def __init__(self):
-
# Common blog platforms and patterns
-
self.blog_patterns = [
-
r"https?://[^/]+\.(?:org|com|net|io|dev|me|co\.uk)/.*", # Common blog domains
-
r"https?://[^/]+\.github\.io/.*", # GitHub Pages
-
r"https?://[^/]+\.substack\.com/.*", # Substack
-
r"https?://medium\.com/.*", # Medium
-
r"https?://[^/]+\.wordpress\.com/.*", # WordPress.com
-
r"https?://[^/]+\.blogspot\.com/.*", # Blogger
-
]
-
-
# Compile regex patterns
-
self.link_pattern = re.compile(
-
r'<a[^>]+href="([^"]+)"[^>]*>(.*?)</a>', re.IGNORECASE | re.DOTALL
-
)
-
self.url_pattern = re.compile(r'https?://[^\s<>"]+')
-
-
def extract_links_from_html(self, html_content: str) -> list[tuple[str, str]]:
-
"""Extract all links from HTML content."""
-
links = []
-
-
# Extract links from <a> tags
-
for match in self.link_pattern.finditer(html_content):
-
url = match.group(1)
-
text = re.sub(
-
r"<[^>]+>", "", match.group(2)
-
).strip() # Remove HTML tags from link text
-
links.append((url, text))
-
-
return links
-
-
def is_blog_url(self, url: str) -> bool:
-
"""Check if a URL likely points to a blog post."""
-
for pattern in self.blog_patterns:
-
if re.match(pattern, url):
-
return True
-
return False
-
-
def resolve_target_user(
-
self, url: str, user_domains: dict[str, set[str]]
-
) -> Optional[str]:
-
"""Try to resolve a URL to a known user based on domain mapping."""
-
parsed_url = urlparse(url)
-
domain = parsed_url.netloc.lower()
-
-
for username, domains in user_domains.items():
-
if domain in domains:
-
return username
-
-
return None
-
-
def extract_references(
-
self, entry: AtomEntry, username: str, user_domains: dict[str, set[str]]
-
) -> list[BlogReference]:
-
"""Extract all blog references from an entry."""
-
references = []
-
-
# Combine all text content for analysis
-
content_to_search = []
-
if entry.content:
-
content_to_search.append(entry.content)
-
if entry.summary:
-
content_to_search.append(entry.summary)
-
-
for content in content_to_search:
-
links = self.extract_links_from_html(content)
-
-
for url, _link_text in links:
-
# Skip internal links (same domain as the entry)
-
entry_domain = (
-
urlparse(str(entry.link)).netloc.lower() if entry.link else ""
-
)
-
link_domain = urlparse(url).netloc.lower()
-
-
if link_domain == entry_domain:
-
continue
-
-
# Check if this looks like a blog URL
-
if not self.is_blog_url(url):
-
continue
-
-
# Try to resolve to a known user
-
target_username = self.resolve_target_user(url, user_domains)
-
-
ref = BlogReference(
-
source_entry_id=entry.id,
-
source_username=username,
-
target_url=url,
-
target_username=target_username,
-
target_entry_id=None, # Will be resolved later if possible
-
)
-
-
references.append(ref)
-
-
return references
-
-
def build_user_domain_mapping(self, git_store: "GitStore") -> dict[str, set[str]]:
-
"""Build mapping of usernames to their known domains."""
-
user_domains = {}
-
index = git_store._load_index()
-
-
for username, user_metadata in index.users.items():
-
domains = set()
-
-
# Add domains from feeds
-
for feed_url in user_metadata.feeds:
-
domain = urlparse(feed_url).netloc.lower()
-
if domain:
-
domains.add(domain)
-
-
# Add domain from homepage
-
if user_metadata.homepage:
-
domain = urlparse(str(user_metadata.homepage)).netloc.lower()
-
if domain:
-
domains.add(domain)
-
-
user_domains[username] = domains
-
-
return user_domains
-
-
def resolve_target_entry_ids(
-
self, references: list[BlogReference], git_store: "GitStore"
-
) -> list[BlogReference]:
-
"""Resolve target_entry_id for references that have target_username but no target_entry_id."""
-
resolved_refs = []
-
-
for ref in references:
-
# If we already have a target_entry_id, keep the reference as-is
-
if ref.target_entry_id is not None:
-
resolved_refs.append(ref)
-
continue
-
-
# If we don't have a target_username, we can't resolve it
-
if ref.target_username is None:
-
resolved_refs.append(ref)
-
continue
-
-
# Try to find the entry by matching the URL
-
entries = git_store.list_entries(ref.target_username)
-
resolved_entry_id = None
-
-
for entry in entries:
-
# Check if the entry's link matches the target URL
-
if entry.link and str(entry.link) == ref.target_url:
-
resolved_entry_id = entry.id
-
break
-
-
# Create a new reference with the resolved target_entry_id
-
resolved_ref = BlogReference(
-
source_entry_id=ref.source_entry_id,
-
source_username=ref.source_username,
-
target_url=ref.target_url,
-
target_username=ref.target_username,
-
target_entry_id=resolved_entry_id,
-
)
-
resolved_refs.append(resolved_ref)
-
-
return resolved_refs
+4 -2
src/thicket/models/feed.py
···
"""Feed and entry models for thicket."""
from datetime import datetime
-
from typing import TYPE_CHECKING, Optional
+
from typing import TYPE_CHECKING, Any, Optional
from pydantic import BaseModel, ConfigDict, EmailStr, HttpUrl
···
summary: Optional[str] = None
content: Optional[str] = None # Full body content from Atom entry
content_type: Optional[str] = "html" # text, html, xhtml
-
author: Optional[dict] = None
+
author: Optional[dict[str, Any]] = None
categories: list[str] = []
rights: Optional[str] = None # Copyright info
source: Optional[str] = None # Source feed URL
+
links: list[str] = [] # URLs mentioned in this entry
+
backlinks: list[str] = [] # Entry IDs that link to this entry
class FeedMetadata(BaseModel):
+210
uv.lock
···
]
[[package]]
+
name = "blinker"
+
version = "1.9.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" },
+
]
+
+
[[package]]
name = "certifi"
version = "2025.7.14"
source = { registry = "https://pypi.org/simple" }
···
]
[[package]]
+
name = "flask"
+
version = "3.1.1"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "blinker" },
+
{ name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
+
{ name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+
{ name = "importlib-metadata", marker = "python_full_version < '3.10'" },
+
{ name = "itsdangerous" },
+
{ name = "jinja2" },
+
{ name = "markupsafe" },
+
{ name = "werkzeug" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/c0/de/e47735752347f4128bcf354e0da07ef311a78244eba9e3dc1d4a5ab21a98/flask-3.1.1.tar.gz", hash = "sha256:284c7b8f2f58cb737f0cf1c30fd7eaf0ccfcde196099d24ecede3fc2005aa59e", size = 753440, upload-time = "2025-05-13T15:01:17.447Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/3d/68/9d4508e893976286d2ead7f8f571314af6c2037af34853a30fd769c02e9d/flask-3.1.1-py3-none-any.whl", hash = "sha256:07aae2bb5eaf77993ef57e357491839f5fd9f4dc281593a81a9e4d79a24f295c", size = 103305, upload-time = "2025-05-13T15:01:15.591Z" },
+
]
+
+
[[package]]
name = "gitdb"
version = "4.0.12"
source = { registry = "https://pypi.org/simple" }
···
]
[[package]]
+
name = "importlib-metadata"
+
version = "8.7.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "zipp", marker = "python_full_version < '3.10'" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" },
+
]
+
+
[[package]]
name = "iniconfig"
version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
···
]
[[package]]
+
name = "itsdangerous"
+
version = "2.2.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" },
+
]
+
+
[[package]]
+
name = "jinja2"
+
version = "3.1.6"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "markupsafe" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
+
]
+
+
[[package]]
+
name = "linkify-it-py"
+
version = "2.0.3"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "uc-micro-py" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/2a/ae/bb56c6828e4797ba5a4821eec7c43b8bf40f69cda4d4f5f8c8a2810ec96a/linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048", size = 27946, upload-time = "2024-02-04T14:48:04.179Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/04/1e/b832de447dee8b582cac175871d2f6c3d5077cc56d5575cadba1fd1cccfa/linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79", size = 19820, upload-time = "2024-02-04T14:48:02.496Z" },
+
]
+
+
[[package]]
name = "markdown-it-py"
version = "3.0.0"
source = { registry = "https://pypi.org/simple" }
···
{ url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" },
]
+
[package.optional-dependencies]
+
linkify = [
+
{ name = "linkify-it-py" },
+
]
+
plugins = [
+
{ name = "mdit-py-plugins" },
+
]
+
+
[[package]]
+
name = "markupsafe"
+
version = "3.0.2"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" },
+
{ url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" },
+
{ url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" },
+
{ url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" },
+
{ url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" },
+
{ url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" },
+
{ url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" },
+
{ url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" },
+
{ url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" },
+
{ url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" },
+
{ url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" },
+
{ url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" },
+
{ url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" },
+
{ url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" },
+
{ url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" },
+
{ url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" },
+
{ url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" },
+
{ url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" },
+
{ url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" },
+
{ url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" },
+
{ url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" },
+
{ url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" },
+
{ url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" },
+
{ url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" },
+
{ url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" },
+
{ url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" },
+
{ url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" },
+
{ url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" },
+
{ url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" },
+
{ url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" },
+
{ url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" },
+
{ url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" },
+
{ url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" },
+
{ url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" },
+
{ url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" },
+
{ url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" },
+
{ url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" },
+
{ url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" },
+
{ url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" },
+
{ url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" },
+
{ url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" },
+
{ url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" },
+
{ url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" },
+
{ url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" },
+
{ url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" },
+
{ url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" },
+
{ url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" },
+
{ url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" },
+
{ url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" },
+
{ url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" },
+
{ url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" },
+
{ url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" },
+
{ url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" },
+
{ url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" },
+
{ url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" },
+
{ url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" },
+
{ url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" },
+
{ url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" },
+
{ url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" },
+
{ url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" },
+
]
+
+
[[package]]
+
name = "mdit-py-plugins"
+
version = "0.4.2"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "markdown-it-py" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542, upload-time = "2024-09-09T20:27:49.564Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316, upload-time = "2024-09-09T20:27:48.397Z" },
+
]
+
[[package]]
name = "mdurl"
version = "0.1.2"
···
]
[[package]]
+
name = "textual"
+
version = "4.0.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "markdown-it-py", extra = ["linkify", "plugins"] },
+
{ name = "platformdirs" },
+
{ name = "rich" },
+
{ name = "typing-extensions" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/f1/22/a2812ab1e5b0cb3a327a4ea79b430234c2271ba13462b989f435b40a247d/textual-4.0.0.tar.gz", hash = "sha256:1cab4ea3cfc0e47ae773405cdd6bc2a17ed76ff7b648379ac8017ea89c5ad28c", size = 1606128, upload-time = "2025-07-12T09:41:20.812Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/d8/e4/ebe27c54d2534cc41d00ea1d78b783763f97abf3e3d6dd41e5536daa52a5/textual-4.0.0-py3-none-any.whl", hash = "sha256:214051640f890676a670aa7d29cd2a37d27cfe6b2cf866e9d5abc3b6c89c5800", size = 692382, upload-time = "2025-07-12T09:41:18.828Z" },
+
]
+
+
[[package]]
name = "thicket"
source = { editable = "." }
dependencies = [
{ name = "bleach" },
{ name = "email-validator" },
{ name = "feedparser" },
+
{ name = "flask" },
{ name = "gitpython" },
{ name = "httpx" },
{ name = "pendulum" },
···
{ name = "pydantic-settings" },
{ name = "pyyaml" },
{ name = "rich" },
+
{ name = "textual" },
{ name = "typer" },
]
···
{ name = "bleach", specifier = ">=6.0.0" },
{ name = "email-validator" },
{ name = "feedparser", specifier = ">=6.0.11" },
+
{ name = "flask", specifier = ">=3.1.1" },
{ name = "gitpython", specifier = ">=3.1.40" },
{ name = "httpx", specifier = ">=0.28.0" },
{ name = "mypy", marker = "extra == 'dev'", specifier = ">=1.13.0" },
···
{ name = "pyyaml", specifier = ">=6.0.0" },
{ name = "rich", specifier = ">=13.0.0" },
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.8.0" },
+
{ name = "textual", specifier = ">=4.0.0" },
{ name = "typer", specifier = ">=0.15.0" },
{ name = "types-pyyaml", marker = "extra == 'dev'", specifier = ">=6.0.0" },
]
···
[[package]]
+
name = "uc-micro-py"
+
version = "1.0.3"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/91/7a/146a99696aee0609e3712f2b44c6274566bc368dfe8375191278045186b8/uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a", size = 6043, upload-time = "2024-02-09T16:52:01.654Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/37/87/1f677586e8ac487e29672e4b17455758fce261de06a0d086167bb760361a/uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5", size = 6229, upload-time = "2024-02-09T16:52:00.371Z" },
+
]
+
+
[[package]]
name = "webencodings"
version = "0.5.1"
source = { registry = "https://pypi.org/simple" }
···
wheels = [
{ url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" },
+
+
[[package]]
+
name = "werkzeug"
+
version = "3.1.3"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "markupsafe" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2471d621ab06f6ec6b3324685a2ce0f9777fd4a8b71e/werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746", size = 806925, upload-time = "2024-11-08T15:52:18.093Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498, upload-time = "2024-11-08T15:52:16.132Z" },
+
]
+
+
[[package]]
+
name = "zipp"
+
version = "3.23.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" },
+
]