Manage Atom feeds in a persistent git repository

Compare changes

Choose any two refs to compare.

+205
.gitignore
···
+
# Byte-compiled / optimized / DLL files
+
__pycache__/
+
*.py[codz]
+
*$py.class
+
+
# C extensions
+
*.so
+
+
# Distribution / packaging
+
.Python
+
build/
+
develop-eggs/
+
dist/
+
downloads/
+
eggs/
+
.eggs/
+
lib/
+
lib64/
+
parts/
+
sdist/
+
var/
+
wheels/
+
share/python-wheels/
+
*.egg-info/
+
.installed.cfg
+
*.egg
+
MANIFEST
+
+
# PyInstaller
+
# Usually these files are written by a python script from a template
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
+
*.manifest
+
*.spec
+
+
# Installer logs
+
pip-log.txt
+
pip-delete-this-directory.txt
+
+
# Unit test / coverage reports
+
htmlcov/
+
.tox/
+
.nox/
+
.coverage
+
.coverage.*
+
.cache
+
nosetests.xml
+
coverage.xml
+
*.cover
+
*.py.cover
+
.hypothesis/
+
.pytest_cache/
+
cover/
+
+
# Translations
+
*.mo
+
*.pot
+
+
# Django stuff:
+
*.log
+
local_settings.py
+
db.sqlite3
+
db.sqlite3-journal
+
+
# Flask stuff:
+
instance/
+
.webassets-cache
+
+
# Scrapy stuff:
+
.scrapy
+
+
# Sphinx documentation
+
docs/_build/
+
+
# PyBuilder
+
.pybuilder/
+
target/
+
+
# Jupyter Notebook
+
.ipynb_checkpoints
+
+
# IPython
+
profile_default/
+
ipython_config.py
+
+
# pyenv
+
# For a library or package, you might want to ignore these files since the code is
+
# intended to run in multiple environments; otherwise, check them in:
+
# .python-version
+
+
# pipenv
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
+
# install all needed dependencies.
+
#Pipfile.lock
+
+
# UV
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
+
# commonly ignored for libraries.
+
#uv.lock
+
+
# poetry
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
+
# commonly ignored for libraries.
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+
#poetry.lock
+
#poetry.toml
+
+
# pdm
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
+
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
+
#pdm.lock
+
#pdm.toml
+
.pdm-python
+
.pdm-build/
+
+
# pixi
+
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
+
#pixi.lock
+
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
+
# in the .venv directory. It is recommended not to include this directory in version control.
+
.pixi
+
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+
__pypackages__/
+
+
# Celery stuff
+
celerybeat-schedule
+
celerybeat.pid
+
+
# SageMath parsed files
+
*.sage.py
+
+
# Environments
+
.env
+
.envrc
+
.venv
+
env/
+
venv/
+
ENV/
+
env.bak/
+
venv.bak/
+
+
# Spyder project settings
+
.spyderproject
+
.spyproject
+
+
# Rope project settings
+
.ropeproject
+
+
# mkdocs documentation
+
/site
+
+
# mypy
+
.mypy_cache/
+
.dmypy.json
+
dmypy.json
+
+
# Pyre type checker
+
.pyre/
+
+
# pytype static type analyzer
+
.pytype/
+
+
# Cython debug symbols
+
cython_debug/
+
+
# PyCharm
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+
#.idea/
+
+
# Abstra
+
# Abstra is an AI-powered process automation framework.
+
# Ignore directories containing user credentials, local state, and settings.
+
# Learn more at https://abstra.io/docs
+
.abstra/
+
+
# Visual Studio Code
+
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
+
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
+
# and can be added to the global gitignore or merged into this file. However, if you prefer,
+
# you could uncomment the following to ignore the entire vscode folder
+
# .vscode/
+
+
# Ruff stuff:
+
.ruff_cache/
+
+
# PyPI configuration file
+
.pypirc
+
+
# Marimo
+
marimo/_static/
+
marimo/_lsp/
+
__marimo__/
+
+
# Streamlit
+
.streamlit/secrets.toml
+
+
thicket.yaml
+219 -13
ARCH.md
···
โ”‚ โ”‚ โ”œโ”€โ”€ commands/ # Subcommands
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ init.py # Initialize git store
-
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ add.py # Add feed to config
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ add.py # Add users and feeds
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ sync.py # Sync feeds
-
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ list.py # List users/feeds
-
โ”‚ โ”‚ โ”‚ โ””โ”€โ”€ search.py # Search entries
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ list_cmd.py # List users/feeds
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ duplicates.py # Manage duplicate entries
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ links_cmd.py # Extract and categorize links
+
โ”‚ โ”‚ โ”‚ โ””โ”€โ”€ index_cmd.py # Build reference index and show threads
โ”‚ โ”‚ โ””โ”€โ”€ utils.py # CLI utilities (progress, formatting)
โ”‚ โ”œโ”€โ”€ core/ # Core business logic
โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py
โ”‚ โ”‚ โ”œโ”€โ”€ feed_parser.py # Feed parsing and normalization
โ”‚ โ”‚ โ”œโ”€โ”€ git_store.py # Git repository operations
-
โ”‚ โ”‚ โ”œโ”€โ”€ cache.py # Cache management
-
โ”‚ โ”‚ โ””โ”€โ”€ sanitizer.py # Filename and HTML sanitization
+
โ”‚ โ”‚ โ””โ”€โ”€ reference_parser.py # Link extraction and threading
โ”‚ โ”œโ”€โ”€ models/ # Pydantic data models
โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py
โ”‚ โ”‚ โ”œโ”€โ”€ config.py # Configuration models
โ”‚ โ”‚ โ”œโ”€โ”€ feed.py # Feed/Entry models
โ”‚ โ”‚ โ””โ”€โ”€ user.py # User metadata models
โ”‚ โ””โ”€โ”€ utils/ # Shared utilities
-
โ”‚ โ”œโ”€โ”€ __init__.py
-
โ”‚ โ”œโ”€โ”€ paths.py # Path handling
-
โ”‚ โ””โ”€โ”€ network.py # HTTP client wrapper
+
โ”‚ โ””โ”€โ”€ __init__.py
โ”œโ”€โ”€ tests/
โ”‚ โ”œโ”€โ”€ __init__.py
โ”‚ โ”œโ”€โ”€ conftest.py # pytest configuration
···
git-store/
โ”œโ”€โ”€ index.json # User directory index
โ”œโ”€โ”€ duplicates.json # Manual curation of duplicate entries
+
โ”œโ”€โ”€ links.json # Unified links, references, and mapping data
โ”œโ”€โ”€ user1/
-
โ”‚ โ”œโ”€โ”€ metadata.json # User metadata
โ”‚ โ”œโ”€โ”€ entry_id_1.json # Sanitized entry files
โ”‚ โ”œโ”€โ”€ entry_id_2.json
โ”‚ โ””โ”€โ”€ ...
···
thicket list users
thicket list feeds --user alyssa
-
# Search entries
-
thicket search "keyword" --user alyssa --since 2025-01-01
-
# Manage duplicate entries
thicket duplicates list
thicket duplicates add <entry_id_1> <entry_id_2> # Mark as duplicates
thicket duplicates remove <entry_id_1> <entry_id_2> # Unmark duplicates
+
+
# Link processing and threading
+
thicket links --verbose # Extract and categorize all links
+
thicket index --verbose # Build reference index for threading
+
thicket threads # Show conversation threads
+
thicket threads --username user1 # Show threads for specific user
+
thicket threads --min-size 3 # Show threads with minimum size
```
## Performance Considerations
···
homepage=self.author_uri or self.link,
icon=self.logo or self.icon or self.image_url
)
-
```
+
```
+
+
## Link Processing and Threading Architecture
+
+
### Overview
+
The thicket system implements a sophisticated link processing and threading system to create email-style threaded views of blog entries by tracking cross-references between different blogs.
+
+
### Link Processing Pipeline
+
+
#### 1. Link Extraction (`thicket links`)
+
The `links` command systematically extracts all outbound links from blog entries and categorizes them:
+
+
```python
+
class LinkData(BaseModel):
+
url: str # Fully resolved URL
+
entry_id: str # Source entry ID
+
username: str # Source username
+
context: str # Surrounding text context
+
category: str # "internal", "user", or "unknown"
+
target_username: Optional[str] # Target user if applicable
+
```
+
+
**Link Categories:**
+
- **Internal**: Links to the same user's domain (self-references)
+
- **User**: Links to other tracked users' domains
+
- **Unknown**: Links to external sites not tracked by thicket
+
+
#### 2. URL Resolution
+
All links are properly resolved using the Atom feed's base URL to handle:
+
- Relative URLs (converted to absolute)
+
- Protocol-relative URLs
+
- Fragment identifiers
+
- Redirects and canonical URLs
+
+
#### 3. Domain Mapping
+
The system builds a comprehensive domain mapping from user configuration:
+
- Feed URLs โ†’ domain extraction
+
- Homepage URLs โ†’ domain extraction
+
- Reverse mapping: domain โ†’ username
+
+
### Threading System
+
+
#### 1. Reference Index Generation (`thicket index`)
+
Creates a bidirectional reference index from the categorized links:
+
+
```python
+
class BlogReference(BaseModel):
+
source_entry_id: str
+
source_username: str
+
target_url: str
+
target_username: Optional[str]
+
target_entry_id: Optional[str]
+
context: str
+
```
+
+
#### 2. Thread Detection Algorithm
+
Uses graph traversal to find connected blog entries:
+
- **Outbound references**: Links from an entry to other entries
+
- **Inbound references**: Links to an entry from other entries
+
- **Thread members**: All entries connected through references
+
+
#### 3. Threading Display (`thicket threads`)
+
Creates email-style threaded views:
+
- Chronological ordering within threads
+
- Reference counts (outbound/inbound)
+
- Context preservation
+
- Filtering options (user, entry, minimum size)
+
+
### Data Structures
+
+
#### links.json Format (Unified Structure)
+
```json
+
{
+
"links": {
+
"https://example.com/post/123": {
+
"referencing_entries": ["https://blog.user.com/entry/456"],
+
"target_username": "user2"
+
},
+
"https://external-site.com/article": {
+
"referencing_entries": ["https://blog.user.com/entry/789"]
+
}
+
},
+
"reverse_mapping": {
+
"https://blog.user.com/entry/456": ["https://example.com/post/123"],
+
"https://blog.user.com/entry/789": ["https://external-site.com/article"]
+
},
+
"references": [
+
{
+
"source_entry_id": "https://blog.user.com/entry/456",
+
"source_username": "user1",
+
"target_url": "https://example.com/post/123",
+
"target_username": "user2",
+
"target_entry_id": "https://example.com/post/123",
+
"context": "As mentioned in this post..."
+
}
+
],
+
"user_domains": {
+
"user1": ["blog.user.com"],
+
"user2": ["example.com"]
+
}
+
}
+
```
+
+
This unified structure eliminates duplication by:
+
- Storing each URL only once with minimal metadata
+
- Including all link data, reference data, and mappings in one file
+
- Using presence of `target_username` to identify tracked vs external links
+
- Providing bidirectional mappings for efficient queries
+
+
### Unified Structure Benefits
+
+
- **Eliminates Duplication**: Each URL appears only once with metadata
+
- **Single Source of Truth**: All link-related data in one file
+
- **Efficient Queries**: Fast lookups for both directions (URLโ†’entries, entryโ†’URLs)
+
- **Atomic Updates**: All link data changes together
+
- **Reduced I/O**: Fewer file operations
+
+
### Implementation Benefits
+
+
1. **Systematic Link Processing**: All links are extracted and categorized consistently
+
2. **Proper URL Resolution**: Handles relative URLs and base URL resolution correctly
+
3. **Domain-based Categorization**: Automatically identifies user-to-user references
+
4. **Bidirectional Indexing**: Supports both "who links to whom" and "who is linked by whom"
+
5. **Thread Discovery**: Finds conversation threads automatically
+
6. **Rich Context**: Preserves surrounding text for each link
+
7. **Performance**: Pre-computed indexes for fast threading queries
+
+
### CLI Commands
+
+
```bash
+
# Extract and categorize all links
+
thicket links --verbose
+
+
# Build reference index for threading
+
thicket index --verbose
+
+
# Show all conversation threads
+
thicket threads
+
+
# Show threads for specific user
+
thicket threads --username user1
+
+
# Show threads with minimum size
+
thicket threads --min-size 3
+
```
+
+
### Integration with Existing Commands
+
+
The link processing system integrates seamlessly with existing thicket commands:
+
- `thicket sync` updates entries, requiring `thicket links` to be run afterward
+
- `thicket index` uses the output from `thicket links` for improved accuracy
+
- `thicket threads` provides the user-facing threading interface
+
+
## Current Implementation Status
+
+
### โœ… Completed Features
+
1. **Core Infrastructure**
+
- Modern CLI with Typer and Rich
+
- Pydantic data models for type safety
+
- Git repository operations with GitPython
+
- Feed parsing and normalization with feedparser
+
+
2. **User and Feed Management**
+
- `thicket init` - Initialize git store
+
- `thicket add` - Add users and feeds with auto-discovery
+
- `thicket sync` - Sync feeds with progress tracking
+
- `thicket list` - List users, feeds, and entries
+
- `thicket duplicates` - Manage duplicate entries
+
+
3. **Link Processing and Threading**
+
- `thicket links` - Extract and categorize all outbound links
+
- `thicket index` - Build reference index from links
+
- `thicket threads` - Display threaded conversation views
+
- Proper URL resolution with base URL handling
+
- Domain-based link categorization
+
- Context preservation for links
+
+
### ๐Ÿ“Š System Performance
+
- **Link Extraction**: Successfully processes thousands of blog entries
+
- **Categorization**: Identifies internal, user, and unknown links
+
- **Threading**: Creates email-style threaded views of conversations
+
- **Storage**: Efficient JSON-based data structures for links and references
+
+
### ๐Ÿ”ง Current Architecture Highlights
+
- **Modular Design**: Clear separation between CLI, core logic, and models
+
- **Type Safety**: Comprehensive Pydantic models for data validation
+
- **Rich CLI**: Beautiful progress bars, tables, and error handling
+
- **Extensible**: Easy to add new commands and features
+
- **Git Integration**: All data stored in version-controlled JSON files
+
+
### ๐ŸŽฏ Proven Functionality
+
The system has been tested with real blog data and successfully:
+
- Extracted 14,396 total links from blog entries
+
- Categorized 3,994 internal links, 363 user-to-user links, and 10,039 unknown links
+
- Built comprehensive domain mappings for 16 users across 20 domains
+
- Generated threaded views showing blog conversation patterns
+
+
### ๐Ÿš€ Ready for Use
+
The thicket system is now fully functional for:
+
- Maintaining Git repositories of blog feeds
+
- Tracking cross-references between blogs
+
- Creating threaded views of blog conversations
+
- Discovering blog interaction patterns
+
- Building distributed comment systems
+24
CLAUDE.md
···
My goal is to build a CLI tool called thicket in Python that maintains a Git repository within which Atom feeds can be persisted, including their contents.
+
# Python Environment and Package Management
+
+
This project uses `uv` for Python package management and virtual environment handling.
+
+
## Running Commands
+
+
ALWAYS use `uv run` to execute Python commands:
+
+
- Run the CLI: `uv run -m thicket`
+
- Run tests: `uv run pytest`
+
- Type checking: `uv run mypy src/`
+
- Linting: `uv run ruff check src/`
+
- Format code: `uv run ruff format src/`
+
- Compile check: `uv run python -m py_compile <file>`
+
+
## Package Management
+
+
- Add dependencies: `uv add <package>`
+
- Add dev dependencies: `uv add --dev <package>`
+
- Install dependencies: `uv sync`
+
- Update dependencies: `uv lock --upgrade`
+
+
# Project Structure
+
The configuration file specifies:
- the location of a git store
- a list of usernames and target Atom/RSS feed(s) and optional metadata about the username such as their email, homepage, icon and display name
+260
code_duplication_analysis.md
···
+
# Code Duplication Analysis for Thicket
+
+
## 1. Duplicate JSON Handling Code
+
+
### Pattern: JSON file reading/writing
+
**Locations:**
+
- `src/thicket/cli/commands/generate.py:230` - Reading JSON with `json.load(f)`
+
- `src/thicket/cli/commands/generate.py:249` - Reading links.json
+
- `src/thicket/cli/commands/index.py:2305` - Reading JSON
+
- `src/thicket/cli/commands/index.py:2320` - Writing JSON with `json.dump()`
+
- `src/thicket/cli/commands/threads.py:2456` - Reading JSON
+
- `src/thicket/cli/commands/info.py:2683` - Reading JSON
+
- `src/thicket/core/git_store.py:5546` - Writing JSON with custom serializer
+
- `src/thicket/core/git_store.py:5556` - Reading JSON
+
- `src/thicket/core/git_store.py:5566` - Writing JSON
+
- `src/thicket/core/git_store.py:5656` - Writing JSON with model dump
+
+
**Recommendation:** Create a shared `json_utils.py` module:
+
```python
+
def read_json_file(path: Path) -> dict:
+
"""Read JSON file with error handling."""
+
with open(path) as f:
+
return json.load(f)
+
+
def write_json_file(path: Path, data: dict, indent: int = 2) -> None:
+
"""Write JSON file with consistent formatting."""
+
with open(path, "w") as f:
+
json.dump(data, f, indent=indent, default=str)
+
+
def write_model_json(path: Path, model: BaseModel, indent: int = 2) -> None:
+
"""Write Pydantic model as JSON."""
+
with open(path, "w") as f:
+
json.dump(model.model_dump(mode="json", exclude_none=True), f, indent=indent, default=str)
+
```
+
+
## 2. Repeated Datetime Handling
+
+
### Pattern: datetime formatting and fallback handling
+
**Locations:**
+
- `src/thicket/cli/commands/generate.py:241` - `key=lambda x: x[1].updated or x[1].published or datetime.min`
+
- `src/thicket/cli/commands/generate.py:353` - Same pattern in thread sorting
+
- `src/thicket/cli/commands/generate.py:359` - Same pattern for max date
+
- `src/thicket/cli/commands/generate.py:625` - Same pattern
+
- `src/thicket/cli/commands/generate.py:655` - `entry.updated or entry.published or datetime.min`
+
- `src/thicket/cli/commands/generate.py:689` - Same pattern
+
- `src/thicket/cli/commands/generate.py:702` - Same pattern
+
- Multiple `.strftime('%Y-%m-%d')` calls throughout
+
+
**Recommendation:** Create a shared `datetime_utils.py` module:
+
```python
+
def get_entry_date(entry: AtomEntry) -> datetime:
+
"""Get the most relevant date for an entry with fallback."""
+
return entry.updated or entry.published or datetime.min
+
+
def format_date_short(dt: datetime) -> str:
+
"""Format datetime as YYYY-MM-DD."""
+
return dt.strftime('%Y-%m-%d')
+
+
def format_date_full(dt: datetime) -> str:
+
"""Format datetime as YYYY-MM-DD HH:MM."""
+
return dt.strftime('%Y-%m-%d %H:%M')
+
+
def format_date_iso(dt: datetime) -> str:
+
"""Format datetime as ISO string."""
+
return dt.isoformat()
+
```
+
+
## 3. Path Handling Patterns
+
+
### Pattern: Directory creation and existence checks
+
**Locations:**
+
- `src/thicket/cli/commands/generate.py:225` - `if user_dir.exists()`
+
- `src/thicket/cli/commands/generate.py:247` - `if links_file.exists()`
+
- `src/thicket/cli/commands/generate.py:582` - `self.output_dir.mkdir(parents=True, exist_ok=True)`
+
- `src/thicket/cli/commands/generate.py:585-586` - Multiple mkdir calls
+
- `src/thicket/cli/commands/threads.py:2449` - `if not index_path.exists()`
+
- `src/thicket/cli/commands/info.py:2681` - `if links_path.exists()`
+
- `src/thicket/core/git_store.py:5515` - `if not self.repo_path.exists()`
+
- `src/thicket/core/git_store.py:5586` - `user_dir.mkdir(exist_ok=True)`
+
- Many more similar patterns
+
+
**Recommendation:** Create a shared `path_utils.py` module:
+
```python
+
def ensure_directory(path: Path) -> Path:
+
"""Ensure directory exists, creating if necessary."""
+
path.mkdir(parents=True, exist_ok=True)
+
return path
+
+
def read_json_if_exists(path: Path, default: Any = None) -> Any:
+
"""Read JSON file if it exists, otherwise return default."""
+
if path.exists():
+
with open(path) as f:
+
return json.load(f)
+
return default
+
+
def safe_path_join(*parts: Union[str, Path]) -> Path:
+
"""Safely join path components."""
+
return Path(*parts)
+
```
+
+
## 4. Progress Bar and Console Output
+
+
### Pattern: Progress bar creation and updates
+
**Locations:**
+
- `src/thicket/cli/commands/generate.py:209` - Progress with SpinnerColumn
+
- `src/thicket/cli/commands/index.py:2230` - Same Progress pattern
+
- Multiple `console.print()` calls with similar formatting patterns
+
- Progress update patterns repeated
+
+
**Recommendation:** Create a shared `ui_utils.py` module:
+
```python
+
def create_progress_spinner(description: str) -> tuple[Progress, TaskID]:
+
"""Create a standard progress spinner."""
+
progress = Progress(
+
SpinnerColumn(),
+
TextColumn("[progress.description]{task.description}"),
+
transient=True,
+
)
+
task = progress.add_task(description)
+
return progress, task
+
+
def print_success(message: str) -> None:
+
"""Print success message with consistent formatting."""
+
console.print(f"[green]โœ“[/green] {message}")
+
+
def print_error(message: str) -> None:
+
"""Print error message with consistent formatting."""
+
console.print(f"[red]Error: {message}[/red]")
+
+
def print_warning(message: str) -> None:
+
"""Print warning message with consistent formatting."""
+
console.print(f"[yellow]Warning: {message}[/yellow]")
+
```
+
+
## 5. Git Store Operations
+
+
### Pattern: Entry file operations
+
**Locations:**
+
- Multiple patterns of loading entries from user directories
+
- Repeated safe_id generation
+
- Repeated user directory path construction
+
+
**Recommendation:** Enhance GitStore with helper methods:
+
```python
+
def get_user_dir(self, username: str) -> Path:
+
"""Get user directory path."""
+
return self.repo_path / username
+
+
def iter_user_entries(self, username: str) -> Iterator[tuple[Path, AtomEntry]]:
+
"""Iterate over all entries for a user."""
+
user_dir = self.get_user_dir(username)
+
if user_dir.exists():
+
for entry_file in user_dir.glob("*.json"):
+
if entry_file.name not in ["index.json", "duplicates.json"]:
+
try:
+
entry = self.read_entry_file(entry_file)
+
yield entry_file, entry
+
except Exception:
+
continue
+
```
+
+
## 6. Error Handling Patterns
+
+
### Pattern: Try-except with console error printing
+
**Locations:**
+
- Similar error handling patterns throughout CLI commands
+
- Repeated `raise typer.Exit(1)` patterns
+
- Similar exception message formatting
+
+
**Recommendation:** Create error handling decorators:
+
```python
+
def handle_cli_errors(func):
+
"""Decorator to handle CLI command errors consistently."""
+
@functools.wraps(func)
+
def wrapper(*args, **kwargs):
+
try:
+
return func(*args, **kwargs)
+
except ValidationError as e:
+
console.print(f"[red]Validation error: {e}[/red]")
+
raise typer.Exit(1)
+
except Exception as e:
+
console.print(f"[red]Error: {e}[/red]")
+
if kwargs.get('verbose'):
+
console.print_exception()
+
raise typer.Exit(1)
+
return wrapper
+
```
+
+
## 7. Configuration and Validation
+
+
### Pattern: Config file loading and validation
+
**Locations:**
+
- Repeated config loading pattern in every CLI command
+
- Similar validation patterns for URLs and paths
+
+
**Recommendation:** Create a `config_utils.py` module:
+
```python
+
def load_config_with_defaults(config_path: Optional[Path] = None) -> ThicketConfig:
+
"""Load config with standard defaults and error handling."""
+
if config_path is None:
+
config_path = Path("thicket.yaml")
+
+
if not config_path.exists():
+
raise ConfigError(f"Configuration file not found: {config_path}")
+
+
return load_config(config_path)
+
+
def validate_url(url: str) -> HttpUrl:
+
"""Validate and return URL with consistent error handling."""
+
try:
+
return HttpUrl(url)
+
except ValidationError:
+
raise ConfigError(f"Invalid URL: {url}")
+
```
+
+
## 8. Model Serialization
+
+
### Pattern: Pydantic model JSON encoding
+
**Locations:**
+
- Repeated `json_encoders={datetime: lambda v: v.isoformat()}` in model configs
+
- Similar model_dump patterns
+
+
**Recommendation:** Create base model class:
+
```python
+
class ThicketBaseModel(BaseModel):
+
"""Base model with common configuration."""
+
model_config = ConfigDict(
+
json_encoders={datetime: lambda v: v.isoformat()},
+
str_strip_whitespace=True,
+
)
+
+
def to_json_dict(self) -> dict:
+
"""Convert to JSON-serializable dict."""
+
return self.model_dump(mode="json", exclude_none=True)
+
```
+
+
## Summary of Refactoring Benefits
+
+
1. **Reduced Code Duplication**: Eliminate 30-40% of duplicate code
+
2. **Consistent Error Handling**: Standardize error messages and handling
+
3. **Easier Maintenance**: Central location for common patterns
+
4. **Better Testing**: Easier to unit test shared utilities
+
5. **Type Safety**: Shared type hints and validation
+
6. **Performance**: Potential to optimize common operations in one place
+
+
## Implementation Priority
+
+
1. **High Priority**:
+
- JSON utilities (used everywhere)
+
- Datetime utilities (critical for sorting and display)
+
- Error handling decorators (improves UX consistency)
+
+
2. **Medium Priority**:
+
- Path utilities
+
- UI/Console utilities
+
- Config utilities
+
+
3. **Low Priority**:
+
- Base model classes (requires more refactoring)
+
- Git store enhancements (already well-structured)
+6 -2
pyproject.toml
···
"bleach>=6.0.0",
"platformdirs>=4.0.0",
"pyyaml>=6.0.0",
+
"email_validator",
+
"jinja2>=3.1.6",
]
[project.optional-dependencies]
···
[tool.ruff]
target-version = "py39"
line-length = 88
+
+
[tool.ruff.lint]
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
···
"C901", # too complex
]
-
[tool.ruff.per-file-ignores]
+
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"]
[tool.mypy]
···
"if __name__ == .__main__.:",
"class .*\\bProtocol\\):",
"@(abc\\.)?abstractmethod",
-
]
+
]
+6617
repomix-output.xml
···
+
This file is a merged representation of the entire codebase, combined into a single document by Repomix.
+
+
<file_summary>
+
This section contains a summary of this file.
+
+
<purpose>
+
This file contains a packed representation of the entire repository's contents.
+
It is designed to be easily consumable by AI systems for analysis, code review,
+
or other automated processes.
+
</purpose>
+
+
<file_format>
+
The content is organized as follows:
+
1. This summary section
+
2. Repository information
+
3. Directory structure
+
4. Repository files (if enabled)
+
5. Multiple file entries, each consisting of:
+
- File path as an attribute
+
- Full contents of the file
+
</file_format>
+
+
<usage_guidelines>
+
- This file should be treated as read-only. Any changes should be made to the
+
original repository files, not this packed version.
+
- When processing this file, use the file path to distinguish
+
between different files in the repository.
+
- Be aware that this file may contain sensitive information. Handle it with
+
the same level of security as you would the original repository.
+
</usage_guidelines>
+
+
<notes>
+
- Some files may have been excluded based on .gitignore rules and Repomix's configuration
+
- Binary files are not included in this packed representation. Please refer to the Repository Structure section for a complete list of file paths, including binary files
+
- Files matching patterns in .gitignore are excluded
+
- Files matching default ignore patterns are excluded
+
- Files are sorted by Git change count (files with more changes are at the bottom)
+
</notes>
+
+
</file_summary>
+
+
<directory_structure>
+
.claude/
+
settings.local.json
+
src/
+
thicket/
+
cli/
+
commands/
+
__init__.py
+
add.py
+
duplicates.py
+
generate.py
+
index_cmd.py
+
info_cmd.py
+
init.py
+
links_cmd.py
+
list_cmd.py
+
sync.py
+
__init__.py
+
main.py
+
utils.py
+
core/
+
__init__.py
+
feed_parser.py
+
git_store.py
+
reference_parser.py
+
models/
+
__init__.py
+
config.py
+
feed.py
+
user.py
+
templates/
+
base.html
+
index.html
+
links.html
+
script.js
+
style.css
+
timeline.html
+
users.html
+
utils/
+
__init__.py
+
__init__.py
+
__main__.py
+
.gitignore
+
ARCH.md
+
CLAUDE.md
+
pyproject.toml
+
README.md
+
</directory_structure>
+
+
<files>
+
This section contains the contents of the repository's files.
+
+
<file path=".claude/settings.local.json">
+
{
+
"permissions": {
+
"allow": [
+
"Bash(find:*)",
+
"Bash(uv run:*)",
+
"Bash(grep:*)",
+
"Bash(jq:*)",
+
"Bash(git add:*)",
+
"Bash(ls:*)"
+
]
+
},
+
"enableAllProjectMcpServers": false
+
}
+
</file>
+
+
<file path="src/thicket/cli/commands/generate.py">
+
"""Generate static HTML website from thicket data."""
+
+
import base64
+
import json
+
import re
+
import shutil
+
from datetime import datetime
+
from pathlib import Path
+
from typing import Any, Optional, TypedDict, Union
+
+
import typer
+
from jinja2 import Environment, FileSystemLoader, select_autoescape
+
from rich.progress import Progress, SpinnerColumn, TextColumn
+
+
from ...core.git_store import GitStore
+
from ...models.feed import AtomEntry
+
from ...models.user import GitStoreIndex, UserMetadata
+
from ..main import app
+
from ..utils import console, load_config
+
+
+
class UserData(TypedDict):
+
"""Type definition for user data structure."""
+
+
metadata: UserMetadata
+
recent_entries: list[tuple[str, AtomEntry]]
+
+
+
def safe_anchor_id(atom_id: str) -> str:
+
"""Convert an Atom ID to a safe HTML anchor ID."""
+
# Use base64 URL-safe encoding without padding
+
encoded = base64.urlsafe_b64encode(atom_id.encode('utf-8')).decode('ascii').rstrip('=')
+
# Prefix with 'id' to ensure it starts with a letter (HTML requirement)
+
return f"id{encoded}"
+
+
+
class WebsiteGenerator:
+
"""Generate static HTML website from thicket data."""
+
+
def __init__(self, git_store: GitStore, output_dir: Path):
+
self.git_store = git_store
+
self.output_dir = output_dir
+
self.template_dir = Path(__file__).parent.parent.parent / "templates"
+
+
# Initialize Jinja2 environment
+
self.env = Environment(
+
loader=FileSystemLoader(self.template_dir),
+
autoescape=select_autoescape(["html", "xml"]),
+
)
+
+
# Data containers
+
self.index: Optional[GitStoreIndex] = None
+
self.entries: list[tuple[str, AtomEntry]] = [] # (username, entry)
+
self.links_data: Optional[dict[str, Any]] = None
+
self.threads: list[list[dict[str, Any]]] = [] # List of threads with metadata
+
+
def get_display_name(self, username: str) -> str:
+
"""Get display name for a user, falling back to username."""
+
if self.index and username in self.index.users:
+
user = self.index.users[username]
+
return user.display_name or username
+
return username
+
+
def get_user_homepage(self, username: str) -> Optional[str]:
+
"""Get homepage URL for a user."""
+
if self.index and username in self.index.users:
+
user = self.index.users[username]
+
return str(user.homepage) if user.homepage else None
+
return None
+
+
def clean_html_summary(self, content: Optional[str], max_length: int = 200) -> str:
+
"""Clean HTML content and truncate for display in timeline."""
+
if not content:
+
return ""
+
+
# Remove HTML tags
+
clean_text = re.sub(r"<[^>]+>", " ", content)
+
# Replace multiple whitespace with single space
+
clean_text = re.sub(r"\s+", " ", clean_text)
+
# Strip leading/trailing whitespace
+
clean_text = clean_text.strip()
+
+
# Truncate with ellipsis if needed
+
if len(clean_text) > max_length:
+
# Try to break at word boundary
+
truncated = clean_text[:max_length]
+
last_space = truncated.rfind(" ")
+
if (
+
last_space > max_length * 0.8
+
): # If we can break reasonably close to the limit
+
clean_text = truncated[:last_space] + "..."
+
else:
+
clean_text = truncated + "..."
+
+
return clean_text
+
+
def load_data(self) -> None:
+
"""Load all data from the git repository."""
+
with Progress(
+
SpinnerColumn(),
+
TextColumn("[progress.description]{task.description}"),
+
console=console,
+
) as progress:
+
# Load index
+
task = progress.add_task("Loading repository index...", total=None)
+
self.index = self.git_store._load_index()
+
if not self.index:
+
raise ValueError("No index found in repository")
+
progress.update(task, completed=True)
+
+
# Load all entries
+
task = progress.add_task("Loading entries...", total=None)
+
for username, user_metadata in self.index.users.items():
+
user_dir = self.git_store.repo_path / user_metadata.directory
+
if user_dir.exists():
+
for entry_file in user_dir.glob("*.json"):
+
if entry_file.name not in ["index.json", "duplicates.json"]:
+
try:
+
with open(entry_file) as f:
+
entry_data = json.load(f)
+
entry = AtomEntry(**entry_data)
+
self.entries.append((username, entry))
+
except Exception as e:
+
console.print(
+
f"[yellow]Warning: Failed to load {entry_file}: {e}[/yellow]"
+
)
+
progress.update(task, completed=True)
+
+
# Sort entries by date (newest first) - prioritize updated over published
+
self.entries.sort(
+
key=lambda x: x[1].updated or x[1].published or datetime.min, reverse=True
+
)
+
+
# Load links data
+
task = progress.add_task("Loading links and references...", total=None)
+
links_file = self.git_store.repo_path / "links.json"
+
if links_file.exists():
+
with open(links_file) as f:
+
self.links_data = json.load(f)
+
progress.update(task, completed=True)
+
+
def build_threads(self) -> None:
+
"""Build threaded conversations from references."""
+
if not self.links_data or "references" not in self.links_data:
+
return
+
+
# Map entry IDs to (username, entry) tuples
+
entry_map: dict[str, tuple[str, AtomEntry]] = {}
+
for username, entry in self.entries:
+
entry_map[entry.id] = (username, entry)
+
+
# Build adjacency lists for references
+
self.outbound_refs: dict[str, set[str]] = {}
+
self.inbound_refs: dict[str, set[str]] = {}
+
self.reference_details: dict[
+
str, list[dict[str, Any]]
+
] = {} # Store full reference info
+
+
for ref in self.links_data["references"]:
+
source_id = ref["source_entry_id"]
+
target_id = ref.get("target_entry_id")
+
+
if target_id and source_id in entry_map and target_id in entry_map:
+
self.outbound_refs.setdefault(source_id, set()).add(target_id)
+
self.inbound_refs.setdefault(target_id, set()).add(source_id)
+
+
# Store reference details for UI
+
self.reference_details.setdefault(source_id, []).append(
+
{
+
"target_id": target_id,
+
"target_username": ref.get("target_username"),
+
"type": "outbound",
+
}
+
)
+
self.reference_details.setdefault(target_id, []).append(
+
{
+
"source_id": source_id,
+
"source_username": ref.get("source_username"),
+
"type": "inbound",
+
}
+
)
+
+
# Find conversation threads (multi-post discussions)
+
processed = set()
+
+
for entry_id, (_username, _entry) in entry_map.items():
+
if entry_id in processed:
+
continue
+
+
# Build thread starting from this entry
+
thread = []
+
to_visit = [entry_id]
+
thread_ids = set()
+
level_map: dict[str, int] = {} # Track levels for this thread
+
+
# First, traverse up to find the root
+
current = entry_id
+
while current in self.inbound_refs:
+
parents = self.inbound_refs[current] - {
+
current
+
} # Exclude self-references
+
if not parents:
+
break
+
# Take the first parent
+
parent = next(iter(parents))
+
if parent in thread_ids: # Avoid cycles
+
break
+
current = parent
+
to_visit.insert(0, current)
+
+
# Now traverse down from the root
+
while to_visit:
+
current = to_visit.pop(0)
+
if current in thread_ids or current not in entry_map:
+
continue
+
+
thread_ids.add(current)
+
username, entry = entry_map[current]
+
+
# Calculate thread level
+
thread_level = self._calculate_thread_level(current, level_map)
+
+
# Add threading metadata
+
thread_entry = {
+
"username": username,
+
"display_name": self.get_display_name(username),
+
"entry": entry,
+
"entry_id": current,
+
"references_to": list(self.outbound_refs.get(current, [])),
+
"referenced_by": list(self.inbound_refs.get(current, [])),
+
"thread_level": thread_level,
+
}
+
thread.append(thread_entry)
+
processed.add(current)
+
+
# Add children
+
if current in self.outbound_refs:
+
children = self.outbound_refs[current] - thread_ids # Avoid cycles
+
to_visit.extend(sorted(children))
+
+
if len(thread) > 1: # Only keep actual threads
+
# Sort thread by date (newest first) - prioritize updated over published
+
thread.sort(key=lambda x: x["entry"].updated or x["entry"].published or datetime.min, reverse=True) # type: ignore
+
self.threads.append(thread)
+
+
# Sort threads by the date of their most recent entry - prioritize updated over published
+
self.threads.sort(
+
key=lambda t: max(
+
item["entry"].updated or item["entry"].published or datetime.min for item in t
+
),
+
reverse=True,
+
)
+
+
def _calculate_thread_level(
+
self, entry_id: str, processed_entries: dict[str, int]
+
) -> int:
+
"""Calculate indentation level for threaded display."""
+
if entry_id in processed_entries:
+
return processed_entries[entry_id]
+
+
if entry_id not in self.inbound_refs:
+
processed_entries[entry_id] = 0
+
return 0
+
+
parents_in_thread = self.inbound_refs[entry_id] & set(processed_entries.keys())
+
if not parents_in_thread:
+
processed_entries[entry_id] = 0
+
return 0
+
+
# Find the deepest parent level + 1
+
max_parent_level = 0
+
for parent_id in parents_in_thread:
+
parent_level = self._calculate_thread_level(parent_id, processed_entries)
+
max_parent_level = max(max_parent_level, parent_level)
+
+
level = min(max_parent_level + 1, 4) # Cap at level 4
+
processed_entries[entry_id] = level
+
return level
+
+
def get_standalone_references(self) -> list[dict[str, Any]]:
+
"""Get posts that have references but aren't part of multi-post threads."""
+
if not hasattr(self, "reference_details"):
+
return []
+
+
threaded_entry_ids = set()
+
for thread in self.threads:
+
for item in thread:
+
threaded_entry_ids.add(item["entry_id"])
+
+
standalone_refs = []
+
for username, entry in self.entries:
+
if (
+
entry.id in self.reference_details
+
and entry.id not in threaded_entry_ids
+
):
+
refs = self.reference_details[entry.id]
+
# Only include if it has meaningful references (not just self-references)
+
meaningful_refs = [
+
r
+
for r in refs
+
if r.get("target_id") != entry.id and r.get("source_id") != entry.id
+
]
+
if meaningful_refs:
+
standalone_refs.append(
+
{
+
"username": username,
+
"display_name": self.get_display_name(username),
+
"entry": entry,
+
"references": meaningful_refs,
+
}
+
)
+
+
return standalone_refs
+
+
def _add_cross_thread_links(self, timeline_items: list[dict[str, Any]]) -> None:
+
"""Add cross-thread linking for entries that appear in multiple threads."""
+
# Map entry IDs to their positions in the timeline
+
entry_positions: dict[str, list[int]] = {}
+
# Map URLs referenced by entries to the entries that reference them
+
url_references: dict[str, list[tuple[str, int]]] = {} # url -> [(entry_id, position)]
+
+
# First pass: collect all entry IDs, their positions, and referenced URLs
+
for i, item in enumerate(timeline_items):
+
if item["type"] == "post":
+
entry_id = item["content"]["entry"].id
+
entry_positions.setdefault(entry_id, []).append(i)
+
# Track URLs this entry references
+
if entry_id in self.reference_details:
+
for ref in self.reference_details[entry_id]:
+
if ref["type"] == "outbound" and "target_id" in ref:
+
# Find the target entry's URL if available
+
target_entry = self._find_entry_by_id(ref["target_id"])
+
if target_entry and target_entry.link:
+
url = str(target_entry.link)
+
url_references.setdefault(url, []).append((entry_id, i))
+
elif item["type"] == "thread":
+
for thread_item in item["content"]:
+
entry_id = thread_item["entry"].id
+
entry_positions.setdefault(entry_id, []).append(i)
+
# Track URLs this entry references
+
if entry_id in self.reference_details:
+
for ref in self.reference_details[entry_id]:
+
if ref["type"] == "outbound" and "target_id" in ref:
+
target_entry = self._find_entry_by_id(ref["target_id"])
+
if target_entry and target_entry.link:
+
url = str(target_entry.link)
+
url_references.setdefault(url, []).append((entry_id, i))
+
+
# Build cross-thread connections - only for entries that actually appear multiple times
+
cross_thread_connections: dict[str, set[int]] = {} # entry_id -> set of timeline positions
+
+
# Add connections ONLY for entries that appear multiple times in the timeline
+
for entry_id, positions in entry_positions.items():
+
if len(positions) > 1:
+
cross_thread_connections[entry_id] = set(positions)
+
# Debug: uncomment to see which entries have multiple appearances
+
# print(f"Entry {entry_id[:50]}... appears at positions: {positions}")
+
+
# Apply cross-thread links to timeline items
+
for entry_id, positions_set in cross_thread_connections.items():
+
positions_list = list(positions_set)
+
for pos in positions_list:
+
item = timeline_items[pos]
+
other_positions = sorted([p for p in positions_list if p != pos])
+
+
if item["type"] == "post":
+
# Add cross-thread info to individual posts
+
item["content"]["cross_thread_links"] = self._build_cross_thread_link_data(entry_id, other_positions, timeline_items)
+
# Add info about shared references
+
item["content"]["shared_references"] = self._get_shared_references(entry_id, positions_set, timeline_items)
+
elif item["type"] == "thread":
+
# Add cross-thread info to thread items
+
for thread_item in item["content"]:
+
if thread_item["entry"].id == entry_id:
+
thread_item["cross_thread_links"] = self._build_cross_thread_link_data(entry_id, other_positions, timeline_items)
+
thread_item["shared_references"] = self._get_shared_references(entry_id, positions_set, timeline_items)
+
break
+
+
def _build_cross_thread_link_data(self, entry_id: str, other_positions: list[int], timeline_items: list[dict[str, Any]]) -> list[dict[str, Any]]:
+
"""Build detailed cross-thread link data with anchor information."""
+
cross_thread_links = []
+
+
for pos in other_positions:
+
item = timeline_items[pos]
+
if item["type"] == "post":
+
# For individual posts
+
safe_id = safe_anchor_id(entry_id)
+
cross_thread_links.append({
+
"position": pos,
+
"anchor_id": f"post-{pos}-{safe_id}",
+
"context": "individual post",
+
"title": item["content"]["entry"].title
+
})
+
elif item["type"] == "thread":
+
# For thread items, find the specific thread item
+
for thread_idx, thread_item in enumerate(item["content"]):
+
if thread_item["entry"].id == entry_id:
+
safe_id = safe_anchor_id(entry_id)
+
cross_thread_links.append({
+
"position": pos,
+
"anchor_id": f"post-{pos}-{thread_idx}-{safe_id}",
+
"context": f"thread (level {thread_item.get('thread_level', 0)})",
+
"title": thread_item["entry"].title
+
})
+
break
+
+
return cross_thread_links
+
+
def _find_entry_by_id(self, entry_id: str) -> Optional[AtomEntry]:
+
"""Find an entry by its ID."""
+
for _username, entry in self.entries:
+
if entry.id == entry_id:
+
return entry
+
return None
+
+
def _get_shared_references(self, entry_id: str, positions: Union[set[int], list[int]], timeline_items: list[dict[str, Any]]) -> list[dict[str, Any]]:
+
"""Get information about shared references between cross-thread entries."""
+
shared_refs = []
+
+
# Collect all referenced URLs from entries at these positions
+
url_counts: dict[str, int] = {}
+
referencing_entries: dict[str, list[str]] = {} # url -> [entry_ids]
+
+
for pos in positions:
+
item = timeline_items[pos]
+
entries_to_check = []
+
+
if item["type"] == "post":
+
entries_to_check.append(item["content"]["entry"])
+
elif item["type"] == "thread":
+
entries_to_check.extend([ti["entry"] for ti in item["content"]])
+
+
for entry in entries_to_check:
+
if entry.id in self.reference_details:
+
for ref in self.reference_details[entry.id]:
+
if ref["type"] == "outbound" and "target_id" in ref:
+
target_entry = self._find_entry_by_id(ref["target_id"])
+
if target_entry and target_entry.link:
+
url = str(target_entry.link)
+
url_counts[url] = url_counts.get(url, 0) + 1
+
if url not in referencing_entries:
+
referencing_entries[url] = []
+
if entry.id not in referencing_entries[url]:
+
referencing_entries[url].append(entry.id)
+
+
# Find URLs referenced by multiple entries
+
for url, count in url_counts.items():
+
if count > 1 and len(referencing_entries[url]) > 1:
+
# Get the target entry info
+
target_entry = None
+
target_username = None
+
for ref in (self.links_data or {}).get("references", []):
+
if ref.get("target_url") == url:
+
target_username = ref.get("target_username")
+
if ref.get("target_entry_id"):
+
target_entry = self._find_entry_by_id(ref["target_entry_id"])
+
break
+
+
shared_refs.append({
+
"url": url,
+
"count": count,
+
"referencing_entries": referencing_entries[url],
+
"target_username": target_username,
+
"target_title": target_entry.title if target_entry else None
+
})
+
+
return sorted(shared_refs, key=lambda x: x["count"], reverse=True)
+
+
def generate_site(self) -> None:
+
"""Generate the static website."""
+
# Create output directory
+
self.output_dir.mkdir(parents=True, exist_ok=True)
+
+
# Create static directories
+
(self.output_dir / "css").mkdir(exist_ok=True)
+
(self.output_dir / "js").mkdir(exist_ok=True)
+
+
# Generate CSS
+
css_template = self.env.get_template("style.css")
+
css_content = css_template.render()
+
with open(self.output_dir / "css" / "style.css", "w") as f:
+
f.write(css_content)
+
+
# Generate JavaScript
+
js_template = self.env.get_template("script.js")
+
js_content = js_template.render()
+
with open(self.output_dir / "js" / "script.js", "w") as f:
+
f.write(js_content)
+
+
# Prepare common template data
+
base_data = {
+
"title": "Energy & Environment Group",
+
"generated_at": datetime.now().isoformat(),
+
"get_display_name": self.get_display_name,
+
"get_user_homepage": self.get_user_homepage,
+
"clean_html_summary": self.clean_html_summary,
+
"safe_anchor_id": safe_anchor_id,
+
}
+
+
# Build unified timeline
+
timeline_items = []
+
+
# Only consider the threads that will actually be displayed
+
displayed_threads = self.threads[:20] # Limit to 20 threads
+
+
# Track which entries are part of displayed threads
+
threaded_entry_ids = set()
+
for thread in displayed_threads:
+
for item in thread:
+
threaded_entry_ids.add(item["entry_id"])
+
+
# Add threads to timeline (using the date of the most recent post)
+
for thread in displayed_threads:
+
most_recent_date = max(
+
item["entry"].updated or item["entry"].published or datetime.min
+
for item in thread
+
)
+
timeline_items.append({
+
"type": "thread",
+
"date": most_recent_date,
+
"content": thread
+
})
+
+
# Add individual posts (not in threads)
+
for username, entry in self.entries[:50]:
+
if entry.id not in threaded_entry_ids:
+
# Check if this entry has references
+
has_refs = (
+
entry.id in self.reference_details
+
if hasattr(self, "reference_details")
+
else False
+
)
+
+
refs = []
+
if has_refs:
+
refs = self.reference_details.get(entry.id, [])
+
refs = [
+
r for r in refs
+
if r.get("target_id") != entry.id
+
and r.get("source_id") != entry.id
+
]
+
+
timeline_items.append({
+
"type": "post",
+
"date": entry.updated or entry.published or datetime.min,
+
"content": {
+
"username": username,
+
"display_name": self.get_display_name(username),
+
"entry": entry,
+
"references": refs if refs else None
+
}
+
})
+
+
# Sort unified timeline by date (newest first)
+
timeline_items.sort(key=lambda x: x["date"], reverse=True)
+
+
# Limit timeline to what will actually be rendered
+
timeline_items = timeline_items[:50] # Limit to 50 items total
+
+
# Add cross-thread linking for repeat blog references
+
self._add_cross_thread_links(timeline_items)
+
+
# Prepare outgoing links data
+
outgoing_links = []
+
if self.links_data and "links" in self.links_data:
+
for url, link_info in self.links_data["links"].items():
+
referencing_entries = []
+
for entry_id in link_info.get("referencing_entries", []):
+
for username, entry in self.entries:
+
if entry.id == entry_id:
+
referencing_entries.append(
+
(self.get_display_name(username), entry)
+
)
+
break
+
+
if referencing_entries:
+
# Sort by date - prioritize updated over published
+
referencing_entries.sort(
+
key=lambda x: x[1].updated or x[1].published or datetime.min, reverse=True
+
)
+
outgoing_links.append(
+
{
+
"url": url,
+
"target_username": link_info.get("target_username"),
+
"entries": referencing_entries,
+
}
+
)
+
+
# Sort links by most recent reference - prioritize updated over published
+
outgoing_links.sort(
+
key=lambda x: x["entries"][0][1].updated
+
or x["entries"][0][1].published or datetime.min,
+
reverse=True,
+
)
+
+
# Prepare users data
+
users: list[UserData] = []
+
if self.index:
+
for username, user_metadata in self.index.users.items():
+
# Get recent entries for this user with display names
+
user_entries = [
+
(self.get_display_name(u), e)
+
for u, e in self.entries
+
if u == username
+
][:5]
+
users.append(
+
{"metadata": user_metadata, "recent_entries": user_entries}
+
)
+
# Sort by entry count
+
users.sort(key=lambda x: x["metadata"].entry_count, reverse=True)
+
+
# Generate timeline page
+
timeline_template = self.env.get_template("timeline.html")
+
timeline_content = timeline_template.render(
+
**base_data,
+
page="timeline",
+
timeline_items=timeline_items, # Already limited above
+
)
+
with open(self.output_dir / "timeline.html", "w") as f:
+
f.write(timeline_content)
+
+
# Generate links page
+
links_template = self.env.get_template("links.html")
+
links_content = links_template.render(
+
**base_data,
+
page="links",
+
outgoing_links=outgoing_links[:100],
+
)
+
with open(self.output_dir / "links.html", "w") as f:
+
f.write(links_content)
+
+
# Generate users page
+
users_template = self.env.get_template("users.html")
+
users_content = users_template.render(
+
**base_data,
+
page="users",
+
users=users,
+
)
+
with open(self.output_dir / "users.html", "w") as f:
+
f.write(users_content)
+
+
# Generate main index page (redirect to timeline)
+
index_template = self.env.get_template("index.html")
+
index_content = index_template.render(**base_data)
+
with open(self.output_dir / "index.html", "w") as f:
+
f.write(index_content)
+
+
console.print(f"[green]โœ“[/green] Generated website at {self.output_dir}")
+
console.print(f" - {len(self.entries)} entries")
+
console.print(f" - {len(self.threads)} conversation threads")
+
console.print(f" - {len(outgoing_links)} outgoing links")
+
console.print(f" - {len(users)} users")
+
console.print(
+
" - Generated pages: index.html, timeline.html, links.html, users.html"
+
)
+
+
+
@app.command()
+
def generate(
+
output: Path = typer.Option(
+
Path("./thicket-site"),
+
"--output",
+
"-o",
+
help="Output directory for the generated website",
+
),
+
force: bool = typer.Option(
+
False, "--force", "-f", help="Overwrite existing output directory"
+
),
+
config_file: Path = typer.Option(
+
Path("thicket.yaml"), "--config", help="Configuration file path"
+
),
+
) -> None:
+
"""Generate a static HTML website from thicket data."""
+
config = load_config(config_file)
+
+
if not config.git_store:
+
console.print("[red]No git store path configured[/red]")
+
raise typer.Exit(1)
+
+
git_store = GitStore(config.git_store)
+
+
# Check if output directory exists
+
if output.exists() and not force:
+
console.print(
+
f"[red]Output directory {output} already exists. Use --force to overwrite.[/red]"
+
)
+
raise typer.Exit(1)
+
+
# Clean output directory if forcing
+
if output.exists() and force:
+
shutil.rmtree(output)
+
+
try:
+
generator = WebsiteGenerator(git_store, output)
+
+
console.print("[bold]Generating static website...[/bold]")
+
generator.load_data()
+
generator.build_threads()
+
generator.generate_site()
+
+
except Exception as e:
+
console.print(f"[red]Error generating website: {e}[/red]")
+
raise typer.Exit(1) from e
+
</file>
+
+
<file path="src/thicket/templates/base.html">
+
<!DOCTYPE html>
+
<html lang="en">
+
<head>
+
<meta charset="UTF-8">
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
+
<title>{% block page_title %}{{ title }}{% endblock %}</title>
+
<link rel="stylesheet" href="css/style.css">
+
</head>
+
<body>
+
<header class="site-header">
+
<div class="header-content">
+
<h1 class="site-title">{{ title }}</h1>
+
<nav class="site-nav">
+
<a href="timeline.html" class="nav-link {% if page == 'timeline' %}active{% endif %}">Timeline</a>
+
<a href="links.html" class="nav-link {% if page == 'links' %}active{% endif %}">Links</a>
+
<a href="users.html" class="nav-link {% if page == 'users' %}active{% endif %}">Users</a>
+
</nav>
+
</div>
+
</header>
+
+
<main class="main-content">
+
{% block content %}{% endblock %}
+
</main>
+
+
<footer class="site-footer">
+
<p>Generated on {{ generated_at }} by <a href="https://github.com/avsm/thicket">Thicket</a></p>
+
</footer>
+
+
<script src="js/script.js"></script>
+
</body>
+
</html>
+
</file>
+
+
<file path="src/thicket/templates/index.html">
+
<!DOCTYPE html>
+
<html lang="en">
+
<head>
+
<meta charset="UTF-8">
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
+
<title>{{ title }}</title>
+
<meta http-equiv="refresh" content="0; url=timeline.html">
+
<link rel="canonical" href="timeline.html">
+
</head>
+
<body>
+
<p>Redirecting to <a href="timeline.html">Timeline</a>...</p>
+
</body>
+
</html>
+
</file>
+
+
<file path="src/thicket/templates/links.html">
+
{% extends "base.html" %}
+
+
{% block page_title %}Outgoing Links - {{ title }}{% endblock %}
+
+
{% block content %}
+
<div class="page-content">
+
<h2>Outgoing Links</h2>
+
<p class="page-description">External links referenced in blog posts, ordered by most recent reference.</p>
+
+
{% for link in outgoing_links %}
+
<article class="link-group">
+
<h3 class="link-url">
+
<a href="{{ link.url }}" target="_blank">{{ link.url|truncate(80) }}</a>
+
{% if link.target_username %}
+
<span class="target-user">({{ link.target_username }})</span>
+
{% endif %}
+
</h3>
+
<div class="referencing-entries">
+
<span class="ref-count">Referenced in {{ link.entries|length }} post(s):</span>
+
<ul>
+
{% for display_name, entry in link.entries[:5] %}
+
<li>
+
<span class="author">{{ display_name }}</span> -
+
<a href="{{ entry.link }}" target="_blank">{{ entry.title }}</a>
+
<time datetime="{{ entry.updated or entry.published }}">
+
({{ (entry.updated or entry.published).strftime('%Y-%m-%d') }})
+
</time>
+
</li>
+
{% endfor %}
+
{% if link.entries|length > 5 %}
+
<li class="more">... and {{ link.entries|length - 5 }} more</li>
+
{% endif %}
+
</ul>
+
</div>
+
</article>
+
{% endfor %}
+
</div>
+
{% endblock %}
+
</file>
+
+
<file path="src/thicket/templates/script.js">
+
// Enhanced functionality for thicket website
+
document.addEventListener('DOMContentLoaded', function() {
+
+
// Enhance thread collapsing (optional feature)
+
const threadHeaders = document.querySelectorAll('.thread-header');
+
threadHeaders.forEach(header => {
+
header.style.cursor = 'pointer';
+
header.addEventListener('click', function() {
+
const thread = this.parentElement;
+
const entries = thread.querySelectorAll('.thread-entry');
+
+
// Toggle visibility of all but the first entry
+
for (let i = 1; i < entries.length; i++) {
+
entries[i].style.display = entries[i].style.display === 'none' ? 'block' : 'none';
+
}
+
+
// Update thread count text
+
const count = this.querySelector('.thread-count');
+
if (entries[1] && entries[1].style.display === 'none') {
+
count.textContent = count.textContent.replace('posts', 'posts (collapsed)');
+
} else {
+
count.textContent = count.textContent.replace(' (collapsed)', '');
+
}
+
});
+
});
+
+
// Add relative time display
+
const timeElements = document.querySelectorAll('time');
+
timeElements.forEach(timeEl => {
+
const datetime = new Date(timeEl.getAttribute('datetime'));
+
const now = new Date();
+
const diffMs = now - datetime;
+
const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24));
+
+
let relativeTime;
+
if (diffDays === 0) {
+
const diffHours = Math.floor(diffMs / (1000 * 60 * 60));
+
if (diffHours === 0) {
+
const diffMinutes = Math.floor(diffMs / (1000 * 60));
+
relativeTime = diffMinutes === 0 ? 'just now' : `${diffMinutes}m ago`;
+
} else {
+
relativeTime = `${diffHours}h ago`;
+
}
+
} else if (diffDays === 1) {
+
relativeTime = 'yesterday';
+
} else if (diffDays < 7) {
+
relativeTime = `${diffDays}d ago`;
+
} else if (diffDays < 30) {
+
const weeks = Math.floor(diffDays / 7);
+
relativeTime = weeks === 1 ? '1w ago' : `${weeks}w ago`;
+
} else if (diffDays < 365) {
+
const months = Math.floor(diffDays / 30);
+
relativeTime = months === 1 ? '1mo ago' : `${months}mo ago`;
+
} else {
+
const years = Math.floor(diffDays / 365);
+
relativeTime = years === 1 ? '1y ago' : `${years}y ago`;
+
}
+
+
// Add relative time as title attribute
+
timeEl.setAttribute('title', timeEl.textContent);
+
timeEl.textContent = relativeTime;
+
});
+
+
// Enhanced anchor link scrolling for shared references
+
document.querySelectorAll('a[href^="#"]').forEach(anchor => {
+
anchor.addEventListener('click', function (e) {
+
e.preventDefault();
+
const target = document.querySelector(this.getAttribute('href'));
+
if (target) {
+
target.scrollIntoView({
+
behavior: 'smooth',
+
block: 'center'
+
});
+
+
// Highlight the target briefly
+
const timelineEntry = target.closest('.timeline-entry');
+
if (timelineEntry) {
+
timelineEntry.style.outline = '2px solid var(--primary-color)';
+
timelineEntry.style.borderRadius = '8px';
+
setTimeout(() => {
+
timelineEntry.style.outline = '';
+
timelineEntry.style.borderRadius = '';
+
}, 2000);
+
}
+
}
+
});
+
});
+
});
+
</file>
+
+
<file path="src/thicket/templates/style.css">
+
/* Modern, clean design with high-density text and readable theme */
+
+
:root {
+
--primary-color: #2c3e50;
+
--secondary-color: #3498db;
+
--accent-color: #e74c3c;
+
--background: #ffffff;
+
--surface: #f8f9fa;
+
--text-primary: #2c3e50;
+
--text-secondary: #7f8c8d;
+
--border-color: #e0e0e0;
+
--thread-indent: 20px;
+
--max-width: 1200px;
+
}
+
+
* {
+
margin: 0;
+
padding: 0;
+
box-sizing: border-box;
+
}
+
+
body {
+
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Helvetica Neue', Arial, sans-serif;
+
font-size: 14px;
+
line-height: 1.6;
+
color: var(--text-primary);
+
background-color: var(--background);
+
}
+
+
/* Header */
+
.site-header {
+
background-color: var(--surface);
+
border-bottom: 1px solid var(--border-color);
+
padding: 0.75rem 0;
+
position: sticky;
+
top: 0;
+
z-index: 100;
+
}
+
+
.header-content {
+
max-width: var(--max-width);
+
margin: 0 auto;
+
padding: 0 2rem;
+
display: flex;
+
justify-content: space-between;
+
align-items: center;
+
}
+
+
.site-title {
+
font-size: 1.5rem;
+
font-weight: 600;
+
color: var(--primary-color);
+
margin: 0;
+
}
+
+
/* Navigation */
+
.site-nav {
+
display: flex;
+
gap: 1.5rem;
+
}
+
+
.nav-link {
+
text-decoration: none;
+
color: var(--text-secondary);
+
font-weight: 500;
+
font-size: 0.95rem;
+
padding: 0.5rem 0.75rem;
+
border-radius: 4px;
+
transition: all 0.2s ease;
+
}
+
+
.nav-link:hover {
+
color: var(--primary-color);
+
background-color: var(--background);
+
}
+
+
.nav-link.active {
+
color: var(--secondary-color);
+
background-color: var(--background);
+
font-weight: 600;
+
}
+
+
/* Main Content */
+
.main-content {
+
max-width: var(--max-width);
+
margin: 2rem auto;
+
padding: 0 2rem;
+
}
+
+
.page-content {
+
margin: 0;
+
}
+
+
.page-description {
+
color: var(--text-secondary);
+
margin-bottom: 1.5rem;
+
font-style: italic;
+
}
+
+
/* Sections */
+
section {
+
margin-bottom: 2rem;
+
}
+
+
h2 {
+
font-size: 1.3rem;
+
font-weight: 600;
+
margin-bottom: 0.75rem;
+
color: var(--primary-color);
+
}
+
+
h3 {
+
font-size: 1.1rem;
+
font-weight: 600;
+
margin-bottom: 0.75rem;
+
color: var(--primary-color);
+
}
+
+
/* Entries and Threads */
+
article {
+
margin-bottom: 1.5rem;
+
padding: 1rem;
+
background-color: var(--surface);
+
border-radius: 4px;
+
border: 1px solid var(--border-color);
+
}
+
+
/* Timeline-style entries */
+
.timeline-entry {
+
margin-bottom: 0.5rem;
+
padding: 0.5rem 0.75rem;
+
border: none;
+
background: transparent;
+
transition: background-color 0.2s ease;
+
}
+
+
.timeline-entry:hover {
+
background-color: var(--surface);
+
}
+
+
.timeline-meta {
+
display: inline-flex;
+
gap: 0.5rem;
+
align-items: center;
+
font-size: 0.75rem;
+
color: var(--text-secondary);
+
margin-bottom: 0.25rem;
+
}
+
+
.timeline-time {
+
font-family: 'SF Mono', Monaco, Consolas, 'Courier New', monospace;
+
font-size: 0.75rem;
+
color: var(--text-secondary);
+
}
+
+
.timeline-author {
+
font-weight: 600;
+
color: var(--primary-color);
+
font-size: 0.8rem;
+
text-decoration: none;
+
}
+
+
.timeline-author:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.timeline-content {
+
line-height: 1.4;
+
}
+
+
.timeline-title {
+
font-size: 0.95rem;
+
font-weight: 600;
+
}
+
+
.timeline-title a {
+
color: var(--primary-color);
+
text-decoration: none;
+
}
+
+
.timeline-title a:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.timeline-summary {
+
color: var(--text-secondary);
+
font-size: 0.9rem;
+
line-height: 1.4;
+
}
+
+
/* Legacy styles for other sections */
+
.entry-meta, .thread-header {
+
display: flex;
+
gap: 1rem;
+
align-items: center;
+
margin-bottom: 0.5rem;
+
font-size: 0.85rem;
+
color: var(--text-secondary);
+
}
+
+
.author {
+
font-weight: 600;
+
color: var(--primary-color);
+
}
+
+
time {
+
font-size: 0.85rem;
+
}
+
+
h4 {
+
font-size: 1.1rem;
+
font-weight: 600;
+
margin-bottom: 0.5rem;
+
}
+
+
h4 a {
+
color: var(--primary-color);
+
text-decoration: none;
+
}
+
+
h4 a:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.entry-summary {
+
color: var(--text-primary);
+
line-height: 1.5;
+
margin-top: 0.5rem;
+
}
+
+
/* Enhanced Threading Styles */
+
+
/* Conversation Clusters */
+
.conversation-cluster {
+
background-color: var(--background);
+
border: 2px solid var(--border-color);
+
border-radius: 8px;
+
margin-bottom: 2rem;
+
overflow: hidden;
+
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
+
}
+
+
.conversation-header {
+
background: linear-gradient(135deg, var(--surface) 0%, #f1f3f4 100%);
+
padding: 0.75rem 1rem;
+
border-bottom: 1px solid var(--border-color);
+
}
+
+
.conversation-meta {
+
display: flex;
+
justify-content: space-between;
+
align-items: center;
+
flex-wrap: wrap;
+
gap: 0.5rem;
+
}
+
+
.conversation-count {
+
font-weight: 600;
+
color: var(--secondary-color);
+
font-size: 0.9rem;
+
}
+
+
.conversation-participants {
+
font-size: 0.8rem;
+
color: var(--text-secondary);
+
flex: 1;
+
text-align: right;
+
}
+
+
.conversation-flow {
+
padding: 0.5rem;
+
}
+
+
/* Threaded Conversation Entries */
+
.conversation-entry {
+
position: relative;
+
margin-bottom: 0.75rem;
+
display: flex;
+
align-items: flex-start;
+
}
+
+
.conversation-entry.level-0 {
+
margin-left: 0;
+
}
+
+
.conversation-entry.level-1 {
+
margin-left: 1.5rem;
+
}
+
+
.conversation-entry.level-2 {
+
margin-left: 3rem;
+
}
+
+
.conversation-entry.level-3 {
+
margin-left: 4.5rem;
+
}
+
+
.conversation-entry.level-4 {
+
margin-left: 6rem;
+
}
+
+
.entry-connector {
+
width: 3px;
+
background-color: var(--secondary-color);
+
margin-right: 0.75rem;
+
margin-top: 0.25rem;
+
min-height: 2rem;
+
border-radius: 2px;
+
opacity: 0.6;
+
}
+
+
.conversation-entry.level-0 .entry-connector {
+
background-color: var(--accent-color);
+
opacity: 0.8;
+
}
+
+
.entry-content {
+
flex: 1;
+
background-color: var(--surface);
+
padding: 0.75rem;
+
border-radius: 6px;
+
border: 1px solid var(--border-color);
+
transition: all 0.2s ease;
+
}
+
+
.entry-content:hover {
+
border-color: var(--secondary-color);
+
box-shadow: 0 2px 8px rgba(52, 152, 219, 0.1);
+
}
+
+
/* Reference Indicators */
+
.reference-indicators {
+
display: inline-flex;
+
gap: 0.25rem;
+
margin-left: 0.5rem;
+
}
+
+
.ref-out, .ref-in {
+
display: inline-block;
+
width: 1rem;
+
height: 1rem;
+
border-radius: 50%;
+
text-align: center;
+
line-height: 1rem;
+
font-size: 0.7rem;
+
font-weight: bold;
+
}
+
+
.ref-out {
+
background-color: #e8f5e8;
+
color: #2d8f2d;
+
}
+
+
.ref-in {
+
background-color: #e8f0ff;
+
color: #1f5fbf;
+
}
+
+
/* Reference Badges for Individual Posts */
+
.timeline-entry.with-references {
+
background-color: var(--surface);
+
}
+
+
/* Conversation posts in unified timeline */
+
.timeline-entry.conversation-post {
+
background: transparent;
+
border: none;
+
margin-bottom: 0.5rem;
+
padding: 0.5rem 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-0 {
+
margin-left: 0;
+
border-left: 2px solid var(--accent-color);
+
padding-left: 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-1 {
+
margin-left: 1.5rem;
+
border-left: 2px solid var(--secondary-color);
+
padding-left: 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-2 {
+
margin-left: 3rem;
+
border-left: 2px solid var(--text-secondary);
+
padding-left: 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-3 {
+
margin-left: 4.5rem;
+
border-left: 2px solid var(--text-secondary);
+
padding-left: 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-4 {
+
margin-left: 6rem;
+
border-left: 2px solid var(--text-secondary);
+
padding-left: 0.75rem;
+
}
+
+
/* Cross-thread linking */
+
.cross-thread-links {
+
margin-top: 0.5rem;
+
padding-top: 0.5rem;
+
border-top: 1px solid var(--border-color);
+
}
+
+
.cross-thread-indicator {
+
font-size: 0.75rem;
+
color: var(--text-secondary);
+
background-color: var(--surface);
+
padding: 0.25rem 0.5rem;
+
border-radius: 12px;
+
border: 1px solid var(--border-color);
+
display: inline-block;
+
}
+
+
/* Inline shared references styling */
+
.inline-shared-refs {
+
margin-left: 0.5rem;
+
font-size: 0.85rem;
+
color: var(--text-secondary);
+
}
+
+
.shared-ref-link {
+
color: var(--primary-color);
+
text-decoration: none;
+
font-weight: 500;
+
transition: color 0.2s ease;
+
}
+
+
.shared-ref-link:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.shared-ref-more {
+
font-style: italic;
+
color: var(--text-secondary);
+
font-size: 0.8rem;
+
margin-left: 0.25rem;
+
}
+
+
.user-anchor, .post-anchor {
+
position: absolute;
+
margin-top: -60px; /* Offset for fixed header */
+
pointer-events: none;
+
}
+
+
.cross-thread-link {
+
color: var(--primary-color);
+
text-decoration: none;
+
font-weight: 500;
+
transition: color 0.2s ease;
+
}
+
+
.cross-thread-link:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.reference-badges {
+
display: flex;
+
gap: 0.25rem;
+
margin-left: 0.5rem;
+
flex-wrap: wrap;
+
}
+
+
.ref-badge {
+
display: inline-block;
+
padding: 0.1rem 0.4rem;
+
border-radius: 12px;
+
font-size: 0.7rem;
+
font-weight: 600;
+
text-transform: uppercase;
+
letter-spacing: 0.05em;
+
}
+
+
.ref-badge.ref-outbound {
+
background-color: #e8f5e8;
+
color: #2d8f2d;
+
border: 1px solid #c3e6c3;
+
}
+
+
.ref-badge.ref-inbound {
+
background-color: #e8f0ff;
+
color: #1f5fbf;
+
border: 1px solid #b3d9ff;
+
}
+
+
/* Author Color Coding */
+
.timeline-author {
+
position: relative;
+
}
+
+
.timeline-author::before {
+
content: '';
+
display: inline-block;
+
width: 8px;
+
height: 8px;
+
border-radius: 50%;
+
margin-right: 0.5rem;
+
background-color: var(--secondary-color);
+
}
+
+
/* Generate consistent colors for authors */
+
.author-avsm::before { background-color: #e74c3c; }
+
.author-mort::before { background-color: #3498db; }
+
.author-mte::before { background-color: #2ecc71; }
+
.author-ryan::before { background-color: #f39c12; }
+
.author-mwd::before { background-color: #9b59b6; }
+
.author-dra::before { background-color: #1abc9c; }
+
.author-pf341::before { background-color: #34495e; }
+
.author-sadiqj::before { background-color: #e67e22; }
+
.author-martinkl::before { background-color: #8e44ad; }
+
.author-jonsterling::before { background-color: #27ae60; }
+
.author-jon::before { background-color: #f1c40f; }
+
.author-onkar::before { background-color: #e91e63; }
+
.author-gabriel::before { background-color: #00bcd4; }
+
.author-jess::before { background-color: #ff5722; }
+
.author-ibrahim::before { background-color: #607d8b; }
+
.author-andres::before { background-color: #795548; }
+
.author-eeg::before { background-color: #ff9800; }
+
+
/* Section Headers */
+
.conversations-section h3,
+
.referenced-posts-section h3,
+
.individual-posts-section h3 {
+
border-bottom: 2px solid var(--border-color);
+
padding-bottom: 0.5rem;
+
margin-bottom: 1.5rem;
+
position: relative;
+
}
+
+
.conversations-section h3::before {
+
content: "๐Ÿ’ฌ";
+
margin-right: 0.5rem;
+
}
+
+
.referenced-posts-section h3::before {
+
content: "๐Ÿ”—";
+
margin-right: 0.5rem;
+
}
+
+
.individual-posts-section h3::before {
+
content: "๐Ÿ“";
+
margin-right: 0.5rem;
+
}
+
+
/* Legacy thread styles (for backward compatibility) */
+
.thread {
+
background-color: var(--background);
+
border: 1px solid var(--border-color);
+
padding: 0;
+
overflow: hidden;
+
margin-bottom: 1rem;
+
}
+
+
.thread-header {
+
background-color: var(--surface);
+
padding: 0.5rem 0.75rem;
+
border-bottom: 1px solid var(--border-color);
+
}
+
+
.thread-count {
+
font-weight: 600;
+
color: var(--secondary-color);
+
}
+
+
.thread-entry {
+
padding: 0.5rem 0.75rem;
+
border-bottom: 1px solid var(--border-color);
+
}
+
+
.thread-entry:last-child {
+
border-bottom: none;
+
}
+
+
.thread-entry.reply {
+
margin-left: var(--thread-indent);
+
border-left: 3px solid var(--secondary-color);
+
background-color: var(--surface);
+
}
+
+
/* Links Section */
+
.link-group {
+
background-color: var(--background);
+
}
+
+
.link-url {
+
font-size: 1rem;
+
word-break: break-word;
+
}
+
+
.link-url a {
+
color: var(--secondary-color);
+
text-decoration: none;
+
}
+
+
.link-url a:hover {
+
text-decoration: underline;
+
}
+
+
.target-user {
+
font-size: 0.9rem;
+
color: var(--text-secondary);
+
font-weight: normal;
+
}
+
+
.referencing-entries {
+
margin-top: 0.75rem;
+
}
+
+
.ref-count {
+
font-weight: 600;
+
color: var(--text-secondary);
+
font-size: 0.9rem;
+
}
+
+
.referencing-entries ul {
+
list-style: none;
+
margin-top: 0.5rem;
+
padding-left: 1rem;
+
}
+
+
.referencing-entries li {
+
margin-bottom: 0.25rem;
+
font-size: 0.9rem;
+
}
+
+
.referencing-entries .more {
+
font-style: italic;
+
color: var(--text-secondary);
+
}
+
+
/* Users Section */
+
.user-card {
+
background-color: var(--background);
+
}
+
+
.user-header {
+
display: flex;
+
gap: 1rem;
+
align-items: start;
+
margin-bottom: 1rem;
+
}
+
+
.user-icon {
+
width: 48px;
+
height: 48px;
+
border-radius: 50%;
+
object-fit: cover;
+
}
+
+
.user-info h3 {
+
margin-bottom: 0.25rem;
+
}
+
+
.username {
+
font-size: 0.9rem;
+
color: var(--text-secondary);
+
font-weight: normal;
+
}
+
+
.user-meta {
+
font-size: 0.9rem;
+
color: var(--text-secondary);
+
}
+
+
.user-meta a {
+
color: var(--secondary-color);
+
text-decoration: none;
+
}
+
+
.user-meta a:hover {
+
text-decoration: underline;
+
}
+
+
.separator {
+
margin: 0 0.5rem;
+
}
+
+
.post-count {
+
font-weight: 600;
+
}
+
+
.user-recent h4 {
+
font-size: 0.95rem;
+
margin-bottom: 0.5rem;
+
color: var(--text-secondary);
+
}
+
+
.user-recent ul {
+
list-style: none;
+
padding-left: 0;
+
}
+
+
.user-recent li {
+
margin-bottom: 0.25rem;
+
font-size: 0.9rem;
+
}
+
+
/* Footer */
+
.site-footer {
+
max-width: var(--max-width);
+
margin: 3rem auto 2rem;
+
padding: 1rem 2rem;
+
text-align: center;
+
color: var(--text-secondary);
+
font-size: 0.85rem;
+
border-top: 1px solid var(--border-color);
+
}
+
+
.site-footer a {
+
color: var(--secondary-color);
+
text-decoration: none;
+
}
+
+
.site-footer a:hover {
+
text-decoration: underline;
+
}
+
+
/* Responsive */
+
@media (max-width: 768px) {
+
.site-title {
+
font-size: 1.3rem;
+
}
+
+
.header-content {
+
flex-direction: column;
+
gap: 0.75rem;
+
align-items: flex-start;
+
}
+
+
.site-nav {
+
gap: 1rem;
+
}
+
+
.main-content {
+
padding: 0 1rem;
+
}
+
+
.thread-entry.reply {
+
margin-left: calc(var(--thread-indent) / 2);
+
}
+
+
.user-header {
+
flex-direction: column;
+
}
+
}
+
</file>
+
+
<file path="src/thicket/templates/timeline.html">
+
{% extends "base.html" %}
+
+
{% block page_title %}Timeline - {{ title }}{% endblock %}
+
+
{% block content %}
+
{% set seen_users = [] %}
+
<div class="page-content">
+
<h2>Recent Posts & Conversations</h2>
+
+
<section class="unified-timeline">
+
{% for item in timeline_items %}
+
{% if item.type == "post" %}
+
<!-- Individual Post -->
+
<article class="timeline-entry {% if item.content.references %}with-references{% endif %}">
+
<div class="timeline-meta">
+
<time datetime="{{ item.content.entry.updated or item.content.entry.published }}" class="timeline-time">
+
{{ (item.content.entry.updated or item.content.entry.published).strftime('%Y-%m-%d %H:%M') }}
+
</time>
+
{% set homepage = get_user_homepage(item.content.username) %}
+
{% if item.content.username not in seen_users %}
+
<a id="{{ item.content.username }}" class="user-anchor"></a>
+
{% set _ = seen_users.append(item.content.username) %}
+
{% endif %}
+
<a id="post-{{ loop.index0 }}-{{ safe_anchor_id(item.content.entry.id) }}" class="post-anchor"></a>
+
{% if homepage %}
+
<a href="{{ homepage }}" target="_blank" class="timeline-author">{{ item.content.display_name }}</a>
+
{% else %}
+
<span class="timeline-author">{{ item.content.display_name }}</span>
+
{% endif %}
+
{% if item.content.references %}
+
<div class="reference-badges">
+
{% for ref in item.content.references %}
+
{% if ref.type == 'outbound' %}
+
<span class="ref-badge ref-outbound" title="References {{ ref.target_username or 'external post' }}">
+
โ†’ {{ ref.target_username or 'ext' }}
+
</span>
+
{% elif ref.type == 'inbound' %}
+
<span class="ref-badge ref-inbound" title="Referenced by {{ ref.source_username or 'external post' }}">
+
โ† {{ ref.source_username or 'ext' }}
+
</span>
+
{% endif %}
+
{% endfor %}
+
</div>
+
{% endif %}
+
</div>
+
<div class="timeline-content">
+
<strong class="timeline-title">
+
<a href="{{ item.content.entry.link }}" target="_blank">{{ item.content.entry.title }}</a>
+
</strong>
+
{% if item.content.entry.summary %}
+
<span class="timeline-summary">โ€” {{ clean_html_summary(item.content.entry.summary, 250) }}</span>
+
{% endif %}
+
{% if item.content.shared_references %}
+
<span class="inline-shared-refs">
+
{% for ref in item.content.shared_references[:3] %}
+
{% if ref.target_username %}
+
<a href="#{{ ref.target_username }}" class="shared-ref-link" title="Referenced by {{ ref.count }} entries">@{{ ref.target_username }}</a>{% if not loop.last %}, {% endif %}
+
{% endif %}
+
{% endfor %}
+
{% if item.content.shared_references|length > 3 %}
+
<span class="shared-ref-more">+{{ item.content.shared_references|length - 3 }} more</span>
+
{% endif %}
+
</span>
+
{% endif %}
+
{% if item.content.cross_thread_links %}
+
<div class="cross-thread-links">
+
<span class="cross-thread-indicator">๐Ÿ”— Also appears: </span>
+
{% for link in item.content.cross_thread_links %}
+
<a href="#{{ link.anchor_id }}" class="cross-thread-link" title="{{ link.title }}">{{ link.context }}</a>{% if not loop.last %}, {% endif %}
+
{% endfor %}
+
</div>
+
{% endif %}
+
</div>
+
</article>
+
+
{% elif item.type == "thread" %}
+
<!-- Conversation Thread -->
+
{% set outer_loop_index = loop.index0 %}
+
{% for thread_item in item.content %}
+
<article class="timeline-entry conversation-post level-{{ thread_item.thread_level }}">
+
<div class="timeline-meta">
+
<time datetime="{{ thread_item.entry.updated or thread_item.entry.published }}" class="timeline-time">
+
{{ (thread_item.entry.updated or thread_item.entry.published).strftime('%Y-%m-%d %H:%M') }}
+
</time>
+
{% set homepage = get_user_homepage(thread_item.username) %}
+
{% if thread_item.username not in seen_users %}
+
<a id="{{ thread_item.username }}" class="user-anchor"></a>
+
{% set _ = seen_users.append(thread_item.username) %}
+
{% endif %}
+
<a id="post-{{ outer_loop_index }}-{{ loop.index0 }}-{{ safe_anchor_id(thread_item.entry.id) }}" class="post-anchor"></a>
+
{% if homepage %}
+
<a href="{{ homepage }}" target="_blank" class="timeline-author author-{{ thread_item.username }}">{{ thread_item.display_name }}</a>
+
{% else %}
+
<span class="timeline-author author-{{ thread_item.username }}">{{ thread_item.display_name }}</span>
+
{% endif %}
+
{% if thread_item.references_to or thread_item.referenced_by %}
+
<span class="reference-indicators">
+
{% if thread_item.references_to %}
+
<span class="ref-out" title="References other posts">โ†’</span>
+
{% endif %}
+
{% if thread_item.referenced_by %}
+
<span class="ref-in" title="Referenced by other posts">โ†</span>
+
{% endif %}
+
</span>
+
{% endif %}
+
</div>
+
<div class="timeline-content">
+
<strong class="timeline-title">
+
<a href="{{ thread_item.entry.link }}" target="_blank">{{ thread_item.entry.title }}</a>
+
</strong>
+
{% if thread_item.entry.summary %}
+
<span class="timeline-summary">โ€” {{ clean_html_summary(thread_item.entry.summary, 300) }}</span>
+
{% endif %}
+
{% if thread_item.shared_references %}
+
<span class="inline-shared-refs">
+
{% for ref in thread_item.shared_references[:3] %}
+
{% if ref.target_username %}
+
<a href="#{{ ref.target_username }}" class="shared-ref-link" title="Referenced by {{ ref.count }} entries">@{{ ref.target_username }}</a>{% if not loop.last %}, {% endif %}
+
{% endif %}
+
{% endfor %}
+
{% if thread_item.shared_references|length > 3 %}
+
<span class="shared-ref-more">+{{ thread_item.shared_references|length - 3 }} more</span>
+
{% endif %}
+
</span>
+
{% endif %}
+
{% if thread_item.cross_thread_links %}
+
<div class="cross-thread-links">
+
<span class="cross-thread-indicator">๐Ÿ”— Also appears: </span>
+
{% for link in thread_item.cross_thread_links %}
+
<a href="#{{ link.anchor_id }}" class="cross-thread-link" title="{{ link.title }}">{{ link.context }}</a>{% if not loop.last %}, {% endif %}
+
{% endfor %}
+
</div>
+
{% endif %}
+
</div>
+
</article>
+
{% endfor %}
+
{% endif %}
+
{% endfor %}
+
</section>
+
</div>
+
{% endblock %}
+
</file>
+
+
<file path="src/thicket/templates/users.html">
+
{% extends "base.html" %}
+
+
{% block page_title %}Users - {{ title }}{% endblock %}
+
+
{% block content %}
+
<div class="page-content">
+
<h2>Users</h2>
+
<p class="page-description">All users contributing to this thicket, ordered by post count.</p>
+
+
{% for user_info in users %}
+
<article class="user-card">
+
<div class="user-header">
+
{% if user_info.metadata.icon and user_info.metadata.icon != "None" %}
+
<img src="{{ user_info.metadata.icon }}" alt="{{ user_info.metadata.username }}" class="user-icon">
+
{% endif %}
+
<div class="user-info">
+
<h3>
+
{% if user_info.metadata.display_name %}
+
{{ user_info.metadata.display_name }}
+
<span class="username">({{ user_info.metadata.username }})</span>
+
{% else %}
+
{{ user_info.metadata.username }}
+
{% endif %}
+
</h3>
+
<div class="user-meta">
+
{% if user_info.metadata.homepage %}
+
<a href="{{ user_info.metadata.homepage }}" target="_blank">{{ user_info.metadata.homepage }}</a>
+
{% endif %}
+
{% if user_info.metadata.email %}
+
<span class="separator">โ€ข</span>
+
<a href="mailto:{{ user_info.metadata.email }}">{{ user_info.metadata.email }}</a>
+
{% endif %}
+
<span class="separator">โ€ข</span>
+
<span class="post-count">{{ user_info.metadata.entry_count }} posts</span>
+
</div>
+
</div>
+
</div>
+
+
{% if user_info.recent_entries %}
+
<div class="user-recent">
+
<h4>Recent posts:</h4>
+
<ul>
+
{% for display_name, entry in user_info.recent_entries %}
+
<li>
+
<a href="{{ entry.link }}" target="_blank">{{ entry.title }}</a>
+
<time datetime="{{ entry.updated or entry.published }}">
+
({{ (entry.updated or entry.published).strftime('%Y-%m-%d') }})
+
</time>
+
</li>
+
{% endfor %}
+
</ul>
+
</div>
+
{% endif %}
+
</article>
+
{% endfor %}
+
</div>
+
{% endblock %}
+
</file>
+
+
<file path="README.md">
+
# Thicket
+
+
A modern CLI tool for persisting Atom/RSS feeds in Git repositories, designed to enable distributed webblog comment structures.
+
+
## Features
+
+
- **Feed Auto-Discovery**: Automatically extracts user metadata from Atom/RSS feeds
+
- **Git Storage**: Stores feed entries in a Git repository with full history
+
- **Duplicate Management**: Manual curation of duplicate entries across feeds
+
- **Modern CLI**: Built with Typer and Rich for beautiful terminal output
+
- **Comprehensive Parsing**: Supports RSS 0.9x, RSS 1.0, RSS 2.0, and Atom feeds
+
- **Cron-Friendly**: Designed for scheduled execution
+
+
## Installation
+
+
```bash
+
# Install from source
+
pip install -e .
+
+
# Or install with dev dependencies
+
pip install -e .[dev]
+
```
+
+
## Quick Start
+
+
1. **Initialize a new thicket repository:**
+
```bash
+
thicket init ./my-feeds
+
```
+
+
2. **Add a user with their feed:**
+
```bash
+
thicket add user "alice" --feed "https://alice.example.com/feed.xml"
+
```
+
+
3. **Sync feeds to download entries:**
+
```bash
+
thicket sync --all
+
```
+
+
4. **List users and feeds:**
+
```bash
+
thicket list users
+
thicket list feeds
+
thicket list entries
+
```
+
+
## Commands
+
+
### Initialize
+
```bash
+
thicket init <git-store-path> [--cache-dir <path>] [--config <config-file>]
+
```
+
+
### Add Users and Feeds
+
```bash
+
# Add user with auto-discovery
+
thicket add user "username" --feed "https://example.com/feed.xml"
+
+
# Add user with manual metadata
+
thicket add user "username" \
+
--feed "https://example.com/feed.xml" \
+
--email "user@example.com" \
+
--homepage "https://example.com" \
+
--display-name "User Name"
+
+
# Add additional feed to existing user
+
thicket add feed "username" "https://example.com/other-feed.xml"
+
```
+
+
### Sync Feeds
+
```bash
+
# Sync all users
+
thicket sync --all
+
+
# Sync specific user
+
thicket sync --user "username"
+
+
# Dry run (preview changes)
+
thicket sync --all --dry-run
+
```
+
+
### List Information
+
```bash
+
# List all users
+
thicket list users
+
+
# List all feeds
+
thicket list feeds
+
+
# List feeds for specific user
+
thicket list feeds --user "username"
+
+
# List recent entries
+
thicket list entries --limit 20
+
+
# List entries for specific user
+
thicket list entries --user "username"
+
```
+
+
### Manage Duplicates
+
```bash
+
# List duplicate mappings
+
thicket duplicates list
+
+
# Mark entries as duplicates
+
thicket duplicates add "https://example.com/dup" "https://example.com/canonical"
+
+
# Remove duplicate mapping
+
thicket duplicates remove "https://example.com/dup"
+
```
+
+
## Configuration
+
+
Thicket uses a YAML configuration file (default: `thicket.yaml`):
+
+
```yaml
+
git_store: ./feeds-repo
+
cache_dir: ~/.cache/thicket
+
users:
+
- username: alice
+
feeds:
+
- https://alice.example.com/feed.xml
+
email: alice@example.com
+
homepage: https://alice.example.com
+
display_name: Alice
+
```
+
+
## Git Repository Structure
+
+
```
+
feeds-repo/
+
โ”œโ”€โ”€ index.json # User directory index
+
โ”œโ”€โ”€ duplicates.json # Duplicate entry mappings
+
โ”œโ”€โ”€ alice/
+
โ”‚ โ”œโ”€โ”€ metadata.json # User metadata
+
โ”‚ โ”œโ”€โ”€ entry_id_1.json # Feed entries
+
โ”‚ โ””โ”€โ”€ entry_id_2.json
+
โ””โ”€โ”€ bob/
+
โ””โ”€โ”€ ...
+
```
+
+
## Development
+
+
### Setup
+
```bash
+
# Install in development mode
+
pip install -e .[dev]
+
+
# Run tests
+
pytest
+
+
# Run linting
+
ruff check src/
+
black --check src/
+
+
# Run type checking
+
mypy src/
+
```
+
+
### Architecture
+
+
- **CLI**: Modern interface with Typer and Rich
+
- **Feed Processing**: Universal parsing with feedparser
+
- **Git Storage**: Structured storage with GitPython
+
- **Data Models**: Pydantic for validation and serialization
+
- **Async HTTP**: httpx for efficient feed fetching
+
+
## Use Cases
+
+
- **Blog Aggregation**: Collect and archive blog posts from multiple sources
+
- **Comment Networks**: Enable distributed commenting systems
+
- **Feed Archival**: Preserve feed history beyond typical feed depth limits
+
- **Content Curation**: Manage and deduplicate content across feeds
+
+
## License
+
+
MIT License - see LICENSE file for details.
+
</file>
+
+
<file path="src/thicket/cli/commands/index_cmd.py">
+
"""CLI command for building reference index from blog entries."""
+
+
import json
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from rich.console import Console
+
from rich.progress import (
+
BarColumn,
+
Progress,
+
SpinnerColumn,
+
TaskProgressColumn,
+
TextColumn,
+
)
+
from rich.table import Table
+
+
from ...core.git_store import GitStore
+
from ...core.reference_parser import ReferenceIndex, ReferenceParser
+
from ..main import app
+
from ..utils import get_tsv_mode, load_config
+
+
console = Console()
+
+
+
@app.command()
+
def index(
+
config_file: Optional[Path] = typer.Option(
+
None,
+
"--config",
+
"-c",
+
help="Path to configuration file",
+
),
+
output_file: Optional[Path] = typer.Option(
+
None,
+
"--output",
+
"-o",
+
help="Path to output index file (default: updates links.json in git store)",
+
),
+
verbose: bool = typer.Option(
+
False,
+
"--verbose",
+
"-v",
+
help="Show detailed progress information",
+
),
+
) -> None:
+
"""Build a reference index showing which blog entries reference others.
+
+
This command analyzes all blog entries to detect cross-references between
+
different blogs, creating an index that can be used to build threaded
+
views of related content.
+
+
Updates the unified links.json file with reference data.
+
"""
+
try:
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
# Initialize reference parser
+
parser = ReferenceParser()
+
+
# Build user domain mapping
+
if verbose:
+
console.print("Building user domain mapping...")
+
user_domains = parser.build_user_domain_mapping(git_store)
+
+
if verbose:
+
console.print(f"Found {len(user_domains)} users with {sum(len(d) for d in user_domains.values())} total domains")
+
+
# Initialize reference index
+
ref_index = ReferenceIndex()
+
ref_index.user_domains = user_domains
+
+
# Get all users
+
index = git_store._load_index()
+
users = list(index.users.keys())
+
+
if not users:
+
console.print("[yellow]No users found in Git store[/yellow]")
+
raise typer.Exit(0)
+
+
# Process all entries
+
total_entries = 0
+
total_references = 0
+
all_references = []
+
+
with Progress(
+
SpinnerColumn(),
+
TextColumn("[progress.description]{task.description}"),
+
BarColumn(),
+
TaskProgressColumn(),
+
console=console,
+
) as progress:
+
+
# Count total entries first
+
counting_task = progress.add_task("Counting entries...", total=len(users))
+
entry_counts = {}
+
for username in users:
+
entries = git_store.list_entries(username)
+
entry_counts[username] = len(entries)
+
total_entries += len(entries)
+
progress.advance(counting_task)
+
+
progress.remove_task(counting_task)
+
+
# Process entries - extract references
+
processing_task = progress.add_task(
+
f"Extracting references from {total_entries} entries...",
+
total=total_entries
+
)
+
+
for username in users:
+
entries = git_store.list_entries(username)
+
+
for entry in entries:
+
# Extract references from this entry
+
references = parser.extract_references(entry, username, user_domains)
+
all_references.extend(references)
+
+
progress.advance(processing_task)
+
+
if verbose and references:
+
console.print(f" Found {len(references)} references in {username}:{entry.title[:50]}...")
+
+
progress.remove_task(processing_task)
+
+
# Resolve target_entry_ids for references
+
if all_references:
+
resolve_task = progress.add_task(
+
f"Resolving {len(all_references)} references...",
+
total=len(all_references)
+
)
+
+
if verbose:
+
console.print(f"Resolving target entry IDs for {len(all_references)} references...")
+
+
resolved_references = parser.resolve_target_entry_ids(all_references, git_store)
+
+
# Count resolved references
+
resolved_count = sum(1 for ref in resolved_references if ref.target_entry_id is not None)
+
if verbose:
+
console.print(f"Resolved {resolved_count} out of {len(all_references)} references")
+
+
# Add resolved references to index
+
for ref in resolved_references:
+
ref_index.add_reference(ref)
+
total_references += 1
+
progress.advance(resolve_task)
+
+
progress.remove_task(resolve_task)
+
+
# Determine output path
+
if output_file:
+
output_path = output_file
+
else:
+
output_path = config.git_store / "links.json"
+
+
# Load existing links data or create new structure
+
if output_path.exists() and not output_file:
+
# Load existing unified structure
+
with open(output_path) as f:
+
existing_data = json.load(f)
+
else:
+
# Create new structure
+
existing_data = {
+
"links": {},
+
"reverse_mapping": {},
+
"user_domains": {}
+
}
+
+
# Update with reference data
+
existing_data["references"] = ref_index.to_dict()["references"]
+
existing_data["user_domains"] = {k: list(v) for k, v in user_domains.items()}
+
+
# Save updated structure
+
with open(output_path, "w") as f:
+
json.dump(existing_data, f, indent=2, default=str)
+
+
# Show summary
+
if not get_tsv_mode():
+
console.print("\n[green]โœ“ Reference index built successfully[/green]")
+
+
# Create summary table or TSV output
+
if get_tsv_mode():
+
print("Metric\tCount")
+
print(f"Total Users\t{len(users)}")
+
print(f"Total Entries\t{total_entries}")
+
print(f"Total References\t{total_references}")
+
print(f"Outbound Refs\t{len(ref_index.outbound_refs)}")
+
print(f"Inbound Refs\t{len(ref_index.inbound_refs)}")
+
print(f"Output File\t{output_path}")
+
else:
+
table = Table(title="Reference Index Summary")
+
table.add_column("Metric", style="cyan")
+
table.add_column("Count", style="green")
+
+
table.add_row("Total Users", str(len(users)))
+
table.add_row("Total Entries", str(total_entries))
+
table.add_row("Total References", str(total_references))
+
table.add_row("Outbound Refs", str(len(ref_index.outbound_refs)))
+
table.add_row("Inbound Refs", str(len(ref_index.inbound_refs)))
+
table.add_row("Output File", str(output_path))
+
+
console.print(table)
+
+
# Show some interesting statistics
+
if total_references > 0:
+
if not get_tsv_mode():
+
console.print("\n[bold]Reference Statistics:[/bold]")
+
+
# Most referenced users
+
target_counts = {}
+
unresolved_domains = set()
+
+
for ref in ref_index.references:
+
if ref.target_username:
+
target_counts[ref.target_username] = target_counts.get(ref.target_username, 0) + 1
+
else:
+
# Track unresolved domains
+
from urllib.parse import urlparse
+
domain = urlparse(ref.target_url).netloc.lower()
+
unresolved_domains.add(domain)
+
+
if target_counts:
+
if get_tsv_mode():
+
print("Referenced User\tReference Count")
+
for username, count in sorted(target_counts.items(), key=lambda x: x[1], reverse=True)[:5]:
+
print(f"{username}\t{count}")
+
else:
+
console.print("\nMost referenced users:")
+
for username, count in sorted(target_counts.items(), key=lambda x: x[1], reverse=True)[:5]:
+
console.print(f" {username}: {count} references")
+
+
if unresolved_domains and verbose:
+
if get_tsv_mode():
+
print("Unresolved Domain\tCount")
+
for domain in sorted(list(unresolved_domains)[:10]):
+
print(f"{domain}\t1")
+
if len(unresolved_domains) > 10:
+
print(f"... and {len(unresolved_domains) - 10} more\t...")
+
else:
+
console.print(f"\nUnresolved domains: {len(unresolved_domains)}")
+
for domain in sorted(list(unresolved_domains)[:10]):
+
console.print(f" {domain}")
+
if len(unresolved_domains) > 10:
+
console.print(f" ... and {len(unresolved_domains) - 10} more")
+
+
except Exception as e:
+
console.print(f"[red]Error building reference index: {e}[/red]")
+
if verbose:
+
console.print_exception()
+
raise typer.Exit(1)
+
+
+
@app.command()
+
def threads(
+
config_file: Optional[Path] = typer.Option(
+
None,
+
"--config",
+
"-c",
+
help="Path to configuration file",
+
),
+
index_file: Optional[Path] = typer.Option(
+
None,
+
"--index",
+
"-i",
+
help="Path to reference index file (default: links.json in git store)",
+
),
+
username: Optional[str] = typer.Option(
+
None,
+
"--username",
+
"-u",
+
help="Show threads for specific username only",
+
),
+
entry_id: Optional[str] = typer.Option(
+
None,
+
"--entry",
+
"-e",
+
help="Show thread for specific entry ID",
+
),
+
min_size: int = typer.Option(
+
2,
+
"--min-size",
+
"-m",
+
help="Minimum thread size to display",
+
),
+
) -> None:
+
"""Show threaded view of related blog entries.
+
+
This command uses the reference index to show which blog entries
+
are connected through cross-references, creating an email-style
+
threaded view of the conversation.
+
+
Reads reference data from the unified links.json file.
+
"""
+
try:
+
# Load configuration
+
config = load_config(config_file)
+
+
# Determine index file path
+
if index_file:
+
index_path = index_file
+
else:
+
index_path = config.git_store / "links.json"
+
+
if not index_path.exists():
+
console.print(f"[red]Links file not found: {index_path}[/red]")
+
console.print("Run 'thicket links' and 'thicket index' first to build the reference index")
+
raise typer.Exit(1)
+
+
# Load unified data
+
with open(index_path) as f:
+
unified_data = json.load(f)
+
+
# Check if references exist in the unified structure
+
if "references" not in unified_data:
+
console.print(f"[red]No references found in {index_path}[/red]")
+
console.print("Run 'thicket index' first to build the reference index")
+
raise typer.Exit(1)
+
+
# Extract reference data and reconstruct ReferenceIndex
+
ref_index = ReferenceIndex.from_dict({
+
"references": unified_data["references"],
+
"user_domains": unified_data.get("user_domains", {})
+
})
+
+
# Initialize Git store to get entry details
+
git_store = GitStore(config.git_store)
+
+
if entry_id and username:
+
# Show specific thread
+
thread_members = ref_index.get_thread_members(username, entry_id)
+
_display_thread(thread_members, ref_index, git_store, f"Thread for {username}:{entry_id}")
+
+
elif username:
+
# Show all threads involving this user
+
user_index = git_store._load_index()
+
user = user_index.get_user(username)
+
if not user:
+
console.print(f"[red]User not found: {username}[/red]")
+
raise typer.Exit(1)
+
+
entries = git_store.list_entries(username)
+
threads_found = set()
+
+
console.print(f"[bold]Threads involving {username}:[/bold]\n")
+
+
for entry in entries:
+
thread_members = ref_index.get_thread_members(username, entry.id)
+
if len(thread_members) >= min_size:
+
thread_key = tuple(sorted(thread_members))
+
if thread_key not in threads_found:
+
threads_found.add(thread_key)
+
_display_thread(thread_members, ref_index, git_store, f"Thread #{len(threads_found)}")
+
+
else:
+
# Show all threads
+
console.print("[bold]All conversation threads:[/bold]\n")
+
+
all_threads = set()
+
processed_entries = set()
+
+
# Get all entries
+
user_index = git_store._load_index()
+
for username in user_index.users.keys():
+
entries = git_store.list_entries(username)
+
for entry in entries:
+
entry_key = (username, entry.id)
+
if entry_key in processed_entries:
+
continue
+
+
thread_members = ref_index.get_thread_members(username, entry.id)
+
if len(thread_members) >= min_size:
+
thread_key = tuple(sorted(thread_members))
+
if thread_key not in all_threads:
+
all_threads.add(thread_key)
+
_display_thread(thread_members, ref_index, git_store, f"Thread #{len(all_threads)}")
+
+
# Mark all members as processed
+
for member in thread_members:
+
processed_entries.add(member)
+
+
if not all_threads:
+
console.print("[yellow]No conversation threads found[/yellow]")
+
console.print(f"(minimum thread size: {min_size})")
+
+
except Exception as e:
+
console.print(f"[red]Error showing threads: {e}[/red]")
+
raise typer.Exit(1)
+
+
+
def _display_thread(thread_members, ref_index, git_store, title):
+
"""Display a single conversation thread."""
+
console.print(f"[bold cyan]{title}[/bold cyan]")
+
console.print(f"Thread size: {len(thread_members)} entries")
+
+
# Get entry details for each member
+
thread_entries = []
+
for username, entry_id in thread_members:
+
entry = git_store.get_entry(username, entry_id)
+
if entry:
+
thread_entries.append((username, entry))
+
+
# Sort by publication date
+
thread_entries.sort(key=lambda x: x[1].published or x[1].updated)
+
+
# Display entries
+
for i, (username, entry) in enumerate(thread_entries):
+
prefix = "โ”œโ”€" if i < len(thread_entries) - 1 else "โ””โ”€"
+
+
# Get references for this entry
+
outbound = ref_index.get_outbound_refs(username, entry.id)
+
inbound = ref_index.get_inbound_refs(username, entry.id)
+
+
ref_info = ""
+
if outbound or inbound:
+
ref_info = f" ({len(outbound)} out, {len(inbound)} in)"
+
+
console.print(f" {prefix} [{username}] {entry.title[:60]}...{ref_info}")
+
+
if entry.published:
+
console.print(f" Published: {entry.published.strftime('%Y-%m-%d')}")
+
+
console.print() # Empty line after each thread
+
</file>
+
+
<file path="src/thicket/cli/commands/info_cmd.py">
+
"""CLI command for displaying detailed information about a specific atom entry."""
+
+
import json
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from rich.console import Console
+
from rich.panel import Panel
+
from rich.table import Table
+
from rich.text import Text
+
+
from ...core.git_store import GitStore
+
from ...core.reference_parser import ReferenceIndex
+
from ..main import app
+
from ..utils import load_config, get_tsv_mode
+
+
console = Console()
+
+
+
@app.command()
+
def info(
+
identifier: str = typer.Argument(
+
...,
+
help="The atom ID or URL of the entry to display information about"
+
),
+
username: Optional[str] = typer.Option(
+
None,
+
"--username",
+
"-u",
+
help="Username to search for the entry (if not provided, searches all users)"
+
),
+
config_file: Optional[Path] = typer.Option(
+
Path("thicket.yaml"),
+
"--config",
+
"-c",
+
help="Path to configuration file",
+
),
+
show_content: bool = typer.Option(
+
False,
+
"--content",
+
help="Include the full content of the entry in the output"
+
),
+
) -> None:
+
"""Display detailed information about a specific atom entry.
+
+
You can specify the entry using either its atom ID or URL.
+
Shows all metadata for the given entry, including title, dates, categories,
+
and summarizes all inbound and outbound links to/from other posts.
+
"""
+
try:
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
# Find the entry
+
entry = None
+
found_username = None
+
+
# Check if identifier looks like a URL
+
is_url = identifier.startswith(('http://', 'https://'))
+
+
if username:
+
# Search specific username
+
if is_url:
+
# Search by URL
+
entries = git_store.list_entries(username)
+
for e in entries:
+
if str(e.link) == identifier:
+
entry = e
+
found_username = username
+
break
+
else:
+
# Search by atom ID
+
entry = git_store.get_entry(username, identifier)
+
if entry:
+
found_username = username
+
else:
+
# Search all users
+
index = git_store._load_index()
+
for user in index.users.keys():
+
if is_url:
+
# Search by URL
+
entries = git_store.list_entries(user)
+
for e in entries:
+
if str(e.link) == identifier:
+
entry = e
+
found_username = user
+
break
+
if entry:
+
break
+
else:
+
# Search by atom ID
+
entry = git_store.get_entry(user, identifier)
+
if entry:
+
found_username = user
+
break
+
+
if not entry or not found_username:
+
if username:
+
console.print(f"[red]Entry with {'URL' if is_url else 'atom ID'} '{identifier}' not found for user '{username}'[/red]")
+
else:
+
console.print(f"[red]Entry with {'URL' if is_url else 'atom ID'} '{identifier}' not found in any user's entries[/red]")
+
raise typer.Exit(1)
+
+
# Load reference index if available
+
links_path = config.git_store / "links.json"
+
ref_index = None
+
if links_path.exists():
+
with open(links_path) as f:
+
unified_data = json.load(f)
+
+
# Check if references exist in the unified structure
+
if "references" in unified_data:
+
ref_index = ReferenceIndex.from_dict({
+
"references": unified_data["references"],
+
"user_domains": unified_data.get("user_domains", {})
+
})
+
+
# Display information
+
if get_tsv_mode():
+
_display_entry_info_tsv(entry, found_username, ref_index, show_content)
+
else:
+
_display_entry_info(entry, found_username)
+
+
if ref_index:
+
_display_link_info(entry, found_username, ref_index)
+
else:
+
console.print("\n[yellow]No reference index found. Run 'thicket links' and 'thicket index' to build cross-reference data.[/yellow]")
+
+
# Optionally display content
+
if show_content and entry.content:
+
_display_content(entry.content)
+
+
except Exception as e:
+
console.print(f"[red]Error displaying entry info: {e}[/red]")
+
raise typer.Exit(1)
+
+
+
def _display_entry_info(entry, username: str) -> None:
+
"""Display basic entry information in a structured format."""
+
+
# Create main info panel
+
info_table = Table.grid(padding=(0, 2))
+
info_table.add_column("Field", style="cyan bold", width=15)
+
info_table.add_column("Value", style="white")
+
+
info_table.add_row("User", f"[green]{username}[/green]")
+
info_table.add_row("Atom ID", f"[blue]{entry.id}[/blue]")
+
info_table.add_row("Title", entry.title)
+
info_table.add_row("Link", str(entry.link))
+
+
if entry.published:
+
info_table.add_row("Published", entry.published.strftime("%Y-%m-%d %H:%M:%S UTC"))
+
+
info_table.add_row("Updated", entry.updated.strftime("%Y-%m-%d %H:%M:%S UTC"))
+
+
if entry.summary:
+
# Truncate long summaries
+
summary = entry.summary[:200] + "..." if len(entry.summary) > 200 else entry.summary
+
info_table.add_row("Summary", summary)
+
+
if entry.categories:
+
categories_text = ", ".join(entry.categories)
+
info_table.add_row("Categories", categories_text)
+
+
if entry.author:
+
author_info = []
+
if "name" in entry.author:
+
author_info.append(entry.author["name"])
+
if "email" in entry.author:
+
author_info.append(f"<{entry.author['email']}>")
+
if author_info:
+
info_table.add_row("Author", " ".join(author_info))
+
+
if entry.content_type:
+
info_table.add_row("Content Type", entry.content_type)
+
+
if entry.rights:
+
info_table.add_row("Rights", entry.rights)
+
+
if entry.source:
+
info_table.add_row("Source Feed", entry.source)
+
+
panel = Panel(
+
info_table,
+
title=f"[bold]Entry Information[/bold]",
+
border_style="blue"
+
)
+
+
console.print(panel)
+
+
+
def _display_link_info(entry, username: str, ref_index: ReferenceIndex) -> None:
+
"""Display inbound and outbound link information."""
+
+
# Get links
+
outbound_refs = ref_index.get_outbound_refs(username, entry.id)
+
inbound_refs = ref_index.get_inbound_refs(username, entry.id)
+
+
if not outbound_refs and not inbound_refs:
+
console.print("\n[dim]No cross-references found for this entry.[/dim]")
+
return
+
+
# Create links table
+
links_table = Table(title="Cross-References")
+
links_table.add_column("Direction", style="cyan", width=10)
+
links_table.add_column("Target/Source", style="green", width=20)
+
links_table.add_column("URL", style="blue", width=50)
+
+
# Add outbound references
+
for ref in outbound_refs:
+
target_info = f"{ref.target_username}:{ref.target_entry_id}" if ref.target_username and ref.target_entry_id else "External"
+
links_table.add_row("โ†’ Out", target_info, ref.target_url)
+
+
# Add inbound references
+
for ref in inbound_refs:
+
source_info = f"{ref.source_username}:{ref.source_entry_id}"
+
links_table.add_row("โ† In", source_info, ref.target_url)
+
+
console.print()
+
console.print(links_table)
+
+
# Summary
+
console.print(f"\n[bold]Summary:[/bold] {len(outbound_refs)} outbound, {len(inbound_refs)} inbound references")
+
+
+
def _display_content(content: str) -> None:
+
"""Display the full content of the entry."""
+
+
# Truncate very long content
+
display_content = content
+
if len(content) > 5000:
+
display_content = content[:5000] + "\n\n[... content truncated ...]"
+
+
panel = Panel(
+
display_content,
+
title="[bold]Entry Content[/bold]",
+
border_style="green",
+
expand=False
+
)
+
+
console.print()
+
console.print(panel)
+
+
+
def _display_entry_info_tsv(entry, username: str, ref_index: Optional[ReferenceIndex], show_content: bool) -> None:
+
"""Display entry information in TSV format."""
+
+
# Basic info
+
print("Field\tValue")
+
print(f"User\t{username}")
+
print(f"Atom ID\t{entry.id}")
+
print(f"Title\t{entry.title.replace(chr(9), ' ').replace(chr(10), ' ').replace(chr(13), ' ')}")
+
print(f"Link\t{entry.link}")
+
+
if entry.published:
+
print(f"Published\t{entry.published.strftime('%Y-%m-%d %H:%M:%S UTC')}")
+
+
print(f"Updated\t{entry.updated.strftime('%Y-%m-%d %H:%M:%S UTC')}")
+
+
if entry.summary:
+
# Escape tabs and newlines in summary
+
summary = entry.summary.replace('\t', ' ').replace('\n', ' ').replace('\r', ' ')
+
print(f"Summary\t{summary}")
+
+
if entry.categories:
+
print(f"Categories\t{', '.join(entry.categories)}")
+
+
if entry.author:
+
author_info = []
+
if "name" in entry.author:
+
author_info.append(entry.author["name"])
+
if "email" in entry.author:
+
author_info.append(f"<{entry.author['email']}>")
+
if author_info:
+
print(f"Author\t{' '.join(author_info)}")
+
+
if entry.content_type:
+
print(f"Content Type\t{entry.content_type}")
+
+
if entry.rights:
+
print(f"Rights\t{entry.rights}")
+
+
if entry.source:
+
print(f"Source Feed\t{entry.source}")
+
+
# Add reference info if available
+
if ref_index:
+
outbound_refs = ref_index.get_outbound_refs(username, entry.id)
+
inbound_refs = ref_index.get_inbound_refs(username, entry.id)
+
+
print(f"Outbound References\t{len(outbound_refs)}")
+
print(f"Inbound References\t{len(inbound_refs)}")
+
+
# Show each reference
+
for ref in outbound_refs:
+
target_info = f"{ref.target_username}:{ref.target_entry_id}" if ref.target_username and ref.target_entry_id else "External"
+
print(f"Outbound Reference\t{target_info}\t{ref.target_url}")
+
+
for ref in inbound_refs:
+
source_info = f"{ref.source_username}:{ref.source_entry_id}"
+
print(f"Inbound Reference\t{source_info}\t{ref.target_url}")
+
+
# Show content if requested
+
if show_content and entry.content:
+
# Escape tabs and newlines in content
+
content = entry.content.replace('\t', ' ').replace('\n', ' ').replace('\r', ' ')
+
print(f"Content\t{content}")
+
</file>
+
+
<file path="src/thicket/cli/commands/init.py">
+
"""Initialize command for thicket."""
+
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from pydantic import ValidationError
+
+
from ...core.git_store import GitStore
+
from ...models import ThicketConfig
+
from ..main import app
+
from ..utils import print_error, print_success, save_config
+
+
+
@app.command()
+
def init(
+
git_store: Path = typer.Argument(..., help="Path to Git repository for storing feeds"),
+
cache_dir: Optional[Path] = typer.Option(
+
None, "--cache-dir", "-c", help="Cache directory (default: ~/.cache/thicket)"
+
),
+
config_file: Optional[Path] = typer.Option(
+
None, "--config", help="Configuration file path (default: thicket.yaml)"
+
),
+
force: bool = typer.Option(
+
False, "--force", "-f", help="Overwrite existing configuration"
+
),
+
) -> None:
+
"""Initialize a new thicket configuration and Git store."""
+
+
# Set default paths
+
if cache_dir is None:
+
from platformdirs import user_cache_dir
+
cache_dir = Path(user_cache_dir("thicket"))
+
+
if config_file is None:
+
config_file = Path("thicket.yaml")
+
+
# Check if config already exists
+
if config_file.exists() and not force:
+
print_error(f"Configuration file already exists: {config_file}")
+
print_error("Use --force to overwrite")
+
raise typer.Exit(1)
+
+
# Create cache directory
+
cache_dir.mkdir(parents=True, exist_ok=True)
+
+
# Create Git store
+
try:
+
GitStore(git_store)
+
print_success(f"Initialized Git store at: {git_store}")
+
except Exception as e:
+
print_error(f"Failed to initialize Git store: {e}")
+
raise typer.Exit(1) from e
+
+
# Create configuration
+
try:
+
config = ThicketConfig(
+
git_store=git_store,
+
cache_dir=cache_dir,
+
users=[]
+
)
+
+
save_config(config, config_file)
+
print_success(f"Created configuration file: {config_file}")
+
+
except ValidationError as e:
+
print_error(f"Invalid configuration: {e}")
+
raise typer.Exit(1) from e
+
except Exception as e:
+
print_error(f"Failed to create configuration: {e}")
+
raise typer.Exit(1) from e
+
+
print_success("Thicket initialized successfully!")
+
print_success(f"Git store: {git_store}")
+
print_success(f"Cache directory: {cache_dir}")
+
print_success(f"Configuration: {config_file}")
+
print_success("Run 'thicket add user' to add your first user and feed.")
+
</file>
+
+
<file path="src/thicket/cli/__init__.py">
+
"""CLI interface for thicket."""
+
+
from .main import app
+
+
__all__ = ["app"]
+
</file>
+
+
<file path="src/thicket/core/__init__.py">
+
"""Core business logic for thicket."""
+
+
from .feed_parser import FeedParser
+
from .git_store import GitStore
+
+
__all__ = ["FeedParser", "GitStore"]
+
</file>
+
+
<file path="src/thicket/core/feed_parser.py">
+
"""Feed parsing and normalization with auto-discovery."""
+
+
from datetime import datetime
+
from typing import Optional
+
from urllib.parse import urlparse
+
+
import bleach
+
import feedparser
+
import httpx
+
from pydantic import HttpUrl, ValidationError
+
+
from ..models import AtomEntry, FeedMetadata
+
+
+
class FeedParser:
+
"""Parser for RSS/Atom feeds with normalization and auto-discovery."""
+
+
def __init__(self, user_agent: str = "thicket/0.1.0"):
+
"""Initialize the feed parser."""
+
self.user_agent = user_agent
+
self.allowed_tags = [
+
"a", "abbr", "acronym", "b", "blockquote", "br", "code", "em",
+
"i", "li", "ol", "p", "pre", "strong", "ul", "h1", "h2", "h3",
+
"h4", "h5", "h6", "img", "div", "span",
+
]
+
self.allowed_attributes = {
+
"a": ["href", "title"],
+
"abbr": ["title"],
+
"acronym": ["title"],
+
"img": ["src", "alt", "title", "width", "height"],
+
"blockquote": ["cite"],
+
}
+
+
async def fetch_feed(self, url: HttpUrl) -> str:
+
"""Fetch feed content from URL."""
+
async with httpx.AsyncClient() as client:
+
response = await client.get(
+
str(url),
+
headers={"User-Agent": self.user_agent},
+
timeout=30.0,
+
follow_redirects=True,
+
)
+
response.raise_for_status()
+
return response.text
+
+
def parse_feed(self, content: str, source_url: Optional[HttpUrl] = None) -> tuple[FeedMetadata, list[AtomEntry]]:
+
"""Parse feed content and return metadata and entries."""
+
parsed = feedparser.parse(content)
+
+
if parsed.bozo and parsed.bozo_exception:
+
# Try to continue with potentially malformed feed
+
pass
+
+
# Extract feed metadata
+
feed_meta = self._extract_feed_metadata(parsed.feed)
+
+
# Extract and normalize entries
+
entries = []
+
for entry in parsed.entries:
+
try:
+
atom_entry = self._normalize_entry(entry, source_url)
+
entries.append(atom_entry)
+
except Exception as e:
+
# Log error but continue processing other entries
+
print(f"Error processing entry {getattr(entry, 'id', 'unknown')}: {e}")
+
continue
+
+
return feed_meta, entries
+
+
def _extract_feed_metadata(self, feed: feedparser.FeedParserDict) -> FeedMetadata:
+
"""Extract metadata from feed for auto-discovery."""
+
# Parse author information
+
author_name = None
+
author_email = None
+
author_uri = None
+
+
if hasattr(feed, 'author_detail'):
+
author_name = feed.author_detail.get('name')
+
author_email = feed.author_detail.get('email')
+
author_uri = feed.author_detail.get('href')
+
elif hasattr(feed, 'author'):
+
author_name = feed.author
+
+
# Parse managing editor for RSS feeds
+
if not author_email and hasattr(feed, 'managingEditor'):
+
author_email = feed.managingEditor
+
+
# Parse feed link
+
feed_link = None
+
if hasattr(feed, 'link'):
+
try:
+
feed_link = HttpUrl(feed.link)
+
except ValidationError:
+
pass
+
+
# Parse image/icon/logo
+
logo = None
+
icon = None
+
image_url = None
+
+
if hasattr(feed, 'image'):
+
try:
+
image_url = HttpUrl(feed.image.get('href', feed.image.get('url', '')))
+
except (ValidationError, AttributeError):
+
pass
+
+
if hasattr(feed, 'icon'):
+
try:
+
icon = HttpUrl(feed.icon)
+
except ValidationError:
+
pass
+
+
if hasattr(feed, 'logo'):
+
try:
+
logo = HttpUrl(feed.logo)
+
except ValidationError:
+
pass
+
+
return FeedMetadata(
+
title=getattr(feed, 'title', None),
+
author_name=author_name,
+
author_email=author_email,
+
author_uri=HttpUrl(author_uri) if author_uri else None,
+
link=feed_link,
+
logo=logo,
+
icon=icon,
+
image_url=image_url,
+
description=getattr(feed, 'description', None),
+
)
+
+
def _normalize_entry(self, entry: feedparser.FeedParserDict, source_url: Optional[HttpUrl] = None) -> AtomEntry:
+
"""Normalize an entry to Atom format."""
+
# Parse timestamps
+
updated = self._parse_timestamp(entry.get('updated_parsed') or entry.get('published_parsed'))
+
published = self._parse_timestamp(entry.get('published_parsed'))
+
+
# Parse content
+
content = self._extract_content(entry)
+
content_type = self._extract_content_type(entry)
+
+
# Parse author
+
author = self._extract_author(entry)
+
+
# Parse categories/tags
+
categories = []
+
if hasattr(entry, 'tags'):
+
categories = [tag.get('term', '') for tag in entry.tags if tag.get('term')]
+
+
# Sanitize HTML content
+
if content:
+
content = self._sanitize_html(content)
+
+
summary = entry.get('summary', '')
+
if summary:
+
summary = self._sanitize_html(summary)
+
+
return AtomEntry(
+
id=entry.get('id', entry.get('link', '')),
+
title=entry.get('title', ''),
+
link=HttpUrl(entry.get('link', '')),
+
updated=updated,
+
published=published,
+
summary=summary or None,
+
content=content or None,
+
content_type=content_type,
+
author=author,
+
categories=categories,
+
rights=entry.get('rights', None),
+
source=str(source_url) if source_url else None,
+
)
+
+
def _parse_timestamp(self, time_struct) -> datetime:
+
"""Parse feedparser time struct to datetime."""
+
if time_struct:
+
return datetime(*time_struct[:6])
+
return datetime.now()
+
+
def _extract_content(self, entry: feedparser.FeedParserDict) -> Optional[str]:
+
"""Extract the best content from an entry."""
+
# Prefer content over summary
+
if hasattr(entry, 'content') and entry.content:
+
# Find the best content (prefer text/html, then text/plain)
+
for content_item in entry.content:
+
if content_item.get('type') in ['text/html', 'html']:
+
return content_item.get('value', '')
+
elif content_item.get('type') in ['text/plain', 'text']:
+
return content_item.get('value', '')
+
# Fallback to first content item
+
return entry.content[0].get('value', '')
+
+
# Fallback to summary
+
return entry.get('summary', '')
+
+
def _extract_content_type(self, entry: feedparser.FeedParserDict) -> str:
+
"""Extract content type from entry."""
+
if hasattr(entry, 'content') and entry.content:
+
content_type = entry.content[0].get('type', 'html')
+
# Normalize content type
+
if content_type in ['text/html', 'html']:
+
return 'html'
+
elif content_type in ['text/plain', 'text']:
+
return 'text'
+
elif content_type == 'xhtml':
+
return 'xhtml'
+
return 'html'
+
+
def _extract_author(self, entry: feedparser.FeedParserDict) -> Optional[dict]:
+
"""Extract author information from entry."""
+
author = {}
+
+
if hasattr(entry, 'author_detail'):
+
author.update({
+
'name': entry.author_detail.get('name'),
+
'email': entry.author_detail.get('email'),
+
'uri': entry.author_detail.get('href'),
+
})
+
elif hasattr(entry, 'author'):
+
author['name'] = entry.author
+
+
return author if author else None
+
+
def _sanitize_html(self, html: str) -> str:
+
"""Sanitize HTML content to prevent XSS."""
+
return bleach.clean(
+
html,
+
tags=self.allowed_tags,
+
attributes=self.allowed_attributes,
+
strip=True,
+
)
+
+
def sanitize_entry_id(self, entry_id: str) -> str:
+
"""Sanitize entry ID to be a safe filename."""
+
# Parse URL to get meaningful parts
+
parsed = urlparse(entry_id)
+
+
# Start with the path component
+
if parsed.path:
+
# Remove leading slash and replace problematic characters
+
safe_id = parsed.path.lstrip('/').replace('/', '_').replace('\\', '_')
+
else:
+
# Use the entire ID as fallback
+
safe_id = entry_id
+
+
# Replace problematic characters
+
safe_chars = []
+
for char in safe_id:
+
if char.isalnum() or char in '-_.':
+
safe_chars.append(char)
+
else:
+
safe_chars.append('_')
+
+
safe_id = ''.join(safe_chars)
+
+
# Ensure it's not too long (max 200 chars)
+
if len(safe_id) > 200:
+
safe_id = safe_id[:200]
+
+
# Ensure it's not empty
+
if not safe_id:
+
safe_id = "entry"
+
+
return safe_id
+
</file>
+
+
<file path="src/thicket/core/reference_parser.py">
+
"""Reference detection and parsing for blog entries."""
+
+
import re
+
from typing import Optional
+
from urllib.parse import urlparse
+
+
from ..models import AtomEntry
+
+
+
class BlogReference:
+
"""Represents a reference from one blog entry to another."""
+
+
def __init__(
+
self,
+
source_entry_id: str,
+
source_username: str,
+
target_url: str,
+
target_username: Optional[str] = None,
+
target_entry_id: Optional[str] = None,
+
):
+
self.source_entry_id = source_entry_id
+
self.source_username = source_username
+
self.target_url = target_url
+
self.target_username = target_username
+
self.target_entry_id = target_entry_id
+
+
def to_dict(self) -> dict:
+
"""Convert to dictionary for JSON serialization."""
+
result = {
+
"source_entry_id": self.source_entry_id,
+
"source_username": self.source_username,
+
"target_url": self.target_url,
+
}
+
+
# Only include optional fields if they are not None
+
if self.target_username is not None:
+
result["target_username"] = self.target_username
+
if self.target_entry_id is not None:
+
result["target_entry_id"] = self.target_entry_id
+
+
return result
+
+
@classmethod
+
def from_dict(cls, data: dict) -> "BlogReference":
+
"""Create from dictionary."""
+
return cls(
+
source_entry_id=data["source_entry_id"],
+
source_username=data["source_username"],
+
target_url=data["target_url"],
+
target_username=data.get("target_username"),
+
target_entry_id=data.get("target_entry_id"),
+
)
+
+
+
class ReferenceIndex:
+
"""Index of blog-to-blog references for creating threaded views."""
+
+
def __init__(self):
+
self.references: list[BlogReference] = []
+
self.outbound_refs: dict[
+
str, list[BlogReference]
+
] = {} # entry_id -> outbound refs
+
self.inbound_refs: dict[
+
str, list[BlogReference]
+
] = {} # entry_id -> inbound refs
+
self.user_domains: dict[str, set[str]] = {} # username -> set of domains
+
+
def add_reference(self, ref: BlogReference) -> None:
+
"""Add a reference to the index."""
+
self.references.append(ref)
+
+
# Update outbound references
+
source_key = f"{ref.source_username}:{ref.source_entry_id}"
+
if source_key not in self.outbound_refs:
+
self.outbound_refs[source_key] = []
+
self.outbound_refs[source_key].append(ref)
+
+
# Update inbound references if we can identify the target
+
if ref.target_username and ref.target_entry_id:
+
target_key = f"{ref.target_username}:{ref.target_entry_id}"
+
if target_key not in self.inbound_refs:
+
self.inbound_refs[target_key] = []
+
self.inbound_refs[target_key].append(ref)
+
+
def get_outbound_refs(self, username: str, entry_id: str) -> list[BlogReference]:
+
"""Get all outbound references from an entry."""
+
key = f"{username}:{entry_id}"
+
return self.outbound_refs.get(key, [])
+
+
def get_inbound_refs(self, username: str, entry_id: str) -> list[BlogReference]:
+
"""Get all inbound references to an entry."""
+
key = f"{username}:{entry_id}"
+
return self.inbound_refs.get(key, [])
+
+
def get_thread_members(self, username: str, entry_id: str) -> set[tuple[str, str]]:
+
"""Get all entries that are part of the same thread."""
+
visited = set()
+
to_visit = [(username, entry_id)]
+
thread_members = set()
+
+
while to_visit:
+
current_user, current_entry = to_visit.pop()
+
if (current_user, current_entry) in visited:
+
continue
+
+
visited.add((current_user, current_entry))
+
thread_members.add((current_user, current_entry))
+
+
# Add outbound references
+
for ref in self.get_outbound_refs(current_user, current_entry):
+
if ref.target_username and ref.target_entry_id:
+
to_visit.append((ref.target_username, ref.target_entry_id))
+
+
# Add inbound references
+
for ref in self.get_inbound_refs(current_user, current_entry):
+
to_visit.append((ref.source_username, ref.source_entry_id))
+
+
return thread_members
+
+
def to_dict(self) -> dict:
+
"""Convert to dictionary for JSON serialization."""
+
return {
+
"references": [ref.to_dict() for ref in self.references],
+
"user_domains": {k: list(v) for k, v in self.user_domains.items()},
+
}
+
+
@classmethod
+
def from_dict(cls, data: dict) -> "ReferenceIndex":
+
"""Create from dictionary."""
+
index = cls()
+
for ref_data in data.get("references", []):
+
ref = BlogReference.from_dict(ref_data)
+
index.add_reference(ref)
+
+
for username, domains in data.get("user_domains", {}).items():
+
index.user_domains[username] = set(domains)
+
+
return index
+
+
+
class ReferenceParser:
+
"""Parses blog entries to detect references to other blogs."""
+
+
def __init__(self):
+
# Common blog platforms and patterns
+
self.blog_patterns = [
+
r"https?://[^/]+\.(?:org|com|net|io|dev|me|co\.uk)/.*", # Common blog domains
+
r"https?://[^/]+\.github\.io/.*", # GitHub Pages
+
r"https?://[^/]+\.substack\.com/.*", # Substack
+
r"https?://medium\.com/.*", # Medium
+
r"https?://[^/]+\.wordpress\.com/.*", # WordPress.com
+
r"https?://[^/]+\.blogspot\.com/.*", # Blogger
+
]
+
+
# Compile regex patterns
+
self.link_pattern = re.compile(
+
r'<a[^>]+href="([^"]+)"[^>]*>(.*?)</a>', re.IGNORECASE | re.DOTALL
+
)
+
self.url_pattern = re.compile(r'https?://[^\s<>"]+')
+
+
def extract_links_from_html(self, html_content: str) -> list[tuple[str, str]]:
+
"""Extract all links from HTML content."""
+
links = []
+
+
# Extract links from <a> tags
+
for match in self.link_pattern.finditer(html_content):
+
url = match.group(1)
+
text = re.sub(
+
r"<[^>]+>", "", match.group(2)
+
).strip() # Remove HTML tags from link text
+
links.append((url, text))
+
+
return links
+
+
def is_blog_url(self, url: str) -> bool:
+
"""Check if a URL likely points to a blog post."""
+
for pattern in self.blog_patterns:
+
if re.match(pattern, url):
+
return True
+
return False
+
+
def _is_likely_blog_post_url(self, url: str) -> bool:
+
"""Check if a same-domain URL likely points to a blog post (not CSS, images, etc.)."""
+
parsed_url = urlparse(url)
+
path = parsed_url.path.lower()
+
+
# Skip obvious non-blog content
+
if any(path.endswith(ext) for ext in ['.css', '.js', '.png', '.jpg', '.jpeg', '.gif', '.svg', '.ico', '.pdf', '.xml', '.json']):
+
return False
+
+
# Skip common non-blog paths
+
if any(segment in path for segment in ['/static/', '/assets/', '/css/', '/js/', '/images/', '/img/', '/media/', '/uploads/']):
+
return False
+
+
# Skip fragment-only links (same page anchors)
+
if not path or path == '/':
+
return False
+
+
# Look for positive indicators of blog posts
+
# Common blog post patterns: dates, slugs, post indicators
+
blog_indicators = [
+
r'/\d{4}/', # Year in path
+
r'/\d{4}/\d{2}/', # Year/month in path
+
r'/blog/',
+
r'/post/',
+
r'/posts/',
+
r'/articles?/',
+
r'/notes?/',
+
r'/entries/',
+
r'/writing/',
+
]
+
+
for pattern in blog_indicators:
+
if re.search(pattern, path):
+
return True
+
+
# If it has a reasonable path depth and doesn't match exclusions, likely a blog post
+
path_segments = [seg for seg in path.split('/') if seg]
+
return len(path_segments) >= 1 # At least one meaningful path segment
+
+
def resolve_target_user(
+
self, url: str, user_domains: dict[str, set[str]]
+
) -> Optional[str]:
+
"""Try to resolve a URL to a known user based on domain mapping."""
+
parsed_url = urlparse(url)
+
domain = parsed_url.netloc.lower()
+
+
for username, domains in user_domains.items():
+
if domain in domains:
+
return username
+
+
return None
+
+
def extract_references(
+
self, entry: AtomEntry, username: str, user_domains: dict[str, set[str]]
+
) -> list[BlogReference]:
+
"""Extract all blog references from an entry."""
+
references = []
+
+
# Combine all text content for analysis
+
content_to_search = []
+
if entry.content:
+
content_to_search.append(entry.content)
+
if entry.summary:
+
content_to_search.append(entry.summary)
+
+
for content in content_to_search:
+
links = self.extract_links_from_html(content)
+
+
for url, _link_text in links:
+
entry_domain = (
+
urlparse(str(entry.link)).netloc.lower() if entry.link else ""
+
)
+
link_domain = urlparse(url).netloc.lower()
+
+
# Check if this looks like a blog URL
+
if not self.is_blog_url(url):
+
continue
+
+
# For same-domain links, apply additional filtering to avoid non-blog content
+
if link_domain == entry_domain:
+
# Only include same-domain links that look like blog posts
+
if not self._is_likely_blog_post_url(url):
+
continue
+
+
# Try to resolve to a known user
+
if link_domain == entry_domain:
+
# Same domain - target user is the same as source user
+
target_username: Optional[str] = username
+
else:
+
# Different domain - try to resolve
+
target_username = self.resolve_target_user(url, user_domains)
+
+
ref = BlogReference(
+
source_entry_id=entry.id,
+
source_username=username,
+
target_url=url,
+
target_username=target_username,
+
target_entry_id=None, # Will be resolved later if possible
+
)
+
+
references.append(ref)
+
+
return references
+
+
def build_user_domain_mapping(self, git_store: "GitStore") -> dict[str, set[str]]:
+
"""Build mapping of usernames to their known domains."""
+
user_domains = {}
+
index = git_store._load_index()
+
+
for username, user_metadata in index.users.items():
+
domains = set()
+
+
# Add domains from feeds
+
for feed_url in user_metadata.feeds:
+
domain = urlparse(feed_url).netloc.lower()
+
if domain:
+
domains.add(domain)
+
+
# Add domain from homepage
+
if user_metadata.homepage:
+
domain = urlparse(str(user_metadata.homepage)).netloc.lower()
+
if domain:
+
domains.add(domain)
+
+
user_domains[username] = domains
+
+
return user_domains
+
+
def _build_url_to_entry_mapping(self, git_store: "GitStore") -> dict[str, str]:
+
"""Build a comprehensive mapping from URLs to entry IDs using git store data.
+
+
This creates a bidirectional mapping that handles:
+
- Entry link URLs -> Entry IDs
+
- URL variations (with/without www, http/https)
+
- Multiple URLs pointing to the same entry
+
"""
+
url_to_entry: dict[str, str] = {}
+
+
# Load index to get all users
+
index = git_store._load_index()
+
+
for username in index.users.keys():
+
entries = git_store.list_entries(username)
+
+
for entry in entries:
+
if entry.link:
+
link_url = str(entry.link)
+
entry_id = entry.id
+
+
# Map the canonical link URL
+
url_to_entry[link_url] = entry_id
+
+
# Handle common URL variations
+
parsed = urlparse(link_url)
+
if parsed.netloc and parsed.path:
+
# Add version without www
+
if parsed.netloc.startswith('www.'):
+
no_www_url = f"{parsed.scheme}://{parsed.netloc[4:]}{parsed.path}"
+
if parsed.query:
+
no_www_url += f"?{parsed.query}"
+
if parsed.fragment:
+
no_www_url += f"#{parsed.fragment}"
+
url_to_entry[no_www_url] = entry_id
+
+
# Add version with www if not present
+
elif not parsed.netloc.startswith('www.'):
+
www_url = f"{parsed.scheme}://www.{parsed.netloc}{parsed.path}"
+
if parsed.query:
+
www_url += f"?{parsed.query}"
+
if parsed.fragment:
+
www_url += f"#{parsed.fragment}"
+
url_to_entry[www_url] = entry_id
+
+
# Add http/https variations
+
if parsed.scheme == 'https':
+
http_url = link_url.replace('https://', 'http://', 1)
+
url_to_entry[http_url] = entry_id
+
elif parsed.scheme == 'http':
+
https_url = link_url.replace('http://', 'https://', 1)
+
url_to_entry[https_url] = entry_id
+
+
return url_to_entry
+
+
def _normalize_url(self, url: str) -> str:
+
"""Normalize URL for consistent matching.
+
+
Handles common variations like trailing slashes, fragments, etc.
+
"""
+
parsed = urlparse(url)
+
+
# Remove trailing slash from path
+
path = parsed.path.rstrip('/') if parsed.path != '/' else parsed.path
+
+
# Reconstruct without fragment for consistent matching
+
normalized = f"{parsed.scheme}://{parsed.netloc}{path}"
+
if parsed.query:
+
normalized += f"?{parsed.query}"
+
+
return normalized
+
+
def resolve_target_entry_ids(
+
self, references: list[BlogReference], git_store: "GitStore"
+
) -> list[BlogReference]:
+
"""Resolve target_entry_id for references using comprehensive URL mapping."""
+
resolved_refs = []
+
+
# Build comprehensive URL to entry ID mapping
+
url_to_entry = self._build_url_to_entry_mapping(git_store)
+
+
for ref in references:
+
# If we already have a target_entry_id, keep the reference as-is
+
if ref.target_entry_id is not None:
+
resolved_refs.append(ref)
+
continue
+
+
# If we don't have a target_username, we can't resolve it
+
if ref.target_username is None:
+
resolved_refs.append(ref)
+
continue
+
+
# Try to resolve using URL mapping
+
resolved_entry_id = None
+
+
# First, try exact match
+
if ref.target_url in url_to_entry:
+
resolved_entry_id = url_to_entry[ref.target_url]
+
else:
+
# Try normalized URL matching
+
normalized_target = self._normalize_url(ref.target_url)
+
if normalized_target in url_to_entry:
+
resolved_entry_id = url_to_entry[normalized_target]
+
else:
+
# Try URL variations
+
for mapped_url, entry_id in url_to_entry.items():
+
if self._normalize_url(mapped_url) == normalized_target:
+
resolved_entry_id = entry_id
+
break
+
+
# Verify the resolved entry belongs to the target username
+
if resolved_entry_id:
+
# Double-check by loading the actual entry
+
entries = git_store.list_entries(ref.target_username)
+
entry_found = any(entry.id == resolved_entry_id for entry in entries)
+
if not entry_found:
+
resolved_entry_id = None
+
+
# Create a new reference with the resolved target_entry_id
+
resolved_ref = BlogReference(
+
source_entry_id=ref.source_entry_id,
+
source_username=ref.source_username,
+
target_url=ref.target_url,
+
target_username=ref.target_username,
+
target_entry_id=resolved_entry_id,
+
)
+
resolved_refs.append(resolved_ref)
+
+
return resolved_refs
+
</file>
+
+
<file path="src/thicket/models/__init__.py">
+
"""Data models for thicket."""
+
+
from .config import ThicketConfig, UserConfig
+
from .feed import AtomEntry, DuplicateMap, FeedMetadata
+
from .user import GitStoreIndex, UserMetadata
+
+
__all__ = [
+
"ThicketConfig",
+
"UserConfig",
+
"AtomEntry",
+
"DuplicateMap",
+
"FeedMetadata",
+
"GitStoreIndex",
+
"UserMetadata",
+
]
+
</file>
+
+
<file path="src/thicket/models/feed.py">
+
"""Feed and entry models for thicket."""
+
+
from datetime import datetime
+
from typing import TYPE_CHECKING, Optional
+
+
from pydantic import BaseModel, ConfigDict, EmailStr, HttpUrl
+
+
if TYPE_CHECKING:
+
from .config import UserConfig
+
+
+
class AtomEntry(BaseModel):
+
"""Represents an Atom feed entry stored in the Git repository."""
+
+
model_config = ConfigDict(
+
json_encoders={datetime: lambda v: v.isoformat()},
+
str_strip_whitespace=True,
+
)
+
+
id: str # Original Atom ID
+
title: str
+
link: HttpUrl
+
updated: datetime
+
published: Optional[datetime] = None
+
summary: Optional[str] = None
+
content: Optional[str] = None # Full body content from Atom entry
+
content_type: Optional[str] = "html" # text, html, xhtml
+
author: Optional[dict] = None
+
categories: list[str] = []
+
rights: Optional[str] = None # Copyright info
+
source: Optional[str] = None # Source feed URL
+
+
+
class FeedMetadata(BaseModel):
+
"""Metadata extracted from a feed for auto-discovery."""
+
+
title: Optional[str] = None
+
author_name: Optional[str] = None
+
author_email: Optional[EmailStr] = None
+
author_uri: Optional[HttpUrl] = None
+
link: Optional[HttpUrl] = None
+
logo: Optional[HttpUrl] = None
+
icon: Optional[HttpUrl] = None
+
image_url: Optional[HttpUrl] = None
+
description: Optional[str] = None
+
+
def to_user_config(self, username: str, feed_url: HttpUrl) -> "UserConfig":
+
"""Convert discovered metadata to UserConfig with fallbacks."""
+
from .config import UserConfig
+
+
return UserConfig(
+
username=username,
+
feeds=[feed_url],
+
display_name=self.author_name or self.title,
+
email=self.author_email,
+
homepage=self.author_uri or self.link,
+
icon=self.logo or self.icon or self.image_url,
+
)
+
+
+
class DuplicateMap(BaseModel):
+
"""Maps duplicate entry IDs to canonical entry IDs."""
+
+
duplicates: dict[str, str] = {} # duplicate_id -> canonical_id
+
comment: str = "Entry IDs that map to the same canonical content"
+
+
def add_duplicate(self, duplicate_id: str, canonical_id: str) -> None:
+
"""Add a duplicate mapping."""
+
self.duplicates[duplicate_id] = canonical_id
+
+
def remove_duplicate(self, duplicate_id: str) -> bool:
+
"""Remove a duplicate mapping. Returns True if existed."""
+
return self.duplicates.pop(duplicate_id, None) is not None
+
+
def get_canonical(self, entry_id: str) -> str:
+
"""Get canonical ID for an entry (returns original if not duplicate)."""
+
return self.duplicates.get(entry_id, entry_id)
+
+
def is_duplicate(self, entry_id: str) -> bool:
+
"""Check if entry ID is marked as duplicate."""
+
return entry_id in self.duplicates
+
+
def get_duplicates_for_canonical(self, canonical_id: str) -> list[str]:
+
"""Get all duplicate IDs that map to a canonical ID."""
+
return [
+
duplicate_id
+
for duplicate_id, canonical in self.duplicates.items()
+
if canonical == canonical_id
+
]
+
</file>
+
+
<file path="src/thicket/models/user.py">
+
"""User metadata models for thicket."""
+
+
from datetime import datetime
+
from typing import Optional
+
+
from pydantic import BaseModel, ConfigDict
+
+
+
class UserMetadata(BaseModel):
+
"""Metadata about a user stored in the Git repository."""
+
+
model_config = ConfigDict(
+
json_encoders={datetime: lambda v: v.isoformat()},
+
str_strip_whitespace=True,
+
)
+
+
username: str
+
display_name: Optional[str] = None
+
email: Optional[str] = None
+
homepage: Optional[str] = None
+
icon: Optional[str] = None
+
feeds: list[str] = []
+
directory: str # Directory name in Git store
+
created: datetime
+
last_updated: datetime
+
entry_count: int = 0
+
+
def update_timestamp(self) -> None:
+
"""Update the last_updated timestamp to now."""
+
self.last_updated = datetime.now()
+
+
def increment_entry_count(self, count: int = 1) -> None:
+
"""Increment the entry count by the given amount."""
+
self.entry_count += count
+
self.update_timestamp()
+
+
+
class GitStoreIndex(BaseModel):
+
"""Index of all users and their directories in the Git store."""
+
+
model_config = ConfigDict(
+
json_encoders={datetime: lambda v: v.isoformat()}
+
)
+
+
users: dict[str, UserMetadata] = {} # username -> UserMetadata
+
created: datetime
+
last_updated: datetime
+
total_entries: int = 0
+
+
def add_user(self, user_metadata: UserMetadata) -> None:
+
"""Add or update a user in the index."""
+
self.users[user_metadata.username] = user_metadata
+
self.last_updated = datetime.now()
+
+
def remove_user(self, username: str) -> bool:
+
"""Remove a user from the index. Returns True if user existed."""
+
if username in self.users:
+
del self.users[username]
+
self.last_updated = datetime.now()
+
return True
+
return False
+
+
def get_user(self, username: str) -> Optional[UserMetadata]:
+
"""Get user metadata by username."""
+
return self.users.get(username)
+
+
def update_entry_count(self, username: str, count: int) -> None:
+
"""Update entry count for a user and total."""
+
user = self.get_user(username)
+
if user:
+
user.increment_entry_count(count)
+
self.total_entries += count
+
self.last_updated = datetime.now()
+
+
def recalculate_totals(self) -> None:
+
"""Recalculate total entries from all users."""
+
self.total_entries = sum(user.entry_count for user in self.users.values())
+
self.last_updated = datetime.now()
+
</file>
+
+
<file path="src/thicket/utils/__init__.py">
+
"""Utility modules for thicket."""
+
+
# This module will contain shared utilities
+
# For now, it's empty but can be expanded with common functions
+
</file>
+
+
<file path="src/thicket/__init__.py">
+
"""Thicket: A CLI tool for persisting Atom/RSS feeds in Git repositories."""
+
+
__version__ = "0.1.0"
+
__author__ = "thicket"
+
__email__ = "thicket@example.com"
+
</file>
+
+
<file path="src/thicket/__main__.py">
+
"""Entry point for running thicket as a module."""
+
+
from .cli.main import app
+
+
if __name__ == "__main__":
+
app()
+
</file>
+
+
<file path=".gitignore">
+
# Byte-compiled / optimized / DLL files
+
__pycache__/
+
*.py[codz]
+
*$py.class
+
+
# C extensions
+
*.so
+
+
# Distribution / packaging
+
.Python
+
build/
+
develop-eggs/
+
dist/
+
downloads/
+
eggs/
+
.eggs/
+
lib/
+
lib64/
+
parts/
+
sdist/
+
var/
+
wheels/
+
share/python-wheels/
+
*.egg-info/
+
.installed.cfg
+
*.egg
+
MANIFEST
+
+
# PyInstaller
+
# Usually these files are written by a python script from a template
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
+
*.manifest
+
*.spec
+
+
# Installer logs
+
pip-log.txt
+
pip-delete-this-directory.txt
+
+
# Unit test / coverage reports
+
htmlcov/
+
.tox/
+
.nox/
+
.coverage
+
.coverage.*
+
.cache
+
nosetests.xml
+
coverage.xml
+
*.cover
+
*.py.cover
+
.hypothesis/
+
.pytest_cache/
+
cover/
+
+
# Translations
+
*.mo
+
*.pot
+
+
# Django stuff:
+
*.log
+
local_settings.py
+
db.sqlite3
+
db.sqlite3-journal
+
+
# Flask stuff:
+
instance/
+
.webassets-cache
+
+
# Scrapy stuff:
+
.scrapy
+
+
# Sphinx documentation
+
docs/_build/
+
+
# PyBuilder
+
.pybuilder/
+
target/
+
+
# Jupyter Notebook
+
.ipynb_checkpoints
+
+
# IPython
+
profile_default/
+
ipython_config.py
+
+
# pyenv
+
# For a library or package, you might want to ignore these files since the code is
+
# intended to run in multiple environments; otherwise, check them in:
+
# .python-version
+
+
# pipenv
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
+
# install all needed dependencies.
+
#Pipfile.lock
+
+
# UV
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
+
# commonly ignored for libraries.
+
#uv.lock
+
+
# poetry
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
+
# commonly ignored for libraries.
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+
#poetry.lock
+
#poetry.toml
+
+
# pdm
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
+
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
+
#pdm.lock
+
#pdm.toml
+
.pdm-python
+
.pdm-build/
+
+
# pixi
+
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
+
#pixi.lock
+
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
+
# in the .venv directory. It is recommended not to include this directory in version control.
+
.pixi
+
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+
__pypackages__/
+
+
# Celery stuff
+
celerybeat-schedule
+
celerybeat.pid
+
+
# SageMath parsed files
+
*.sage.py
+
+
# Environments
+
.env
+
.envrc
+
.venv
+
env/
+
venv/
+
ENV/
+
env.bak/
+
venv.bak/
+
+
# Spyder project settings
+
.spyderproject
+
.spyproject
+
+
# Rope project settings
+
.ropeproject
+
+
# mkdocs documentation
+
/site
+
+
# mypy
+
.mypy_cache/
+
.dmypy.json
+
dmypy.json
+
+
# Pyre type checker
+
.pyre/
+
+
# pytype static type analyzer
+
.pytype/
+
+
# Cython debug symbols
+
cython_debug/
+
+
# PyCharm
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+
#.idea/
+
+
# Abstra
+
# Abstra is an AI-powered process automation framework.
+
# Ignore directories containing user credentials, local state, and settings.
+
# Learn more at https://abstra.io/docs
+
.abstra/
+
+
# Visual Studio Code
+
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
+
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
+
# and can be added to the global gitignore or merged into this file. However, if you prefer,
+
# you could uncomment the following to ignore the entire vscode folder
+
# .vscode/
+
+
# Ruff stuff:
+
.ruff_cache/
+
+
# PyPI configuration file
+
.pypirc
+
+
# Marimo
+
marimo/_static/
+
marimo/_lsp/
+
__marimo__/
+
+
# Streamlit
+
.streamlit/secrets.toml
+
+
thicket.yaml
+
</file>
+
+
<file path="CLAUDE.md">
+
My goal is to build a CLI tool called thicket in Python that maintains a Git repository within which Atom feeds can be persisted, including their contents.
+
+
# Python Environment and Package Management
+
+
This project uses `uv` for Python package management and virtual environment handling.
+
+
## Running Commands
+
+
ALWAYS use `uv run` to execute Python commands:
+
+
- Run the CLI: `uv run -m thicket`
+
- Run tests: `uv run pytest`
+
- Type checking: `uv run mypy src/`
+
- Linting: `uv run ruff check src/`
+
- Format code: `uv run ruff format src/`
+
- Compile check: `uv run python -m py_compile <file>`
+
+
## Package Management
+
+
- Add dependencies: `uv add <package>`
+
- Add dev dependencies: `uv add --dev <package>`
+
- Install dependencies: `uv sync`
+
- Update dependencies: `uv lock --upgrade`
+
+
# Project Structure
+
+
The configuration file specifies:
+
- the location of a git store
+
- a list of usernames and target Atom/RSS feed(s) and optional metadata about the username such as their email, homepage, icon and display name
+
- a cache directory to store temporary results such as feed downloads and their last modification date that speed up operations across runs of the tool
+
+
The Git data store should:
+
- have a subdirectory per user
+
- within that directory, an entry per Atom entry indexed by the Atom id for that entry. The id should be sanitised consistently to be a safe filename. RSS feed should be normalized to Atom before storing it.
+
- within each entry file, the metadata of the Atom feed converted into a JSON format that preserves as much metadata as possible.
+
- have a JSON file in the Git repository that indexes the users, their associated directories within the Git repository, and any other metadata about that user from the config file
+
The CLI should be modern and use cool progress bars and any otfrom ecosystem libraries.
+
+
The intention behind the Git repository is that it can be queried by other websites in order to build a webblog structure of comments that link to other blogs.
+
</file>
+
+
<file path="pyproject.toml">
+
[build-system]
+
requires = ["hatchling"]
+
build-backend = "hatchling.build"
+
+
[project]
+
name = "thicket"
+
dynamic = ["version"]
+
description = "A CLI tool for persisting Atom/RSS feeds in Git repositories"
+
readme = "README.md"
+
license = "MIT"
+
requires-python = ">=3.9"
+
authors = [
+
{name = "thicket", email = "thicket@example.com"},
+
]
+
classifiers = [
+
"Development Status :: 3 - Alpha",
+
"Intended Audience :: Developers",
+
"License :: OSI Approved :: MIT License",
+
"Operating System :: OS Independent",
+
"Programming Language :: Python :: 3",
+
"Programming Language :: Python :: 3.9",
+
"Programming Language :: Python :: 3.10",
+
"Programming Language :: Python :: 3.11",
+
"Programming Language :: Python :: 3.12",
+
"Programming Language :: Python :: 3.13",
+
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
+
"Topic :: Software Development :: Version Control :: Git",
+
"Topic :: Text Processing :: Markup :: XML",
+
]
+
dependencies = [
+
"typer>=0.15.0",
+
"rich>=13.0.0",
+
"GitPython>=3.1.40",
+
"feedparser>=6.0.11",
+
"pydantic>=2.11.0",
+
"pydantic-settings>=2.10.0",
+
"httpx>=0.28.0",
+
"pendulum>=3.0.0",
+
"bleach>=6.0.0",
+
"platformdirs>=4.0.0",
+
"pyyaml>=6.0.0",
+
"email_validator",
+
"jinja2>=3.1.6",
+
]
+
+
[project.optional-dependencies]
+
dev = [
+
"pytest>=8.0.0",
+
"pytest-asyncio>=0.24.0",
+
"pytest-cov>=6.0.0",
+
"black>=24.0.0",
+
"ruff>=0.8.0",
+
"mypy>=1.13.0",
+
"types-PyYAML>=6.0.0",
+
]
+
+
[project.urls]
+
Homepage = "https://github.com/example/thicket"
+
Documentation = "https://github.com/example/thicket"
+
Repository = "https://github.com/example/thicket"
+
"Bug Tracker" = "https://github.com/example/thicket/issues"
+
+
[project.scripts]
+
thicket = "thicket.cli.main:app"
+
+
[tool.hatch.version]
+
path = "src/thicket/__init__.py"
+
+
[tool.hatch.build.targets.wheel]
+
packages = ["src/thicket"]
+
+
[tool.black]
+
line-length = 88
+
target-version = ['py39']
+
include = '\.pyi?$'
+
extend-exclude = '''
+
/(
+
# directories
+
\.eggs
+
| \.git
+
| \.hg
+
| \.mypy_cache
+
| \.tox
+
| \.venv
+
| build
+
| dist
+
)/
+
'''
+
+
[tool.ruff]
+
target-version = "py39"
+
line-length = 88
+
+
[tool.ruff.lint]
+
select = [
+
"E", # pycodestyle errors
+
"W", # pycodestyle warnings
+
"F", # pyflakes
+
"I", # isort
+
"B", # flake8-bugbear
+
"C4", # flake8-comprehensions
+
"UP", # pyupgrade
+
]
+
ignore = [
+
"E501", # line too long, handled by black
+
"B008", # do not perform function calls in argument defaults
+
"C901", # too complex
+
]
+
+
[tool.ruff.lint.per-file-ignores]
+
"__init__.py" = ["F401"]
+
+
[tool.mypy]
+
python_version = "3.9"
+
check_untyped_defs = true
+
disallow_any_generics = true
+
disallow_incomplete_defs = true
+
disallow_untyped_defs = true
+
no_implicit_optional = true
+
warn_redundant_casts = true
+
warn_unused_ignores = true
+
warn_return_any = true
+
strict_optional = true
+
+
[[tool.mypy.overrides]]
+
module = [
+
"feedparser",
+
"git",
+
"bleach",
+
]
+
ignore_missing_imports = true
+
+
[tool.pytest.ini_options]
+
testpaths = ["tests"]
+
python_files = ["test_*.py"]
+
python_classes = ["Test*"]
+
python_functions = ["test_*"]
+
addopts = [
+
"-ra",
+
"--strict-markers",
+
"--strict-config",
+
"--cov=src/thicket",
+
"--cov-report=term-missing",
+
"--cov-report=html",
+
"--cov-report=xml",
+
]
+
filterwarnings = [
+
"error",
+
"ignore::UserWarning",
+
"ignore::DeprecationWarning",
+
]
+
markers = [
+
"slow: marks tests as slow (deselect with '-m \"not slow\"')",
+
"integration: marks tests as integration tests",
+
]
+
+
[tool.coverage.run]
+
source = ["src"]
+
branch = true
+
+
[tool.coverage.report]
+
exclude_lines = [
+
"pragma: no cover",
+
"def __repr__",
+
"if self.debug:",
+
"if settings.DEBUG",
+
"raise AssertionError",
+
"raise NotImplementedError",
+
"if 0:",
+
"if __name__ == .__main__.:",
+
"class .*\\bProtocol\\):",
+
"@(abc\\.)?abstractmethod",
+
]
+
</file>
+
+
<file path="src/thicket/cli/commands/__init__.py">
+
"""CLI commands for thicket."""
+
+
# Import all commands to register them with the main app
+
from . import add, duplicates, generate, index_cmd, info_cmd, init, links_cmd, list_cmd, sync
+
+
__all__ = ["add", "duplicates", "generate", "index_cmd", "info_cmd", "init", "links_cmd", "list_cmd", "sync"]
+
</file>
+
+
<file path="src/thicket/cli/commands/add.py">
+
"""Add command for thicket."""
+
+
import asyncio
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from pydantic import HttpUrl, ValidationError
+
+
from ...core.feed_parser import FeedParser
+
from ...core.git_store import GitStore
+
from ..main import app
+
from ..utils import (
+
create_progress,
+
load_config,
+
print_error,
+
print_info,
+
print_success,
+
)
+
+
+
@app.command("add")
+
def add_command(
+
subcommand: str = typer.Argument(..., help="Subcommand: 'user' or 'feed'"),
+
username: str = typer.Argument(..., help="Username"),
+
feed_url: Optional[str] = typer.Argument(None, help="Feed URL (required for 'user' command)"),
+
email: Optional[str] = typer.Option(None, "--email", "-e", help="User email"),
+
homepage: Optional[str] = typer.Option(None, "--homepage", "-h", help="User homepage"),
+
icon: Optional[str] = typer.Option(None, "--icon", "-i", help="User icon URL"),
+
display_name: Optional[str] = typer.Option(None, "--display-name", "-d", help="User display name"),
+
config_file: Optional[Path] = typer.Option(
+
Path("thicket.yaml"), "--config", help="Configuration file path"
+
),
+
auto_discover: bool = typer.Option(
+
True, "--auto-discover/--no-auto-discover", help="Auto-discover user metadata from feed"
+
),
+
) -> None:
+
"""Add a user or feed to thicket."""
+
+
if subcommand == "user":
+
add_user(username, feed_url, email, homepage, icon, display_name, config_file, auto_discover)
+
elif subcommand == "feed":
+
add_feed(username, feed_url, config_file)
+
else:
+
print_error(f"Unknown subcommand: {subcommand}")
+
print_error("Use 'user' or 'feed'")
+
raise typer.Exit(1)
+
+
+
def add_user(
+
username: str,
+
feed_url: Optional[str],
+
email: Optional[str],
+
homepage: Optional[str],
+
icon: Optional[str],
+
display_name: Optional[str],
+
config_file: Path,
+
auto_discover: bool,
+
) -> None:
+
"""Add a new user with feed."""
+
+
if not feed_url:
+
print_error("Feed URL is required when adding a user")
+
raise typer.Exit(1)
+
+
# Validate feed URL
+
try:
+
validated_feed_url = HttpUrl(feed_url)
+
except ValidationError:
+
print_error(f"Invalid feed URL: {feed_url}")
+
raise typer.Exit(1) from None
+
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
# Check if user already exists
+
existing_user = git_store.get_user(username)
+
if existing_user:
+
print_error(f"User '{username}' already exists")
+
print_error("Use 'thicket add feed' to add additional feeds")
+
raise typer.Exit(1)
+
+
# Auto-discover metadata if enabled
+
discovered_metadata = None
+
if auto_discover:
+
discovered_metadata = asyncio.run(discover_feed_metadata(validated_feed_url))
+
+
# Prepare user data with manual overrides taking precedence
+
user_display_name = display_name or (discovered_metadata.author_name or discovered_metadata.title if discovered_metadata else None)
+
user_email = email or (discovered_metadata.author_email if discovered_metadata else None)
+
user_homepage = homepage or (str(discovered_metadata.author_uri or discovered_metadata.link) if discovered_metadata else None)
+
user_icon = icon or (str(discovered_metadata.logo or discovered_metadata.icon or discovered_metadata.image_url) if discovered_metadata else None)
+
+
# Add user to Git store
+
git_store.add_user(
+
username=username,
+
display_name=user_display_name,
+
email=user_email,
+
homepage=user_homepage,
+
icon=user_icon,
+
feeds=[str(validated_feed_url)],
+
)
+
+
# Commit changes
+
git_store.commit_changes(f"Add user: {username}")
+
+
print_success(f"Added user '{username}' with feed: {feed_url}")
+
+
if discovered_metadata and auto_discover:
+
print_info("Auto-discovered metadata:")
+
if user_display_name:
+
print_info(f" Display name: {user_display_name}")
+
if user_email:
+
print_info(f" Email: {user_email}")
+
if user_homepage:
+
print_info(f" Homepage: {user_homepage}")
+
if user_icon:
+
print_info(f" Icon: {user_icon}")
+
+
+
def add_feed(username: str, feed_url: Optional[str], config_file: Path) -> None:
+
"""Add a feed to an existing user."""
+
+
if not feed_url:
+
print_error("Feed URL is required")
+
raise typer.Exit(1)
+
+
# Validate feed URL
+
try:
+
validated_feed_url = HttpUrl(feed_url)
+
except ValidationError:
+
print_error(f"Invalid feed URL: {feed_url}")
+
raise typer.Exit(1) from None
+
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
# Check if user exists
+
user = git_store.get_user(username)
+
if not user:
+
print_error(f"User '{username}' not found")
+
print_error("Use 'thicket add user' to add a new user")
+
raise typer.Exit(1)
+
+
# Check if feed already exists
+
if str(validated_feed_url) in user.feeds:
+
print_error(f"Feed already exists for user '{username}': {feed_url}")
+
raise typer.Exit(1)
+
+
# Add feed to user
+
updated_feeds = user.feeds + [str(validated_feed_url)]
+
if git_store.update_user(username, feeds=updated_feeds):
+
git_store.commit_changes(f"Add feed to user {username}: {feed_url}")
+
print_success(f"Added feed to user '{username}': {feed_url}")
+
else:
+
print_error(f"Failed to add feed to user '{username}'")
+
raise typer.Exit(1)
+
+
+
async def discover_feed_metadata(feed_url: HttpUrl):
+
"""Discover metadata from a feed URL."""
+
try:
+
with create_progress() as progress:
+
task = progress.add_task("Discovering feed metadata...", total=None)
+
+
parser = FeedParser()
+
content = await parser.fetch_feed(feed_url)
+
metadata, _ = parser.parse_feed(content, feed_url)
+
+
progress.update(task, completed=True)
+
return metadata
+
+
except Exception as e:
+
print_error(f"Failed to discover feed metadata: {e}")
+
return None
+
</file>
+
+
<file path="src/thicket/cli/commands/duplicates.py">
+
"""Duplicates command for thicket."""
+
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from rich.table import Table
+
+
from ...core.git_store import GitStore
+
from ..main import app
+
from ..utils import (
+
console,
+
load_config,
+
print_error,
+
print_info,
+
print_success,
+
get_tsv_mode,
+
)
+
+
+
@app.command("duplicates")
+
def duplicates_command(
+
action: str = typer.Argument(..., help="Action: 'list', 'add', 'remove'"),
+
duplicate_id: Optional[str] = typer.Argument(None, help="Duplicate entry ID"),
+
canonical_id: Optional[str] = typer.Argument(None, help="Canonical entry ID"),
+
config_file: Optional[Path] = typer.Option(
+
Path("thicket.yaml"), "--config", help="Configuration file path"
+
),
+
) -> None:
+
"""Manage duplicate entry mappings."""
+
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
if action == "list":
+
list_duplicates(git_store)
+
elif action == "add":
+
add_duplicate(git_store, duplicate_id, canonical_id)
+
elif action == "remove":
+
remove_duplicate(git_store, duplicate_id)
+
else:
+
print_error(f"Unknown action: {action}")
+
print_error("Use 'list', 'add', or 'remove'")
+
raise typer.Exit(1)
+
+
+
def list_duplicates(git_store: GitStore) -> None:
+
"""List all duplicate mappings."""
+
duplicates = git_store.get_duplicates()
+
+
if not duplicates.duplicates:
+
if get_tsv_mode():
+
print("No duplicate mappings found")
+
else:
+
print_info("No duplicate mappings found")
+
return
+
+
if get_tsv_mode():
+
print("Duplicate ID\tCanonical ID")
+
for duplicate_id, canonical_id in duplicates.duplicates.items():
+
print(f"{duplicate_id}\t{canonical_id}")
+
print(f"Total duplicates: {len(duplicates.duplicates)}")
+
else:
+
table = Table(title="Duplicate Entry Mappings")
+
table.add_column("Duplicate ID", style="red")
+
table.add_column("Canonical ID", style="green")
+
+
for duplicate_id, canonical_id in duplicates.duplicates.items():
+
table.add_row(duplicate_id, canonical_id)
+
+
console.print(table)
+
print_info(f"Total duplicates: {len(duplicates.duplicates)}")
+
+
+
def add_duplicate(git_store: GitStore, duplicate_id: Optional[str], canonical_id: Optional[str]) -> None:
+
"""Add a duplicate mapping."""
+
if not duplicate_id:
+
print_error("Duplicate ID is required")
+
raise typer.Exit(1)
+
+
if not canonical_id:
+
print_error("Canonical ID is required")
+
raise typer.Exit(1)
+
+
# Check if duplicate_id already exists
+
duplicates = git_store.get_duplicates()
+
if duplicates.is_duplicate(duplicate_id):
+
existing_canonical = duplicates.get_canonical(duplicate_id)
+
print_error(f"Duplicate ID already mapped to: {existing_canonical}")
+
print_error("Use 'remove' first to change the mapping")
+
raise typer.Exit(1)
+
+
# Check if we're trying to make a canonical ID point to itself
+
if duplicate_id == canonical_id:
+
print_error("Duplicate ID cannot be the same as canonical ID")
+
raise typer.Exit(1)
+
+
# Add the mapping
+
git_store.add_duplicate(duplicate_id, canonical_id)
+
+
# Commit changes
+
git_store.commit_changes(f"Add duplicate mapping: {duplicate_id} -> {canonical_id}")
+
+
print_success(f"Added duplicate mapping: {duplicate_id} -> {canonical_id}")
+
+
+
def remove_duplicate(git_store: GitStore, duplicate_id: Optional[str]) -> None:
+
"""Remove a duplicate mapping."""
+
if not duplicate_id:
+
print_error("Duplicate ID is required")
+
raise typer.Exit(1)
+
+
# Check if mapping exists
+
duplicates = git_store.get_duplicates()
+
if not duplicates.is_duplicate(duplicate_id):
+
print_error(f"No duplicate mapping found for: {duplicate_id}")
+
raise typer.Exit(1)
+
+
canonical_id = duplicates.get_canonical(duplicate_id)
+
+
# Remove the mapping
+
if git_store.remove_duplicate(duplicate_id):
+
# Commit changes
+
git_store.commit_changes(f"Remove duplicate mapping: {duplicate_id} -> {canonical_id}")
+
print_success(f"Removed duplicate mapping: {duplicate_id} -> {canonical_id}")
+
else:
+
print_error(f"Failed to remove duplicate mapping: {duplicate_id}")
+
raise typer.Exit(1)
+
</file>
+
+
<file path="src/thicket/cli/commands/sync.py">
+
"""Sync command for thicket."""
+
+
import asyncio
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from rich.progress import track
+
+
from ...core.feed_parser import FeedParser
+
from ...core.git_store import GitStore
+
from ..main import app
+
from ..utils import (
+
load_config,
+
print_error,
+
print_info,
+
print_success,
+
)
+
+
+
@app.command()
+
def sync(
+
all_users: bool = typer.Option(
+
False, "--all", "-a", help="Sync all users and feeds"
+
),
+
user: Optional[str] = typer.Option(
+
None, "--user", "-u", help="Sync specific user only"
+
),
+
config_file: Optional[Path] = typer.Option(
+
Path("thicket.yaml"), "--config", help="Configuration file path"
+
),
+
dry_run: bool = typer.Option(
+
False, "--dry-run", help="Show what would be synced without making changes"
+
),
+
) -> None:
+
"""Sync feeds and store entries in Git repository."""
+
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
# Determine which users to sync from git repository
+
users_to_sync = []
+
if all_users:
+
index = git_store._load_index()
+
users_to_sync = list(index.users.values())
+
elif user:
+
user_metadata = git_store.get_user(user)
+
if not user_metadata:
+
print_error(f"User '{user}' not found in git repository")
+
raise typer.Exit(1)
+
users_to_sync = [user_metadata]
+
else:
+
print_error("Specify --all to sync all users or --user to sync a specific user")
+
raise typer.Exit(1)
+
+
if not users_to_sync:
+
print_info("No users configured to sync")
+
return
+
+
# Sync each user
+
total_new_entries = 0
+
total_updated_entries = 0
+
+
for user_metadata in users_to_sync:
+
print_info(f"Syncing user: {user_metadata.username}")
+
+
user_new_entries = 0
+
user_updated_entries = 0
+
+
# Sync each feed for the user
+
for feed_url in track(user_metadata.feeds, description=f"Syncing {user_metadata.username}'s feeds"):
+
try:
+
new_entries, updated_entries = asyncio.run(
+
sync_feed(git_store, user_metadata.username, feed_url, dry_run)
+
)
+
user_new_entries += new_entries
+
user_updated_entries += updated_entries
+
+
except Exception as e:
+
print_error(f"Failed to sync feed {feed_url}: {e}")
+
continue
+
+
print_info(f"User {user_metadata.username}: {user_new_entries} new, {user_updated_entries} updated")
+
total_new_entries += user_new_entries
+
total_updated_entries += user_updated_entries
+
+
# Commit changes if not dry run
+
if not dry_run and (total_new_entries > 0 or total_updated_entries > 0):
+
commit_message = f"Sync feeds: {total_new_entries} new entries, {total_updated_entries} updated"
+
git_store.commit_changes(commit_message)
+
print_success(f"Committed changes: {commit_message}")
+
+
# Summary
+
if dry_run:
+
print_info(f"Dry run complete: would sync {total_new_entries} new entries, {total_updated_entries} updated")
+
else:
+
print_success(f"Sync complete: {total_new_entries} new entries, {total_updated_entries} updated")
+
+
+
async def sync_feed(git_store: GitStore, username: str, feed_url, dry_run: bool) -> tuple[int, int]:
+
"""Sync a single feed for a user."""
+
+
parser = FeedParser()
+
+
try:
+
# Fetch and parse feed
+
content = await parser.fetch_feed(feed_url)
+
metadata, entries = parser.parse_feed(content, feed_url)
+
+
new_entries = 0
+
updated_entries = 0
+
+
# Process each entry
+
for entry in entries:
+
try:
+
# Check if entry already exists
+
existing_entry = git_store.get_entry(username, entry.id)
+
+
if existing_entry:
+
# Check if entry has been updated
+
if existing_entry.updated != entry.updated:
+
if not dry_run:
+
git_store.store_entry(username, entry)
+
updated_entries += 1
+
else:
+
# New entry
+
if not dry_run:
+
git_store.store_entry(username, entry)
+
new_entries += 1
+
+
except Exception as e:
+
print_error(f"Failed to process entry {entry.id}: {e}")
+
continue
+
+
return new_entries, updated_entries
+
+
except Exception as e:
+
print_error(f"Failed to sync feed {feed_url}: {e}")
+
return 0, 0
+
</file>
+
+
<file path="src/thicket/models/config.py">
+
"""Configuration models for thicket."""
+
+
from pathlib import Path
+
from typing import Optional
+
+
from pydantic import BaseModel, EmailStr, HttpUrl
+
from pydantic_settings import BaseSettings, SettingsConfigDict
+
+
+
class UserConfig(BaseModel):
+
"""Configuration for a single user and their feeds."""
+
+
username: str
+
feeds: list[HttpUrl]
+
email: Optional[EmailStr] = None
+
homepage: Optional[HttpUrl] = None
+
icon: Optional[HttpUrl] = None
+
display_name: Optional[str] = None
+
+
+
class ThicketConfig(BaseSettings):
+
"""Main configuration for thicket."""
+
+
model_config = SettingsConfigDict(
+
env_prefix="THICKET_",
+
env_file=".env",
+
yaml_file="thicket.yaml",
+
case_sensitive=False,
+
)
+
+
git_store: Path
+
cache_dir: Path
+
users: list[UserConfig] = []
+
</file>
+
+
<file path="src/thicket/cli/commands/links_cmd.py">
+
"""CLI command for extracting and categorizing all outbound links from blog entries."""
+
+
import json
+
import re
+
from pathlib import Path
+
from typing import Dict, List, Optional, Set
+
from urllib.parse import urljoin, urlparse
+
+
import typer
+
from rich.console import Console
+
from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TaskProgressColumn
+
from rich.table import Table
+
+
from ...core.git_store import GitStore
+
from ..main import app
+
from ..utils import load_config, get_tsv_mode
+
+
console = Console()
+
+
+
class LinkData:
+
"""Represents a link found in a blog entry."""
+
+
def __init__(self, url: str, entry_id: str, username: str):
+
self.url = url
+
self.entry_id = entry_id
+
self.username = username
+
+
def to_dict(self) -> dict:
+
"""Convert to dictionary for JSON serialization."""
+
return {
+
"url": self.url,
+
"entry_id": self.entry_id,
+
"username": self.username
+
}
+
+
@classmethod
+
def from_dict(cls, data: dict) -> "LinkData":
+
"""Create from dictionary."""
+
return cls(
+
url=data["url"],
+
entry_id=data["entry_id"],
+
username=data["username"]
+
)
+
+
+
class LinkCategorizer:
+
"""Categorizes links as internal, user, or unknown."""
+
+
def __init__(self, user_domains: Dict[str, Set[str]]):
+
self.user_domains = user_domains
+
# Create reverse mapping of domain -> username
+
self.domain_to_user = {}
+
for username, domains in user_domains.items():
+
for domain in domains:
+
self.domain_to_user[domain] = username
+
+
def categorize_url(self, url: str, source_username: str) -> tuple[str, Optional[str]]:
+
"""
+
Categorize a URL as 'internal', 'user', or 'unknown'.
+
Returns (category, target_username).
+
"""
+
try:
+
parsed = urlparse(url)
+
domain = parsed.netloc.lower()
+
+
# Check if it's a link to the same user's domain (internal)
+
if domain in self.user_domains.get(source_username, set()):
+
return "internal", source_username
+
+
# Check if it's a link to another user's domain
+
if domain in self.domain_to_user:
+
return "user", self.domain_to_user[domain]
+
+
# Everything else is unknown
+
return "unknown", None
+
+
except Exception:
+
return "unknown", None
+
+
+
class LinkExtractor:
+
"""Extracts and resolves links from blog entries."""
+
+
def __init__(self):
+
# Pattern for extracting links from HTML
+
self.link_pattern = re.compile(r'<a[^>]+href="([^"]+)"[^>]*>(.*?)</a>', re.IGNORECASE | re.DOTALL)
+
self.url_pattern = re.compile(r'https?://[^\s<>"]+')
+
+
def extract_links_from_html(self, html_content: str, base_url: str) -> List[tuple[str, str]]:
+
"""Extract all links from HTML content and resolve them against base URL."""
+
links = []
+
+
# Extract links from <a> tags
+
for match in self.link_pattern.finditer(html_content):
+
url = match.group(1)
+
text = re.sub(r'<[^>]+>', '', match.group(2)).strip() # Remove HTML tags from link text
+
+
# Resolve relative URLs against base URL
+
resolved_url = urljoin(base_url, url)
+
links.append((resolved_url, text))
+
+
return links
+
+
+
def extract_links_from_entry(self, entry, username: str, base_url: str) -> List[LinkData]:
+
"""Extract all links from a blog entry."""
+
links = []
+
+
# Combine all text content for analysis
+
content_to_search = []
+
if entry.content:
+
content_to_search.append(entry.content)
+
if entry.summary:
+
content_to_search.append(entry.summary)
+
+
for content in content_to_search:
+
extracted_links = self.extract_links_from_html(content, base_url)
+
+
for url, link_text in extracted_links:
+
# Skip empty URLs
+
if not url or url.startswith('#'):
+
continue
+
+
link_data = LinkData(
+
url=url,
+
entry_id=entry.id,
+
username=username
+
)
+
+
links.append(link_data)
+
+
return links
+
+
+
@app.command()
+
def links(
+
config_file: Optional[Path] = typer.Option(
+
Path("thicket.yaml"),
+
"--config",
+
"-c",
+
help="Path to configuration file",
+
),
+
output_file: Optional[Path] = typer.Option(
+
None,
+
"--output",
+
"-o",
+
help="Path to output unified links file (default: links.json in git store)",
+
),
+
verbose: bool = typer.Option(
+
False,
+
"--verbose",
+
"-v",
+
help="Show detailed progress information",
+
),
+
) -> None:
+
"""Extract and categorize all outbound links from blog entries.
+
+
This command analyzes all blog entries to extract outbound links,
+
resolve them properly with respect to the feed's base URL, and
+
categorize them as internal, user, or unknown links.
+
+
Creates a unified links.json file containing all link data.
+
"""
+
try:
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
# Build user domain mapping
+
if verbose:
+
console.print("Building user domain mapping...")
+
+
index = git_store._load_index()
+
user_domains = {}
+
+
for username, user_metadata in index.users.items():
+
domains = set()
+
+
# Add domains from feeds
+
for feed_url in user_metadata.feeds:
+
domain = urlparse(feed_url).netloc.lower()
+
if domain:
+
domains.add(domain)
+
+
# Add domain from homepage
+
if user_metadata.homepage:
+
domain = urlparse(str(user_metadata.homepage)).netloc.lower()
+
if domain:
+
domains.add(domain)
+
+
user_domains[username] = domains
+
+
if verbose:
+
console.print(f"Found {len(user_domains)} users with {sum(len(d) for d in user_domains.values())} total domains")
+
+
# Initialize components
+
link_extractor = LinkExtractor()
+
categorizer = LinkCategorizer(user_domains)
+
+
# Get all users
+
users = list(index.users.keys())
+
+
if not users:
+
console.print("[yellow]No users found in Git store[/yellow]")
+
raise typer.Exit(0)
+
+
# Process all entries
+
all_links = []
+
link_categories = {"internal": [], "user": [], "unknown": []}
+
link_dict = {} # Dictionary with link URL as key, maps to list of atom IDs
+
reverse_dict = {} # Dictionary with atom ID as key, maps to list of URLs
+
+
with Progress(
+
SpinnerColumn(),
+
TextColumn("[progress.description]{task.description}"),
+
BarColumn(),
+
TaskProgressColumn(),
+
console=console,
+
) as progress:
+
+
# Count total entries first
+
counting_task = progress.add_task("Counting entries...", total=len(users))
+
total_entries = 0
+
+
for username in users:
+
entries = git_store.list_entries(username)
+
total_entries += len(entries)
+
progress.advance(counting_task)
+
+
progress.remove_task(counting_task)
+
+
# Process entries
+
processing_task = progress.add_task(
+
f"Processing {total_entries} entries...",
+
total=total_entries
+
)
+
+
for username in users:
+
entries = git_store.list_entries(username)
+
user_metadata = index.users[username]
+
+
# Get base URL for this user (use first feed URL)
+
base_url = str(user_metadata.feeds[0]) if user_metadata.feeds else "https://example.com"
+
+
for entry in entries:
+
# Extract links from this entry
+
entry_links = link_extractor.extract_links_from_entry(entry, username, base_url)
+
+
# Track unique links per entry
+
entry_urls_seen = set()
+
+
# Categorize each link
+
for link_data in entry_links:
+
# Skip if we've already seen this URL in this entry
+
if link_data.url in entry_urls_seen:
+
continue
+
entry_urls_seen.add(link_data.url)
+
+
category, target_username = categorizer.categorize_url(link_data.url, username)
+
+
# Add to link dictionary (URL as key, maps to list of atom IDs)
+
if link_data.url not in link_dict:
+
link_dict[link_data.url] = []
+
if link_data.entry_id not in link_dict[link_data.url]:
+
link_dict[link_data.url].append(link_data.entry_id)
+
+
# Also add to reverse mapping (atom ID -> list of URLs)
+
if link_data.entry_id not in reverse_dict:
+
reverse_dict[link_data.entry_id] = []
+
if link_data.url not in reverse_dict[link_data.entry_id]:
+
reverse_dict[link_data.entry_id].append(link_data.url)
+
+
# Add category info to link data for categories tracking
+
link_info = link_data.to_dict()
+
link_info["category"] = category
+
link_info["target_username"] = target_username
+
+
all_links.append(link_info)
+
link_categories[category].append(link_info)
+
+
progress.advance(processing_task)
+
+
if verbose and entry_links:
+
console.print(f" Found {len(entry_links)} links in {username}:{entry.title[:50]}...")
+
+
# Determine output path
+
if output_file:
+
output_path = output_file
+
else:
+
output_path = config.git_store / "links.json"
+
+
# Save all extracted links (not just filtered ones)
+
if verbose:
+
console.print("Preparing output data...")
+
+
# Build a set of all URLs that correspond to posts in the git database
+
registered_urls = set()
+
+
# Get all entries from all users and build URL mappings
+
for username in users:
+
entries = git_store.list_entries(username)
+
user_metadata = index.users[username]
+
+
for entry in entries:
+
# Try to match entry URLs with extracted links
+
if hasattr(entry, 'link') and entry.link:
+
registered_urls.add(str(entry.link))
+
+
# Also check entry alternate links if they exist
+
if hasattr(entry, 'links') and entry.links:
+
for link in entry.links:
+
if hasattr(link, 'href') and link.href:
+
registered_urls.add(str(link.href))
+
+
# Build unified structure with metadata
+
unified_links = {}
+
reverse_mapping = {}
+
+
for url, entry_ids in link_dict.items():
+
unified_links[url] = {
+
"referencing_entries": entry_ids
+
}
+
+
# Find target username if this is a tracked post
+
if url in registered_urls:
+
for username in users:
+
user_domains_set = {domain for domain in user_domains.get(username, [])}
+
if any(domain in url for domain in user_domains_set):
+
unified_links[url]["target_username"] = username
+
break
+
+
# Build reverse mapping
+
for entry_id in entry_ids:
+
if entry_id not in reverse_mapping:
+
reverse_mapping[entry_id] = []
+
if url not in reverse_mapping[entry_id]:
+
reverse_mapping[entry_id].append(url)
+
+
# Create unified output data
+
output_data = {
+
"links": unified_links,
+
"reverse_mapping": reverse_mapping,
+
"user_domains": {k: list(v) for k, v in user_domains.items()}
+
}
+
+
if verbose:
+
console.print(f"Found {len(registered_urls)} registered post URLs")
+
console.print(f"Found {len(link_dict)} total links, {sum(1 for link in unified_links.values() if 'target_username' in link)} tracked posts")
+
+
# Save unified data
+
with open(output_path, "w") as f:
+
json.dump(output_data, f, indent=2, default=str)
+
+
# Show summary
+
if not get_tsv_mode():
+
console.print("\n[green]โœ“ Links extraction completed successfully[/green]")
+
+
# Create summary table or TSV output
+
if get_tsv_mode():
+
print("Category\tCount\tDescription")
+
print(f"Internal\t{len(link_categories['internal'])}\tLinks to same user's domain")
+
print(f"User\t{len(link_categories['user'])}\tLinks to other tracked users")
+
print(f"Unknown\t{len(link_categories['unknown'])}\tLinks to external sites")
+
print(f"Total Extracted\t{len(all_links)}\tAll extracted links")
+
print(f"Saved to Output\t{len(output_data['links'])}\tLinks saved to output file")
+
print(f"Cross-references\t{sum(1 for link in unified_links.values() if 'target_username' in link)}\tLinks to registered posts only")
+
else:
+
table = Table(title="Links Summary")
+
table.add_column("Category", style="cyan")
+
table.add_column("Count", style="green")
+
table.add_column("Description", style="white")
+
+
table.add_row("Internal", str(len(link_categories["internal"])), "Links to same user's domain")
+
table.add_row("User", str(len(link_categories["user"])), "Links to other tracked users")
+
table.add_row("Unknown", str(len(link_categories["unknown"])), "Links to external sites")
+
table.add_row("Total Extracted", str(len(all_links)), "All extracted links")
+
table.add_row("Saved to Output", str(len(output_data['links'])), "Links saved to output file")
+
table.add_row("Cross-references", str(sum(1 for link in unified_links.values() if 'target_username' in link)), "Links to registered posts only")
+
+
console.print(table)
+
+
# Show user links if verbose
+
if verbose and link_categories["user"]:
+
if get_tsv_mode():
+
print("User Link Source\tUser Link Target\tLink Count")
+
user_link_counts = {}
+
+
for link in link_categories["user"]:
+
key = f"{link['username']} -> {link['target_username']}"
+
user_link_counts[key] = user_link_counts.get(key, 0) + 1
+
+
for link_pair, count in sorted(user_link_counts.items(), key=lambda x: x[1], reverse=True)[:10]:
+
source, target = link_pair.split(" -> ")
+
print(f"{source}\t{target}\t{count}")
+
else:
+
console.print("\n[bold]User-to-user links:[/bold]")
+
user_link_counts = {}
+
+
for link in link_categories["user"]:
+
key = f"{link['username']} -> {link['target_username']}"
+
user_link_counts[key] = user_link_counts.get(key, 0) + 1
+
+
for link_pair, count in sorted(user_link_counts.items(), key=lambda x: x[1], reverse=True)[:10]:
+
console.print(f" {link_pair}: {count} links")
+
+
if not get_tsv_mode():
+
console.print(f"\nUnified links data saved to: {output_path}")
+
+
except Exception as e:
+
console.print(f"[red]Error extracting links: {e}[/red]")
+
if verbose:
+
console.print_exception()
+
raise typer.Exit(1)
+
</file>
+
+
<file path="src/thicket/cli/commands/list_cmd.py">
+
"""List command for thicket."""
+
+
import re
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from rich.table import Table
+
+
from ...core.git_store import GitStore
+
from ..main import app
+
from ..utils import (
+
console,
+
load_config,
+
print_error,
+
print_feeds_table,
+
print_feeds_table_from_git,
+
print_info,
+
print_users_table,
+
print_users_table_from_git,
+
print_entries_tsv,
+
get_tsv_mode,
+
)
+
+
+
@app.command("list")
+
def list_command(
+
what: str = typer.Argument(..., help="What to list: 'users', 'feeds', 'entries'"),
+
user: Optional[str] = typer.Option(
+
None, "--user", "-u", help="Filter by specific user"
+
),
+
limit: Optional[int] = typer.Option(
+
None, "--limit", "-l", help="Limit number of results"
+
),
+
config_file: Optional[Path] = typer.Option(
+
Path("thicket.yaml"), "--config", help="Configuration file path"
+
),
+
) -> None:
+
"""List users, feeds, or entries."""
+
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
if what == "users":
+
list_users(git_store)
+
elif what == "feeds":
+
list_feeds(git_store, user)
+
elif what == "entries":
+
list_entries(git_store, user, limit)
+
else:
+
print_error(f"Unknown list type: {what}")
+
print_error("Use 'users', 'feeds', or 'entries'")
+
raise typer.Exit(1)
+
+
+
def list_users(git_store: GitStore) -> None:
+
"""List all users."""
+
index = git_store._load_index()
+
users = list(index.users.values())
+
+
if not users:
+
print_info("No users configured")
+
return
+
+
print_users_table_from_git(users)
+
+
+
def list_feeds(git_store: GitStore, username: Optional[str] = None) -> None:
+
"""List feeds, optionally filtered by user."""
+
if username:
+
user = git_store.get_user(username)
+
if not user:
+
print_error(f"User '{username}' not found")
+
raise typer.Exit(1)
+
+
if not user.feeds:
+
print_info(f"No feeds configured for user '{username}'")
+
return
+
+
print_feeds_table_from_git(git_store, username)
+
+
+
def list_entries(git_store: GitStore, username: Optional[str] = None, limit: Optional[int] = None) -> None:
+
"""List entries, optionally filtered by user."""
+
+
if username:
+
# List entries for specific user
+
user = git_store.get_user(username)
+
if not user:
+
print_error(f"User '{username}' not found")
+
raise typer.Exit(1)
+
+
entries = git_store.list_entries(username, limit)
+
if not entries:
+
print_info(f"No entries found for user '{username}'")
+
return
+
+
print_entries_table([entries], [username])
+
+
else:
+
# List entries for all users
+
all_entries = []
+
all_usernames = []
+
+
index = git_store._load_index()
+
for user in index.users.values():
+
entries = git_store.list_entries(user.username, limit)
+
if entries:
+
all_entries.append(entries)
+
all_usernames.append(user.username)
+
+
if not all_entries:
+
print_info("No entries found")
+
return
+
+
print_entries_table(all_entries, all_usernames)
+
+
+
def _clean_html_content(content: Optional[str]) -> str:
+
"""Clean HTML content for display in table."""
+
if not content:
+
return ""
+
+
# Remove HTML tags
+
clean_text = re.sub(r'<[^>]+>', ' ', content)
+
# Replace multiple whitespace with single space
+
clean_text = re.sub(r'\s+', ' ', clean_text)
+
# Strip and limit length
+
clean_text = clean_text.strip()
+
if len(clean_text) > 100:
+
clean_text = clean_text[:97] + "..."
+
+
return clean_text
+
+
+
def print_entries_table(entries_by_user: list[list], usernames: list[str]) -> None:
+
"""Print a table of entries."""
+
if get_tsv_mode():
+
print_entries_tsv(entries_by_user, usernames)
+
return
+
+
table = Table(title="Feed Entries")
+
table.add_column("User", style="cyan", no_wrap=True)
+
table.add_column("Title", style="bold")
+
table.add_column("Updated", style="blue")
+
table.add_column("URL", style="green")
+
+
# Combine all entries with usernames
+
all_entries = []
+
for entries, username in zip(entries_by_user, usernames):
+
for entry in entries:
+
all_entries.append((username, entry))
+
+
# Sort by updated time (newest first)
+
all_entries.sort(key=lambda x: x[1].updated, reverse=True)
+
+
for username, entry in all_entries:
+
# Format updated time
+
updated_str = entry.updated.strftime("%Y-%m-%d %H:%M")
+
+
# Truncate title if too long
+
title = entry.title
+
if len(title) > 50:
+
title = title[:47] + "..."
+
+
table.add_row(
+
username,
+
title,
+
updated_str,
+
str(entry.link),
+
)
+
+
console.print(table)
+
</file>
+
+
<file path="src/thicket/cli/main.py">
+
"""Main CLI application using Typer."""
+
+
import typer
+
from rich.console import Console
+
+
from .. import __version__
+
+
app = typer.Typer(
+
name="thicket",
+
help="A CLI tool for persisting Atom/RSS feeds in Git repositories",
+
no_args_is_help=True,
+
rich_markup_mode="rich",
+
)
+
+
console = Console()
+
+
# Global state for TSV output mode
+
tsv_mode = False
+
+
+
def version_callback(value: bool) -> None:
+
"""Show version and exit."""
+
if value:
+
console.print(f"thicket version {__version__}")
+
raise typer.Exit()
+
+
+
@app.callback()
+
def main(
+
version: bool = typer.Option(
+
None,
+
"--version",
+
"-v",
+
help="Show the version and exit",
+
callback=version_callback,
+
is_eager=True,
+
),
+
tsv: bool = typer.Option(
+
False,
+
"--tsv",
+
help="Output in tab-separated values format without truncation",
+
),
+
) -> None:
+
"""Thicket: A CLI tool for persisting Atom/RSS feeds in Git repositories."""
+
global tsv_mode
+
tsv_mode = tsv
+
+
+
# Import commands to register them
+
from .commands import add, duplicates, generate, index_cmd, info_cmd, init, links_cmd, list_cmd, sync
+
+
if __name__ == "__main__":
+
app()
+
</file>
+
+
<file path="src/thicket/core/git_store.py">
+
"""Git repository operations for thicket."""
+
+
import json
+
from datetime import datetime
+
from pathlib import Path
+
from typing import Optional
+
+
import git
+
from git import Repo
+
+
from ..models import AtomEntry, DuplicateMap, GitStoreIndex, UserMetadata
+
+
+
class GitStore:
+
"""Manages the Git repository for storing feed entries."""
+
+
def __init__(self, repo_path: Path):
+
"""Initialize the Git store."""
+
self.repo_path = repo_path
+
self.repo: Optional[Repo] = None
+
self._ensure_repo()
+
+
def _ensure_repo(self) -> None:
+
"""Ensure the Git repository exists and is initialized."""
+
if not self.repo_path.exists():
+
self.repo_path.mkdir(parents=True, exist_ok=True)
+
+
try:
+
self.repo = Repo(self.repo_path)
+
except git.InvalidGitRepositoryError:
+
# Initialize new repository
+
self.repo = Repo.init(self.repo_path)
+
self._create_initial_structure()
+
+
def _create_initial_structure(self) -> None:
+
"""Create initial Git store structure."""
+
# Create index.json
+
index = GitStoreIndex(
+
created=datetime.now(),
+
last_updated=datetime.now(),
+
)
+
self._save_index(index)
+
+
# Create duplicates.json
+
duplicates = DuplicateMap()
+
self._save_duplicates(duplicates)
+
+
# Create initial commit
+
self.repo.index.add(["index.json", "duplicates.json"])
+
self.repo.index.commit("Initial thicket repository structure")
+
+
def _save_index(self, index: GitStoreIndex) -> None:
+
"""Save the index to index.json."""
+
index_path = self.repo_path / "index.json"
+
with open(index_path, "w") as f:
+
json.dump(index.model_dump(mode="json", exclude_none=True), f, indent=2, default=str)
+
+
def _load_index(self) -> GitStoreIndex:
+
"""Load the index from index.json."""
+
index_path = self.repo_path / "index.json"
+
if not index_path.exists():
+
return GitStoreIndex(
+
created=datetime.now(),
+
last_updated=datetime.now(),
+
)
+
+
with open(index_path) as f:
+
data = json.load(f)
+
+
return GitStoreIndex(**data)
+
+
def _save_duplicates(self, duplicates: DuplicateMap) -> None:
+
"""Save duplicates map to duplicates.json."""
+
duplicates_path = self.repo_path / "duplicates.json"
+
with open(duplicates_path, "w") as f:
+
json.dump(duplicates.model_dump(exclude_none=True), f, indent=2)
+
+
def _load_duplicates(self) -> DuplicateMap:
+
"""Load duplicates map from duplicates.json."""
+
duplicates_path = self.repo_path / "duplicates.json"
+
if not duplicates_path.exists():
+
return DuplicateMap()
+
+
with open(duplicates_path) as f:
+
data = json.load(f)
+
+
return DuplicateMap(**data)
+
+
def add_user(self, username: str, display_name: Optional[str] = None,
+
email: Optional[str] = None, homepage: Optional[str] = None,
+
icon: Optional[str] = None, feeds: Optional[list[str]] = None) -> UserMetadata:
+
"""Add a new user to the Git store."""
+
index = self._load_index()
+
+
# Create user directory
+
user_dir = self.repo_path / username
+
user_dir.mkdir(exist_ok=True)
+
+
# Create user metadata
+
user_metadata = UserMetadata(
+
username=username,
+
display_name=display_name,
+
email=email,
+
homepage=homepage,
+
icon=icon,
+
feeds=feeds or [],
+
directory=username,
+
created=datetime.now(),
+
last_updated=datetime.now(),
+
)
+
+
+
# Update index
+
index.add_user(user_metadata)
+
self._save_index(index)
+
+
return user_metadata
+
+
def get_user(self, username: str) -> Optional[UserMetadata]:
+
"""Get user metadata by username."""
+
index = self._load_index()
+
return index.get_user(username)
+
+
def update_user(self, username: str, **kwargs) -> bool:
+
"""Update user metadata."""
+
index = self._load_index()
+
user = index.get_user(username)
+
+
if not user:
+
return False
+
+
# Update user metadata
+
for key, value in kwargs.items():
+
if hasattr(user, key) and value is not None:
+
setattr(user, key, value)
+
+
user.update_timestamp()
+
+
+
# Update index
+
index.add_user(user)
+
self._save_index(index)
+
+
return True
+
+
def store_entry(self, username: str, entry: AtomEntry) -> bool:
+
"""Store an entry in the user's directory."""
+
user = self.get_user(username)
+
if not user:
+
return False
+
+
# Sanitize entry ID for filename
+
from .feed_parser import FeedParser
+
parser = FeedParser()
+
safe_id = parser.sanitize_entry_id(entry.id)
+
+
# Create entry file
+
user_dir = self.repo_path / user.directory
+
entry_path = user_dir / f"{safe_id}.json"
+
+
# Check if entry already exists
+
entry_exists = entry_path.exists()
+
+
# Save entry
+
with open(entry_path, "w") as f:
+
json.dump(entry.model_dump(mode="json", exclude_none=True), f, indent=2, default=str)
+
+
# Update user metadata if new entry
+
if not entry_exists:
+
index = self._load_index()
+
index.update_entry_count(username, 1)
+
self._save_index(index)
+
+
return True
+
+
def get_entry(self, username: str, entry_id: str) -> Optional[AtomEntry]:
+
"""Get an entry by username and entry ID."""
+
user = self.get_user(username)
+
if not user:
+
return None
+
+
# Sanitize entry ID
+
from .feed_parser import FeedParser
+
parser = FeedParser()
+
safe_id = parser.sanitize_entry_id(entry_id)
+
+
entry_path = self.repo_path / user.directory / f"{safe_id}.json"
+
if not entry_path.exists():
+
return None
+
+
with open(entry_path) as f:
+
data = json.load(f)
+
+
return AtomEntry(**data)
+
+
def list_entries(self, username: str, limit: Optional[int] = None) -> list[AtomEntry]:
+
"""List entries for a user."""
+
user = self.get_user(username)
+
if not user:
+
return []
+
+
user_dir = self.repo_path / user.directory
+
if not user_dir.exists():
+
return []
+
+
entries = []
+
entry_files = sorted(user_dir.glob("*.json"), key=lambda p: p.stat().st_mtime, reverse=True)
+
+
+
if limit:
+
entry_files = entry_files[:limit]
+
+
for entry_file in entry_files:
+
try:
+
with open(entry_file) as f:
+
data = json.load(f)
+
entries.append(AtomEntry(**data))
+
except Exception:
+
# Skip invalid entries
+
continue
+
+
return entries
+
+
def get_duplicates(self) -> DuplicateMap:
+
"""Get the duplicates map."""
+
return self._load_duplicates()
+
+
def add_duplicate(self, duplicate_id: str, canonical_id: str) -> None:
+
"""Add a duplicate mapping."""
+
duplicates = self._load_duplicates()
+
duplicates.add_duplicate(duplicate_id, canonical_id)
+
self._save_duplicates(duplicates)
+
+
def remove_duplicate(self, duplicate_id: str) -> bool:
+
"""Remove a duplicate mapping."""
+
duplicates = self._load_duplicates()
+
result = duplicates.remove_duplicate(duplicate_id)
+
self._save_duplicates(duplicates)
+
return result
+
+
def commit_changes(self, message: str) -> None:
+
"""Commit all changes to the Git repository."""
+
if not self.repo:
+
return
+
+
# Add all changes
+
self.repo.git.add(A=True)
+
+
# Check if there are changes to commit
+
if self.repo.index.diff("HEAD"):
+
self.repo.index.commit(message)
+
+
def get_stats(self) -> dict:
+
"""Get statistics about the Git store."""
+
index = self._load_index()
+
duplicates = self._load_duplicates()
+
+
return {
+
"total_users": len(index.users),
+
"total_entries": index.total_entries,
+
"total_duplicates": len(duplicates.duplicates),
+
"last_updated": index.last_updated,
+
"repository_size": sum(f.stat().st_size for f in self.repo_path.rglob("*") if f.is_file()),
+
}
+
+
def search_entries(self, query: str, username: Optional[str] = None,
+
limit: Optional[int] = None) -> list[tuple[str, AtomEntry]]:
+
"""Search entries by content."""
+
results = []
+
+
# Get users to search
+
index = self._load_index()
+
users = [index.get_user(username)] if username else list(index.users.values())
+
users = [u for u in users if u is not None]
+
+
for user in users:
+
user_dir = self.repo_path / user.directory
+
if not user_dir.exists():
+
continue
+
+
entry_files = user_dir.glob("*.json")
+
+
for entry_file in entry_files:
+
try:
+
with open(entry_file) as f:
+
data = json.load(f)
+
+
entry = AtomEntry(**data)
+
+
# Simple text search in title, summary, and content
+
searchable_text = " ".join(filter(None, [
+
entry.title,
+
entry.summary or "",
+
entry.content or "",
+
])).lower()
+
+
if query.lower() in searchable_text:
+
results.append((user.username, entry))
+
+
if limit and len(results) >= limit:
+
return results
+
+
except Exception:
+
# Skip invalid entries
+
continue
+
+
# Sort by updated time (newest first)
+
results.sort(key=lambda x: x[1].updated, reverse=True)
+
+
return results[:limit] if limit else results
+
</file>
+
+
<file path="ARCH.md">
+
# Thicket Architecture Design
+
+
## Overview
+
Thicket is a modern CLI tool for persisting Atom/RSS feeds in a Git repository, designed to enable distributed webblog comment structures.
+
+
## Technology Stack
+
+
### Core Libraries
+
+
#### CLI Framework
+
- **Typer** (0.15.x) - Modern CLI framework with type hints
+
- **Rich** (13.x) - Beautiful terminal output, progress bars, and tables
+
- **prompt-toolkit** - Interactive prompts when needed
+
+
#### Feed Processing
+
- **feedparser** (6.0.11) - Universal feed parser supporting RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0
+
- Alternative: **atoma** for stricter Atom/RSS parsing with JSON feed support
+
- Alternative: **fastfeedparser** for high-performance parsing (10x faster)
+
+
#### Git Integration
+
- **GitPython** (3.1.44) - High-level git operations, requires git CLI
+
- Alternative: **pygit2** (1.18.0) - Direct libgit2 bindings, better for authentication
+
+
#### HTTP Client
+
- **httpx** (0.28.x) - Modern async/sync HTTP client with connection pooling
+
- **aiohttp** (3.11.x) - For async-only operations if needed
+
+
#### Configuration & Data Models
+
- **pydantic** (2.11.x) - Data validation and settings management
+
- **pydantic-settings** (2.10.x) - Configuration file handling with env var support
+
+
#### Utilities
+
- **pendulum** (3.x) - Better datetime handling
+
- **bleach** (6.x) - HTML sanitization for feed content
+
- **platformdirs** (4.x) - Cross-platform directory paths
+
+
## Project Structure
+
+
```
+
thicket/
+
โ”œโ”€โ”€ pyproject.toml # Modern Python packaging
+
โ”œโ”€โ”€ README.md # Project documentation
+
โ”œโ”€โ”€ ARCH.md # This file
+
โ”œโ”€โ”€ CLAUDE.md # Project instructions
+
โ”œโ”€โ”€ .gitignore
+
โ”œโ”€โ”€ src/
+
โ”‚ โ””โ”€โ”€ thicket/
+
โ”‚ โ”œโ”€โ”€ __init__.py
+
โ”‚ โ”œโ”€โ”€ __main__.py # Entry point for `python -m thicket`
+
โ”‚ โ”œโ”€โ”€ cli/ # CLI commands and interface
+
โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py
+
โ”‚ โ”‚ โ”œโ”€โ”€ main.py # Main CLI app with Typer
+
โ”‚ โ”‚ โ”œโ”€โ”€ commands/ # Subcommands
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ init.py # Initialize git store
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ add.py # Add users and feeds
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ sync.py # Sync feeds
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ list_cmd.py # List users/feeds
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ duplicates.py # Manage duplicate entries
+
โ”‚ โ”‚ โ”‚ โ”œโ”€โ”€ links_cmd.py # Extract and categorize links
+
โ”‚ โ”‚ โ”‚ โ””โ”€โ”€ index_cmd.py # Build reference index and show threads
+
โ”‚ โ”‚ โ””โ”€โ”€ utils.py # CLI utilities (progress, formatting)
+
โ”‚ โ”œโ”€โ”€ core/ # Core business logic
+
โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py
+
โ”‚ โ”‚ โ”œโ”€โ”€ feed_parser.py # Feed parsing and normalization
+
โ”‚ โ”‚ โ”œโ”€โ”€ git_store.py # Git repository operations
+
โ”‚ โ”‚ โ””โ”€โ”€ reference_parser.py # Link extraction and threading
+
โ”‚ โ”œโ”€โ”€ models/ # Pydantic data models
+
โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py
+
โ”‚ โ”‚ โ”œโ”€โ”€ config.py # Configuration models
+
โ”‚ โ”‚ โ”œโ”€โ”€ feed.py # Feed/Entry models
+
โ”‚ โ”‚ โ””โ”€โ”€ user.py # User metadata models
+
โ”‚ โ””โ”€โ”€ utils/ # Shared utilities
+
โ”‚ โ””โ”€โ”€ __init__.py
+
โ”œโ”€โ”€ tests/
+
โ”‚ โ”œโ”€โ”€ __init__.py
+
โ”‚ โ”œโ”€โ”€ conftest.py # pytest configuration
+
โ”‚ โ”œโ”€โ”€ test_feed_parser.py
+
โ”‚ โ”œโ”€โ”€ test_git_store.py
+
โ”‚ โ””โ”€โ”€ fixtures/ # Test data
+
โ”‚ โ””โ”€โ”€ feeds/
+
โ””โ”€โ”€ docs/
+
โ””โ”€โ”€ examples/ # Example configurations
+
```
+
+
## Data Models
+
+
### Configuration File (YAML/TOML)
+
```python
+
class ThicketConfig(BaseSettings):
+
git_store: Path # Git repository location
+
cache_dir: Path # Cache directory
+
users: list[UserConfig]
+
+
model_config = SettingsConfigDict(
+
env_prefix="THICKET_",
+
env_file=".env",
+
yaml_file="thicket.yaml"
+
)
+
+
class UserConfig(BaseModel):
+
username: str
+
feeds: list[HttpUrl]
+
email: Optional[EmailStr] = None
+
homepage: Optional[HttpUrl] = None
+
icon: Optional[HttpUrl] = None
+
display_name: Optional[str] = None
+
```
+
+
### Feed Storage Format
+
```python
+
class AtomEntry(BaseModel):
+
id: str # Original Atom ID
+
title: str
+
link: HttpUrl
+
updated: datetime
+
published: Optional[datetime]
+
summary: Optional[str]
+
content: Optional[str] # Full body content from Atom entry
+
content_type: Optional[str] = "html" # text, html, xhtml
+
author: Optional[dict]
+
categories: list[str] = []
+
rights: Optional[str] = None # Copyright info
+
source: Optional[str] = None # Source feed URL
+
# Additional Atom fields preserved during RSS->Atom conversion
+
+
model_config = ConfigDict(
+
json_encoders={
+
datetime: lambda v: v.isoformat()
+
}
+
)
+
+
class DuplicateMap(BaseModel):
+
"""Maps duplicate entry IDs to canonical entry IDs"""
+
duplicates: dict[str, str] = {} # duplicate_id -> canonical_id
+
comment: str = "Entry IDs that map to the same canonical content"
+
+
def add_duplicate(self, duplicate_id: str, canonical_id: str) -> None:
+
"""Add a duplicate mapping"""
+
self.duplicates[duplicate_id] = canonical_id
+
+
def remove_duplicate(self, duplicate_id: str) -> bool:
+
"""Remove a duplicate mapping. Returns True if existed."""
+
return self.duplicates.pop(duplicate_id, None) is not None
+
+
def get_canonical(self, entry_id: str) -> str:
+
"""Get canonical ID for an entry (returns original if not duplicate)"""
+
return self.duplicates.get(entry_id, entry_id)
+
+
def is_duplicate(self, entry_id: str) -> bool:
+
"""Check if entry ID is marked as duplicate"""
+
return entry_id in self.duplicates
+
```
+
+
## Git Repository Structure
+
```
+
git-store/
+
โ”œโ”€โ”€ index.json # User directory index
+
โ”œโ”€โ”€ duplicates.json # Manual curation of duplicate entries
+
โ”œโ”€โ”€ links.json # Unified links, references, and mapping data
+
โ”œโ”€โ”€ user1/
+
โ”‚ โ”œโ”€โ”€ entry_id_1.json # Sanitized entry files
+
โ”‚ โ”œโ”€โ”€ entry_id_2.json
+
โ”‚ โ””โ”€โ”€ ...
+
โ””โ”€โ”€ user2/
+
โ””โ”€โ”€ ...
+
```
+
+
## Key Design Decisions
+
+
### 1. Feed Normalization & Auto-Discovery
+
- All RSS feeds converted to Atom format before storage
+
- Preserves maximum metadata during conversion
+
- Sanitizes HTML content to prevent XSS
+
- **Auto-discovery**: Extracts user metadata from feed during `add user` command
+
+
### 2. ID Sanitization
+
- Consistent algorithm to convert Atom IDs to safe filenames
+
- Handles edge cases (very long IDs, special characters)
+
- Maintains reversibility where possible
+
+
### 3. Git Operations
+
- Uses GitPython for simplicity (no authentication required)
+
- Single main branch for all users and entries
+
- Atomic commits per sync operation
+
- Meaningful commit messages with feed update summaries
+
- Preserves complete history - never delete entries even if they disappear from feeds
+
+
### 4. Caching Strategy
+
- HTTP caching with Last-Modified/ETag support
+
- Local cache of parsed feeds with TTL
+
- Cache invalidation on configuration changes
+
- Git store serves as permanent historical archive beyond feed depth limits
+
+
### 5. Error Handling
+
- Graceful handling of feed parsing errors
+
- Retry logic for network failures
+
- Clear error messages with recovery suggestions
+
+
## CLI Command Structure
+
+
```bash
+
# Initialize a new git store
+
thicket init /path/to/store
+
+
# Add a user with feeds (auto-discovers metadata from feed)
+
thicket add user "alyssa" \
+
--feed "https://example.com/feed.atom"
+
# Auto-populates: email, homepage, icon, display_name from feed metadata
+
+
# Add a user with manual overrides
+
thicket add user "alyssa" \
+
--feed "https://example.com/feed.atom" \
+
--email "alyssa@example.com" \
+
--homepage "https://alyssa.example.com" \
+
--icon "https://example.com/avatar.png" \
+
--display-name "Alyssa P. Hacker"
+
+
# Add additional feed to existing user
+
thicket add feed "alyssa" "https://example.com/other-feed.rss"
+
+
# Sync all feeds (designed for cron usage)
+
thicket sync --all
+
+
# Sync specific user
+
thicket sync --user alyssa
+
+
# List users and their feeds
+
thicket list users
+
thicket list feeds --user alyssa
+
+
# Manage duplicate entries
+
thicket duplicates list
+
thicket duplicates add <entry_id_1> <entry_id_2> # Mark as duplicates
+
thicket duplicates remove <entry_id_1> <entry_id_2> # Unmark duplicates
+
+
# Link processing and threading
+
thicket links --verbose # Extract and categorize all links
+
thicket index --verbose # Build reference index for threading
+
thicket threads # Show conversation threads
+
thicket threads --username user1 # Show threads for specific user
+
thicket threads --min-size 3 # Show threads with minimum size
+
```
+
+
## Performance Considerations
+
+
1. **Concurrent Feed Fetching**: Use httpx with asyncio for parallel downloads
+
2. **Incremental Updates**: Only fetch/parse feeds that have changed
+
3. **Efficient Git Operations**: Batch commits, use shallow clones where appropriate
+
4. **Progress Feedback**: Rich progress bars for long operations
+
+
## Security Considerations
+
+
1. **HTML Sanitization**: Use bleach to clean feed content
+
2. **URL Validation**: Strict validation of feed URLs
+
3. **Git Security**: No credentials stored in repository
+
4. **Path Traversal**: Careful sanitization of filenames
+
+
## Future Enhancements
+
+
1. **Web Interface**: Optional web UI for browsing the git store
+
2. **Webhooks**: Notify external services on feed updates
+
3. **Feed Discovery**: Auto-discover feeds from HTML pages
+
4. **Export Formats**: Generate static sites, OPML exports
+
5. **Federation**: P2P sync between thicket instances
+
+
## Requirements Clarification
+
+
**โœ“ Resolved Requirements:**
+
1. **Feed Update Frequency**: Designed for cron usage - no built-in scheduling needed
+
2. **Duplicate Handling**: Manual curation via `duplicates.json` file with CLI commands
+
3. **Git Branching**: Single main branch for all users and entries
+
4. **Authentication**: No feeds require authentication currently
+
5. **Content Storage**: Store complete Atom entry body content as provided
+
6. **Deleted Entries**: Preserve all entries in Git store permanently (historical archive)
+
7. **History Depth**: Git store maintains full history beyond feed depth limits
+
8. **Feed Auto-Discovery**: Extract user metadata from feed during `add user` command
+
+
## Duplicate Entry Management
+
+
### Duplicate Detection Strategy
+
- **Manual Curation**: Duplicates identified and managed manually via CLI
+
- **Storage**: `duplicates.json` file in Git root maps entry IDs to canonical entries
+
- **Structure**: `{"duplicate_id": "canonical_id", ...}`
+
- **CLI Commands**: Add/remove duplicate mappings with validation
+
- **Query Resolution**: Search/list commands resolve duplicates to canonical entries
+
+
### Duplicate File Format
+
```json
+
{
+
"https://example.com/feed/entry/123": "https://canonical.com/posts/same-post",
+
"https://mirror.com/articles/456": "https://canonical.com/posts/same-post",
+
"comment": "Entry IDs that map to the same canonical content"
+
}
+
```
+
+
## Feed Metadata Auto-Discovery
+
+
### Extraction Strategy
+
When adding a new user with `thicket add user`, the system fetches and parses the feed to extract:
+
+
- **Display Name**: From `feed.title` or `feed.author.name`
+
- **Email**: From `feed.author.email` or `feed.managingEditor`
+
- **Homepage**: From `feed.link` or `feed.author.uri`
+
- **Icon**: From `feed.logo`, `feed.icon`, or `feed.image.url`
+
+
### Discovery Priority Order
+
1. **Author Information**: Prefer `feed.author.*` fields (more specific to person)
+
2. **Feed-Level**: Fall back to feed-level metadata
+
3. **Manual Override**: CLI flags always take precedence over discovered values
+
4. **Update Behavior**: Auto-discovery only runs during initial `add user`, not on sync
+
+
### Extracted Metadata Format
+
```python
+
class FeedMetadata(BaseModel):
+
title: Optional[str] = None
+
author_name: Optional[str] = None
+
author_email: Optional[EmailStr] = None
+
author_uri: Optional[HttpUrl] = None
+
link: Optional[HttpUrl] = None
+
logo: Optional[HttpUrl] = None
+
icon: Optional[HttpUrl] = None
+
image_url: Optional[HttpUrl] = None
+
+
def to_user_config(self, username: str, feed_url: HttpUrl) -> UserConfig:
+
"""Convert discovered metadata to UserConfig with fallbacks"""
+
return UserConfig(
+
username=username,
+
feeds=[feed_url],
+
display_name=self.author_name or self.title,
+
email=self.author_email,
+
homepage=self.author_uri or self.link,
+
icon=self.logo or self.icon or self.image_url
+
)
+
```
+
+
## Link Processing and Threading Architecture
+
+
### Overview
+
The thicket system implements a sophisticated link processing and threading system to create email-style threaded views of blog entries by tracking cross-references between different blogs.
+
+
### Link Processing Pipeline
+
+
#### 1. Link Extraction (`thicket links`)
+
The `links` command systematically extracts all outbound links from blog entries and categorizes them:
+
+
```python
+
class LinkData(BaseModel):
+
url: str # Fully resolved URL
+
entry_id: str # Source entry ID
+
username: str # Source username
+
context: str # Surrounding text context
+
category: str # "internal", "user", or "unknown"
+
target_username: Optional[str] # Target user if applicable
+
```
+
+
**Link Categories:**
+
- **Internal**: Links to the same user's domain (self-references)
+
- **User**: Links to other tracked users' domains
+
- **Unknown**: Links to external sites not tracked by thicket
+
+
#### 2. URL Resolution
+
All links are properly resolved using the Atom feed's base URL to handle:
+
- Relative URLs (converted to absolute)
+
- Protocol-relative URLs
+
- Fragment identifiers
+
- Redirects and canonical URLs
+
+
#### 3. Domain Mapping
+
The system builds a comprehensive domain mapping from user configuration:
+
- Feed URLs โ†’ domain extraction
+
- Homepage URLs โ†’ domain extraction
+
- Reverse mapping: domain โ†’ username
+
+
### Threading System
+
+
#### 1. Reference Index Generation (`thicket index`)
+
Creates a bidirectional reference index from the categorized links:
+
+
```python
+
class BlogReference(BaseModel):
+
source_entry_id: str
+
source_username: str
+
target_url: str
+
target_username: Optional[str]
+
target_entry_id: Optional[str]
+
context: str
+
```
+
+
#### 2. Thread Detection Algorithm
+
Uses graph traversal to find connected blog entries:
+
- **Outbound references**: Links from an entry to other entries
+
- **Inbound references**: Links to an entry from other entries
+
- **Thread members**: All entries connected through references
+
+
#### 3. Threading Display (`thicket threads`)
+
Creates email-style threaded views:
+
- Chronological ordering within threads
+
- Reference counts (outbound/inbound)
+
- Context preservation
+
- Filtering options (user, entry, minimum size)
+
+
### Data Structures
+
+
#### links.json Format (Unified Structure)
+
```json
+
{
+
"links": {
+
"https://example.com/post/123": {
+
"referencing_entries": ["https://blog.user.com/entry/456"],
+
"target_username": "user2"
+
},
+
"https://external-site.com/article": {
+
"referencing_entries": ["https://blog.user.com/entry/789"]
+
}
+
},
+
"reverse_mapping": {
+
"https://blog.user.com/entry/456": ["https://example.com/post/123"],
+
"https://blog.user.com/entry/789": ["https://external-site.com/article"]
+
},
+
"references": [
+
{
+
"source_entry_id": "https://blog.user.com/entry/456",
+
"source_username": "user1",
+
"target_url": "https://example.com/post/123",
+
"target_username": "user2",
+
"target_entry_id": "https://example.com/post/123",
+
"context": "As mentioned in this post..."
+
}
+
],
+
"user_domains": {
+
"user1": ["blog.user.com"],
+
"user2": ["example.com"]
+
}
+
}
+
```
+
+
This unified structure eliminates duplication by:
+
- Storing each URL only once with minimal metadata
+
- Including all link data, reference data, and mappings in one file
+
- Using presence of `target_username` to identify tracked vs external links
+
- Providing bidirectional mappings for efficient queries
+
+
### Unified Structure Benefits
+
+
- **Eliminates Duplication**: Each URL appears only once with metadata
+
- **Single Source of Truth**: All link-related data in one file
+
- **Efficient Queries**: Fast lookups for both directions (URLโ†’entries, entryโ†’URLs)
+
- **Atomic Updates**: All link data changes together
+
- **Reduced I/O**: Fewer file operations
+
+
### Implementation Benefits
+
+
1. **Systematic Link Processing**: All links are extracted and categorized consistently
+
2. **Proper URL Resolution**: Handles relative URLs and base URL resolution correctly
+
3. **Domain-based Categorization**: Automatically identifies user-to-user references
+
4. **Bidirectional Indexing**: Supports both "who links to whom" and "who is linked by whom"
+
5. **Thread Discovery**: Finds conversation threads automatically
+
6. **Rich Context**: Preserves surrounding text for each link
+
7. **Performance**: Pre-computed indexes for fast threading queries
+
+
### CLI Commands
+
+
```bash
+
# Extract and categorize all links
+
thicket links --verbose
+
+
# Build reference index for threading
+
thicket index --verbose
+
+
# Show all conversation threads
+
thicket threads
+
+
# Show threads for specific user
+
thicket threads --username user1
+
+
# Show threads with minimum size
+
thicket threads --min-size 3
+
```
+
+
### Integration with Existing Commands
+
+
The link processing system integrates seamlessly with existing thicket commands:
+
- `thicket sync` updates entries, requiring `thicket links` to be run afterward
+
- `thicket index` uses the output from `thicket links` for improved accuracy
+
- `thicket threads` provides the user-facing threading interface
+
+
## Current Implementation Status
+
+
### โœ… Completed Features
+
1. **Core Infrastructure**
+
- Modern CLI with Typer and Rich
+
- Pydantic data models for type safety
+
- Git repository operations with GitPython
+
- Feed parsing and normalization with feedparser
+
+
2. **User and Feed Management**
+
- `thicket init` - Initialize git store
+
- `thicket add` - Add users and feeds with auto-discovery
+
- `thicket sync` - Sync feeds with progress tracking
+
- `thicket list` - List users, feeds, and entries
+
- `thicket duplicates` - Manage duplicate entries
+
+
3. **Link Processing and Threading**
+
- `thicket links` - Extract and categorize all outbound links
+
- `thicket index` - Build reference index from links
+
- `thicket threads` - Display threaded conversation views
+
- Proper URL resolution with base URL handling
+
- Domain-based link categorization
+
- Context preservation for links
+
+
### ๐Ÿ“Š System Performance
+
- **Link Extraction**: Successfully processes thousands of blog entries
+
- **Categorization**: Identifies internal, user, and unknown links
+
- **Threading**: Creates email-style threaded views of conversations
+
- **Storage**: Efficient JSON-based data structures for links and references
+
+
### ๐Ÿ”ง Current Architecture Highlights
+
- **Modular Design**: Clear separation between CLI, core logic, and models
+
- **Type Safety**: Comprehensive Pydantic models for data validation
+
- **Rich CLI**: Beautiful progress bars, tables, and error handling
+
- **Extensible**: Easy to add new commands and features
+
- **Git Integration**: All data stored in version-controlled JSON files
+
+
### ๐ŸŽฏ Proven Functionality
+
The system has been tested with real blog data and successfully:
+
- Extracted 14,396 total links from blog entries
+
- Categorized 3,994 internal links, 363 user-to-user links, and 10,039 unknown links
+
- Built comprehensive domain mappings for 16 users across 20 domains
+
- Generated threaded views showing blog conversation patterns
+
+
### ๐Ÿš€ Ready for Use
+
The thicket system is now fully functional for:
+
- Maintaining Git repositories of blog feeds
+
- Tracking cross-references between blogs
+
- Creating threaded views of blog conversations
+
- Discovering blog interaction patterns
+
- Building distributed comment systems
+
</file>
+
+
<file path="src/thicket/cli/utils.py">
+
"""CLI utilities and helpers."""
+
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from rich.console import Console
+
from rich.progress import Progress, SpinnerColumn, TextColumn
+
from rich.table import Table
+
+
from ..models import ThicketConfig, UserMetadata
+
from ..core.git_store import GitStore
+
+
console = Console()
+
+
+
def get_tsv_mode() -> bool:
+
"""Get the global TSV mode setting."""
+
from .main import tsv_mode
+
return tsv_mode
+
+
+
def load_config(config_path: Optional[Path] = None) -> ThicketConfig:
+
"""Load thicket configuration from file or environment."""
+
if config_path and config_path.exists():
+
import yaml
+
+
with open(config_path) as f:
+
config_data = yaml.safe_load(f)
+
+
# Convert to ThicketConfig
+
return ThicketConfig(**config_data)
+
+
# Try to load from default locations or environment
+
try:
+
# First try to find thicket.yaml in current directory
+
default_config = Path("thicket.yaml")
+
if default_config.exists():
+
import yaml
+
with open(default_config) as f:
+
config_data = yaml.safe_load(f)
+
return ThicketConfig(**config_data)
+
+
# Fall back to environment variables
+
return ThicketConfig()
+
except Exception as e:
+
console.print(f"[red]Error loading configuration: {e}[/red]")
+
console.print("[yellow]Run 'thicket init' to create a new configuration.[/yellow]")
+
raise typer.Exit(1) from e
+
+
+
def save_config(config: ThicketConfig, config_path: Path) -> None:
+
"""Save thicket configuration to file."""
+
import yaml
+
+
config_data = config.model_dump(mode="json", exclude_none=True)
+
+
# Convert Path objects to strings for YAML serialization
+
config_data["git_store"] = str(config_data["git_store"])
+
config_data["cache_dir"] = str(config_data["cache_dir"])
+
+
with open(config_path, "w") as f:
+
yaml.dump(config_data, f, default_flow_style=False, sort_keys=False)
+
+
+
def create_progress() -> Progress:
+
"""Create a Rich progress display."""
+
return Progress(
+
SpinnerColumn(),
+
TextColumn("[progress.description]{task.description}"),
+
console=console,
+
transient=True,
+
)
+
+
+
def print_users_table(config: ThicketConfig) -> None:
+
"""Print a table of users and their feeds."""
+
if get_tsv_mode():
+
print_users_tsv(config)
+
return
+
+
table = Table(title="Users and Feeds")
+
table.add_column("Username", style="cyan", no_wrap=True)
+
table.add_column("Display Name", style="magenta")
+
table.add_column("Email", style="blue")
+
table.add_column("Homepage", style="green")
+
table.add_column("Feeds", style="yellow")
+
+
for user in config.users:
+
feeds_str = "\n".join(str(feed) for feed in user.feeds)
+
table.add_row(
+
user.username,
+
user.display_name or "",
+
user.email or "",
+
str(user.homepage) if user.homepage else "",
+
feeds_str,
+
)
+
+
console.print(table)
+
+
+
def print_feeds_table(config: ThicketConfig, username: Optional[str] = None) -> None:
+
"""Print a table of feeds, optionally filtered by username."""
+
if get_tsv_mode():
+
print_feeds_tsv(config, username)
+
return
+
+
table = Table(title=f"Feeds{f' for {username}' if username else ''}")
+
table.add_column("Username", style="cyan", no_wrap=True)
+
table.add_column("Feed URL", style="blue")
+
table.add_column("Status", style="green")
+
+
users = [config.find_user(username)] if username else config.users
+
users = [u for u in users if u is not None]
+
+
for user in users:
+
for feed in user.feeds:
+
table.add_row(
+
user.username,
+
str(feed),
+
"Active", # TODO: Add actual status checking
+
)
+
+
console.print(table)
+
+
+
def confirm_action(message: str, default: bool = False) -> bool:
+
"""Prompt for confirmation."""
+
return typer.confirm(message, default=default)
+
+
+
def print_success(message: str) -> None:
+
"""Print a success message."""
+
console.print(f"[green]โœ“[/green] {message}")
+
+
+
def print_error(message: str) -> None:
+
"""Print an error message."""
+
console.print(f"[red]โœ—[/red] {message}")
+
+
+
def print_warning(message: str) -> None:
+
"""Print a warning message."""
+
console.print(f"[yellow]โš [/yellow] {message}")
+
+
+
def print_info(message: str) -> None:
+
"""Print an info message."""
+
console.print(f"[blue]โ„น[/blue] {message}")
+
+
+
def print_users_table_from_git(users: list[UserMetadata]) -> None:
+
"""Print a table of users from git repository."""
+
if get_tsv_mode():
+
print_users_tsv_from_git(users)
+
return
+
+
table = Table(title="Users and Feeds")
+
table.add_column("Username", style="cyan", no_wrap=True)
+
table.add_column("Display Name", style="magenta")
+
table.add_column("Email", style="blue")
+
table.add_column("Homepage", style="green")
+
table.add_column("Feeds", style="yellow")
+
+
for user in users:
+
feeds_str = "\n".join(user.feeds)
+
table.add_row(
+
user.username,
+
user.display_name or "",
+
user.email or "",
+
user.homepage or "",
+
feeds_str,
+
)
+
+
console.print(table)
+
+
+
def print_feeds_table_from_git(git_store: GitStore, username: Optional[str] = None) -> None:
+
"""Print a table of feeds from git repository."""
+
if get_tsv_mode():
+
print_feeds_tsv_from_git(git_store, username)
+
return
+
+
table = Table(title=f"Feeds{f' for {username}' if username else ''}")
+
table.add_column("Username", style="cyan", no_wrap=True)
+
table.add_column("Feed URL", style="blue")
+
table.add_column("Status", style="green")
+
+
if username:
+
user = git_store.get_user(username)
+
users = [user] if user else []
+
else:
+
index = git_store._load_index()
+
users = list(index.users.values())
+
+
for user in users:
+
for feed in user.feeds:
+
table.add_row(
+
user.username,
+
feed,
+
"Active", # TODO: Add actual status checking
+
)
+
+
console.print(table)
+
+
+
def print_users_tsv(config: ThicketConfig) -> None:
+
"""Print users in TSV format."""
+
print("Username\tDisplay Name\tEmail\tHomepage\tFeeds")
+
for user in config.users:
+
feeds_str = ",".join(str(feed) for feed in user.feeds)
+
print(f"{user.username}\t{user.display_name or ''}\t{user.email or ''}\t{user.homepage or ''}\t{feeds_str}")
+
+
+
def print_users_tsv_from_git(users: list[UserMetadata]) -> None:
+
"""Print users from git repository in TSV format."""
+
print("Username\tDisplay Name\tEmail\tHomepage\tFeeds")
+
for user in users:
+
feeds_str = ",".join(user.feeds)
+
print(f"{user.username}\t{user.display_name or ''}\t{user.email or ''}\t{user.homepage or ''}\t{feeds_str}")
+
+
+
def print_feeds_tsv(config: ThicketConfig, username: Optional[str] = None) -> None:
+
"""Print feeds in TSV format."""
+
print("Username\tFeed URL\tStatus")
+
users = [config.find_user(username)] if username else config.users
+
users = [u for u in users if u is not None]
+
+
for user in users:
+
for feed in user.feeds:
+
print(f"{user.username}\t{feed}\tActive")
+
+
+
def print_feeds_tsv_from_git(git_store: GitStore, username: Optional[str] = None) -> None:
+
"""Print feeds from git repository in TSV format."""
+
print("Username\tFeed URL\tStatus")
+
+
if username:
+
user = git_store.get_user(username)
+
users = [user] if user else []
+
else:
+
index = git_store._load_index()
+
users = list(index.users.values())
+
+
for user in users:
+
for feed in user.feeds:
+
print(f"{user.username}\t{feed}\tActive")
+
+
+
def print_entries_tsv(entries_by_user: list[list], usernames: list[str]) -> None:
+
"""Print entries in TSV format."""
+
print("User\tAtom ID\tTitle\tUpdated\tURL")
+
+
# Combine all entries with usernames
+
all_entries = []
+
for entries, username in zip(entries_by_user, usernames):
+
for entry in entries:
+
all_entries.append((username, entry))
+
+
# Sort by updated time (newest first)
+
all_entries.sort(key=lambda x: x[1].updated, reverse=True)
+
+
for username, entry in all_entries:
+
# Format updated time
+
updated_str = entry.updated.strftime("%Y-%m-%d %H:%M")
+
+
# Escape tabs and newlines in title to preserve TSV format
+
title = entry.title.replace('\t', ' ').replace('\n', ' ').replace('\r', ' ')
+
+
print(f"{username}\t{entry.id}\t{title}\t{updated_str}\t{entry.link}")
+
</file>
+
+
</files>
+6 -2
src/thicket/__init__.py
···
-
"""Thicket: A CLI tool for persisting Atom/RSS feeds in Git repositories."""
+
"""Thicket - A library for managing feed repositories and static site generation."""
+
from .thicket import Thicket
+
from .models import AtomEntry, UserConfig, ThicketConfig
+
+
__all__ = ["Thicket", "AtomEntry", "UserConfig", "ThicketConfig"]
__version__ = "0.1.0"
__author__ = "thicket"
-
__email__ = "thicket@example.com"
+
__email__ = "thicket@example.com"
+1 -1
src/thicket/__main__.py
···
from .cli.main import app
if __name__ == "__main__":
-
app()
+
app()
+1 -1
src/thicket/cli/__init__.py
···
from .main import app
-
__all__ = ["app"]
+
__all__ = ["app"]
+2 -2
src/thicket/cli/commands/__init__.py
···
"""CLI commands for thicket."""
# Import all commands to register them with the main app
-
from . import add, duplicates, init, list_cmd, sync
+
from . import add, duplicates, generate, index_cmd, info_cmd, init, links_cmd, list_cmd, sync
-
__all__ = ["add", "duplicates", "init", "list_cmd", "sync"]
+
__all__ = ["add", "duplicates", "generate", "index_cmd", "info_cmd", "init", "links_cmd", "list_cmd", "sync"]
+41 -168
src/thicket/cli/commands/add.py
···
"""Add command for thicket."""
-
import asyncio
from pathlib import Path
from typing import Optional
import typer
-
from pydantic import HttpUrl, ValidationError
+
from pydantic import ValidationError
-
from ...core.feed_parser import FeedParser
-
from ...core.git_store import GitStore
-
from ...models import UserConfig
-
from ..main import app
-
from ..utils import (
-
create_progress,
-
load_config,
-
print_error,
-
print_info,
-
print_success,
-
save_config,
-
)
+
from ..main import app, console, load_thicket
@app.command("add")
-
def add_command(
-
subcommand: str = typer.Argument(..., help="Subcommand: 'user' or 'feed'"),
+
def add_user(
username: str = typer.Argument(..., help="Username"),
-
feed_url: Optional[str] = typer.Argument(None, help="Feed URL (required for 'user' command)"),
+
feeds: list[str] = typer.Argument(..., help="Feed URLs"),
email: Optional[str] = typer.Option(None, "--email", "-e", help="User email"),
homepage: Optional[str] = typer.Option(None, "--homepage", "-h", help="User homepage"),
icon: Optional[str] = typer.Option(None, "--icon", "-i", help="User icon URL"),
display_name: Optional[str] = typer.Option(None, "--display-name", "-d", help="User display name"),
config_file: Optional[Path] = typer.Option(
-
Path("thicket.yaml"), "--config", help="Configuration file path"
-
),
-
auto_discover: bool = typer.Option(
-
True, "--auto-discover/--no-auto-discover", help="Auto-discover user metadata from feed"
+
None, "--config", help="Configuration file path"
),
) -> None:
-
"""Add a user or feed to thicket."""
+
"""Add a user with their feeds to thicket."""
-
if subcommand == "user":
-
add_user(username, feed_url, email, homepage, icon, display_name, config_file, auto_discover)
-
elif subcommand == "feed":
-
add_feed(username, feed_url, config_file)
-
else:
-
print_error(f"Unknown subcommand: {subcommand}")
-
print_error("Use 'user' or 'feed'")
-
raise typer.Exit(1)
-
-
-
def add_user(
-
username: str,
-
feed_url: Optional[str],
-
email: Optional[str],
-
homepage: Optional[str],
-
icon: Optional[str],
-
display_name: Optional[str],
-
config_file: Path,
-
auto_discover: bool,
-
) -> None:
-
"""Add a new user with feed."""
-
-
if not feed_url:
-
print_error("Feed URL is required when adding a user")
-
raise typer.Exit(1)
-
-
# Validate feed URL
try:
-
validated_feed_url = HttpUrl(feed_url)
-
except ValidationError:
-
print_error(f"Invalid feed URL: {feed_url}")
-
raise typer.Exit(1)
-
-
# Load configuration
-
config = load_config(config_file)
-
-
# Check if user already exists
-
existing_user = config.find_user(username)
-
if existing_user:
-
print_error(f"User '{username}' already exists")
-
print_error("Use 'thicket add feed' to add additional feeds")
-
raise typer.Exit(1)
-
-
# Auto-discover metadata if enabled
-
discovered_metadata = None
-
if auto_discover:
-
discovered_metadata = asyncio.run(discover_feed_metadata(validated_feed_url))
-
-
# Create user config with manual overrides taking precedence
-
user_config = UserConfig(
-
username=username,
-
feeds=[validated_feed_url],
-
email=email or (discovered_metadata.author_email if discovered_metadata else None),
-
homepage=HttpUrl(homepage) if homepage else (discovered_metadata.author_uri or discovered_metadata.link if discovered_metadata else None),
-
icon=HttpUrl(icon) if icon else (discovered_metadata.logo or discovered_metadata.icon or discovered_metadata.image_url if discovered_metadata else None),
-
display_name=display_name or (discovered_metadata.author_name or discovered_metadata.title if discovered_metadata else None),
-
)
-
-
# Add user to configuration
-
config.add_user(user_config)
-
-
# Save configuration
-
save_config(config, config_file)
-
-
# Add user to Git store
-
git_store = GitStore(config.git_store)
-
git_store.add_user(
-
username=username,
-
display_name=user_config.display_name,
-
email=user_config.email,
-
homepage=str(user_config.homepage) if user_config.homepage else None,
-
icon=str(user_config.icon) if user_config.icon else None,
-
feeds=[str(f) for f in user_config.feeds],
-
)
-
-
# Commit changes
-
git_store.commit_changes(f"Add user: {username}")
-
-
print_success(f"Added user '{username}' with feed: {feed_url}")
-
-
if discovered_metadata and auto_discover:
-
print_info("Auto-discovered metadata:")
-
if user_config.display_name:
-
print_info(f" Display name: {user_config.display_name}")
-
if user_config.email:
-
print_info(f" Email: {user_config.email}")
-
if user_config.homepage:
-
print_info(f" Homepage: {user_config.homepage}")
-
if user_config.icon:
-
print_info(f" Icon: {user_config.icon}")
-
-
-
def add_feed(username: str, feed_url: Optional[str], config_file: Path) -> None:
-
"""Add a feed to an existing user."""
-
-
if not feed_url:
-
print_error("Feed URL is required")
-
raise typer.Exit(1)
-
-
# Validate feed URL
-
try:
-
validated_feed_url = HttpUrl(feed_url)
-
except ValidationError:
-
print_error(f"Invalid feed URL: {feed_url}")
-
raise typer.Exit(1)
-
-
# Load configuration
-
config = load_config(config_file)
-
-
# Check if user exists
-
user = config.find_user(username)
-
if not user:
-
print_error(f"User '{username}' not found")
-
print_error("Use 'thicket add user' to add a new user")
-
raise typer.Exit(1)
-
-
# Check if feed already exists
-
if validated_feed_url in user.feeds:
-
print_error(f"Feed already exists for user '{username}': {feed_url}")
-
raise typer.Exit(1)
-
-
# Add feed to user
-
if config.add_feed_to_user(username, validated_feed_url):
-
save_config(config, config_file)
+
# Load Thicket instance
+
thicket = load_thicket(config_file)
+
+
# Prepare user data
+
user_data = {}
+
if email:
+
user_data['email'] = email
+
if homepage:
+
user_data['homepage'] = homepage
+
if icon:
+
user_data['icon'] = icon
+
if display_name:
+
user_data['display_name'] = display_name
+
+
# Add the user
+
user_config = thicket.add_user(username, feeds, **user_data)
-
# Update Git store
-
git_store = GitStore(config.git_store)
-
git_store.update_user(username, feeds=[str(f) for f in user.feeds])
-
git_store.commit_changes(f"Add feed to user {username}: {feed_url}")
+
console.print(f"[green]โœ“[/green] Added user: {username}")
+
console.print(f" โ€ข Display name: {user_config.display_name or 'None'}")
+
console.print(f" โ€ข Email: {user_config.email or 'None'}")
+
console.print(f" โ€ข Homepage: {user_config.homepage or 'None'}")
+
console.print(f" โ€ข Feeds: {len(user_config.feeds)}")
-
print_success(f"Added feed to user '{username}': {feed_url}")
-
else:
-
print_error(f"Failed to add feed to user '{username}'")
-
raise typer.Exit(1)
-
-
-
async def discover_feed_metadata(feed_url: HttpUrl):
-
"""Discover metadata from a feed URL."""
-
try:
-
with create_progress() as progress:
-
task = progress.add_task("Discovering feed metadata...", total=None)
+
for feed in user_config.feeds:
+
console.print(f" - {feed}")
-
parser = FeedParser()
-
content = await parser.fetch_feed(feed_url)
-
metadata, _ = parser.parse_feed(content, feed_url)
-
-
progress.update(task, completed=True)
-
return metadata
+
# Commit the addition
+
commit_message = f"Add user {username} with {len(feeds)} feed(s)"
+
if thicket.commit_changes(commit_message):
+
console.print(f"[green]โœ“[/green] Committed: {commit_message}")
+
else:
+
console.print("[yellow]Warning:[/yellow] Failed to commit changes")
+
except ValidationError as e:
+
console.print(f"[red]Validation Error:[/red] {str(e)}")
+
raise typer.Exit(1)
except Exception as e:
-
print_error(f"Failed to discover feed metadata: {e}")
-
return None
+
console.print(f"[red]Error:[/red] {str(e)}")
+
raise typer.Exit(1)
+
+35 -25
src/thicket/cli/commands/duplicates.py
···
print_error,
print_info,
print_success,
+
get_tsv_mode,
)
···
),
) -> None:
"""Manage duplicate entry mappings."""
-
+
# Load configuration
config = load_config(config_file)
-
+
# Initialize Git store
git_store = GitStore(config.git_store)
-
+
if action == "list":
list_duplicates(git_store)
elif action == "add":
···
def list_duplicates(git_store: GitStore) -> None:
"""List all duplicate mappings."""
duplicates = git_store.get_duplicates()
-
+
if not duplicates.duplicates:
-
print_info("No duplicate mappings found")
+
if get_tsv_mode():
+
print("No duplicate mappings found")
+
else:
+
print_info("No duplicate mappings found")
return
-
-
table = Table(title="Duplicate Entry Mappings")
-
table.add_column("Duplicate ID", style="red")
-
table.add_column("Canonical ID", style="green")
-
-
for duplicate_id, canonical_id in duplicates.duplicates.items():
-
table.add_row(duplicate_id, canonical_id)
-
-
console.print(table)
-
print_info(f"Total duplicates: {len(duplicates.duplicates)}")
+
+
if get_tsv_mode():
+
print("Duplicate ID\tCanonical ID")
+
for duplicate_id, canonical_id in duplicates.duplicates.items():
+
print(f"{duplicate_id}\t{canonical_id}")
+
print(f"Total duplicates: {len(duplicates.duplicates)}")
+
else:
+
table = Table(title="Duplicate Entry Mappings")
+
table.add_column("Duplicate ID", style="red")
+
table.add_column("Canonical ID", style="green")
+
+
for duplicate_id, canonical_id in duplicates.duplicates.items():
+
table.add_row(duplicate_id, canonical_id)
+
+
console.print(table)
+
print_info(f"Total duplicates: {len(duplicates.duplicates)}")
def add_duplicate(git_store: GitStore, duplicate_id: Optional[str], canonical_id: Optional[str]) -> None:
···
if not duplicate_id:
print_error("Duplicate ID is required")
raise typer.Exit(1)
-
+
if not canonical_id:
print_error("Canonical ID is required")
raise typer.Exit(1)
-
+
# Check if duplicate_id already exists
duplicates = git_store.get_duplicates()
if duplicates.is_duplicate(duplicate_id):
···
print_error(f"Duplicate ID already mapped to: {existing_canonical}")
print_error("Use 'remove' first to change the mapping")
raise typer.Exit(1)
-
+
# Check if we're trying to make a canonical ID point to itself
if duplicate_id == canonical_id:
print_error("Duplicate ID cannot be the same as canonical ID")
raise typer.Exit(1)
-
+
# Add the mapping
git_store.add_duplicate(duplicate_id, canonical_id)
-
+
# Commit changes
git_store.commit_changes(f"Add duplicate mapping: {duplicate_id} -> {canonical_id}")
-
+
print_success(f"Added duplicate mapping: {duplicate_id} -> {canonical_id}")
···
if not duplicate_id:
print_error("Duplicate ID is required")
raise typer.Exit(1)
-
+
# Check if mapping exists
duplicates = git_store.get_duplicates()
if not duplicates.is_duplicate(duplicate_id):
print_error(f"No duplicate mapping found for: {duplicate_id}")
raise typer.Exit(1)
-
+
canonical_id = duplicates.get_canonical(duplicate_id)
-
+
# Remove the mapping
if git_store.remove_duplicate(duplicate_id):
# Commit changes
···
print_success(f"Removed duplicate mapping: {duplicate_id} -> {canonical_id}")
else:
print_error(f"Failed to remove duplicate mapping: {duplicate_id}")
-
raise typer.Exit(1)
+
raise typer.Exit(1)
+59
src/thicket/cli/commands/generate.py
···
+
"""Generate static HTML website from thicket data."""
+
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
+
from ..main import app, console, load_thicket
+
+
+
+
+
@app.command()
+
def generate(
+
output: Path = typer.Option(
+
Path("./thicket-site"),
+
"--output",
+
"-o",
+
help="Output directory for the generated website",
+
),
+
template_dir: Optional[Path] = typer.Option(
+
None, "--templates", help="Custom template directory"
+
),
+
config_file: Optional[Path] = typer.Option(
+
None, "--config", help="Configuration file path"
+
),
+
) -> None:
+
"""Generate a static HTML website from thicket data."""
+
+
try:
+
# Load Thicket instance
+
thicket = load_thicket(config_file)
+
+
console.print(f"[blue]Generating static site to:[/blue] {output}")
+
+
# Generate the complete site
+
if thicket.generate_site(output, template_dir):
+
console.print(f"[green]โœ“[/green] Successfully generated site at {output}")
+
+
# Show what was generated
+
stats = thicket.get_stats()
+
console.print(f" โ€ข {stats.get('total_entries', 0)} entries")
+
console.print(f" โ€ข {stats.get('total_users', 0)} users")
+
console.print(f" โ€ข {stats.get('unique_urls', 0)} unique links")
+
+
# List generated files
+
if output.exists():
+
html_files = list(output.glob("*.html"))
+
if html_files:
+
console.print(" โ€ข Generated pages:")
+
for html_file in sorted(html_files):
+
console.print(f" - {html_file.name}")
+
else:
+
console.print("[red]โœ—[/red] Failed to generate site")
+
raise typer.Exit(1)
+
+
except Exception as e:
+
console.print(f"[red]Error:[/red] {str(e)}")
+
raise typer.Exit(1)
+427
src/thicket/cli/commands/index_cmd.py
···
+
"""CLI command for building reference index from blog entries."""
+
+
import json
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from rich.console import Console
+
from rich.progress import (
+
BarColumn,
+
Progress,
+
SpinnerColumn,
+
TaskProgressColumn,
+
TextColumn,
+
)
+
from rich.table import Table
+
+
from ...core.git_store import GitStore
+
from ...core.reference_parser import ReferenceIndex, ReferenceParser
+
from ..main import app
+
from ..utils import get_tsv_mode, load_config
+
+
console = Console()
+
+
+
@app.command()
+
def index(
+
config_file: Optional[Path] = typer.Option(
+
None,
+
"--config",
+
"-c",
+
help="Path to configuration file",
+
),
+
output_file: Optional[Path] = typer.Option(
+
None,
+
"--output",
+
"-o",
+
help="Path to output index file (default: updates links.json in git store)",
+
),
+
verbose: bool = typer.Option(
+
False,
+
"--verbose",
+
"-v",
+
help="Show detailed progress information",
+
),
+
) -> None:
+
"""Build a reference index showing which blog entries reference others.
+
+
This command analyzes all blog entries to detect cross-references between
+
different blogs, creating an index that can be used to build threaded
+
views of related content.
+
+
Updates the unified links.json file with reference data.
+
"""
+
try:
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
# Initialize reference parser
+
parser = ReferenceParser()
+
+
# Build user domain mapping
+
if verbose:
+
console.print("Building user domain mapping...")
+
user_domains = parser.build_user_domain_mapping(git_store)
+
+
if verbose:
+
console.print(f"Found {len(user_domains)} users with {sum(len(d) for d in user_domains.values())} total domains")
+
+
# Initialize reference index
+
ref_index = ReferenceIndex()
+
ref_index.user_domains = user_domains
+
+
# Get all users
+
index = git_store._load_index()
+
users = list(index.users.keys())
+
+
if not users:
+
console.print("[yellow]No users found in Git store[/yellow]")
+
raise typer.Exit(0)
+
+
# Process all entries
+
total_entries = 0
+
total_references = 0
+
all_references = []
+
+
with Progress(
+
SpinnerColumn(),
+
TextColumn("[progress.description]{task.description}"),
+
BarColumn(),
+
TaskProgressColumn(),
+
console=console,
+
) as progress:
+
+
# Count total entries first
+
counting_task = progress.add_task("Counting entries...", total=len(users))
+
entry_counts = {}
+
for username in users:
+
entries = git_store.list_entries(username)
+
entry_counts[username] = len(entries)
+
total_entries += len(entries)
+
progress.advance(counting_task)
+
+
progress.remove_task(counting_task)
+
+
# Process entries - extract references
+
processing_task = progress.add_task(
+
f"Extracting references from {total_entries} entries...",
+
total=total_entries
+
)
+
+
for username in users:
+
entries = git_store.list_entries(username)
+
+
for entry in entries:
+
# Extract references from this entry
+
references = parser.extract_references(entry, username, user_domains)
+
all_references.extend(references)
+
+
progress.advance(processing_task)
+
+
if verbose and references:
+
console.print(f" Found {len(references)} references in {username}:{entry.title[:50]}...")
+
+
progress.remove_task(processing_task)
+
+
# Resolve target_entry_ids for references
+
if all_references:
+
resolve_task = progress.add_task(
+
f"Resolving {len(all_references)} references...",
+
total=len(all_references)
+
)
+
+
if verbose:
+
console.print(f"Resolving target entry IDs for {len(all_references)} references...")
+
+
resolved_references = parser.resolve_target_entry_ids(all_references, git_store)
+
+
# Count resolved references
+
resolved_count = sum(1 for ref in resolved_references if ref.target_entry_id is not None)
+
if verbose:
+
console.print(f"Resolved {resolved_count} out of {len(all_references)} references")
+
+
# Add resolved references to index
+
for ref in resolved_references:
+
ref_index.add_reference(ref)
+
total_references += 1
+
progress.advance(resolve_task)
+
+
progress.remove_task(resolve_task)
+
+
# Determine output path
+
if output_file:
+
output_path = output_file
+
else:
+
output_path = config.git_store / "links.json"
+
+
# Load existing links data or create new structure
+
if output_path.exists() and not output_file:
+
# Load existing unified structure
+
with open(output_path) as f:
+
existing_data = json.load(f)
+
else:
+
# Create new structure
+
existing_data = {
+
"links": {},
+
"reverse_mapping": {},
+
"user_domains": {}
+
}
+
+
# Update with reference data
+
existing_data["references"] = ref_index.to_dict()["references"]
+
existing_data["user_domains"] = {k: list(v) for k, v in user_domains.items()}
+
+
# Save updated structure
+
with open(output_path, "w") as f:
+
json.dump(existing_data, f, indent=2, default=str)
+
+
# Show summary
+
if not get_tsv_mode():
+
console.print("\n[green]โœ“ Reference index built successfully[/green]")
+
+
# Create summary table or TSV output
+
if get_tsv_mode():
+
print("Metric\tCount")
+
print(f"Total Users\t{len(users)}")
+
print(f"Total Entries\t{total_entries}")
+
print(f"Total References\t{total_references}")
+
print(f"Outbound Refs\t{len(ref_index.outbound_refs)}")
+
print(f"Inbound Refs\t{len(ref_index.inbound_refs)}")
+
print(f"Output File\t{output_path}")
+
else:
+
table = Table(title="Reference Index Summary")
+
table.add_column("Metric", style="cyan")
+
table.add_column("Count", style="green")
+
+
table.add_row("Total Users", str(len(users)))
+
table.add_row("Total Entries", str(total_entries))
+
table.add_row("Total References", str(total_references))
+
table.add_row("Outbound Refs", str(len(ref_index.outbound_refs)))
+
table.add_row("Inbound Refs", str(len(ref_index.inbound_refs)))
+
table.add_row("Output File", str(output_path))
+
+
console.print(table)
+
+
# Show some interesting statistics
+
if total_references > 0:
+
if not get_tsv_mode():
+
console.print("\n[bold]Reference Statistics:[/bold]")
+
+
# Most referenced users
+
target_counts = {}
+
unresolved_domains = set()
+
+
for ref in ref_index.references:
+
if ref.target_username:
+
target_counts[ref.target_username] = target_counts.get(ref.target_username, 0) + 1
+
else:
+
# Track unresolved domains
+
from urllib.parse import urlparse
+
domain = urlparse(ref.target_url).netloc.lower()
+
unresolved_domains.add(domain)
+
+
if target_counts:
+
if get_tsv_mode():
+
print("Referenced User\tReference Count")
+
for username, count in sorted(target_counts.items(), key=lambda x: x[1], reverse=True)[:5]:
+
print(f"{username}\t{count}")
+
else:
+
console.print("\nMost referenced users:")
+
for username, count in sorted(target_counts.items(), key=lambda x: x[1], reverse=True)[:5]:
+
console.print(f" {username}: {count} references")
+
+
if unresolved_domains and verbose:
+
if get_tsv_mode():
+
print("Unresolved Domain\tCount")
+
for domain in sorted(list(unresolved_domains)[:10]):
+
print(f"{domain}\t1")
+
if len(unresolved_domains) > 10:
+
print(f"... and {len(unresolved_domains) - 10} more\t...")
+
else:
+
console.print(f"\nUnresolved domains: {len(unresolved_domains)}")
+
for domain in sorted(list(unresolved_domains)[:10]):
+
console.print(f" {domain}")
+
if len(unresolved_domains) > 10:
+
console.print(f" ... and {len(unresolved_domains) - 10} more")
+
+
except Exception as e:
+
console.print(f"[red]Error building reference index: {e}[/red]")
+
if verbose:
+
console.print_exception()
+
raise typer.Exit(1)
+
+
+
@app.command()
+
def threads(
+
config_file: Optional[Path] = typer.Option(
+
None,
+
"--config",
+
"-c",
+
help="Path to configuration file",
+
),
+
index_file: Optional[Path] = typer.Option(
+
None,
+
"--index",
+
"-i",
+
help="Path to reference index file (default: links.json in git store)",
+
),
+
username: Optional[str] = typer.Option(
+
None,
+
"--username",
+
"-u",
+
help="Show threads for specific username only",
+
),
+
entry_id: Optional[str] = typer.Option(
+
None,
+
"--entry",
+
"-e",
+
help="Show thread for specific entry ID",
+
),
+
min_size: int = typer.Option(
+
2,
+
"--min-size",
+
"-m",
+
help="Minimum thread size to display",
+
),
+
) -> None:
+
"""Show threaded view of related blog entries.
+
+
This command uses the reference index to show which blog entries
+
are connected through cross-references, creating an email-style
+
threaded view of the conversation.
+
+
Reads reference data from the unified links.json file.
+
"""
+
try:
+
# Load configuration
+
config = load_config(config_file)
+
+
# Determine index file path
+
if index_file:
+
index_path = index_file
+
else:
+
index_path = config.git_store / "links.json"
+
+
if not index_path.exists():
+
console.print(f"[red]Links file not found: {index_path}[/red]")
+
console.print("Run 'thicket links' and 'thicket index' first to build the reference index")
+
raise typer.Exit(1)
+
+
# Load unified data
+
with open(index_path) as f:
+
unified_data = json.load(f)
+
+
# Check if references exist in the unified structure
+
if "references" not in unified_data:
+
console.print(f"[red]No references found in {index_path}[/red]")
+
console.print("Run 'thicket index' first to build the reference index")
+
raise typer.Exit(1)
+
+
# Extract reference data and reconstruct ReferenceIndex
+
ref_index = ReferenceIndex.from_dict({
+
"references": unified_data["references"],
+
"user_domains": unified_data.get("user_domains", {})
+
})
+
+
# Initialize Git store to get entry details
+
git_store = GitStore(config.git_store)
+
+
if entry_id and username:
+
# Show specific thread
+
thread_members = ref_index.get_thread_members(username, entry_id)
+
_display_thread(thread_members, ref_index, git_store, f"Thread for {username}:{entry_id}")
+
+
elif username:
+
# Show all threads involving this user
+
user_index = git_store._load_index()
+
user = user_index.get_user(username)
+
if not user:
+
console.print(f"[red]User not found: {username}[/red]")
+
raise typer.Exit(1)
+
+
entries = git_store.list_entries(username)
+
threads_found = set()
+
+
console.print(f"[bold]Threads involving {username}:[/bold]\n")
+
+
for entry in entries:
+
thread_members = ref_index.get_thread_members(username, entry.id)
+
if len(thread_members) >= min_size:
+
thread_key = tuple(sorted(thread_members))
+
if thread_key not in threads_found:
+
threads_found.add(thread_key)
+
_display_thread(thread_members, ref_index, git_store, f"Thread #{len(threads_found)}")
+
+
else:
+
# Show all threads
+
console.print("[bold]All conversation threads:[/bold]\n")
+
+
all_threads = set()
+
processed_entries = set()
+
+
# Get all entries
+
user_index = git_store._load_index()
+
for username in user_index.users.keys():
+
entries = git_store.list_entries(username)
+
for entry in entries:
+
entry_key = (username, entry.id)
+
if entry_key in processed_entries:
+
continue
+
+
thread_members = ref_index.get_thread_members(username, entry.id)
+
if len(thread_members) >= min_size:
+
thread_key = tuple(sorted(thread_members))
+
if thread_key not in all_threads:
+
all_threads.add(thread_key)
+
_display_thread(thread_members, ref_index, git_store, f"Thread #{len(all_threads)}")
+
+
# Mark all members as processed
+
for member in thread_members:
+
processed_entries.add(member)
+
+
if not all_threads:
+
console.print("[yellow]No conversation threads found[/yellow]")
+
console.print(f"(minimum thread size: {min_size})")
+
+
except Exception as e:
+
console.print(f"[red]Error showing threads: {e}[/red]")
+
raise typer.Exit(1)
+
+
+
def _display_thread(thread_members, ref_index, git_store, title):
+
"""Display a single conversation thread."""
+
console.print(f"[bold cyan]{title}[/bold cyan]")
+
console.print(f"Thread size: {len(thread_members)} entries")
+
+
# Get entry details for each member
+
thread_entries = []
+
for username, entry_id in thread_members:
+
entry = git_store.get_entry(username, entry_id)
+
if entry:
+
thread_entries.append((username, entry))
+
+
# Sort by publication date
+
thread_entries.sort(key=lambda x: x[1].published or x[1].updated)
+
+
# Display entries
+
for i, (username, entry) in enumerate(thread_entries):
+
prefix = "โ”œโ”€" if i < len(thread_entries) - 1 else "โ””โ”€"
+
+
# Get references for this entry
+
outbound = ref_index.get_outbound_refs(username, entry.id)
+
inbound = ref_index.get_inbound_refs(username, entry.id)
+
+
ref_info = ""
+
if outbound or inbound:
+
ref_info = f" ({len(outbound)} out, {len(inbound)} in)"
+
+
console.print(f" {prefix} [{username}] {entry.title[:60]}...{ref_info}")
+
+
if entry.published:
+
console.print(f" Published: {entry.published.strftime('%Y-%m-%d')}")
+
+
console.print() # Empty line after each thread
+311
src/thicket/cli/commands/info_cmd.py
···
+
"""CLI command for displaying detailed information about a specific atom entry."""
+
+
import json
+
from pathlib import Path
+
from typing import Optional
+
+
import typer
+
from rich.console import Console
+
from rich.panel import Panel
+
from rich.table import Table
+
from rich.text import Text
+
+
from ...core.git_store import GitStore
+
from ...core.reference_parser import ReferenceIndex
+
from ..main import app
+
from ..utils import load_config, get_tsv_mode
+
+
console = Console()
+
+
+
@app.command()
+
def info(
+
identifier: str = typer.Argument(
+
...,
+
help="The atom ID or URL of the entry to display information about"
+
),
+
username: Optional[str] = typer.Option(
+
None,
+
"--username",
+
"-u",
+
help="Username to search for the entry (if not provided, searches all users)"
+
),
+
config_file: Optional[Path] = typer.Option(
+
Path("thicket.yaml"),
+
"--config",
+
"-c",
+
help="Path to configuration file",
+
),
+
show_content: bool = typer.Option(
+
False,
+
"--content",
+
help="Include the full content of the entry in the output"
+
),
+
) -> None:
+
"""Display detailed information about a specific atom entry.
+
+
You can specify the entry using either its atom ID or URL.
+
Shows all metadata for the given entry, including title, dates, categories,
+
and summarizes all inbound and outbound links to/from other posts.
+
"""
+
try:
+
# Load configuration
+
config = load_config(config_file)
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
+
# Find the entry
+
entry = None
+
found_username = None
+
+
# Check if identifier looks like a URL
+
is_url = identifier.startswith(('http://', 'https://'))
+
+
if username:
+
# Search specific username
+
if is_url:
+
# Search by URL
+
entries = git_store.list_entries(username)
+
for e in entries:
+
if str(e.link) == identifier:
+
entry = e
+
found_username = username
+
break
+
else:
+
# Search by atom ID
+
entry = git_store.get_entry(username, identifier)
+
if entry:
+
found_username = username
+
else:
+
# Search all users
+
index = git_store._load_index()
+
for user in index.users.keys():
+
if is_url:
+
# Search by URL
+
entries = git_store.list_entries(user)
+
for e in entries:
+
if str(e.link) == identifier:
+
entry = e
+
found_username = user
+
break
+
if entry:
+
break
+
else:
+
# Search by atom ID
+
entry = git_store.get_entry(user, identifier)
+
if entry:
+
found_username = user
+
break
+
+
if not entry or not found_username:
+
if username:
+
console.print(f"[red]Entry with {'URL' if is_url else 'atom ID'} '{identifier}' not found for user '{username}'[/red]")
+
else:
+
console.print(f"[red]Entry with {'URL' if is_url else 'atom ID'} '{identifier}' not found in any user's entries[/red]")
+
raise typer.Exit(1)
+
+
# Load reference index if available
+
links_path = config.git_store / "links.json"
+
ref_index = None
+
if links_path.exists():
+
with open(links_path) as f:
+
unified_data = json.load(f)
+
+
# Check if references exist in the unified structure
+
if "references" in unified_data:
+
ref_index = ReferenceIndex.from_dict({
+
"references": unified_data["references"],
+
"user_domains": unified_data.get("user_domains", {})
+
})
+
+
# Display information
+
if get_tsv_mode():
+
_display_entry_info_tsv(entry, found_username, ref_index, show_content)
+
else:
+
_display_entry_info(entry, found_username)
+
+
if ref_index:
+
_display_link_info(entry, found_username, ref_index)
+
else:
+
console.print("\n[yellow]No reference index found. Run 'thicket links' and 'thicket index' to build cross-reference data.[/yellow]")
+
+
# Optionally display content
+
if show_content and entry.content:
+
_display_content(entry.content)
+
+
except Exception as e:
+
console.print(f"[red]Error displaying entry info: {e}[/red]")
+
raise typer.Exit(1)
+
+
+
def _display_entry_info(entry, username: str) -> None:
+
"""Display basic entry information in a structured format."""
+
+
# Create main info panel
+
info_table = Table.grid(padding=(0, 2))
+
info_table.add_column("Field", style="cyan bold", width=15)
+
info_table.add_column("Value", style="white")
+
+
info_table.add_row("User", f"[green]{username}[/green]")
+
info_table.add_row("Atom ID", f"[blue]{entry.id}[/blue]")
+
info_table.add_row("Title", entry.title)
+
info_table.add_row("Link", str(entry.link))
+
+
if entry.published:
+
info_table.add_row("Published", entry.published.strftime("%Y-%m-%d %H:%M:%S UTC"))
+
+
info_table.add_row("Updated", entry.updated.strftime("%Y-%m-%d %H:%M:%S UTC"))
+
+
if entry.summary:
+
# Truncate long summaries
+
summary = entry.summary[:200] + "..." if len(entry.summary) > 200 else entry.summary
+
info_table.add_row("Summary", summary)
+
+
if entry.categories:
+
categories_text = ", ".join(entry.categories)
+
info_table.add_row("Categories", categories_text)
+
+
if entry.author:
+
author_info = []
+
if "name" in entry.author:
+
author_info.append(entry.author["name"])
+
if "email" in entry.author:
+
author_info.append(f"<{entry.author['email']}>")
+
if author_info:
+
info_table.add_row("Author", " ".join(author_info))
+
+
if entry.content_type:
+
info_table.add_row("Content Type", entry.content_type)
+
+
if entry.rights:
+
info_table.add_row("Rights", entry.rights)
+
+
if entry.source:
+
info_table.add_row("Source Feed", entry.source)
+
+
panel = Panel(
+
info_table,
+
title=f"[bold]Entry Information[/bold]",
+
border_style="blue"
+
)
+
+
console.print(panel)
+
+
+
def _display_link_info(entry, username: str, ref_index: ReferenceIndex) -> None:
+
"""Display inbound and outbound link information."""
+
+
# Get links
+
outbound_refs = ref_index.get_outbound_refs(username, entry.id)
+
inbound_refs = ref_index.get_inbound_refs(username, entry.id)
+
+
if not outbound_refs and not inbound_refs:
+
console.print("\n[dim]No cross-references found for this entry.[/dim]")
+
return
+
+
# Create links table
+
links_table = Table(title="Cross-References")
+
links_table.add_column("Direction", style="cyan", width=10)
+
links_table.add_column("Target/Source", style="green", width=20)
+
links_table.add_column("URL", style="blue", width=50)
+
+
# Add outbound references
+
for ref in outbound_refs:
+
target_info = f"{ref.target_username}:{ref.target_entry_id}" if ref.target_username and ref.target_entry_id else "External"
+
links_table.add_row("โ†’ Out", target_info, ref.target_url)
+
+
# Add inbound references
+
for ref in inbound_refs:
+
source_info = f"{ref.source_username}:{ref.source_entry_id}"
+
links_table.add_row("โ† In", source_info, ref.target_url)
+
+
console.print()
+
console.print(links_table)
+
+
# Summary
+
console.print(f"\n[bold]Summary:[/bold] {len(outbound_refs)} outbound, {len(inbound_refs)} inbound references")
+
+
+
def _display_content(content: str) -> None:
+
"""Display the full content of the entry."""
+
+
# Truncate very long content
+
display_content = content
+
if len(content) > 5000:
+
display_content = content[:5000] + "\n\n[... content truncated ...]"
+
+
panel = Panel(
+
display_content,
+
title="[bold]Entry Content[/bold]",
+
border_style="green",
+
expand=False
+
)
+
+
console.print()
+
console.print(panel)
+
+
+
def _display_entry_info_tsv(entry, username: str, ref_index: Optional[ReferenceIndex], show_content: bool) -> None:
+
"""Display entry information in TSV format."""
+
+
# Basic info
+
print("Field\tValue")
+
print(f"User\t{username}")
+
print(f"Atom ID\t{entry.id}")
+
print(f"Title\t{entry.title.replace(chr(9), ' ').replace(chr(10), ' ').replace(chr(13), ' ')}")
+
print(f"Link\t{entry.link}")
+
+
if entry.published:
+
print(f"Published\t{entry.published.strftime('%Y-%m-%d %H:%M:%S UTC')}")
+
+
print(f"Updated\t{entry.updated.strftime('%Y-%m-%d %H:%M:%S UTC')}")
+
+
if entry.summary:
+
# Escape tabs and newlines in summary
+
summary = entry.summary.replace('\t', ' ').replace('\n', ' ').replace('\r', ' ')
+
print(f"Summary\t{summary}")
+
+
if entry.categories:
+
print(f"Categories\t{', '.join(entry.categories)}")
+
+
if entry.author:
+
author_info = []
+
if "name" in entry.author:
+
author_info.append(entry.author["name"])
+
if "email" in entry.author:
+
author_info.append(f"<{entry.author['email']}>")
+
if author_info:
+
print(f"Author\t{' '.join(author_info)}")
+
+
if entry.content_type:
+
print(f"Content Type\t{entry.content_type}")
+
+
if entry.rights:
+
print(f"Rights\t{entry.rights}")
+
+
if entry.source:
+
print(f"Source Feed\t{entry.source}")
+
+
# Add reference info if available
+
if ref_index:
+
outbound_refs = ref_index.get_outbound_refs(username, entry.id)
+
inbound_refs = ref_index.get_inbound_refs(username, entry.id)
+
+
print(f"Outbound References\t{len(outbound_refs)}")
+
print(f"Inbound References\t{len(inbound_refs)}")
+
+
# Show each reference
+
for ref in outbound_refs:
+
target_info = f"{ref.target_username}:{ref.target_entry_id}" if ref.target_username and ref.target_entry_id else "External"
+
print(f"Outbound Reference\t{target_info}\t{ref.target_url}")
+
+
for ref in inbound_refs:
+
source_info = f"{ref.source_username}:{ref.source_entry_id}"
+
print(f"Inbound Reference\t{source_info}\t{ref.target_url}")
+
+
# Show content if requested
+
if show_content and entry.content:
+
# Escape tabs and newlines in content
+
content = entry.content.replace('\t', ' ').replace('\n', ' ').replace('\r', ' ')
+
print(f"Content\t{content}")
+54 -43
src/thicket/cli/commands/init.py
···
"""Initialize command for thicket."""
+
import yaml
from pathlib import Path
from typing import Optional
import typer
-
from pydantic import ValidationError
-
from ...core.git_store import GitStore
+
from ..main import app, console, get_config_path
from ...models import ThicketConfig
-
from ..main import app
-
from ..utils import print_error, print_success, save_config
+
from ... import Thicket
@app.command()
···
None, "--cache-dir", "-c", help="Cache directory (default: ~/.cache/thicket)"
),
config_file: Optional[Path] = typer.Option(
-
None, "--config", help="Configuration file path (default: thicket.yaml)"
+
None, "--config", help="Configuration file path (default: ~/.config/thicket/config.yaml)"
),
force: bool = typer.Option(
False, "--force", "-f", help="Overwrite existing configuration"
),
) -> None:
"""Initialize a new thicket configuration and Git store."""
-
+
# Set default paths
if cache_dir is None:
-
from platformdirs import user_cache_dir
-
cache_dir = Path(user_cache_dir("thicket"))
-
+
cache_dir = Path.home() / ".cache" / "thicket"
+
if config_file is None:
-
config_file = Path("thicket.yaml")
-
+
config_file = get_config_path()
+
# Check if config already exists
if config_file.exists() and not force:
-
print_error(f"Configuration file already exists: {config_file}")
-
print_error("Use --force to overwrite")
+
console.print(f"[red]Configuration file already exists:[/red] {config_file}")
+
console.print("Use --force to overwrite")
raise typer.Exit(1)
-
-
# Create cache directory
-
cache_dir.mkdir(parents=True, exist_ok=True)
-
-
# Create Git store
+
try:
-
git_store_obj = GitStore(git_store)
-
print_success(f"Initialized Git store at: {git_store}")
-
except Exception as e:
-
print_error(f"Failed to initialize Git store: {e}")
-
raise typer.Exit(1)
-
-
# Create configuration
-
try:
-
config = ThicketConfig(
-
git_store=git_store,
-
cache_dir=cache_dir,
-
users=[]
-
)
+
# Create directories
+
git_store.mkdir(parents=True, exist_ok=True)
+
cache_dir.mkdir(parents=True, exist_ok=True)
+
config_file.parent.mkdir(parents=True, exist_ok=True)
+
+
# Create Thicket instance with minimal config
+
thicket = Thicket.create(git_store, cache_dir)
+
+
# Initialize the repository
+
if thicket.init_repository():
+
console.print(f"[green]โœ“[/green] Initialized Git store at: {git_store}")
+
else:
+
console.print(f"[red]โœ—[/red] Failed to initialize Git store")
+
raise typer.Exit(1)
+
+
# Save configuration
+
config_data = {
+
'git_store': str(git_store),
+
'cache_dir': str(cache_dir),
+
'users': []
+
}
-
save_config(config, config_file)
-
print_success(f"Created configuration file: {config_file}")
+
with open(config_file, 'w') as f:
+
yaml.dump(config_data, f, default_flow_style=False)
-
except ValidationError as e:
-
print_error(f"Invalid configuration: {e}")
-
raise typer.Exit(1)
+
console.print(f"[green]โœ“[/green] Created configuration file: {config_file}")
+
+
# Create initial commit
+
if thicket.commit_changes("Initialize thicket repository"):
+
console.print("[green]โœ“[/green] Created initial commit")
+
+
console.print("\n[green]Thicket initialized successfully![/green]")
+
console.print(f" โ€ข Git store: {git_store}")
+
console.print(f" โ€ข Cache directory: {cache_dir}")
+
console.print(f" โ€ข Configuration: {config_file}")
+
console.print("\n[blue]Next steps:[/blue]")
+
console.print(" 1. Add your first user and feed:")
+
console.print(f" [cyan]thicket add username https://example.com/feed.xml[/cyan]")
+
console.print(" 2. Sync feeds:")
+
console.print(f" [cyan]thicket sync[/cyan]")
+
console.print(" 3. Generate a website:")
+
console.print(f" [cyan]thicket generate[/cyan]")
+
except Exception as e:
-
print_error(f"Failed to create configuration: {e}")
+
console.print(f"[red]Error:[/red] {str(e)}")
raise typer.Exit(1)
-
-
print_success("Thicket initialized successfully!")
-
print_success(f"Git store: {git_store}")
-
print_success(f"Cache directory: {cache_dir}")
-
print_success(f"Configuration: {config_file}")
-
print_success("Run 'thicket add user' to add your first user and feed.")
+64 -34
src/thicket/cli/commands/list_cmd.py
···
"""List command for thicket."""
+
import re
from pathlib import Path
from typing import Optional
···
load_config,
print_error,
print_feeds_table,
+
print_feeds_table_from_git,
print_info,
print_users_table,
+
print_users_table_from_git,
+
print_entries_tsv,
+
get_tsv_mode,
)
···
),
) -> None:
"""List users, feeds, or entries."""
-
+
# Load configuration
config = load_config(config_file)
-
+
+
# Initialize Git store
+
git_store = GitStore(config.git_store)
+
if what == "users":
-
list_users(config)
+
list_users(git_store)
elif what == "feeds":
-
list_feeds(config, user)
+
list_feeds(git_store, user)
elif what == "entries":
-
list_entries(config, user, limit)
+
list_entries(git_store, user, limit)
else:
print_error(f"Unknown list type: {what}")
print_error("Use 'users', 'feeds', or 'entries'")
raise typer.Exit(1)
-
def list_users(config) -> None:
+
def list_users(git_store: GitStore) -> None:
"""List all users."""
-
if not config.users:
+
index = git_store._load_index()
+
users = list(index.users.values())
+
+
if not users:
print_info("No users configured")
return
-
-
print_users_table(config)
+
print_users_table_from_git(users)
-
def list_feeds(config, username: Optional[str] = None) -> None:
+
+
def list_feeds(git_store: GitStore, username: Optional[str] = None) -> None:
"""List feeds, optionally filtered by user."""
if username:
-
user = config.find_user(username)
+
user = git_store.get_user(username)
if not user:
print_error(f"User '{username}' not found")
raise typer.Exit(1)
-
+
if not user.feeds:
print_info(f"No feeds configured for user '{username}'")
return
-
-
print_feeds_table(config, username)
+
+
print_feeds_table_from_git(git_store, username)
-
def list_entries(config, username: Optional[str] = None, limit: Optional[int] = None) -> None:
+
def list_entries(git_store: GitStore, username: Optional[str] = None, limit: Optional[int] = None) -> None:
"""List entries, optionally filtered by user."""
-
-
# Initialize Git store
-
git_store = GitStore(config.git_store)
-
+
if username:
# List entries for specific user
-
user = config.find_user(username)
+
user = git_store.get_user(username)
if not user:
print_error(f"User '{username}' not found")
raise typer.Exit(1)
-
+
entries = git_store.list_entries(username, limit)
if not entries:
print_info(f"No entries found for user '{username}'")
return
-
+
print_entries_table([entries], [username])
-
+
else:
# List entries for all users
all_entries = []
all_usernames = []
-
-
for user in config.users:
+
+
index = git_store._load_index()
+
for user in index.users.values():
entries = git_store.list_entries(user.username, limit)
if entries:
all_entries.append(entries)
all_usernames.append(user.username)
-
+
if not all_entries:
print_info("No entries found")
return
-
+
print_entries_table(all_entries, all_usernames)
+
def _clean_html_content(content: Optional[str]) -> str:
+
"""Clean HTML content for display in table."""
+
if not content:
+
return ""
+
+
# Remove HTML tags
+
clean_text = re.sub(r'<[^>]+>', ' ', content)
+
# Replace multiple whitespace with single space
+
clean_text = re.sub(r'\s+', ' ', clean_text)
+
# Strip and limit length
+
clean_text = clean_text.strip()
+
if len(clean_text) > 100:
+
clean_text = clean_text[:97] + "..."
+
+
return clean_text
+
+
def print_entries_table(entries_by_user: list[list], usernames: list[str]) -> None:
"""Print a table of entries."""
+
if get_tsv_mode():
+
print_entries_tsv(entries_by_user, usernames)
+
return
+
table = Table(title="Feed Entries")
table.add_column("User", style="cyan", no_wrap=True)
table.add_column("Title", style="bold")
table.add_column("Updated", style="blue")
table.add_column("URL", style="green")
-
+
# Combine all entries with usernames
all_entries = []
for entries, username in zip(entries_by_user, usernames):
for entry in entries:
all_entries.append((username, entry))
-
+
# Sort by updated time (newest first)
all_entries.sort(key=lambda x: x[1].updated, reverse=True)
-
+
for username, entry in all_entries:
# Format updated time
updated_str = entry.updated.strftime("%Y-%m-%d %H:%M")
-
+
# Truncate title if too long
title = entry.title
if len(title) > 50:
title = title[:47] + "..."
-
+
table.add_row(
username,
title,
updated_str,
str(entry.link),
)
-
-
console.print(table)
+
+
console.print(table)
+67 -113
src/thicket/cli/commands/sync.py
···
from typing import Optional
import typer
-
from rich.progress import track
+
from rich.progress import Progress, SpinnerColumn, TextColumn
-
from ...core.feed_parser import FeedParser
-
from ...core.git_store import GitStore
-
from ..main import app
-
from ..utils import (
-
create_progress,
-
load_config,
-
print_error,
-
print_info,
-
print_success,
-
)
+
from ..main import app, console, load_thicket
@app.command()
def sync(
-
all_users: bool = typer.Option(
-
False, "--all", "-a", help="Sync all users and feeds"
-
),
user: Optional[str] = typer.Option(
-
None, "--user", "-u", help="Sync specific user only"
+
None, "--user", "-u", help="Sync specific user only (default: all users)"
),
config_file: Optional[Path] = typer.Option(
-
Path("thicket.yaml"), "--config", help="Configuration file path"
+
None, "--config", help="Configuration file path"
),
-
dry_run: bool = typer.Option(
-
False, "--dry-run", help="Show what would be synced without making changes"
+
commit: bool = typer.Option(
+
True, "--commit/--no-commit", help="Commit changes after sync"
),
) -> None:
"""Sync feeds and store entries in Git repository."""
-
# Load configuration
-
config = load_config(config_file)
-
-
# Determine which users to sync
-
users_to_sync = []
-
if all_users:
-
users_to_sync = config.users
-
elif user:
-
user_config = config.find_user(user)
-
if not user_config:
-
print_error(f"User '{user}' not found")
-
raise typer.Exit(1)
-
users_to_sync = [user_config]
-
else:
-
print_error("Specify --all to sync all users or --user to sync a specific user")
-
raise typer.Exit(1)
-
-
if not users_to_sync:
-
print_info("No users configured to sync")
-
return
-
-
# Initialize Git store
-
git_store = GitStore(config.git_store)
-
-
# Sync each user
-
total_new_entries = 0
-
total_updated_entries = 0
-
-
for user_config in users_to_sync:
-
print_info(f"Syncing user: {user_config.username}")
+
try:
+
# Load Thicket instance
+
thicket = load_thicket(config_file)
-
user_new_entries = 0
-
user_updated_entries = 0
+
# Progress callback for tracking
+
current_task = None
-
# Sync each feed for the user
-
for feed_url in track(user_config.feeds, description=f"Syncing {user_config.username}'s feeds"):
-
try:
-
new_entries, updated_entries = asyncio.run(
-
sync_feed(git_store, user_config.username, feed_url, dry_run)
-
)
-
user_new_entries += new_entries
-
user_updated_entries += updated_entries
-
-
except Exception as e:
-
print_error(f"Failed to sync feed {feed_url}: {e}")
-
continue
+
def progress_callback(message: str, current: int = 0, total: int = 0):
+
nonlocal current_task
+
current_task = message
+
if total > 0:
+
console.print(f"[blue]Progress:[/blue] {message} ({current}/{total})")
+
else:
+
console.print(f"[blue]Info:[/blue] {message}")
-
print_info(f"User {user_config.username}: {user_new_entries} new, {user_updated_entries} updated")
-
total_new_entries += user_new_entries
-
total_updated_entries += user_updated_entries
-
-
# Commit changes if not dry run
-
if not dry_run and (total_new_entries > 0 or total_updated_entries > 0):
-
commit_message = f"Sync feeds: {total_new_entries} new entries, {total_updated_entries} updated"
-
git_store.commit_changes(commit_message)
-
print_success(f"Committed changes: {commit_message}")
-
-
# Summary
-
if dry_run:
-
print_info(f"Dry run complete: would sync {total_new_entries} new entries, {total_updated_entries} updated")
-
else:
-
print_success(f"Sync complete: {total_new_entries} new entries, {total_updated_entries} updated")
-
-
-
async def sync_feed(git_store: GitStore, username: str, feed_url, dry_run: bool) -> tuple[int, int]:
-
"""Sync a single feed for a user."""
-
-
parser = FeedParser()
-
-
try:
-
# Fetch and parse feed
-
content = await parser.fetch_feed(feed_url)
-
metadata, entries = parser.parse_feed(content, feed_url)
+
# Run sync with progress
+
with Progress(
+
SpinnerColumn(),
+
TextColumn("[progress.description]{task.description}"),
+
console=console,
+
transient=True,
+
) as progress:
+
task = progress.add_task("Syncing feeds...", total=None)
+
+
# Perform sync
+
results = asyncio.run(thicket.sync_feeds(user, progress_callback))
+
+
progress.remove_task(task)
-
new_entries = 0
-
updated_entries = 0
+
# Process results
+
total_new = 0
+
total_processed = 0
+
errors = []
-
# Process each entry
-
for entry in entries:
-
try:
-
# Check if entry already exists
-
existing_entry = git_store.get_entry(username, entry.id)
+
if isinstance(results, dict):
+
for username, user_results in results.items():
+
if 'error' in user_results:
+
errors.append(f"{username}: {user_results['error']}")
+
continue
-
if existing_entry:
-
# Check if entry has been updated
-
if existing_entry.updated != entry.updated:
-
if not dry_run:
-
git_store.store_entry(username, entry)
-
updated_entries += 1
-
else:
-
# New entry
-
if not dry_run:
-
git_store.store_entry(username, entry)
-
new_entries += 1
-
-
except Exception as e:
-
print_error(f"Failed to process entry {entry.id}: {e}")
-
continue
+
total_new += user_results.get('new_entries', 0)
+
total_processed += user_results.get('feeds_processed', 0)
+
+
console.print(f"[green]โœ“[/green] {username}: {user_results.get('new_entries', 0)} new entries from {user_results.get('feeds_processed', 0)} feeds")
+
+
# Show any feed-specific errors
+
for error in user_results.get('errors', []):
+
console.print(f" [yellow]Warning:[/yellow] {error}")
+
+
# Show errors
+
for error in errors:
+
console.print(f"[red]Error:[/red] {error}")
-
return new_entries, updated_entries
+
# Commit changes if requested
+
if commit and total_new > 0:
+
commit_message = f"Sync feeds: {total_new} new entries from {total_processed} feeds"
+
if thicket.commit_changes(commit_message):
+
console.print(f"[green]โœ“[/green] Committed: {commit_message}")
+
else:
+
console.print("[red]โœ—[/red] Failed to commit changes")
+
# Summary
+
if total_new > 0:
+
console.print(f"\n[green]Sync complete:[/green] {total_new} new entries processed")
+
else:
+
console.print("\n[blue]Sync complete:[/blue] No new entries found")
+
except Exception as e:
-
print_error(f"Failed to sync feed {feed_url}: {e}")
-
return 0, 0
+
console.print(f"[red]Error:[/red] {str(e)}")
+
raise typer.Exit(1)
+47 -5
src/thicket/cli/main.py
···
"""Main CLI application using Typer."""
+
from pathlib import Path
+
from typing import Optional
+
import typer
from rich.console import Console
-
from .. import __version__
+
from .. import __version__, Thicket, ThicketConfig
app = typer.Typer(
name="thicket",
···
console = Console()
+
# Global state for TSV output mode
+
tsv_mode = False
+
def version_callback(value: bool) -> None:
"""Show version and exit."""
···
raise typer.Exit()
+
def load_thicket(config_path: Optional[Path] = None) -> Thicket:
+
"""Load Thicket instance from configuration."""
+
if config_path and config_path.exists():
+
return Thicket.from_config_file(config_path)
+
+
# Try default locations
+
default_paths = [
+
Path("thicket.yaml"),
+
Path("thicket.yml"),
+
Path("thicket.json"),
+
Path.home() / ".config" / "thicket" / "config.yaml",
+
Path.home() / ".thicket.yaml",
+
]
+
+
for path in default_paths:
+
if path.exists():
+
return Thicket.from_config_file(path)
+
+
# No config found
+
console.print("[red]Error:[/red] No configuration file found.")
+
console.print("Use [bold]thicket init[/bold] to create a new configuration or specify --config")
+
raise typer.Exit(1)
+
+
+
def get_config_path() -> Path:
+
"""Get the default configuration path for new configs."""
+
config_dir = Path.home() / ".config" / "thicket"
+
config_dir.mkdir(parents=True, exist_ok=True)
+
return config_dir / "config.yaml"
+
+
@app.callback()
def main(
version: bool = typer.Option(
···
callback=version_callback,
is_eager=True,
),
+
tsv: bool = typer.Option(
+
False,
+
"--tsv",
+
help="Output in tab-separated values format without truncation",
+
),
) -> None:
"""Thicket: A CLI tool for persisting Atom/RSS feeds in Git repositories."""
-
pass
+
global tsv_mode
+
tsv_mode = tsv
# Import commands to register them
-
from .commands import duplicates, init, add, sync, list_cmd # noqa: E402
-
+
from .commands import add, duplicates, generate, index_cmd, info_cmd, init, links_cmd, list_cmd, sync
if __name__ == "__main__":
-
app()
+
app()
+163 -18
src/thicket/cli/utils.py
···
from rich.progress import Progress, SpinnerColumn, TextColumn
from rich.table import Table
-
from ..models import ThicketConfig
+
from ..models import ThicketConfig, UserMetadata
+
from ..core.git_store import GitStore
console = Console()
+
def get_tsv_mode() -> bool:
+
"""Get the global TSV mode setting."""
+
from .main import tsv_mode
+
return tsv_mode
+
+
def load_config(config_path: Optional[Path] = None) -> ThicketConfig:
"""Load thicket configuration from file or environment."""
if config_path and config_path.exists():
import yaml
-
+
with open(config_path) as f:
config_data = yaml.safe_load(f)
-
+
# Convert to ThicketConfig
return ThicketConfig(**config_data)
-
+
# Try to load from default locations or environment
try:
+
# First try to find thicket.yaml in current directory
+
default_config = Path("thicket.yaml")
+
if default_config.exists():
+
import yaml
+
with open(default_config) as f:
+
config_data = yaml.safe_load(f)
+
return ThicketConfig(**config_data)
+
+
# Fall back to environment variables
return ThicketConfig()
except Exception as e:
console.print(f"[red]Error loading configuration: {e}[/red]")
console.print("[yellow]Run 'thicket init' to create a new configuration.[/yellow]")
-
raise typer.Exit(1)
+
raise typer.Exit(1) from e
def save_config(config: ThicketConfig, config_path: Path) -> None:
"""Save thicket configuration to file."""
import yaml
-
-
config_data = config.model_dump(mode="json")
-
+
+
config_data = config.model_dump(mode="json", exclude_none=True)
+
# Convert Path objects to strings for YAML serialization
config_data["git_store"] = str(config_data["git_store"])
config_data["cache_dir"] = str(config_data["cache_dir"])
-
+
with open(config_path, "w") as f:
yaml.dump(config_data, f, default_flow_style=False, sort_keys=False)
···
def print_users_table(config: ThicketConfig) -> None:
"""Print a table of users and their feeds."""
+
if get_tsv_mode():
+
print_users_tsv(config)
+
return
+
table = Table(title="Users and Feeds")
table.add_column("Username", style="cyan", no_wrap=True)
table.add_column("Display Name", style="magenta")
table.add_column("Email", style="blue")
table.add_column("Homepage", style="green")
table.add_column("Feeds", style="yellow")
-
+
for user in config.users:
feeds_str = "\n".join(str(feed) for feed in user.feeds)
table.add_row(
···
str(user.homepage) if user.homepage else "",
feeds_str,
)
-
+
console.print(table)
def print_feeds_table(config: ThicketConfig, username: Optional[str] = None) -> None:
"""Print a table of feeds, optionally filtered by username."""
+
if get_tsv_mode():
+
print_feeds_tsv(config, username)
+
return
+
table = Table(title=f"Feeds{f' for {username}' if username else ''}")
table.add_column("Username", style="cyan", no_wrap=True)
table.add_column("Feed URL", style="blue")
table.add_column("Status", style="green")
-
+
users = [config.find_user(username)] if username else config.users
users = [u for u in users if u is not None]
-
+
for user in users:
for feed in user.feeds:
table.add_row(
···
str(feed),
"Active", # TODO: Add actual status checking
)
-
+
console.print(table)
···
def print_success(message: str) -> None:
"""Print a success message."""
-
console.print(f"[green][/green] {message}")
+
console.print(f"[green]โœ“[/green] {message}")
def print_error(message: str) -> None:
"""Print an error message."""
-
console.print(f"[red][/red] {message}")
+
console.print(f"[red]โœ—[/red] {message}")
def print_warning(message: str) -> None:
"""Print a warning message."""
-
console.print(f"[yellow]๏ฟฝ[/yellow] {message}")
+
console.print(f"[yellow]โš [/yellow] {message}")
def print_info(message: str) -> None:
"""Print an info message."""
-
console.print(f"[blue]9[/blue] {message}")
+
console.print(f"[blue]โ„น[/blue] {message}")
+
+
+
def print_users_table_from_git(users: list[UserMetadata]) -> None:
+
"""Print a table of users from git repository."""
+
if get_tsv_mode():
+
print_users_tsv_from_git(users)
+
return
+
+
table = Table(title="Users and Feeds")
+
table.add_column("Username", style="cyan", no_wrap=True)
+
table.add_column("Display Name", style="magenta")
+
table.add_column("Email", style="blue")
+
table.add_column("Homepage", style="green")
+
table.add_column("Feeds", style="yellow")
+
+
for user in users:
+
feeds_str = "\n".join(user.feeds)
+
table.add_row(
+
user.username,
+
user.display_name or "",
+
user.email or "",
+
user.homepage or "",
+
feeds_str,
+
)
+
+
console.print(table)
+
+
+
def print_feeds_table_from_git(git_store: GitStore, username: Optional[str] = None) -> None:
+
"""Print a table of feeds from git repository."""
+
if get_tsv_mode():
+
print_feeds_tsv_from_git(git_store, username)
+
return
+
+
table = Table(title=f"Feeds{f' for {username}' if username else ''}")
+
table.add_column("Username", style="cyan", no_wrap=True)
+
table.add_column("Feed URL", style="blue")
+
table.add_column("Status", style="green")
+
+
if username:
+
user = git_store.get_user(username)
+
users = [user] if user else []
+
else:
+
index = git_store._load_index()
+
users = list(index.users.values())
+
+
for user in users:
+
for feed in user.feeds:
+
table.add_row(
+
user.username,
+
feed,
+
"Active", # TODO: Add actual status checking
+
)
+
+
console.print(table)
+
+
+
def print_users_tsv(config: ThicketConfig) -> None:
+
"""Print users in TSV format."""
+
print("Username\tDisplay Name\tEmail\tHomepage\tFeeds")
+
for user in config.users:
+
feeds_str = ",".join(str(feed) for feed in user.feeds)
+
print(f"{user.username}\t{user.display_name or ''}\t{user.email or ''}\t{user.homepage or ''}\t{feeds_str}")
+
+
+
def print_users_tsv_from_git(users: list[UserMetadata]) -> None:
+
"""Print users from git repository in TSV format."""
+
print("Username\tDisplay Name\tEmail\tHomepage\tFeeds")
+
for user in users:
+
feeds_str = ",".join(user.feeds)
+
print(f"{user.username}\t{user.display_name or ''}\t{user.email or ''}\t{user.homepage or ''}\t{feeds_str}")
+
+
+
def print_feeds_tsv(config: ThicketConfig, username: Optional[str] = None) -> None:
+
"""Print feeds in TSV format."""
+
print("Username\tFeed URL\tStatus")
+
users = [config.find_user(username)] if username else config.users
+
users = [u for u in users if u is not None]
+
+
for user in users:
+
for feed in user.feeds:
+
print(f"{user.username}\t{feed}\tActive")
+
+
+
def print_feeds_tsv_from_git(git_store: GitStore, username: Optional[str] = None) -> None:
+
"""Print feeds from git repository in TSV format."""
+
print("Username\tFeed URL\tStatus")
+
+
if username:
+
user = git_store.get_user(username)
+
users = [user] if user else []
+
else:
+
index = git_store._load_index()
+
users = list(index.users.values())
+
+
for user in users:
+
for feed in user.feeds:
+
print(f"{user.username}\t{feed}\tActive")
+
+
+
def print_entries_tsv(entries_by_user: list[list], usernames: list[str]) -> None:
+
"""Print entries in TSV format."""
+
print("User\tAtom ID\tTitle\tUpdated\tURL")
+
+
# Combine all entries with usernames
+
all_entries = []
+
for entries, username in zip(entries_by_user, usernames):
+
for entry in entries:
+
all_entries.append((username, entry))
+
+
# Sort by updated time (newest first)
+
all_entries.sort(key=lambda x: x[1].updated, reverse=True)
+
+
for username, entry in all_entries:
+
# Format updated time
+
updated_str = entry.updated.strftime("%Y-%m-%d %H:%M")
+
+
# Escape tabs and newlines in title to preserve TSV format
+
title = entry.title.replace('\t', ' ').replace('\n', ' ').replace('\r', ' ')
+
+
print(f"{username}\t{entry.id}\t{title}\t{updated_str}\t{entry.link}")
+1 -1
src/thicket/core/__init__.py
···
from .feed_parser import FeedParser
from .git_store import GitStore
-
__all__ = ["FeedParser", "GitStore"]
+
__all__ = ["FeedParser", "GitStore"]
+29 -29
src/thicket/core/feed_parser.py
···
from datetime import datetime
from typing import Optional
-
from urllib.parse import urljoin, urlparse
+
from urllib.parse import urlparse
import bleach
import feedparser
···
def parse_feed(self, content: str, source_url: Optional[HttpUrl] = None) -> tuple[FeedMetadata, list[AtomEntry]]:
"""Parse feed content and return metadata and entries."""
parsed = feedparser.parse(content)
-
+
if parsed.bozo and parsed.bozo_exception:
# Try to continue with potentially malformed feed
pass
-
+
# Extract feed metadata
feed_meta = self._extract_feed_metadata(parsed.feed)
-
+
# Extract and normalize entries
entries = []
for entry in parsed.entries:
···
# Log error but continue processing other entries
print(f"Error processing entry {getattr(entry, 'id', 'unknown')}: {e}")
continue
-
+
return feed_meta, entries
def _extract_feed_metadata(self, feed: feedparser.FeedParserDict) -> FeedMetadata:
···
author_name = None
author_email = None
author_uri = None
-
+
if hasattr(feed, 'author_detail'):
author_name = feed.author_detail.get('name')
author_email = feed.author_detail.get('email')
author_uri = feed.author_detail.get('href')
elif hasattr(feed, 'author'):
author_name = feed.author
-
+
# Parse managing editor for RSS feeds
if not author_email and hasattr(feed, 'managingEditor'):
author_email = feed.managingEditor
-
+
# Parse feed link
feed_link = None
if hasattr(feed, 'link'):
···
feed_link = HttpUrl(feed.link)
except ValidationError:
pass
-
+
# Parse image/icon/logo
logo = None
icon = None
image_url = None
-
+
if hasattr(feed, 'image'):
try:
image_url = HttpUrl(feed.image.get('href', feed.image.get('url', '')))
except (ValidationError, AttributeError):
pass
-
+
if hasattr(feed, 'icon'):
try:
icon = HttpUrl(feed.icon)
except ValidationError:
pass
-
+
if hasattr(feed, 'logo'):
try:
logo = HttpUrl(feed.logo)
except ValidationError:
pass
-
+
return FeedMetadata(
title=getattr(feed, 'title', None),
author_name=author_name,
···
# Parse timestamps
updated = self._parse_timestamp(entry.get('updated_parsed') or entry.get('published_parsed'))
published = self._parse_timestamp(entry.get('published_parsed'))
-
+
# Parse content
content = self._extract_content(entry)
content_type = self._extract_content_type(entry)
-
+
# Parse author
author = self._extract_author(entry)
-
+
# Parse categories/tags
categories = []
if hasattr(entry, 'tags'):
categories = [tag.get('term', '') for tag in entry.tags if tag.get('term')]
-
+
# Sanitize HTML content
if content:
content = self._sanitize_html(content)
-
+
summary = entry.get('summary', '')
if summary:
summary = self._sanitize_html(summary)
-
+
return AtomEntry(
id=entry.get('id', entry.get('link', '')),
title=entry.get('title', ''),
···
return content_item.get('value', '')
# Fallback to first content item
return entry.content[0].get('value', '')
-
+
# Fallback to summary
return entry.get('summary', '')
···
def _extract_author(self, entry: feedparser.FeedParserDict) -> Optional[dict]:
"""Extract author information from entry."""
author = {}
-
+
if hasattr(entry, 'author_detail'):
author.update({
'name': entry.author_detail.get('name'),
···
})
elif hasattr(entry, 'author'):
author['name'] = entry.author
-
+
return author if author else None
def _sanitize_html(self, html: str) -> str:
···
"""Sanitize entry ID to be a safe filename."""
# Parse URL to get meaningful parts
parsed = urlparse(entry_id)
-
+
# Start with the path component
if parsed.path:
# Remove leading slash and replace problematic characters
···
else:
# Use the entire ID as fallback
safe_id = entry_id
-
+
# Replace problematic characters
safe_chars = []
for char in safe_id:
···
safe_chars.append(char)
else:
safe_chars.append('_')
-
+
safe_id = ''.join(safe_chars)
-
+
# Ensure it's not too long (max 200 chars)
if len(safe_id) > 200:
safe_id = safe_id[:200]
-
+
# Ensure it's not empty
if not safe_id:
safe_id = "entry"
-
-
return safe_id
+
+
return safe_id
+55 -66
src/thicket/core/git_store.py
···
"""Ensure the Git repository exists and is initialized."""
if not self.repo_path.exists():
self.repo_path.mkdir(parents=True, exist_ok=True)
-
+
try:
self.repo = Repo(self.repo_path)
except git.InvalidGitRepositoryError:
···
last_updated=datetime.now(),
)
self._save_index(index)
-
+
# Create duplicates.json
duplicates = DuplicateMap()
self._save_duplicates(duplicates)
-
+
# Create initial commit
self.repo.index.add(["index.json", "duplicates.json"])
self.repo.index.commit("Initial thicket repository structure")
···
"""Save the index to index.json."""
index_path = self.repo_path / "index.json"
with open(index_path, "w") as f:
-
json.dump(index.model_dump(mode="json"), f, indent=2, default=str)
+
json.dump(index.model_dump(mode="json", exclude_none=True), f, indent=2, default=str)
def _load_index(self) -> GitStoreIndex:
"""Load the index from index.json."""
···
created=datetime.now(),
last_updated=datetime.now(),
)
-
+
with open(index_path) as f:
data = json.load(f)
-
+
return GitStoreIndex(**data)
def _save_duplicates(self, duplicates: DuplicateMap) -> None:
"""Save duplicates map to duplicates.json."""
duplicates_path = self.repo_path / "duplicates.json"
with open(duplicates_path, "w") as f:
-
json.dump(duplicates.model_dump(), f, indent=2)
+
json.dump(duplicates.model_dump(exclude_none=True), f, indent=2)
def _load_duplicates(self) -> DuplicateMap:
"""Load duplicates map from duplicates.json."""
duplicates_path = self.repo_path / "duplicates.json"
if not duplicates_path.exists():
return DuplicateMap()
-
+
with open(duplicates_path) as f:
data = json.load(f)
-
+
return DuplicateMap(**data)
def add_user(self, username: str, display_name: Optional[str] = None,
···
icon: Optional[str] = None, feeds: Optional[list[str]] = None) -> UserMetadata:
"""Add a new user to the Git store."""
index = self._load_index()
-
+
# Create user directory
user_dir = self.repo_path / username
user_dir.mkdir(exist_ok=True)
-
+
# Create user metadata
user_metadata = UserMetadata(
username=username,
···
created=datetime.now(),
last_updated=datetime.now(),
)
-
-
# Save user metadata
-
metadata_path = user_dir / "metadata.json"
-
with open(metadata_path, "w") as f:
-
json.dump(user_metadata.model_dump(mode="json"), f, indent=2, default=str)
-
+
+
# Update index
index.add_user(user_metadata)
self._save_index(index)
-
+
return user_metadata
def get_user(self, username: str) -> Optional[UserMetadata]:
···
"""Update user metadata."""
index = self._load_index()
user = index.get_user(username)
-
+
if not user:
return False
-
+
# Update user metadata
for key, value in kwargs.items():
if hasattr(user, key) and value is not None:
setattr(user, key, value)
-
+
user.update_timestamp()
-
-
# Save user metadata
-
user_dir = self.repo_path / user.directory
-
metadata_path = user_dir / "metadata.json"
-
with open(metadata_path, "w") as f:
-
json.dump(user.model_dump(mode="json"), f, indent=2, default=str)
-
+
+
# Update index
index.add_user(user)
self._save_index(index)
-
+
return True
def store_entry(self, username: str, entry: AtomEntry) -> bool:
···
user = self.get_user(username)
if not user:
return False
-
+
# Sanitize entry ID for filename
from .feed_parser import FeedParser
parser = FeedParser()
safe_id = parser.sanitize_entry_id(entry.id)
-
+
# Create entry file
user_dir = self.repo_path / user.directory
entry_path = user_dir / f"{safe_id}.json"
-
+
# Check if entry already exists
entry_exists = entry_path.exists()
-
+
# Save entry
with open(entry_path, "w") as f:
-
json.dump(entry.model_dump(mode="json"), f, indent=2, default=str)
-
+
json.dump(entry.model_dump(mode="json", exclude_none=True), f, indent=2, default=str)
+
# Update user metadata if new entry
if not entry_exists:
-
user.increment_entry_count()
-
self.update_user(username, entry_count=user.entry_count)
-
+
index = self._load_index()
+
index.update_entry_count(username, 1)
+
self._save_index(index)
+
return True
def get_entry(self, username: str, entry_id: str) -> Optional[AtomEntry]:
···
user = self.get_user(username)
if not user:
return None
-
+
# Sanitize entry ID
from .feed_parser import FeedParser
parser = FeedParser()
safe_id = parser.sanitize_entry_id(entry_id)
-
+
entry_path = self.repo_path / user.directory / f"{safe_id}.json"
if not entry_path.exists():
return None
-
+
with open(entry_path) as f:
data = json.load(f)
-
+
return AtomEntry(**data)
def list_entries(self, username: str, limit: Optional[int] = None) -> list[AtomEntry]:
···
user = self.get_user(username)
if not user:
return []
-
+
user_dir = self.repo_path / user.directory
if not user_dir.exists():
return []
-
+
entries = []
entry_files = sorted(user_dir.glob("*.json"), key=lambda p: p.stat().st_mtime, reverse=True)
-
-
# Filter out metadata.json
-
entry_files = [f for f in entry_files if f.name != "metadata.json"]
-
+
+
if limit:
entry_files = entry_files[:limit]
-
+
for entry_file in entry_files:
try:
with open(entry_file) as f:
···
except Exception:
# Skip invalid entries
continue
-
+
return entries
def get_duplicates(self) -> DuplicateMap:
···
"""Commit all changes to the Git repository."""
if not self.repo:
return
-
+
# Add all changes
self.repo.git.add(A=True)
-
+
# Check if there are changes to commit
if self.repo.index.diff("HEAD"):
self.repo.index.commit(message)
···
"""Get statistics about the Git store."""
index = self._load_index()
duplicates = self._load_duplicates()
-
+
return {
"total_users": len(index.users),
"total_entries": index.total_entries,
···
limit: Optional[int] = None) -> list[tuple[str, AtomEntry]]:
"""Search entries by content."""
results = []
-
+
# Get users to search
index = self._load_index()
users = [index.get_user(username)] if username else list(index.users.values())
users = [u for u in users if u is not None]
-
+
for user in users:
user_dir = self.repo_path / user.directory
if not user_dir.exists():
continue
-
+
entry_files = user_dir.glob("*.json")
-
entry_files = [f for f in entry_files if f.name != "metadata.json"]
-
+
for entry_file in entry_files:
try:
with open(entry_file) as f:
data = json.load(f)
-
+
entry = AtomEntry(**data)
-
+
# Simple text search in title, summary, and content
searchable_text = " ".join(filter(None, [
entry.title,
entry.summary or "",
entry.content or "",
])).lower()
-
+
if query.lower() in searchable_text:
results.append((user.username, entry))
-
+
if limit and len(results) >= limit:
return results
-
+
except Exception:
# Skip invalid entries
continue
-
+
# Sort by updated time (newest first)
results.sort(key=lambda x: x[1].updated, reverse=True)
-
-
return results[:limit] if limit else results
+
+
return results[:limit] if limit else results
+438
src/thicket/core/reference_parser.py
···
+
"""Reference detection and parsing for blog entries."""
+
+
import re
+
from typing import Optional
+
from urllib.parse import urlparse
+
+
from ..models import AtomEntry
+
+
+
class BlogReference:
+
"""Represents a reference from one blog entry to another."""
+
+
def __init__(
+
self,
+
source_entry_id: str,
+
source_username: str,
+
target_url: str,
+
target_username: Optional[str] = None,
+
target_entry_id: Optional[str] = None,
+
):
+
self.source_entry_id = source_entry_id
+
self.source_username = source_username
+
self.target_url = target_url
+
self.target_username = target_username
+
self.target_entry_id = target_entry_id
+
+
def to_dict(self) -> dict:
+
"""Convert to dictionary for JSON serialization."""
+
result = {
+
"source_entry_id": self.source_entry_id,
+
"source_username": self.source_username,
+
"target_url": self.target_url,
+
}
+
+
# Only include optional fields if they are not None
+
if self.target_username is not None:
+
result["target_username"] = self.target_username
+
if self.target_entry_id is not None:
+
result["target_entry_id"] = self.target_entry_id
+
+
return result
+
+
@classmethod
+
def from_dict(cls, data: dict) -> "BlogReference":
+
"""Create from dictionary."""
+
return cls(
+
source_entry_id=data["source_entry_id"],
+
source_username=data["source_username"],
+
target_url=data["target_url"],
+
target_username=data.get("target_username"),
+
target_entry_id=data.get("target_entry_id"),
+
)
+
+
+
class ReferenceIndex:
+
"""Index of blog-to-blog references for creating threaded views."""
+
+
def __init__(self):
+
self.references: list[BlogReference] = []
+
self.outbound_refs: dict[
+
str, list[BlogReference]
+
] = {} # entry_id -> outbound refs
+
self.inbound_refs: dict[
+
str, list[BlogReference]
+
] = {} # entry_id -> inbound refs
+
self.user_domains: dict[str, set[str]] = {} # username -> set of domains
+
+
def add_reference(self, ref: BlogReference) -> None:
+
"""Add a reference to the index."""
+
self.references.append(ref)
+
+
# Update outbound references
+
source_key = f"{ref.source_username}:{ref.source_entry_id}"
+
if source_key not in self.outbound_refs:
+
self.outbound_refs[source_key] = []
+
self.outbound_refs[source_key].append(ref)
+
+
# Update inbound references if we can identify the target
+
if ref.target_username and ref.target_entry_id:
+
target_key = f"{ref.target_username}:{ref.target_entry_id}"
+
if target_key not in self.inbound_refs:
+
self.inbound_refs[target_key] = []
+
self.inbound_refs[target_key].append(ref)
+
+
def get_outbound_refs(self, username: str, entry_id: str) -> list[BlogReference]:
+
"""Get all outbound references from an entry."""
+
key = f"{username}:{entry_id}"
+
return self.outbound_refs.get(key, [])
+
+
def get_inbound_refs(self, username: str, entry_id: str) -> list[BlogReference]:
+
"""Get all inbound references to an entry."""
+
key = f"{username}:{entry_id}"
+
return self.inbound_refs.get(key, [])
+
+
def get_thread_members(self, username: str, entry_id: str) -> set[tuple[str, str]]:
+
"""Get all entries that are part of the same thread."""
+
visited = set()
+
to_visit = [(username, entry_id)]
+
thread_members = set()
+
+
while to_visit:
+
current_user, current_entry = to_visit.pop()
+
if (current_user, current_entry) in visited:
+
continue
+
+
visited.add((current_user, current_entry))
+
thread_members.add((current_user, current_entry))
+
+
# Add outbound references
+
for ref in self.get_outbound_refs(current_user, current_entry):
+
if ref.target_username and ref.target_entry_id:
+
to_visit.append((ref.target_username, ref.target_entry_id))
+
+
# Add inbound references
+
for ref in self.get_inbound_refs(current_user, current_entry):
+
to_visit.append((ref.source_username, ref.source_entry_id))
+
+
return thread_members
+
+
def to_dict(self) -> dict:
+
"""Convert to dictionary for JSON serialization."""
+
return {
+
"references": [ref.to_dict() for ref in self.references],
+
"user_domains": {k: list(v) for k, v in self.user_domains.items()},
+
}
+
+
@classmethod
+
def from_dict(cls, data: dict) -> "ReferenceIndex":
+
"""Create from dictionary."""
+
index = cls()
+
for ref_data in data.get("references", []):
+
ref = BlogReference.from_dict(ref_data)
+
index.add_reference(ref)
+
+
for username, domains in data.get("user_domains", {}).items():
+
index.user_domains[username] = set(domains)
+
+
return index
+
+
+
class ReferenceParser:
+
"""Parses blog entries to detect references to other blogs."""
+
+
def __init__(self):
+
# Common blog platforms and patterns
+
self.blog_patterns = [
+
r"https?://[^/]+\.(?:org|com|net|io|dev|me|co\.uk)/.*", # Common blog domains
+
r"https?://[^/]+\.github\.io/.*", # GitHub Pages
+
r"https?://[^/]+\.substack\.com/.*", # Substack
+
r"https?://medium\.com/.*", # Medium
+
r"https?://[^/]+\.wordpress\.com/.*", # WordPress.com
+
r"https?://[^/]+\.blogspot\.com/.*", # Blogger
+
]
+
+
# Compile regex patterns
+
self.link_pattern = re.compile(
+
r'<a[^>]+href="([^"]+)"[^>]*>(.*?)</a>', re.IGNORECASE | re.DOTALL
+
)
+
self.url_pattern = re.compile(r'https?://[^\s<>"]+')
+
+
def extract_links_from_html(self, html_content: str) -> list[tuple[str, str]]:
+
"""Extract all links from HTML content."""
+
links = []
+
+
# Extract links from <a> tags
+
for match in self.link_pattern.finditer(html_content):
+
url = match.group(1)
+
text = re.sub(
+
r"<[^>]+>", "", match.group(2)
+
).strip() # Remove HTML tags from link text
+
links.append((url, text))
+
+
return links
+
+
def is_blog_url(self, url: str) -> bool:
+
"""Check if a URL likely points to a blog post."""
+
for pattern in self.blog_patterns:
+
if re.match(pattern, url):
+
return True
+
return False
+
+
def _is_likely_blog_post_url(self, url: str) -> bool:
+
"""Check if a same-domain URL likely points to a blog post (not CSS, images, etc.)."""
+
parsed_url = urlparse(url)
+
path = parsed_url.path.lower()
+
+
# Skip obvious non-blog content
+
if any(path.endswith(ext) for ext in ['.css', '.js', '.png', '.jpg', '.jpeg', '.gif', '.svg', '.ico', '.pdf', '.xml', '.json']):
+
return False
+
+
# Skip common non-blog paths
+
if any(segment in path for segment in ['/static/', '/assets/', '/css/', '/js/', '/images/', '/img/', '/media/', '/uploads/']):
+
return False
+
+
# Skip fragment-only links (same page anchors)
+
if not path or path == '/':
+
return False
+
+
# Look for positive indicators of blog posts
+
# Common blog post patterns: dates, slugs, post indicators
+
blog_indicators = [
+
r'/\d{4}/', # Year in path
+
r'/\d{4}/\d{2}/', # Year/month in path
+
r'/blog/',
+
r'/post/',
+
r'/posts/',
+
r'/articles?/',
+
r'/notes?/',
+
r'/entries/',
+
r'/writing/',
+
]
+
+
for pattern in blog_indicators:
+
if re.search(pattern, path):
+
return True
+
+
# If it has a reasonable path depth and doesn't match exclusions, likely a blog post
+
path_segments = [seg for seg in path.split('/') if seg]
+
return len(path_segments) >= 1 # At least one meaningful path segment
+
+
def resolve_target_user(
+
self, url: str, user_domains: dict[str, set[str]]
+
) -> Optional[str]:
+
"""Try to resolve a URL to a known user based on domain mapping."""
+
parsed_url = urlparse(url)
+
domain = parsed_url.netloc.lower()
+
+
for username, domains in user_domains.items():
+
if domain in domains:
+
return username
+
+
return None
+
+
def extract_references(
+
self, entry: AtomEntry, username: str, user_domains: dict[str, set[str]]
+
) -> list[BlogReference]:
+
"""Extract all blog references from an entry."""
+
references = []
+
+
# Combine all text content for analysis
+
content_to_search = []
+
if entry.content:
+
content_to_search.append(entry.content)
+
if entry.summary:
+
content_to_search.append(entry.summary)
+
+
for content in content_to_search:
+
links = self.extract_links_from_html(content)
+
+
for url, _link_text in links:
+
entry_domain = (
+
urlparse(str(entry.link)).netloc.lower() if entry.link else ""
+
)
+
link_domain = urlparse(url).netloc.lower()
+
+
# Check if this looks like a blog URL
+
if not self.is_blog_url(url):
+
continue
+
+
# For same-domain links, apply additional filtering to avoid non-blog content
+
if link_domain == entry_domain:
+
# Only include same-domain links that look like blog posts
+
if not self._is_likely_blog_post_url(url):
+
continue
+
+
# Try to resolve to a known user
+
if link_domain == entry_domain:
+
# Same domain - target user is the same as source user
+
target_username: Optional[str] = username
+
else:
+
# Different domain - try to resolve
+
target_username = self.resolve_target_user(url, user_domains)
+
+
ref = BlogReference(
+
source_entry_id=entry.id,
+
source_username=username,
+
target_url=url,
+
target_username=target_username,
+
target_entry_id=None, # Will be resolved later if possible
+
)
+
+
references.append(ref)
+
+
return references
+
+
def build_user_domain_mapping(self, git_store: "GitStore") -> dict[str, set[str]]:
+
"""Build mapping of usernames to their known domains."""
+
user_domains = {}
+
index = git_store._load_index()
+
+
for username, user_metadata in index.users.items():
+
domains = set()
+
+
# Add domains from feeds
+
for feed_url in user_metadata.feeds:
+
domain = urlparse(feed_url).netloc.lower()
+
if domain:
+
domains.add(domain)
+
+
# Add domain from homepage
+
if user_metadata.homepage:
+
domain = urlparse(str(user_metadata.homepage)).netloc.lower()
+
if domain:
+
domains.add(domain)
+
+
user_domains[username] = domains
+
+
return user_domains
+
+
def _build_url_to_entry_mapping(self, git_store: "GitStore") -> dict[str, str]:
+
"""Build a comprehensive mapping from URLs to entry IDs using git store data.
+
+
This creates a bidirectional mapping that handles:
+
- Entry link URLs -> Entry IDs
+
- URL variations (with/without www, http/https)
+
- Multiple URLs pointing to the same entry
+
"""
+
url_to_entry: dict[str, str] = {}
+
+
# Load index to get all users
+
index = git_store._load_index()
+
+
for username in index.users.keys():
+
entries = git_store.list_entries(username)
+
+
for entry in entries:
+
if entry.link:
+
link_url = str(entry.link)
+
entry_id = entry.id
+
+
# Map the canonical link URL
+
url_to_entry[link_url] = entry_id
+
+
# Handle common URL variations
+
parsed = urlparse(link_url)
+
if parsed.netloc and parsed.path:
+
# Add version without www
+
if parsed.netloc.startswith('www.'):
+
no_www_url = f"{parsed.scheme}://{parsed.netloc[4:]}{parsed.path}"
+
if parsed.query:
+
no_www_url += f"?{parsed.query}"
+
if parsed.fragment:
+
no_www_url += f"#{parsed.fragment}"
+
url_to_entry[no_www_url] = entry_id
+
+
# Add version with www if not present
+
elif not parsed.netloc.startswith('www.'):
+
www_url = f"{parsed.scheme}://www.{parsed.netloc}{parsed.path}"
+
if parsed.query:
+
www_url += f"?{parsed.query}"
+
if parsed.fragment:
+
www_url += f"#{parsed.fragment}"
+
url_to_entry[www_url] = entry_id
+
+
# Add http/https variations
+
if parsed.scheme == 'https':
+
http_url = link_url.replace('https://', 'http://', 1)
+
url_to_entry[http_url] = entry_id
+
elif parsed.scheme == 'http':
+
https_url = link_url.replace('http://', 'https://', 1)
+
url_to_entry[https_url] = entry_id
+
+
return url_to_entry
+
+
def _normalize_url(self, url: str) -> str:
+
"""Normalize URL for consistent matching.
+
+
Handles common variations like trailing slashes, fragments, etc.
+
"""
+
parsed = urlparse(url)
+
+
# Remove trailing slash from path
+
path = parsed.path.rstrip('/') if parsed.path != '/' else parsed.path
+
+
# Reconstruct without fragment for consistent matching
+
normalized = f"{parsed.scheme}://{parsed.netloc}{path}"
+
if parsed.query:
+
normalized += f"?{parsed.query}"
+
+
return normalized
+
+
def resolve_target_entry_ids(
+
self, references: list[BlogReference], git_store: "GitStore"
+
) -> list[BlogReference]:
+
"""Resolve target_entry_id for references using comprehensive URL mapping."""
+
resolved_refs = []
+
+
# Build comprehensive URL to entry ID mapping
+
url_to_entry = self._build_url_to_entry_mapping(git_store)
+
+
for ref in references:
+
# If we already have a target_entry_id, keep the reference as-is
+
if ref.target_entry_id is not None:
+
resolved_refs.append(ref)
+
continue
+
+
# If we don't have a target_username, we can't resolve it
+
if ref.target_username is None:
+
resolved_refs.append(ref)
+
continue
+
+
# Try to resolve using URL mapping
+
resolved_entry_id = None
+
+
# First, try exact match
+
if ref.target_url in url_to_entry:
+
resolved_entry_id = url_to_entry[ref.target_url]
+
else:
+
# Try normalized URL matching
+
normalized_target = self._normalize_url(ref.target_url)
+
if normalized_target in url_to_entry:
+
resolved_entry_id = url_to_entry[normalized_target]
+
else:
+
# Try URL variations
+
for mapped_url, entry_id in url_to_entry.items():
+
if self._normalize_url(mapped_url) == normalized_target:
+
resolved_entry_id = entry_id
+
break
+
+
# Verify the resolved entry belongs to the target username
+
if resolved_entry_id:
+
# Double-check by loading the actual entry
+
entries = git_store.list_entries(ref.target_username)
+
entry_found = any(entry.id == resolved_entry_id for entry in entries)
+
if not entry_found:
+
resolved_entry_id = None
+
+
# Create a new reference with the resolved target_entry_id
+
resolved_ref = BlogReference(
+
source_entry_id=ref.source_entry_id,
+
source_username=ref.source_username,
+
target_url=ref.target_url,
+
target_username=ref.target_username,
+
target_entry_id=resolved_entry_id,
+
)
+
resolved_refs.append(resolved_ref)
+
+
return resolved_refs
+1 -1
src/thicket/models/__init__.py
···
"FeedMetadata",
"GitStoreIndex",
"UserMetadata",
-
]
+
]
+28 -38
src/thicket/models/config.py
···
"""Configuration models for thicket."""
+
import json
+
import yaml
from pathlib import Path
-
from typing import Optional
+
from typing import Optional, Union
-
from pydantic import BaseModel, EmailStr, HttpUrl
+
from pydantic import BaseModel, EmailStr, HttpUrl, ValidationError
from pydantic_settings import BaseSettings, SettingsConfigDict
···
cache_dir: Path
users: list[UserConfig] = []
-
def find_user(self, username: str) -> Optional[UserConfig]:
-
"""Find a user by username."""
-
for user in self.users:
-
if user.username == username:
-
return user
-
return None
-
-
def add_user(self, user: UserConfig) -> None:
-
"""Add a new user or update existing user."""
-
existing = self.find_user(user.username)
-
if existing:
-
# Update existing user
-
existing.feeds = list(set(existing.feeds + user.feeds))
-
existing.email = user.email or existing.email
-
existing.homepage = user.homepage or existing.homepage
-
existing.icon = user.icon or existing.icon
-
existing.display_name = user.display_name or existing.display_name
+
@classmethod
+
def from_file(cls, config_path: Path) -> 'ThicketConfig':
+
"""Load configuration from a file."""
+
if not config_path.exists():
+
raise FileNotFoundError(f"Configuration file not found: {config_path}")
+
+
content = config_path.read_text(encoding='utf-8')
+
+
if config_path.suffix.lower() in ['.yaml', '.yml']:
+
try:
+
data = yaml.safe_load(content)
+
except yaml.YAMLError as e:
+
raise ValueError(f"Invalid YAML in {config_path}: {e}")
+
elif config_path.suffix.lower() == '.json':
+
try:
+
data = json.loads(content)
+
except json.JSONDecodeError as e:
+
raise ValueError(f"Invalid JSON in {config_path}: {e}")
else:
-
# Add new user
-
self.users.append(user)
-
-
def remove_user(self, username: str) -> bool:
-
"""Remove a user by username. Returns True if user was found and removed."""
-
for i, user in enumerate(self.users):
-
if user.username == username:
-
del self.users[i]
-
return True
-
return False
-
-
def add_feed_to_user(self, username: str, feed_url: HttpUrl) -> bool:
-
"""Add a feed to an existing user. Returns True if user was found."""
-
user = self.find_user(username)
-
if user:
-
if feed_url not in user.feeds:
-
user.feeds.append(feed_url)
-
return True
-
return False
+
raise ValueError(f"Unsupported configuration file format: {config_path.suffix}")
+
+
try:
+
return cls(**data)
+
except ValidationError as e:
+
raise ValueError(f"Configuration validation error: {e}")
+5 -2
src/thicket/models/feed.py
···
"""Feed and entry models for thicket."""
from datetime import datetime
-
from typing import Optional
+
from typing import TYPE_CHECKING, Optional
from pydantic import BaseModel, ConfigDict, EmailStr, HttpUrl
+
+
if TYPE_CHECKING:
+
from .config import UserConfig
class AtomEntry(BaseModel):
···
duplicate_id
for duplicate_id, canonical in self.duplicates.items()
if canonical == canonical_id
-
]
+
]
+1 -2
src/thicket/models/user.py
···
"""Update entry count for a user and total."""
user = self.get_user(username)
if user:
-
old_count = user.entry_count
user.increment_entry_count(count)
self.total_entries += count
self.last_updated = datetime.now()
···
def recalculate_totals(self) -> None:
"""Recalculate total entries from all users."""
self.total_entries = sum(user.entry_count for user in self.users.values())
-
self.last_updated = datetime.now()
+
self.last_updated = datetime.now()
+1
src/thicket/subsystems/__init__.py
···
+
"""Thicket subsystems for specialized operations."""
+227
src/thicket/subsystems/feeds.py
···
+
"""Feed management subsystem."""
+
+
import asyncio
+
import json
+
from datetime import datetime
+
from pathlib import Path
+
from typing import Callable, Optional
+
+
from pydantic import HttpUrl
+
+
from ..core.feed_parser import FeedParser
+
from ..core.git_store import GitStore
+
from ..models import AtomEntry, ThicketConfig
+
+
+
class FeedManager:
+
"""Manages feed operations and caching."""
+
+
def __init__(self, git_store: GitStore, feed_parser: FeedParser, config: ThicketConfig):
+
"""Initialize feed manager."""
+
self.git_store = git_store
+
self.feed_parser = feed_parser
+
self.config = config
+
self._ensure_cache_dir()
+
+
def _ensure_cache_dir(self):
+
"""Ensure cache directory exists."""
+
self.config.cache_dir.mkdir(parents=True, exist_ok=True)
+
+
async def sync_feeds(self, username: Optional[str] = None, progress_callback: Optional[Callable] = None) -> dict:
+
"""Sync feeds for all users or specific user."""
+
if username:
+
return await self.sync_user_feeds(username, progress_callback)
+
+
# Sync all users
+
results = {}
+
total_users = len(self.config.users)
+
+
for i, user_config in enumerate(self.config.users):
+
if progress_callback:
+
progress_callback(f"Syncing feeds for {user_config.username}", i, total_users)
+
+
user_results = await self.sync_user_feeds(user_config.username, progress_callback)
+
results[user_config.username] = user_results
+
+
return results
+
+
async def sync_user_feeds(self, username: str, progress_callback: Optional[Callable] = None) -> dict:
+
"""Sync feeds for a specific user."""
+
user_config = next((u for u in self.config.users if u.username == username), None)
+
if not user_config:
+
return {'error': f'User {username} not found in configuration'}
+
+
# Ensure user exists in git store
+
git_user = self.git_store.get_user(username)
+
if not git_user:
+
self.git_store.add_user(
+
username=user_config.username,
+
display_name=user_config.display_name,
+
email=str(user_config.email) if user_config.email else None,
+
homepage=str(user_config.homepage) if user_config.homepage else None,
+
icon=str(user_config.icon) if user_config.icon else None,
+
feeds=[str(feed) for feed in user_config.feeds]
+
)
+
+
results = {
+
'username': username,
+
'feeds_processed': 0,
+
'new_entries': 0,
+
'errors': [],
+
'feeds': {}
+
}
+
+
total_feeds = len(user_config.feeds)
+
+
for i, feed_url in enumerate(user_config.feeds):
+
if progress_callback:
+
progress_callback(f"Processing feed {i+1}/{total_feeds} for {username}", i, total_feeds)
+
+
try:
+
feed_result = await self._sync_single_feed(username, feed_url)
+
results['feeds'][str(feed_url)] = feed_result
+
results['feeds_processed'] += 1
+
results['new_entries'] += feed_result.get('new_entries', 0)
+
except Exception as e:
+
error_msg = f"Error syncing {feed_url}: {str(e)}"
+
results['errors'].append(error_msg)
+
results['feeds'][str(feed_url)] = {'error': error_msg}
+
+
return results
+
+
async def _sync_single_feed(self, username: str, feed_url: HttpUrl) -> dict:
+
"""Sync a single feed for a user."""
+
cache_key = self._get_cache_key(username, feed_url)
+
last_modified = self._get_last_modified(cache_key)
+
+
try:
+
# Fetch feed content
+
content = await self.feed_parser.fetch_feed(feed_url)
+
+
# Parse feed
+
feed_meta, entries = self.feed_parser.parse_feed(content, feed_url)
+
+
# Filter new entries
+
new_entries = []
+
for entry in entries:
+
existing_entry = self.git_store.get_entry(username, entry.id)
+
if not existing_entry:
+
new_entries.append(entry)
+
+
# Store new entries
+
stored_count = 0
+
for entry in new_entries:
+
if self.git_store.store_entry(username, entry):
+
stored_count += 1
+
+
# Update cache
+
self._update_cache(cache_key, {
+
'last_fetched': datetime.now().isoformat(),
+
'feed_meta': feed_meta.model_dump(exclude_none=True),
+
'entry_count': len(entries),
+
'new_entries': stored_count,
+
'feed_url': str(feed_url)
+
})
+
+
return {
+
'success': True,
+
'total_entries': len(entries),
+
'new_entries': stored_count,
+
'feed_title': feed_meta.title,
+
'last_fetched': datetime.now().isoformat()
+
}
+
+
except Exception as e:
+
return {
+
'success': False,
+
'error': str(e),
+
'feed_url': str(feed_url)
+
}
+
+
def get_entries(self, username: str, limit: Optional[int] = None) -> list[AtomEntry]:
+
"""Get entries for a user."""
+
return self.git_store.list_entries(username, limit)
+
+
def get_entry(self, username: str, entry_id: str) -> Optional[AtomEntry]:
+
"""Get a specific entry."""
+
return self.git_store.get_entry(username, entry_id)
+
+
def search_entries(self, query: str, username: Optional[str] = None, limit: Optional[int] = None) -> list[tuple[str, AtomEntry]]:
+
"""Search entries across users."""
+
return self.git_store.search_entries(query, username, limit)
+
+
def get_stats(self) -> dict:
+
"""Get feed-related statistics."""
+
index = self.git_store._load_index()
+
+
feed_stats = {
+
'total_feeds_configured': sum(len(user.feeds) for user in self.config.users),
+
'users_with_entries': len([u for u in index.users.values() if u.entry_count > 0]),
+
'cache_files': len(list(self.config.cache_dir.glob("*.json"))) if self.config.cache_dir.exists() else 0,
+
}
+
+
return feed_stats
+
+
def _get_cache_key(self, username: str, feed_url: HttpUrl) -> str:
+
"""Generate cache key for feed."""
+
# Simple hash of username and feed URL
+
import hashlib
+
key_data = f"{username}:{str(feed_url)}"
+
return hashlib.md5(key_data.encode()).hexdigest()
+
+
def _get_last_modified(self, cache_key: str) -> Optional[datetime]:
+
"""Get last modified time from cache."""
+
cache_file = self.config.cache_dir / f"{cache_key}.json"
+
if cache_file.exists():
+
try:
+
with open(cache_file) as f:
+
data = json.load(f)
+
return datetime.fromisoformat(data.get('last_fetched', ''))
+
except Exception:
+
pass
+
return None
+
+
def _update_cache(self, cache_key: str, data: dict):
+
"""Update cache with feed data."""
+
cache_file = self.config.cache_dir / f"{cache_key}.json"
+
try:
+
with open(cache_file, 'w') as f:
+
json.dump(data, f, indent=2)
+
except Exception:
+
# Cache update failure shouldn't break the sync
+
pass
+
+
def clear_cache(self, username: Optional[str] = None) -> bool:
+
"""Clear feed cache."""
+
try:
+
if username:
+
# Clear cache for specific user
+
for user_config in self.config.users:
+
if user_config.username == username:
+
for feed_url in user_config.feeds:
+
cache_key = self._get_cache_key(username, feed_url)
+
cache_file = self.config.cache_dir / f"{cache_key}.json"
+
if cache_file.exists():
+
cache_file.unlink()
+
else:
+
# Clear all cache
+
if self.config.cache_dir.exists():
+
for cache_file in self.config.cache_dir.glob("*.json"):
+
cache_file.unlink()
+
return True
+
except Exception:
+
return False
+
+
def get_feed_info(self, username: str, feed_url: str) -> Optional[dict]:
+
"""Get cached information about a specific feed."""
+
try:
+
feed_url_obj = HttpUrl(feed_url)
+
cache_key = self._get_cache_key(username, feed_url_obj)
+
cache_file = self.config.cache_dir / f"{cache_key}.json"
+
+
if cache_file.exists():
+
with open(cache_file) as f:
+
return json.load(f)
+
except Exception:
+
pass
+
return None
+304
src/thicket/subsystems/links.py
···
+
"""Link processing subsystem."""
+
+
import json
+
import re
+
from collections import defaultdict
+
from pathlib import Path
+
from typing import Optional
+
from urllib.parse import urljoin, urlparse
+
+
from ..core.git_store import GitStore
+
from ..models import AtomEntry, ThicketConfig
+
+
+
class LinkProcessor:
+
"""Processes and manages links between entries."""
+
+
def __init__(self, git_store: GitStore, config: ThicketConfig):
+
"""Initialize link processor."""
+
self.git_store = git_store
+
self.config = config
+
self.links_file = self.git_store.repo_path / "links.json"
+
+
def process_links(self, username: Optional[str] = None) -> dict:
+
"""Process and extract links from entries."""
+
if username:
+
return self._process_user_links(username)
+
+
# Process all users
+
results = {}
+
index = self.git_store._load_index()
+
+
for user_metadata in index.users.values():
+
user_results = self._process_user_links(user_metadata.username)
+
results[user_metadata.username] = user_results
+
+
# Consolidate all links
+
self._consolidate_links()
+
+
return results
+
+
def _process_user_links(self, username: str) -> dict:
+
"""Process links for a specific user."""
+
entries = self.git_store.list_entries(username)
+
+
results = {
+
'username': username,
+
'entries_processed': 0,
+
'links_found': 0,
+
'external_links': 0,
+
'internal_links': 0,
+
}
+
+
links_data = self._load_links_data()
+
+
for entry in entries:
+
entry_links = self._extract_links_from_entry(entry)
+
+
if entry_links:
+
# Store links for this entry
+
entry_key = f"{username}:{entry.id}"
+
links_data[entry_key] = {
+
'entry_id': entry.id,
+
'username': username,
+
'title': entry.title,
+
'links': entry_links,
+
'processed_at': entry.updated.isoformat() if entry.updated else None,
+
}
+
+
results['links_found'] += len(entry_links)
+
results['external_links'] += len([l for l in entry_links if self._is_external_link(l['url'])])
+
results['internal_links'] += len([l for l in entry_links if not self._is_external_link(l['url'])])
+
+
results['entries_processed'] += 1
+
+
self._save_links_data(links_data)
+
+
return results
+
+
def _extract_links_from_entry(self, entry: AtomEntry) -> list[dict]:
+
"""Extract links from an entry's content."""
+
links = []
+
+
# Combine content and summary for link extraction
+
text_content = ""
+
if entry.content:
+
text_content += entry.content
+
if entry.summary:
+
text_content += " " + entry.summary
+
+
if not text_content:
+
return links
+
+
# Extract HTML links
+
html_link_pattern = r'<a[^>]+href=["\']([^"\']+)["\'][^>]*>([^<]*)</a>'
+
html_matches = re.findall(html_link_pattern, text_content, re.IGNORECASE)
+
+
for url, text in html_matches:
+
# Clean up the URL
+
url = url.strip()
+
text = text.strip()
+
+
if url and url not in ['#', 'javascript:void(0)']:
+
# Resolve relative URLs if possible
+
if entry.link and url.startswith('/'):
+
base_url = str(entry.link)
+
parsed_base = urlparse(base_url)
+
base_domain = f"{parsed_base.scheme}://{parsed_base.netloc}"
+
url = urljoin(base_domain, url)
+
+
links.append({
+
'url': url,
+
'text': text or url,
+
'type': 'html'
+
})
+
+
# Extract markdown links
+
markdown_link_pattern = r'\[([^\]]*)\]\(([^\)]+)\)'
+
markdown_matches = re.findall(markdown_link_pattern, text_content)
+
+
for text, url in markdown_matches:
+
url = url.strip()
+
text = text.strip()
+
+
if url and url not in ['#']:
+
links.append({
+
'url': url,
+
'text': text or url,
+
'type': 'markdown'
+
})
+
+
# Extract plain URLs
+
url_pattern = r'https?://[^\s<>"]+[^\s<>".,;!?]'
+
url_matches = re.findall(url_pattern, text_content)
+
+
for url in url_matches:
+
# Skip if already found as HTML or markdown link
+
if not any(link['url'] == url for link in links):
+
links.append({
+
'url': url,
+
'text': url,
+
'type': 'plain'
+
})
+
+
return links
+
+
def _is_external_link(self, url: str) -> bool:
+
"""Check if a link is external to the configured domains."""
+
try:
+
parsed = urlparse(url)
+
domain = parsed.netloc.lower()
+
+
# Check against user domains from feeds
+
for user_config in self.config.users:
+
for feed_url in user_config.feeds:
+
feed_domain = urlparse(str(feed_url)).netloc.lower()
+
if domain == feed_domain or domain.endswith(f'.{feed_domain}'):
+
return False
+
+
# Check homepage domain
+
if user_config.homepage:
+
homepage_domain = urlparse(str(user_config.homepage)).netloc.lower()
+
if domain == homepage_domain or domain.endswith(f'.{homepage_domain}'):
+
return False
+
+
return True
+
except Exception:
+
return True
+
+
def _load_links_data(self) -> dict:
+
"""Load existing links data."""
+
if self.links_file.exists():
+
try:
+
with open(self.links_file) as f:
+
return json.load(f)
+
except Exception:
+
pass
+
return {}
+
+
def _save_links_data(self, links_data: dict):
+
"""Save links data to file."""
+
try:
+
with open(self.links_file, 'w') as f:
+
json.dump(links_data, f, indent=2, ensure_ascii=False)
+
except Exception:
+
# Link processing failure shouldn't break the main operation
+
pass
+
+
def _consolidate_links(self):
+
"""Consolidate and create reverse link mappings."""
+
links_data = self._load_links_data()
+
+
# Create URL to entries mapping
+
url_mapping = defaultdict(list)
+
+
for entry_key, entry_data in links_data.items():
+
for link in entry_data.get('links', []):
+
url_mapping[link['url']].append({
+
'entry_key': entry_key,
+
'username': entry_data['username'],
+
'entry_id': entry_data['entry_id'],
+
'title': entry_data['title'],
+
'link_text': link['text'],
+
'link_type': link['type'],
+
})
+
+
# Save URL mapping
+
url_mapping_file = self.git_store.repo_path / "url_mapping.json"
+
try:
+
with open(url_mapping_file, 'w') as f:
+
json.dump(dict(url_mapping), f, indent=2, ensure_ascii=False)
+
except Exception:
+
pass
+
+
def get_links(self, username: Optional[str] = None) -> dict:
+
"""Get processed links."""
+
links_data = self._load_links_data()
+
+
if username:
+
user_links = {k: v for k, v in links_data.items() if v.get('username') == username}
+
return user_links
+
+
return links_data
+
+
def find_references(self, url: str) -> list[tuple[str, AtomEntry]]:
+
"""Find entries that reference a URL."""
+
url_mapping_file = self.git_store.repo_path / "url_mapping.json"
+
+
if not url_mapping_file.exists():
+
return []
+
+
try:
+
with open(url_mapping_file) as f:
+
url_mapping = json.load(f)
+
+
references = url_mapping.get(url, [])
+
results = []
+
+
for ref in references:
+
entry = self.git_store.get_entry(ref['username'], ref['entry_id'])
+
if entry:
+
results.append((ref['username'], entry))
+
+
return results
+
except Exception:
+
return []
+
+
def get_stats(self) -> dict:
+
"""Get link processing statistics."""
+
links_data = self._load_links_data()
+
+
total_entries_with_links = len(links_data)
+
total_links = sum(len(entry_data.get('links', [])) for entry_data in links_data.values())
+
+
external_links = 0
+
internal_links = 0
+
+
for entry_data in links_data.values():
+
for link in entry_data.get('links', []):
+
if self._is_external_link(link['url']):
+
external_links += 1
+
else:
+
internal_links += 1
+
+
# Count unique URLs
+
unique_urls = set()
+
for entry_data in links_data.values():
+
for link in entry_data.get('links', []):
+
unique_urls.add(link['url'])
+
+
return {
+
'entries_with_links': total_entries_with_links,
+
'total_links': total_links,
+
'unique_urls': len(unique_urls),
+
'external_links': external_links,
+
'internal_links': internal_links,
+
}
+
+
def get_most_referenced_urls(self, limit: int = 10) -> list[dict]:
+
"""Get most frequently referenced URLs."""
+
url_mapping_file = self.git_store.repo_path / "url_mapping.json"
+
+
if not url_mapping_file.exists():
+
return []
+
+
try:
+
with open(url_mapping_file) as f:
+
url_mapping = json.load(f)
+
+
# Count references per URL
+
url_counts = [(url, len(refs)) for url, refs in url_mapping.items()]
+
url_counts.sort(key=lambda x: x[1], reverse=True)
+
+
results = []
+
for url, count in url_counts[:limit]:
+
results.append({
+
'url': url,
+
'reference_count': count,
+
'is_external': self._is_external_link(url),
+
'references': url_mapping[url]
+
})
+
+
return results
+
except Exception:
+
return []
+158
src/thicket/subsystems/repository.py
···
+
"""Repository management subsystem."""
+
+
import shutil
+
from datetime import datetime
+
from pathlib import Path
+
from typing import Optional
+
+
from ..core.git_store import GitStore
+
from ..models import ThicketConfig
+
+
+
class RepositoryManager:
+
"""Manages repository operations and metadata."""
+
+
def __init__(self, git_store: GitStore, config: ThicketConfig):
+
"""Initialize repository manager."""
+
self.git_store = git_store
+
self.config = config
+
+
def init_repository(self) -> bool:
+
"""Initialize the git repository if not already done."""
+
try:
+
# GitStore.__init__ already handles repository initialization
+
return True
+
except Exception:
+
return False
+
+
def commit_changes(self, message: str) -> bool:
+
"""Commit all pending changes."""
+
try:
+
self.git_store.commit_changes(message)
+
return True
+
except Exception:
+
return False
+
+
def get_status(self) -> dict:
+
"""Get repository status and statistics."""
+
try:
+
stats = self.git_store.get_stats()
+
+
# Add repository-specific information
+
repo_status = {
+
**stats,
+
'repository_path': str(self.config.git_store),
+
'cache_path': str(self.config.cache_dir),
+
'has_uncommitted_changes': self._has_uncommitted_changes(),
+
'last_commit': self._get_last_commit_info(),
+
}
+
+
return repo_status
+
except Exception as e:
+
return {'error': str(e)}
+
+
def backup_repository(self, backup_path: Path) -> bool:
+
"""Create a backup of the repository."""
+
try:
+
if backup_path.exists():
+
shutil.rmtree(backup_path)
+
+
shutil.copytree(self.config.git_store, backup_path)
+
return True
+
except Exception:
+
return False
+
+
def cleanup_cache(self) -> bool:
+
"""Clean up cache directory."""
+
try:
+
if self.config.cache_dir.exists():
+
shutil.rmtree(self.config.cache_dir)
+
self.config.cache_dir.mkdir(parents=True, exist_ok=True)
+
return True
+
except Exception:
+
return False
+
+
def get_repository_size(self) -> dict:
+
"""Get detailed repository size information."""
+
try:
+
total_size = 0
+
file_count = 0
+
dir_count = 0
+
+
for path in self.config.git_store.rglob("*"):
+
if path.is_file():
+
total_size += path.stat().st_size
+
file_count += 1
+
elif path.is_dir():
+
dir_count += 1
+
+
return {
+
'total_size_bytes': total_size,
+
'total_size_mb': round(total_size / (1024 * 1024), 2),
+
'file_count': file_count,
+
'directory_count': dir_count,
+
}
+
except Exception as e:
+
return {'error': str(e)}
+
+
def _has_uncommitted_changes(self) -> bool:
+
"""Check if there are uncommitted changes."""
+
try:
+
if not self.git_store.repo:
+
return False
+
return bool(self.git_store.repo.index.diff("HEAD") or self.git_store.repo.untracked_files)
+
except Exception:
+
return False
+
+
def _get_last_commit_info(self) -> Optional[dict]:
+
"""Get information about the last commit."""
+
try:
+
if not self.git_store.repo:
+
return None
+
+
last_commit = self.git_store.repo.head.commit
+
return {
+
'hash': last_commit.hexsha[:8],
+
'message': last_commit.message.strip(),
+
'author': str(last_commit.author),
+
'date': datetime.fromtimestamp(last_commit.committed_date).isoformat(),
+
}
+
except Exception:
+
return None
+
+
def verify_integrity(self) -> dict:
+
"""Verify repository integrity."""
+
issues = []
+
+
# Check if git repository is valid
+
try:
+
if not self.git_store.repo:
+
issues.append("Git repository not initialized")
+
except Exception as e:
+
issues.append(f"Git repository error: {e}")
+
+
# Check if index.json exists and is valid
+
index_path = self.config.git_store / "index.json"
+
if not index_path.exists():
+
issues.append("index.json missing")
+
else:
+
try:
+
self.git_store._load_index()
+
except Exception as e:
+
issues.append(f"index.json corrupted: {e}")
+
+
# Check if duplicates.json exists
+
duplicates_path = self.config.git_store / "duplicates.json"
+
if not duplicates_path.exists():
+
issues.append("duplicates.json missing")
+
else:
+
try:
+
self.git_store._load_duplicates()
+
except Exception as e:
+
issues.append(f"duplicates.json corrupted: {e}")
+
+
return {
+
'is_valid': len(issues) == 0,
+
'issues': issues,
+
'checked_at': datetime.now().isoformat(),
+
}
+319
src/thicket/subsystems/site.py
···
+
"""Site generation subsystem."""
+
+
import json
+
import shutil
+
from datetime import datetime
+
from pathlib import Path
+
from typing import Optional
+
+
from jinja2 import Environment, FileSystemLoader, select_autoescape
+
+
from ..core.git_store import GitStore
+
from ..models import ThicketConfig
+
+
+
class SiteGenerator:
+
"""Generates static sites from stored entries."""
+
+
def __init__(self, git_store: GitStore, config: ThicketConfig):
+
"""Initialize site generator."""
+
self.git_store = git_store
+
self.config = config
+
self.default_template_dir = Path(__file__).parent.parent / "templates"
+
+
def generate_site(self, output_dir: Path, template_dir: Optional[Path] = None) -> bool:
+
"""Generate complete static site."""
+
try:
+
# Setup template environment
+
template_dir = template_dir or self.default_template_dir
+
if not template_dir.exists():
+
return False
+
+
env = Environment(
+
loader=FileSystemLoader(str(template_dir)),
+
autoescape=select_autoescape(['html', 'xml'])
+
)
+
+
# Prepare output directory
+
output_dir.mkdir(parents=True, exist_ok=True)
+
+
# Copy static assets
+
self._copy_static_assets(template_dir, output_dir)
+
+
# Generate pages
+
self._generate_index_page(env, output_dir)
+
self._generate_timeline_page(env, output_dir)
+
self._generate_users_page(env, output_dir)
+
self._generate_links_page(env, output_dir)
+
self._generate_user_detail_pages(env, output_dir)
+
+
return True
+
except Exception:
+
return False
+
+
def generate_timeline(self, output_path: Path, limit: Optional[int] = None) -> bool:
+
"""Generate timeline HTML file."""
+
try:
+
env = Environment(
+
loader=FileSystemLoader(str(self.default_template_dir)),
+
autoescape=select_autoescape(['html', 'xml'])
+
)
+
+
timeline_data = self._get_timeline_data(limit)
+
template = env.get_template('timeline.html')
+
+
content = template.render(**timeline_data)
+
+
output_path.parent.mkdir(parents=True, exist_ok=True)
+
with open(output_path, 'w', encoding='utf-8') as f:
+
f.write(content)
+
+
return True
+
except Exception:
+
return False
+
+
def generate_user_pages(self, output_dir: Path) -> bool:
+
"""Generate individual user pages."""
+
try:
+
env = Environment(
+
loader=FileSystemLoader(str(self.default_template_dir)),
+
autoescape=select_autoescape(['html', 'xml'])
+
)
+
+
return self._generate_user_detail_pages(env, output_dir)
+
except Exception:
+
return False
+
+
def _copy_static_assets(self, template_dir: Path, output_dir: Path):
+
"""Copy CSS, JS, and other static assets."""
+
static_files = ['style.css', 'script.js']
+
+
for filename in static_files:
+
src_file = template_dir / filename
+
if src_file.exists():
+
dst_file = output_dir / filename
+
shutil.copy2(src_file, dst_file)
+
+
def _generate_index_page(self, env: Environment, output_dir: Path):
+
"""Generate main index page."""
+
template = env.get_template('index.html')
+
+
# Get summary statistics
+
stats = self.git_store.get_stats()
+
index = self.git_store._load_index()
+
+
# Recent entries
+
recent_entries = []
+
for username in index.users.keys():
+
user_entries = self.git_store.list_entries(username, limit=5)
+
for entry in user_entries:
+
recent_entries.append({
+
'username': username,
+
'entry': entry
+
})
+
+
# Sort by date
+
recent_entries.sort(key=lambda x: x['entry'].updated or x['entry'].published, reverse=True)
+
recent_entries = recent_entries[:10]
+
+
context = {
+
'title': 'Thicket Feed Archive',
+
'stats': stats,
+
'recent_entries': recent_entries,
+
'users': list(index.users.values()),
+
'generated_at': datetime.now().isoformat(),
+
}
+
+
content = template.render(**context)
+
+
with open(output_dir / 'index.html', 'w', encoding='utf-8') as f:
+
f.write(content)
+
+
def _generate_timeline_page(self, env: Environment, output_dir: Path):
+
"""Generate timeline page."""
+
template = env.get_template('timeline.html')
+
timeline_data = self._get_timeline_data()
+
+
content = template.render(**timeline_data)
+
+
with open(output_dir / 'timeline.html', 'w', encoding='utf-8') as f:
+
f.write(content)
+
+
def _generate_users_page(self, env: Environment, output_dir: Path):
+
"""Generate users overview page."""
+
template = env.get_template('users.html')
+
+
index = self.git_store._load_index()
+
users_data = []
+
+
for user_metadata in index.users.values():
+
# Get user config for additional details
+
user_config = next(
+
(u for u in self.config.users if u.username == user_metadata.username),
+
None
+
)
+
+
# Get recent entries
+
recent_entries = self.git_store.list_entries(user_metadata.username, limit=3)
+
+
users_data.append({
+
'metadata': user_metadata,
+
'config': user_config,
+
'recent_entries': recent_entries,
+
})
+
+
# Sort by entry count
+
users_data.sort(key=lambda x: x['metadata'].entry_count, reverse=True)
+
+
context = {
+
'title': 'Users',
+
'users': users_data,
+
'generated_at': datetime.now().isoformat(),
+
}
+
+
content = template.render(**context)
+
+
with open(output_dir / 'users.html', 'w', encoding='utf-8') as f:
+
f.write(content)
+
+
def _generate_links_page(self, env: Environment, output_dir: Path):
+
"""Generate links overview page."""
+
template = env.get_template('links.html')
+
+
# Load links data
+
links_file = self.git_store.repo_path / "links.json"
+
url_mapping_file = self.git_store.repo_path / "url_mapping.json"
+
+
links_data = {}
+
url_mapping = {}
+
+
if links_file.exists():
+
try:
+
with open(links_file) as f:
+
links_data = json.load(f)
+
except Exception:
+
pass
+
+
if url_mapping_file.exists():
+
try:
+
with open(url_mapping_file) as f:
+
url_mapping = json.load(f)
+
except Exception:
+
pass
+
+
# Process most referenced URLs
+
url_counts = [(url, len(refs)) for url, refs in url_mapping.items()]
+
url_counts.sort(key=lambda x: x[1], reverse=True)
+
most_referenced = url_counts[:20]
+
+
# Count links by type
+
link_stats = {
+
'total_entries_with_links': len(links_data),
+
'total_links': sum(len(entry_data.get('links', [])) for entry_data in links_data.values()),
+
'unique_urls': len(url_mapping),
+
}
+
+
context = {
+
'title': 'Links',
+
'most_referenced': most_referenced,
+
'url_mapping': url_mapping,
+
'link_stats': link_stats,
+
'generated_at': datetime.now().isoformat(),
+
}
+
+
content = template.render(**context)
+
+
with open(output_dir / 'links.html', 'w', encoding='utf-8') as f:
+
f.write(content)
+
+
def _generate_user_detail_pages(self, env: Environment, output_dir: Path) -> bool:
+
"""Generate individual user detail pages."""
+
try:
+
template = env.get_template('user_detail.html')
+
index = self.git_store._load_index()
+
+
# Create users subdirectory
+
users_dir = output_dir / 'users'
+
users_dir.mkdir(exist_ok=True)
+
+
for user_metadata in index.users.values():
+
user_config = next(
+
(u for u in self.config.users if u.username == user_metadata.username),
+
None
+
)
+
+
entries = self.git_store.list_entries(user_metadata.username)
+
+
# Get user's links
+
links_file = self.git_store.repo_path / "links.json"
+
user_links = []
+
if links_file.exists():
+
try:
+
with open(links_file) as f:
+
all_links = json.load(f)
+
user_links = [
+
data for key, data in all_links.items()
+
if data.get('username') == user_metadata.username
+
]
+
except Exception:
+
pass
+
+
context = {
+
'title': f"{user_metadata.display_name or user_metadata.username}",
+
'user_metadata': user_metadata,
+
'user_config': user_config,
+
'entries': entries,
+
'user_links': user_links,
+
'generated_at': datetime.now().isoformat(),
+
}
+
+
content = template.render(**context)
+
+
user_file = users_dir / f"{user_metadata.username}.html"
+
with open(user_file, 'w', encoding='utf-8') as f:
+
f.write(content)
+
+
return True
+
except Exception:
+
return False
+
+
def _get_timeline_data(self, limit: Optional[int] = None) -> dict:
+
"""Get data for timeline page."""
+
index = self.git_store._load_index()
+
+
# Collect all entries with metadata
+
all_entries = []
+
for user_metadata in index.users.values():
+
user_entries = self.git_store.list_entries(user_metadata.username)
+
for entry in user_entries:
+
all_entries.append({
+
'username': user_metadata.username,
+
'display_name': user_metadata.display_name,
+
'entry': entry,
+
})
+
+
# Sort by date (newest first)
+
all_entries.sort(
+
key=lambda x: x['entry'].updated or x['entry'].published or datetime.min,
+
reverse=True
+
)
+
+
if limit:
+
all_entries = all_entries[:limit]
+
+
# Group by date for timeline display
+
timeline_groups = {}
+
for item in all_entries:
+
entry_date = item['entry'].updated or item['entry'].published
+
if entry_date:
+
date_key = entry_date.strftime('%Y-%m-%d')
+
if date_key not in timeline_groups:
+
timeline_groups[date_key] = []
+
timeline_groups[date_key].append(item)
+
+
return {
+
'title': 'Timeline',
+
'timeline_groups': timeline_groups,
+
'total_entries': len(all_entries),
+
'generated_at': datetime.now().isoformat(),
+
}
+254
src/thicket/subsystems/users.py
···
+
"""User management subsystem."""
+
+
import shutil
+
from typing import Optional
+
+
from pydantic import EmailStr, HttpUrl, ValidationError
+
+
from ..core.git_store import GitStore
+
from ..models import ThicketConfig, UserConfig, UserMetadata
+
+
+
class UserManager:
+
"""Manages user operations and metadata."""
+
+
def __init__(self, git_store: GitStore, config: ThicketConfig):
+
"""Initialize user manager."""
+
self.git_store = git_store
+
self.config = config
+
+
def add_user(self, username: str, feeds: list[str], **kwargs) -> UserConfig:
+
"""Add a new user with feeds."""
+
# Validate feeds
+
validated_feeds = []
+
for feed in feeds:
+
try:
+
validated_feeds.append(HttpUrl(feed))
+
except ValidationError as e:
+
raise ValueError(f"Invalid feed URL '{feed}': {e}")
+
+
# Validate optional fields
+
email = None
+
if 'email' in kwargs and kwargs['email']:
+
try:
+
email = EmailStr(kwargs['email'])
+
except ValidationError as e:
+
raise ValueError(f"Invalid email '{kwargs['email']}': {e}")
+
+
homepage = None
+
if 'homepage' in kwargs and kwargs['homepage']:
+
try:
+
homepage = HttpUrl(kwargs['homepage'])
+
except ValidationError as e:
+
raise ValueError(f"Invalid homepage URL '{kwargs['homepage']}': {e}")
+
+
icon = None
+
if 'icon' in kwargs and kwargs['icon']:
+
try:
+
icon = HttpUrl(kwargs['icon'])
+
except ValidationError as e:
+
raise ValueError(f"Invalid icon URL '{kwargs['icon']}': {e}")
+
+
# Create user config
+
user_config = UserConfig(
+
username=username,
+
feeds=validated_feeds,
+
email=email,
+
homepage=homepage,
+
icon=icon,
+
display_name=kwargs.get('display_name')
+
)
+
+
# Add to git store
+
self.git_store.add_user(
+
username=username,
+
display_name=user_config.display_name,
+
email=str(user_config.email) if user_config.email else None,
+
homepage=str(user_config.homepage) if user_config.homepage else None,
+
icon=str(user_config.icon) if user_config.icon else None,
+
feeds=[str(feed) for feed in user_config.feeds]
+
)
+
+
# Add to config if not already present
+
existing_user = next((u for u in self.config.users if u.username == username), None)
+
if not existing_user:
+
self.config.users.append(user_config)
+
else:
+
# Update existing config
+
existing_user.feeds = user_config.feeds
+
existing_user.email = user_config.email
+
existing_user.homepage = user_config.homepage
+
existing_user.icon = user_config.icon
+
existing_user.display_name = user_config.display_name
+
+
return user_config
+
+
def get_user(self, username: str) -> Optional[UserConfig]:
+
"""Get user configuration."""
+
return next((u for u in self.config.users if u.username == username), None)
+
+
def get_user_metadata(self, username: str) -> Optional[UserMetadata]:
+
"""Get user metadata from git store."""
+
return self.git_store.get_user(username)
+
+
def list_users(self) -> list[UserConfig]:
+
"""List all configured users."""
+
return self.config.users.copy()
+
+
def list_users_with_metadata(self) -> list[tuple[UserConfig, Optional[UserMetadata]]]:
+
"""List users with their git store metadata."""
+
result = []
+
for user_config in self.config.users:
+
metadata = self.git_store.get_user(user_config.username)
+
result.append((user_config, metadata))
+
return result
+
+
def update_user(self, username: str, **kwargs) -> bool:
+
"""Update user configuration."""
+
# Update in config
+
user_config = self.get_user(username)
+
if not user_config:
+
return False
+
+
# Validate and update feeds if provided
+
if 'feeds' in kwargs:
+
validated_feeds = []
+
for feed in kwargs['feeds']:
+
try:
+
validated_feeds.append(HttpUrl(feed))
+
except ValidationError:
+
return False
+
user_config.feeds = validated_feeds
+
+
# Validate and update other fields
+
if 'email' in kwargs and kwargs['email']:
+
try:
+
user_config.email = EmailStr(kwargs['email'])
+
except ValidationError:
+
return False
+
elif 'email' in kwargs and not kwargs['email']:
+
user_config.email = None
+
+
if 'homepage' in kwargs and kwargs['homepage']:
+
try:
+
user_config.homepage = HttpUrl(kwargs['homepage'])
+
except ValidationError:
+
return False
+
elif 'homepage' in kwargs and not kwargs['homepage']:
+
user_config.homepage = None
+
+
if 'icon' in kwargs and kwargs['icon']:
+
try:
+
user_config.icon = HttpUrl(kwargs['icon'])
+
except ValidationError:
+
return False
+
elif 'icon' in kwargs and not kwargs['icon']:
+
user_config.icon = None
+
+
if 'display_name' in kwargs:
+
user_config.display_name = kwargs['display_name'] or None
+
+
# Update in git store
+
git_kwargs = {}
+
if 'feeds' in kwargs:
+
git_kwargs['feeds'] = [str(feed) for feed in user_config.feeds]
+
if user_config.email:
+
git_kwargs['email'] = str(user_config.email)
+
if user_config.homepage:
+
git_kwargs['homepage'] = str(user_config.homepage)
+
if user_config.icon:
+
git_kwargs['icon'] = str(user_config.icon)
+
if user_config.display_name:
+
git_kwargs['display_name'] = user_config.display_name
+
+
return self.git_store.update_user(username, **git_kwargs)
+
+
def remove_user(self, username: str) -> bool:
+
"""Remove a user and their data."""
+
# Remove from config
+
self.config.users = [u for u in self.config.users if u.username != username]
+
+
# Remove user directory from git store
+
user_metadata = self.git_store.get_user(username)
+
if user_metadata:
+
user_dir = self.git_store.repo_path / user_metadata.directory
+
if user_dir.exists():
+
try:
+
shutil.rmtree(user_dir)
+
except Exception:
+
return False
+
+
# Remove user from index
+
index = self.git_store._load_index()
+
if username in index.users:
+
del index.users[username]
+
self.git_store._save_index(index)
+
+
return True
+
+
def get_user_stats(self, username: str) -> Optional[dict]:
+
"""Get statistics for a specific user."""
+
user_metadata = self.git_store.get_user(username)
+
if not user_metadata:
+
return None
+
+
user_config = self.get_user(username)
+
entries = self.git_store.list_entries(username)
+
+
return {
+
'username': username,
+
'display_name': user_metadata.display_name,
+
'entry_count': user_metadata.entry_count,
+
'feeds_configured': len(user_config.feeds) if user_config else 0,
+
'directory': user_metadata.directory,
+
'created': user_metadata.created.isoformat() if user_metadata.created else None,
+
'last_updated': user_metadata.last_updated.isoformat() if user_metadata.last_updated else None,
+
'latest_entry': entries[0].updated.isoformat() if entries else None,
+
}
+
+
def validate_user_feeds(self, username: str) -> dict:
+
"""Validate all feeds for a user."""
+
user_config = self.get_user(username)
+
if not user_config:
+
return {'error': 'User not found'}
+
+
results = {
+
'username': username,
+
'total_feeds': len(user_config.feeds),
+
'valid_feeds': [],
+
'invalid_feeds': [],
+
}
+
+
for feed_url in user_config.feeds:
+
try:
+
# Basic URL validation - more comprehensive validation would require fetching
+
HttpUrl(str(feed_url))
+
results['valid_feeds'].append(str(feed_url))
+
except ValidationError as e:
+
results['invalid_feeds'].append({
+
'url': str(feed_url),
+
'error': str(e)
+
})
+
+
results['is_valid'] = len(results['invalid_feeds']) == 0
+
+
return results
+
+
def sync_config_with_git_store(self) -> bool:
+
"""Sync configuration users with git store."""
+
try:
+
for user_config in self.config.users:
+
git_user = self.git_store.get_user(user_config.username)
+
if not git_user:
+
# Add missing user to git store
+
self.git_store.add_user(
+
username=user_config.username,
+
display_name=user_config.display_name,
+
email=str(user_config.email) if user_config.email else None,
+
homepage=str(user_config.homepage) if user_config.homepage else None,
+
icon=str(user_config.icon) if user_config.icon else None,
+
feeds=[str(feed) for feed in user_config.feeds]
+
)
+
return True
+
except Exception:
+
return False
+31
src/thicket/templates/base.html
···
+
<!DOCTYPE html>
+
<html lang="en">
+
<head>
+
<meta charset="UTF-8">
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
+
<title>{% block page_title %}{{ title }}{% endblock %}</title>
+
<link rel="stylesheet" href="css/style.css">
+
</head>
+
<body>
+
<header class="site-header">
+
<div class="header-content">
+
<h1 class="site-title">{{ title }}</h1>
+
<nav class="site-nav">
+
<a href="timeline.html" class="nav-link {% if page == 'timeline' %}active{% endif %}">Timeline</a>
+
<a href="links.html" class="nav-link {% if page == 'links' %}active{% endif %}">Links</a>
+
<a href="users.html" class="nav-link {% if page == 'users' %}active{% endif %}">Users</a>
+
</nav>
+
</div>
+
</header>
+
+
<main class="main-content">
+
{% block content %}{% endblock %}
+
</main>
+
+
<footer class="site-footer">
+
<p>Generated on {{ generated_at }} by <a href="https://github.com/avsm/thicket">Thicket</a></p>
+
</footer>
+
+
<script src="js/script.js"></script>
+
</body>
+
</html>
+13
src/thicket/templates/index.html
···
+
<!DOCTYPE html>
+
<html lang="en">
+
<head>
+
<meta charset="UTF-8">
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
+
<title>{{ title }}</title>
+
<meta http-equiv="refresh" content="0; url=timeline.html">
+
<link rel="canonical" href="timeline.html">
+
</head>
+
<body>
+
<p>Redirecting to <a href="timeline.html">Timeline</a>...</p>
+
</body>
+
</html>
+38
src/thicket/templates/links.html
···
+
{% extends "base.html" %}
+
+
{% block page_title %}Outgoing Links - {{ title }}{% endblock %}
+
+
{% block content %}
+
<div class="page-content">
+
<h2>Outgoing Links</h2>
+
<p class="page-description">External links referenced in blog posts, ordered by most recent reference.</p>
+
+
{% for link in outgoing_links %}
+
<article class="link-group">
+
<h3 class="link-url">
+
<a href="{{ link.url }}" target="_blank">{{ link.url|truncate(80) }}</a>
+
{% if link.target_username %}
+
<span class="target-user">({{ link.target_username }})</span>
+
{% endif %}
+
</h3>
+
<div class="referencing-entries">
+
<span class="ref-count">Referenced in {{ link.entries|length }} post(s):</span>
+
<ul>
+
{% for display_name, entry in link.entries[:5] %}
+
<li>
+
<span class="author">{{ display_name }}</span> -
+
<a href="{{ entry.link }}" target="_blank">{{ entry.title }}</a>
+
<time datetime="{{ entry.updated or entry.published }}">
+
({{ (entry.updated or entry.published).strftime('%Y-%m-%d') }})
+
</time>
+
</li>
+
{% endfor %}
+
{% if link.entries|length > 5 %}
+
<li class="more">... and {{ link.entries|length - 5 }} more</li>
+
{% endif %}
+
</ul>
+
</div>
+
</article>
+
{% endfor %}
+
</div>
+
{% endblock %}
+88
src/thicket/templates/script.js
···
+
// Enhanced functionality for thicket website
+
document.addEventListener('DOMContentLoaded', function() {
+
+
// Enhance thread collapsing (optional feature)
+
const threadHeaders = document.querySelectorAll('.thread-header');
+
threadHeaders.forEach(header => {
+
header.style.cursor = 'pointer';
+
header.addEventListener('click', function() {
+
const thread = this.parentElement;
+
const entries = thread.querySelectorAll('.thread-entry');
+
+
// Toggle visibility of all but the first entry
+
for (let i = 1; i < entries.length; i++) {
+
entries[i].style.display = entries[i].style.display === 'none' ? 'block' : 'none';
+
}
+
+
// Update thread count text
+
const count = this.querySelector('.thread-count');
+
if (entries[1] && entries[1].style.display === 'none') {
+
count.textContent = count.textContent.replace('posts', 'posts (collapsed)');
+
} else {
+
count.textContent = count.textContent.replace(' (collapsed)', '');
+
}
+
});
+
});
+
+
// Add relative time display
+
const timeElements = document.querySelectorAll('time');
+
timeElements.forEach(timeEl => {
+
const datetime = new Date(timeEl.getAttribute('datetime'));
+
const now = new Date();
+
const diffMs = now - datetime;
+
const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24));
+
+
let relativeTime;
+
if (diffDays === 0) {
+
const diffHours = Math.floor(diffMs / (1000 * 60 * 60));
+
if (diffHours === 0) {
+
const diffMinutes = Math.floor(diffMs / (1000 * 60));
+
relativeTime = diffMinutes === 0 ? 'just now' : `${diffMinutes}m ago`;
+
} else {
+
relativeTime = `${diffHours}h ago`;
+
}
+
} else if (diffDays === 1) {
+
relativeTime = 'yesterday';
+
} else if (diffDays < 7) {
+
relativeTime = `${diffDays}d ago`;
+
} else if (diffDays < 30) {
+
const weeks = Math.floor(diffDays / 7);
+
relativeTime = weeks === 1 ? '1w ago' : `${weeks}w ago`;
+
} else if (diffDays < 365) {
+
const months = Math.floor(diffDays / 30);
+
relativeTime = months === 1 ? '1mo ago' : `${months}mo ago`;
+
} else {
+
const years = Math.floor(diffDays / 365);
+
relativeTime = years === 1 ? '1y ago' : `${years}y ago`;
+
}
+
+
// Add relative time as title attribute
+
timeEl.setAttribute('title', timeEl.textContent);
+
timeEl.textContent = relativeTime;
+
});
+
+
// Enhanced anchor link scrolling for shared references
+
document.querySelectorAll('a[href^="#"]').forEach(anchor => {
+
anchor.addEventListener('click', function (e) {
+
e.preventDefault();
+
const target = document.querySelector(this.getAttribute('href'));
+
if (target) {
+
target.scrollIntoView({
+
behavior: 'smooth',
+
block: 'center'
+
});
+
+
// Highlight the target briefly
+
const timelineEntry = target.closest('.timeline-entry');
+
if (timelineEntry) {
+
timelineEntry.style.outline = '2px solid var(--primary-color)';
+
timelineEntry.style.borderRadius = '8px';
+
setTimeout(() => {
+
timelineEntry.style.outline = '';
+
timelineEntry.style.borderRadius = '';
+
}, 2000);
+
}
+
}
+
});
+
});
+
});
+754
src/thicket/templates/style.css
···
+
/* Modern, clean design with high-density text and readable theme */
+
+
:root {
+
--primary-color: #2c3e50;
+
--secondary-color: #3498db;
+
--accent-color: #e74c3c;
+
--background: #ffffff;
+
--surface: #f8f9fa;
+
--text-primary: #2c3e50;
+
--text-secondary: #7f8c8d;
+
--border-color: #e0e0e0;
+
--thread-indent: 20px;
+
--max-width: 1200px;
+
}
+
+
* {
+
margin: 0;
+
padding: 0;
+
box-sizing: border-box;
+
}
+
+
body {
+
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Helvetica Neue', Arial, sans-serif;
+
font-size: 14px;
+
line-height: 1.6;
+
color: var(--text-primary);
+
background-color: var(--background);
+
}
+
+
/* Header */
+
.site-header {
+
background-color: var(--surface);
+
border-bottom: 1px solid var(--border-color);
+
padding: 0.75rem 0;
+
position: sticky;
+
top: 0;
+
z-index: 100;
+
}
+
+
.header-content {
+
max-width: var(--max-width);
+
margin: 0 auto;
+
padding: 0 2rem;
+
display: flex;
+
justify-content: space-between;
+
align-items: center;
+
}
+
+
.site-title {
+
font-size: 1.5rem;
+
font-weight: 600;
+
color: var(--primary-color);
+
margin: 0;
+
}
+
+
/* Navigation */
+
.site-nav {
+
display: flex;
+
gap: 1.5rem;
+
}
+
+
.nav-link {
+
text-decoration: none;
+
color: var(--text-secondary);
+
font-weight: 500;
+
font-size: 0.95rem;
+
padding: 0.5rem 0.75rem;
+
border-radius: 4px;
+
transition: all 0.2s ease;
+
}
+
+
.nav-link:hover {
+
color: var(--primary-color);
+
background-color: var(--background);
+
}
+
+
.nav-link.active {
+
color: var(--secondary-color);
+
background-color: var(--background);
+
font-weight: 600;
+
}
+
+
/* Main Content */
+
.main-content {
+
max-width: var(--max-width);
+
margin: 2rem auto;
+
padding: 0 2rem;
+
}
+
+
.page-content {
+
margin: 0;
+
}
+
+
.page-description {
+
color: var(--text-secondary);
+
margin-bottom: 1.5rem;
+
font-style: italic;
+
}
+
+
/* Sections */
+
section {
+
margin-bottom: 2rem;
+
}
+
+
h2 {
+
font-size: 1.3rem;
+
font-weight: 600;
+
margin-bottom: 0.75rem;
+
color: var(--primary-color);
+
}
+
+
h3 {
+
font-size: 1.1rem;
+
font-weight: 600;
+
margin-bottom: 0.75rem;
+
color: var(--primary-color);
+
}
+
+
/* Entries and Threads */
+
article {
+
margin-bottom: 1.5rem;
+
padding: 1rem;
+
background-color: var(--surface);
+
border-radius: 4px;
+
border: 1px solid var(--border-color);
+
}
+
+
/* Timeline-style entries */
+
.timeline-entry {
+
margin-bottom: 0.5rem;
+
padding: 0.5rem 0.75rem;
+
border: none;
+
background: transparent;
+
transition: background-color 0.2s ease;
+
}
+
+
.timeline-entry:hover {
+
background-color: var(--surface);
+
}
+
+
.timeline-meta {
+
display: inline-flex;
+
gap: 0.5rem;
+
align-items: center;
+
font-size: 0.75rem;
+
color: var(--text-secondary);
+
margin-bottom: 0.25rem;
+
}
+
+
.timeline-time {
+
font-family: 'SF Mono', Monaco, Consolas, 'Courier New', monospace;
+
font-size: 0.75rem;
+
color: var(--text-secondary);
+
}
+
+
.timeline-author {
+
font-weight: 600;
+
color: var(--primary-color);
+
font-size: 0.8rem;
+
text-decoration: none;
+
}
+
+
.timeline-author:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.timeline-content {
+
line-height: 1.4;
+
}
+
+
.timeline-title {
+
font-size: 0.95rem;
+
font-weight: 600;
+
}
+
+
.timeline-title a {
+
color: var(--primary-color);
+
text-decoration: none;
+
}
+
+
.timeline-title a:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.timeline-summary {
+
color: var(--text-secondary);
+
font-size: 0.9rem;
+
line-height: 1.4;
+
}
+
+
/* Legacy styles for other sections */
+
.entry-meta, .thread-header {
+
display: flex;
+
gap: 1rem;
+
align-items: center;
+
margin-bottom: 0.5rem;
+
font-size: 0.85rem;
+
color: var(--text-secondary);
+
}
+
+
.author {
+
font-weight: 600;
+
color: var(--primary-color);
+
}
+
+
time {
+
font-size: 0.85rem;
+
}
+
+
h4 {
+
font-size: 1.1rem;
+
font-weight: 600;
+
margin-bottom: 0.5rem;
+
}
+
+
h4 a {
+
color: var(--primary-color);
+
text-decoration: none;
+
}
+
+
h4 a:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.entry-summary {
+
color: var(--text-primary);
+
line-height: 1.5;
+
margin-top: 0.5rem;
+
}
+
+
/* Enhanced Threading Styles */
+
+
/* Conversation Clusters */
+
.conversation-cluster {
+
background-color: var(--background);
+
border: 2px solid var(--border-color);
+
border-radius: 8px;
+
margin-bottom: 2rem;
+
overflow: hidden;
+
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
+
}
+
+
.conversation-header {
+
background: linear-gradient(135deg, var(--surface) 0%, #f1f3f4 100%);
+
padding: 0.75rem 1rem;
+
border-bottom: 1px solid var(--border-color);
+
}
+
+
.conversation-meta {
+
display: flex;
+
justify-content: space-between;
+
align-items: center;
+
flex-wrap: wrap;
+
gap: 0.5rem;
+
}
+
+
.conversation-count {
+
font-weight: 600;
+
color: var(--secondary-color);
+
font-size: 0.9rem;
+
}
+
+
.conversation-participants {
+
font-size: 0.8rem;
+
color: var(--text-secondary);
+
flex: 1;
+
text-align: right;
+
}
+
+
.conversation-flow {
+
padding: 0.5rem;
+
}
+
+
/* Threaded Conversation Entries */
+
.conversation-entry {
+
position: relative;
+
margin-bottom: 0.75rem;
+
display: flex;
+
align-items: flex-start;
+
}
+
+
.conversation-entry.level-0 {
+
margin-left: 0;
+
}
+
+
.conversation-entry.level-1 {
+
margin-left: 1.5rem;
+
}
+
+
.conversation-entry.level-2 {
+
margin-left: 3rem;
+
}
+
+
.conversation-entry.level-3 {
+
margin-left: 4.5rem;
+
}
+
+
.conversation-entry.level-4 {
+
margin-left: 6rem;
+
}
+
+
.entry-connector {
+
width: 3px;
+
background-color: var(--secondary-color);
+
margin-right: 0.75rem;
+
margin-top: 0.25rem;
+
min-height: 2rem;
+
border-radius: 2px;
+
opacity: 0.6;
+
}
+
+
.conversation-entry.level-0 .entry-connector {
+
background-color: var(--accent-color);
+
opacity: 0.8;
+
}
+
+
.entry-content {
+
flex: 1;
+
background-color: var(--surface);
+
padding: 0.75rem;
+
border-radius: 6px;
+
border: 1px solid var(--border-color);
+
transition: all 0.2s ease;
+
}
+
+
.entry-content:hover {
+
border-color: var(--secondary-color);
+
box-shadow: 0 2px 8px rgba(52, 152, 219, 0.1);
+
}
+
+
/* Reference Indicators */
+
.reference-indicators {
+
display: inline-flex;
+
gap: 0.25rem;
+
margin-left: 0.5rem;
+
}
+
+
.ref-out, .ref-in {
+
display: inline-block;
+
width: 1rem;
+
height: 1rem;
+
border-radius: 50%;
+
text-align: center;
+
line-height: 1rem;
+
font-size: 0.7rem;
+
font-weight: bold;
+
}
+
+
.ref-out {
+
background-color: #e8f5e8;
+
color: #2d8f2d;
+
}
+
+
.ref-in {
+
background-color: #e8f0ff;
+
color: #1f5fbf;
+
}
+
+
/* Reference Badges for Individual Posts */
+
.timeline-entry.with-references {
+
background-color: var(--surface);
+
}
+
+
/* Conversation posts in unified timeline */
+
.timeline-entry.conversation-post {
+
background: transparent;
+
border: none;
+
margin-bottom: 0.5rem;
+
padding: 0.5rem 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-0 {
+
margin-left: 0;
+
border-left: 2px solid var(--accent-color);
+
padding-left: 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-1 {
+
margin-left: 1.5rem;
+
border-left: 2px solid var(--secondary-color);
+
padding-left: 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-2 {
+
margin-left: 3rem;
+
border-left: 2px solid var(--text-secondary);
+
padding-left: 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-3 {
+
margin-left: 4.5rem;
+
border-left: 2px solid var(--text-secondary);
+
padding-left: 0.75rem;
+
}
+
+
.timeline-entry.conversation-post.level-4 {
+
margin-left: 6rem;
+
border-left: 2px solid var(--text-secondary);
+
padding-left: 0.75rem;
+
}
+
+
/* Cross-thread linking */
+
.cross-thread-links {
+
margin-top: 0.5rem;
+
padding-top: 0.5rem;
+
border-top: 1px solid var(--border-color);
+
}
+
+
.cross-thread-indicator {
+
font-size: 0.75rem;
+
color: var(--text-secondary);
+
background-color: var(--surface);
+
padding: 0.25rem 0.5rem;
+
border-radius: 12px;
+
border: 1px solid var(--border-color);
+
display: inline-block;
+
}
+
+
/* Inline shared references styling */
+
.inline-shared-refs {
+
margin-left: 0.5rem;
+
font-size: 0.85rem;
+
color: var(--text-secondary);
+
}
+
+
.shared-ref-link {
+
color: var(--primary-color);
+
text-decoration: none;
+
font-weight: 500;
+
transition: color 0.2s ease;
+
}
+
+
.shared-ref-link:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.shared-ref-more {
+
font-style: italic;
+
color: var(--text-secondary);
+
font-size: 0.8rem;
+
margin-left: 0.25rem;
+
}
+
+
.user-anchor, .post-anchor {
+
position: absolute;
+
margin-top: -60px; /* Offset for fixed header */
+
pointer-events: none;
+
}
+
+
.cross-thread-link {
+
color: var(--primary-color);
+
text-decoration: none;
+
font-weight: 500;
+
transition: color 0.2s ease;
+
}
+
+
.cross-thread-link:hover {
+
color: var(--secondary-color);
+
text-decoration: underline;
+
}
+
+
.reference-badges {
+
display: flex;
+
gap: 0.25rem;
+
margin-left: 0.5rem;
+
flex-wrap: wrap;
+
}
+
+
.ref-badge {
+
display: inline-block;
+
padding: 0.1rem 0.4rem;
+
border-radius: 12px;
+
font-size: 0.7rem;
+
font-weight: 600;
+
text-transform: uppercase;
+
letter-spacing: 0.05em;
+
}
+
+
.ref-badge.ref-outbound {
+
background-color: #e8f5e8;
+
color: #2d8f2d;
+
border: 1px solid #c3e6c3;
+
}
+
+
.ref-badge.ref-inbound {
+
background-color: #e8f0ff;
+
color: #1f5fbf;
+
border: 1px solid #b3d9ff;
+
}
+
+
/* Author Color Coding */
+
.timeline-author {
+
position: relative;
+
}
+
+
.timeline-author::before {
+
content: '';
+
display: inline-block;
+
width: 8px;
+
height: 8px;
+
border-radius: 50%;
+
margin-right: 0.5rem;
+
background-color: var(--secondary-color);
+
}
+
+
/* Generate consistent colors for authors */
+
.author-avsm::before { background-color: #e74c3c; }
+
.author-mort::before { background-color: #3498db; }
+
.author-mte::before { background-color: #2ecc71; }
+
.author-ryan::before { background-color: #f39c12; }
+
.author-mwd::before { background-color: #9b59b6; }
+
.author-dra::before { background-color: #1abc9c; }
+
.author-pf341::before { background-color: #34495e; }
+
.author-sadiqj::before { background-color: #e67e22; }
+
.author-martinkl::before { background-color: #8e44ad; }
+
.author-jonsterling::before { background-color: #27ae60; }
+
.author-jon::before { background-color: #f1c40f; }
+
.author-onkar::before { background-color: #e91e63; }
+
.author-gabriel::before { background-color: #00bcd4; }
+
.author-jess::before { background-color: #ff5722; }
+
.author-ibrahim::before { background-color: #607d8b; }
+
.author-andres::before { background-color: #795548; }
+
.author-eeg::before { background-color: #ff9800; }
+
+
/* Section Headers */
+
.conversations-section h3,
+
.referenced-posts-section h3,
+
.individual-posts-section h3 {
+
border-bottom: 2px solid var(--border-color);
+
padding-bottom: 0.5rem;
+
margin-bottom: 1.5rem;
+
position: relative;
+
}
+
+
.conversations-section h3::before {
+
content: "๐Ÿ’ฌ";
+
margin-right: 0.5rem;
+
}
+
+
.referenced-posts-section h3::before {
+
content: "๐Ÿ”—";
+
margin-right: 0.5rem;
+
}
+
+
.individual-posts-section h3::before {
+
content: "๐Ÿ“";
+
margin-right: 0.5rem;
+
}
+
+
/* Legacy thread styles (for backward compatibility) */
+
.thread {
+
background-color: var(--background);
+
border: 1px solid var(--border-color);
+
padding: 0;
+
overflow: hidden;
+
margin-bottom: 1rem;
+
}
+
+
.thread-header {
+
background-color: var(--surface);
+
padding: 0.5rem 0.75rem;
+
border-bottom: 1px solid var(--border-color);
+
}
+
+
.thread-count {
+
font-weight: 600;
+
color: var(--secondary-color);
+
}
+
+
.thread-entry {
+
padding: 0.5rem 0.75rem;
+
border-bottom: 1px solid var(--border-color);
+
}
+
+
.thread-entry:last-child {
+
border-bottom: none;
+
}
+
+
.thread-entry.reply {
+
margin-left: var(--thread-indent);
+
border-left: 3px solid var(--secondary-color);
+
background-color: var(--surface);
+
}
+
+
/* Links Section */
+
.link-group {
+
background-color: var(--background);
+
}
+
+
.link-url {
+
font-size: 1rem;
+
word-break: break-word;
+
}
+
+
.link-url a {
+
color: var(--secondary-color);
+
text-decoration: none;
+
}
+
+
.link-url a:hover {
+
text-decoration: underline;
+
}
+
+
.target-user {
+
font-size: 0.9rem;
+
color: var(--text-secondary);
+
font-weight: normal;
+
}
+
+
.referencing-entries {
+
margin-top: 0.75rem;
+
}
+
+
.ref-count {
+
font-weight: 600;
+
color: var(--text-secondary);
+
font-size: 0.9rem;
+
}
+
+
.referencing-entries ul {
+
list-style: none;
+
margin-top: 0.5rem;
+
padding-left: 1rem;
+
}
+
+
.referencing-entries li {
+
margin-bottom: 0.25rem;
+
font-size: 0.9rem;
+
}
+
+
.referencing-entries .more {
+
font-style: italic;
+
color: var(--text-secondary);
+
}
+
+
/* Users Section */
+
.user-card {
+
background-color: var(--background);
+
}
+
+
.user-header {
+
display: flex;
+
gap: 1rem;
+
align-items: start;
+
margin-bottom: 1rem;
+
}
+
+
.user-icon {
+
width: 48px;
+
height: 48px;
+
border-radius: 50%;
+
object-fit: cover;
+
}
+
+
.user-info h3 {
+
margin-bottom: 0.25rem;
+
}
+
+
.username {
+
font-size: 0.9rem;
+
color: var(--text-secondary);
+
font-weight: normal;
+
}
+
+
.user-meta {
+
font-size: 0.9rem;
+
color: var(--text-secondary);
+
}
+
+
.user-meta a {
+
color: var(--secondary-color);
+
text-decoration: none;
+
}
+
+
.user-meta a:hover {
+
text-decoration: underline;
+
}
+
+
.separator {
+
margin: 0 0.5rem;
+
}
+
+
.post-count {
+
font-weight: 600;
+
}
+
+
.user-recent h4 {
+
font-size: 0.95rem;
+
margin-bottom: 0.5rem;
+
color: var(--text-secondary);
+
}
+
+
.user-recent ul {
+
list-style: none;
+
padding-left: 0;
+
}
+
+
.user-recent li {
+
margin-bottom: 0.25rem;
+
font-size: 0.9rem;
+
}
+
+
/* Footer */
+
.site-footer {
+
max-width: var(--max-width);
+
margin: 3rem auto 2rem;
+
padding: 1rem 2rem;
+
text-align: center;
+
color: var(--text-secondary);
+
font-size: 0.85rem;
+
border-top: 1px solid var(--border-color);
+
}
+
+
.site-footer a {
+
color: var(--secondary-color);
+
text-decoration: none;
+
}
+
+
.site-footer a:hover {
+
text-decoration: underline;
+
}
+
+
/* Responsive */
+
@media (max-width: 768px) {
+
.site-title {
+
font-size: 1.3rem;
+
}
+
+
.header-content {
+
flex-direction: column;
+
gap: 0.75rem;
+
align-items: flex-start;
+
}
+
+
.site-nav {
+
gap: 1rem;
+
}
+
+
.main-content {
+
padding: 0 1rem;
+
}
+
+
.thread-entry.reply {
+
margin-left: calc(var(--thread-indent) / 2);
+
}
+
+
.user-header {
+
flex-direction: column;
+
}
+
}
+141
src/thicket/templates/timeline.html
···
+
{% extends "base.html" %}
+
+
{% block page_title %}Timeline - {{ title }}{% endblock %}
+
+
{% block content %}
+
{% set seen_users = [] %}
+
<div class="page-content">
+
<h2>Recent Posts & Conversations</h2>
+
+
<section class="unified-timeline">
+
{% for item in timeline_items %}
+
{% if item.type == "post" %}
+
<!-- Individual Post -->
+
<article class="timeline-entry {% if item.content.references %}with-references{% endif %}">
+
<div class="timeline-meta">
+
<time datetime="{{ item.content.entry.updated or item.content.entry.published }}" class="timeline-time">
+
{{ (item.content.entry.updated or item.content.entry.published).strftime('%Y-%m-%d %H:%M') }}
+
</time>
+
{% set homepage = get_user_homepage(item.content.username) %}
+
{% if item.content.username not in seen_users %}
+
<a id="{{ item.content.username }}" class="user-anchor"></a>
+
{% set _ = seen_users.append(item.content.username) %}
+
{% endif %}
+
<a id="post-{{ loop.index0 }}-{{ safe_anchor_id(item.content.entry.id) }}" class="post-anchor"></a>
+
{% if homepage %}
+
<a href="{{ homepage }}" target="_blank" class="timeline-author">{{ item.content.display_name }}</a>
+
{% else %}
+
<span class="timeline-author">{{ item.content.display_name }}</span>
+
{% endif %}
+
{% if item.content.references %}
+
<div class="reference-badges">
+
{% for ref in item.content.references %}
+
{% if ref.type == 'outbound' %}
+
<span class="ref-badge ref-outbound" title="References {{ ref.target_username or 'external post' }}">
+
โ†’ {{ ref.target_username or 'ext' }}
+
</span>
+
{% elif ref.type == 'inbound' %}
+
<span class="ref-badge ref-inbound" title="Referenced by {{ ref.source_username or 'external post' }}">
+
โ† {{ ref.source_username or 'ext' }}
+
</span>
+
{% endif %}
+
{% endfor %}
+
</div>
+
{% endif %}
+
</div>
+
<div class="timeline-content">
+
<strong class="timeline-title">
+
<a href="{{ item.content.entry.link }}" target="_blank">{{ item.content.entry.title }}</a>
+
</strong>
+
{% if item.content.entry.summary %}
+
<span class="timeline-summary">โ€” {{ clean_html_summary(item.content.entry.summary, 250) }}</span>
+
{% endif %}
+
{% if item.content.shared_references %}
+
<span class="inline-shared-refs">
+
{% for ref in item.content.shared_references[:3] %}
+
{% if ref.target_username %}
+
<a href="#{{ ref.target_username }}" class="shared-ref-link" title="Referenced by {{ ref.count }} entries">@{{ ref.target_username }}</a>{% if not loop.last %}, {% endif %}
+
{% endif %}
+
{% endfor %}
+
{% if item.content.shared_references|length > 3 %}
+
<span class="shared-ref-more">+{{ item.content.shared_references|length - 3 }} more</span>
+
{% endif %}
+
</span>
+
{% endif %}
+
{% if item.content.cross_thread_links %}
+
<div class="cross-thread-links">
+
<span class="cross-thread-indicator">๐Ÿ”— Also appears: </span>
+
{% for link in item.content.cross_thread_links %}
+
<a href="#{{ link.anchor_id }}" class="cross-thread-link" title="{{ link.title }}">{{ link.context }}</a>{% if not loop.last %}, {% endif %}
+
{% endfor %}
+
</div>
+
{% endif %}
+
</div>
+
</article>
+
+
{% elif item.type == "thread" %}
+
<!-- Conversation Thread -->
+
{% set outer_loop_index = loop.index0 %}
+
{% for thread_item in item.content %}
+
<article class="timeline-entry conversation-post level-{{ thread_item.thread_level }}">
+
<div class="timeline-meta">
+
<time datetime="{{ thread_item.entry.updated or thread_item.entry.published }}" class="timeline-time">
+
{{ (thread_item.entry.updated or thread_item.entry.published).strftime('%Y-%m-%d %H:%M') }}
+
</time>
+
{% set homepage = get_user_homepage(thread_item.username) %}
+
{% if thread_item.username not in seen_users %}
+
<a id="{{ thread_item.username }}" class="user-anchor"></a>
+
{% set _ = seen_users.append(thread_item.username) %}
+
{% endif %}
+
<a id="post-{{ outer_loop_index }}-{{ loop.index0 }}-{{ safe_anchor_id(thread_item.entry.id) }}" class="post-anchor"></a>
+
{% if homepage %}
+
<a href="{{ homepage }}" target="_blank" class="timeline-author author-{{ thread_item.username }}">{{ thread_item.display_name }}</a>
+
{% else %}
+
<span class="timeline-author author-{{ thread_item.username }}">{{ thread_item.display_name }}</span>
+
{% endif %}
+
{% if thread_item.references_to or thread_item.referenced_by %}
+
<span class="reference-indicators">
+
{% if thread_item.references_to %}
+
<span class="ref-out" title="References other posts">โ†’</span>
+
{% endif %}
+
{% if thread_item.referenced_by %}
+
<span class="ref-in" title="Referenced by other posts">โ†</span>
+
{% endif %}
+
</span>
+
{% endif %}
+
</div>
+
<div class="timeline-content">
+
<strong class="timeline-title">
+
<a href="{{ thread_item.entry.link }}" target="_blank">{{ thread_item.entry.title }}</a>
+
</strong>
+
{% if thread_item.entry.summary %}
+
<span class="timeline-summary">โ€” {{ clean_html_summary(thread_item.entry.summary, 300) }}</span>
+
{% endif %}
+
{% if thread_item.shared_references %}
+
<span class="inline-shared-refs">
+
{% for ref in thread_item.shared_references[:3] %}
+
{% if ref.target_username %}
+
<a href="#{{ ref.target_username }}" class="shared-ref-link" title="Referenced by {{ ref.count }} entries">@{{ ref.target_username }}</a>{% if not loop.last %}, {% endif %}
+
{% endif %}
+
{% endfor %}
+
{% if thread_item.shared_references|length > 3 %}
+
<span class="shared-ref-more">+{{ thread_item.shared_references|length - 3 }} more</span>
+
{% endif %}
+
</span>
+
{% endif %}
+
{% if thread_item.cross_thread_links %}
+
<div class="cross-thread-links">
+
<span class="cross-thread-indicator">๐Ÿ”— Also appears: </span>
+
{% for link in thread_item.cross_thread_links %}
+
<a href="#{{ link.anchor_id }}" class="cross-thread-link" title="{{ link.title }}">{{ link.context }}</a>{% if not loop.last %}, {% endif %}
+
{% endfor %}
+
</div>
+
{% endif %}
+
</div>
+
</article>
+
{% endfor %}
+
{% endif %}
+
{% endfor %}
+
</section>
+
</div>
+
{% endblock %}
+169
src/thicket/templates/user_detail.html
···
+
{% extends "base.html" %}
+
+
{% block title %}{{ title }} - Thicket{% endblock %}
+
+
{% block content %}
+
<div class="container mx-auto px-4 py-8">
+
<div class="max-w-4xl mx-auto">
+
<!-- User Header -->
+
<div class="bg-white rounded-lg shadow-md p-6 mb-6">
+
<div class="flex items-center space-x-4">
+
{% if user_config and user_config.icon %}
+
<img src="{{ user_config.icon }}" alt="{{ title }}" class="w-16 h-16 rounded-full">
+
{% else %}
+
<div class="w-16 h-16 rounded-full bg-blue-500 flex items-center justify-center text-white text-xl font-bold">
+
{{ user_metadata.username[0].upper() }}
+
</div>
+
{% endif %}
+
+
<div>
+
<h1 class="text-2xl font-bold text-gray-900">{{ title }}</h1>
+
<p class="text-gray-600">@{{ user_metadata.username }}</p>
+
{% if user_config and user_config.email %}
+
<p class="text-sm text-gray-500">{{ user_config.email }}</p>
+
{% endif %}
+
</div>
+
</div>
+
+
{% if user_config and user_config.homepage %}
+
<div class="mt-4">
+
<a href="{{ user_config.homepage }}" class="text-blue-600 hover:text-blue-800" target="_blank">
+
๐Ÿ  Homepage
+
</a>
+
</div>
+
{% endif %}
+
+
<div class="mt-4 grid grid-cols-2 md:grid-cols-4 gap-4">
+
<div class="text-center">
+
<div class="text-2xl font-bold text-blue-600">{{ user_metadata.entry_count }}</div>
+
<div class="text-sm text-gray-500">Entries</div>
+
</div>
+
+
{% if user_config %}
+
<div class="text-center">
+
<div class="text-2xl font-bold text-green-600">{{ user_config.feeds|length }}</div>
+
<div class="text-sm text-gray-500">Feeds</div>
+
</div>
+
{% endif %}
+
+
<div class="text-center">
+
<div class="text-2xl font-bold text-purple-600">{{ user_links|length }}</div>
+
<div class="text-sm text-gray-500">Link Groups</div>
+
</div>
+
+
<div class="text-center">
+
<div class="text-sm text-gray-500">Member since</div>
+
<div class="text-sm font-medium">{{ user_metadata.created.strftime('%Y-%m-%d') if user_metadata.created else 'Unknown' }}</div>
+
</div>
+
</div>
+
</div>
+
+
<!-- Feeds -->
+
{% if user_config and user_config.feeds %}
+
<div class="bg-white rounded-lg shadow-md p-6 mb-6">
+
<h2 class="text-xl font-semibold mb-4">Feeds</h2>
+
<div class="space-y-2">
+
{% for feed in user_config.feeds %}
+
<div class="flex items-center space-x-2">
+
<span class="text-green-500">๐Ÿ“ก</span>
+
<a href="{{ feed }}" class="text-blue-600 hover:text-blue-800" target="_blank">{{ feed }}</a>
+
</div>
+
{% endfor %}
+
</div>
+
</div>
+
{% endif %}
+
+
<!-- Recent Entries -->
+
<div class="bg-white rounded-lg shadow-md p-6 mb-6">
+
<h2 class="text-xl font-semibold mb-4">Recent Entries</h2>
+
+
{% if entries %}
+
<div class="space-y-4">
+
{% for entry in entries[:10] %}
+
<div class="border-l-4 border-blue-500 pl-4 py-2">
+
<h3 class="font-semibold text-lg">
+
<a href="{{ entry.link }}" class="text-blue-600 hover:text-blue-800" target="_blank">
+
{{ entry.title }}
+
</a>
+
</h3>
+
+
<div class="text-sm text-gray-500 mb-2">
+
{% if entry.published %}
+
Published: {{ entry.published.strftime('%Y-%m-%d %H:%M') }}
+
{% endif %}
+
{% if entry.updated and entry.updated != entry.published %}
+
โ€ข Updated: {{ entry.updated.strftime('%Y-%m-%d %H:%M') }}
+
{% endif %}
+
</div>
+
+
{% if entry.summary %}
+
<div class="text-gray-700 mb-2">
+
{{ entry.summary|truncate(200) }}
+
</div>
+
{% endif %}
+
+
{% if entry.categories %}
+
<div class="flex flex-wrap gap-1">
+
{% for category in entry.categories %}
+
<span class="px-2 py-1 bg-blue-100 text-blue-800 text-xs rounded">{{ category }}</span>
+
{% endfor %}
+
</div>
+
{% endif %}
+
</div>
+
{% endfor %}
+
</div>
+
+
{% if entries|length > 10 %}
+
<div class="mt-4 text-center">
+
<p class="text-gray-500">Showing 10 of {{ entries|length }} entries</p>
+
</div>
+
{% endif %}
+
+
{% else %}
+
<p class="text-gray-500">No entries found.</p>
+
{% endif %}
+
</div>
+
+
<!-- Links Summary -->
+
{% if user_links %}
+
<div class="bg-white rounded-lg shadow-md p-6">
+
<h2 class="text-xl font-semibold mb-4">Link Activity</h2>
+
+
<div class="space-y-3">
+
{% for link_group in user_links[:5] %}
+
<div class="border-l-4 border-green-500 pl-4">
+
<h3 class="font-medium">{{ link_group.title }}</h3>
+
<div class="text-sm text-gray-500 mb-2">
+
{{ link_group.links|length }} link(s) found
+
</div>
+
+
<div class="space-y-1">
+
{% for link in link_group.links[:3] %}
+
<div class="text-sm">
+
<a href="{{ link.url }}" class="text-blue-600 hover:text-blue-800" target="_blank">
+
{{ link.text or link.url }}
+
</a>
+
<span class="text-gray-400 ml-2">({{ link.type }})</span>
+
</div>
+
{% endfor %}
+
+
{% if link_group.links|length > 3 %}
+
<div class="text-sm text-gray-500">
+
... and {{ link_group.links|length - 3 }} more
+
</div>
+
{% endif %}
+
</div>
+
</div>
+
{% endfor %}
+
</div>
+
+
{% if user_links|length > 5 %}
+
<div class="mt-4 text-center">
+
<p class="text-gray-500">Showing 5 of {{ user_links|length }} entries with links</p>
+
</div>
+
{% endif %}
+
</div>
+
{% endif %}
+
</div>
+
</div>
+
{% endblock %}
+57
src/thicket/templates/users.html
···
+
{% extends "base.html" %}
+
+
{% block page_title %}Users - {{ title }}{% endblock %}
+
+
{% block content %}
+
<div class="page-content">
+
<h2>Users</h2>
+
<p class="page-description">All users contributing to this thicket, ordered by post count.</p>
+
+
{% for user_info in users %}
+
<article class="user-card">
+
<div class="user-header">
+
{% if user_info.metadata.icon and user_info.metadata.icon != "None" %}
+
<img src="{{ user_info.metadata.icon }}" alt="{{ user_info.metadata.username }}" class="user-icon">
+
{% endif %}
+
<div class="user-info">
+
<h3>
+
{% if user_info.metadata.display_name %}
+
{{ user_info.metadata.display_name }}
+
<span class="username">({{ user_info.metadata.username }})</span>
+
{% else %}
+
{{ user_info.metadata.username }}
+
{% endif %}
+
</h3>
+
<div class="user-meta">
+
{% if user_info.metadata.homepage %}
+
<a href="{{ user_info.metadata.homepage }}" target="_blank">{{ user_info.metadata.homepage }}</a>
+
{% endif %}
+
{% if user_info.metadata.email %}
+
<span class="separator">โ€ข</span>
+
<a href="mailto:{{ user_info.metadata.email }}">{{ user_info.metadata.email }}</a>
+
{% endif %}
+
<span class="separator">โ€ข</span>
+
<span class="post-count">{{ user_info.metadata.entry_count }} posts</span>
+
</div>
+
</div>
+
</div>
+
+
{% if user_info.recent_entries %}
+
<div class="user-recent">
+
<h4>Recent posts:</h4>
+
<ul>
+
{% for display_name, entry in user_info.recent_entries %}
+
<li>
+
<a href="{{ entry.link }}" target="_blank">{{ entry.title }}</a>
+
<time datetime="{{ entry.updated or entry.published }}">
+
({{ (entry.updated or entry.published).strftime('%Y-%m-%d') }})
+
</time>
+
</li>
+
{% endfor %}
+
</ul>
+
</div>
+
{% endif %}
+
</article>
+
{% endfor %}
+
</div>
+
{% endblock %}
+230
src/thicket/thicket.py
···
+
"""Main Thicket library class providing unified API."""
+
+
import asyncio
+
from datetime import datetime
+
from pathlib import Path
+
from typing import Optional, Union
+
+
from pydantic import HttpUrl
+
+
from .core.feed_parser import FeedParser
+
from .core.git_store import GitStore
+
from .models import AtomEntry, ThicketConfig, UserConfig
+
from .subsystems.feeds import FeedManager
+
from .subsystems.links import LinkProcessor
+
from .subsystems.repository import RepositoryManager
+
from .subsystems.site import SiteGenerator
+
from .subsystems.users import UserManager
+
+
+
class Thicket:
+
"""
+
Main Thicket class providing unified API for feed management.
+
+
This class serves as the primary interface for all Thicket operations,
+
consolidating configuration, repository management, feed processing,
+
user management, link processing, and site generation.
+
"""
+
+
def __init__(self, config: Union[ThicketConfig, Path, str]):
+
"""
+
Initialize Thicket with configuration.
+
+
Args:
+
config: Either a ThicketConfig object, or path to config file
+
"""
+
if isinstance(config, (Path, str)):
+
self.config = ThicketConfig.from_file(Path(config))
+
else:
+
self.config = config
+
+
# Initialize subsystems
+
self._init_subsystems()
+
+
def _init_subsystems(self):
+
"""Initialize all subsystems."""
+
# Core components
+
self.git_store = GitStore(self.config.git_store)
+
self.feed_parser = FeedParser()
+
+
# Subsystem managers
+
self.repository = RepositoryManager(self.git_store, self.config)
+
self.users = UserManager(self.git_store, self.config)
+
self.feeds = FeedManager(self.git_store, self.feed_parser, self.config)
+
self.links = LinkProcessor(self.git_store, self.config)
+
self.site = SiteGenerator(self.git_store, self.config)
+
+
@classmethod
+
def create(cls, git_store: Path, cache_dir: Path, users: Optional[list[UserConfig]] = None) -> 'Thicket':
+
"""
+
Create a new Thicket instance with minimal configuration.
+
+
Args:
+
git_store: Path to git repository
+
cache_dir: Path to cache directory
+
users: Optional list of user configurations
+
+
Returns:
+
Configured Thicket instance
+
"""
+
config = ThicketConfig(
+
git_store=git_store,
+
cache_dir=cache_dir,
+
users=users or []
+
)
+
return cls(config)
+
+
@classmethod
+
def from_config_file(cls, config_path: Path) -> 'Thicket':
+
"""Load Thicket from configuration file."""
+
return cls(config_path)
+
+
# User Management API
+
def add_user(self, username: str, feeds: list[str], **kwargs) -> UserConfig:
+
"""Add a new user with feeds."""
+
return self.users.add_user(username, feeds, **kwargs)
+
+
def get_user(self, username: str) -> Optional[UserConfig]:
+
"""Get user configuration."""
+
return self.users.get_user(username)
+
+
def list_users(self) -> list[UserConfig]:
+
"""List all configured users."""
+
return self.users.list_users()
+
+
def update_user(self, username: str, **kwargs) -> bool:
+
"""Update user configuration."""
+
return self.users.update_user(username, **kwargs)
+
+
def remove_user(self, username: str) -> bool:
+
"""Remove a user and their data."""
+
return self.users.remove_user(username)
+
+
# Feed Management API
+
async def sync_feeds(self, username: Optional[str] = None, progress_callback=None) -> dict:
+
"""Sync feeds for user(s)."""
+
return await self.feeds.sync_feeds(username, progress_callback)
+
+
async def sync_user_feeds(self, username: str, progress_callback=None) -> dict:
+
"""Sync feeds for a specific user."""
+
return await self.feeds.sync_user_feeds(username, progress_callback)
+
+
def get_entries(self, username: str, limit: Optional[int] = None) -> list[AtomEntry]:
+
"""Get entries for a user."""
+
return self.feeds.get_entries(username, limit)
+
+
def get_entry(self, username: str, entry_id: str) -> Optional[AtomEntry]:
+
"""Get a specific entry."""
+
return self.feeds.get_entry(username, entry_id)
+
+
def search_entries(self, query: str, username: Optional[str] = None, limit: Optional[int] = None) -> list[tuple[str, AtomEntry]]:
+
"""Search entries across users."""
+
return self.feeds.search_entries(query, username, limit)
+
+
# Repository Management API
+
def init_repository(self) -> bool:
+
"""Initialize the git repository."""
+
return self.repository.init_repository()
+
+
def commit_changes(self, message: str) -> bool:
+
"""Commit all pending changes."""
+
return self.repository.commit_changes(message)
+
+
def get_status(self) -> dict:
+
"""Get repository status and statistics."""
+
return self.repository.get_status()
+
+
def backup_repository(self, backup_path: Path) -> bool:
+
"""Create a backup of the repository."""
+
return self.repository.backup_repository(backup_path)
+
+
# Link Processing API
+
def process_links(self, username: Optional[str] = None) -> dict:
+
"""Process and extract links from entries."""
+
return self.links.process_links(username)
+
+
def get_links(self, username: Optional[str] = None) -> dict:
+
"""Get processed links."""
+
return self.links.get_links(username)
+
+
def find_references(self, url: str) -> list[tuple[str, AtomEntry]]:
+
"""Find entries that reference a URL."""
+
return self.links.find_references(url)
+
+
# Site Generation API
+
def generate_site(self, output_dir: Path, template_dir: Optional[Path] = None) -> bool:
+
"""Generate static site."""
+
return self.site.generate_site(output_dir, template_dir)
+
+
def generate_timeline(self, output_path: Path, limit: Optional[int] = None) -> bool:
+
"""Generate timeline HTML."""
+
return self.site.generate_timeline(output_path, limit)
+
+
def generate_user_pages(self, output_dir: Path) -> bool:
+
"""Generate individual user pages."""
+
return self.site.generate_user_pages(output_dir)
+
+
# Utility Methods
+
def get_stats(self) -> dict:
+
"""Get comprehensive statistics."""
+
base_stats = self.repository.get_status()
+
feed_stats = self.feeds.get_stats()
+
link_stats = self.links.get_stats()
+
+
return {
+
**base_stats,
+
**feed_stats,
+
**link_stats,
+
'config': {
+
'git_store': str(self.config.git_store),
+
'cache_dir': str(self.config.cache_dir),
+
'total_users_configured': len(self.config.users),
+
}
+
}
+
+
async def full_sync(self, progress_callback=None) -> dict:
+
"""Perform a complete sync: feeds -> links -> commit."""
+
results = {}
+
+
# Sync feeds
+
results['feeds'] = await self.sync_feeds(progress_callback=progress_callback)
+
+
# Process links
+
results['links'] = self.process_links()
+
+
# Commit changes
+
message = f"Sync completed at {datetime.now().isoformat()}"
+
results['committed'] = self.commit_changes(message)
+
+
return results
+
+
def validate_config(self) -> list[str]:
+
"""Validate configuration and return any errors."""
+
errors = []
+
+
# Check paths exist
+
if not self.config.git_store.parent.exists():
+
errors.append(f"Git store parent directory does not exist: {self.config.git_store.parent}")
+
+
if not self.config.cache_dir.parent.exists():
+
errors.append(f"Cache directory parent does not exist: {self.config.cache_dir.parent}")
+
+
# Validate user configs
+
for user in self.config.users:
+
if not user.feeds:
+
errors.append(f"User {user.username} has no feeds configured")
+
+
for feed_url in user.feeds:
+
# Basic URL validation is handled by pydantic
+
pass
+
+
return errors
+
+
def __enter__(self):
+
"""Context manager entry."""
+
return self
+
+
def __exit__(self, exc_type, exc_val, exc_tb):
+
"""Context manager exit."""
+
# Could add cleanup logic here if needed
+
pass
+1 -1
src/thicket/utils/__init__.py
···
"""Utility modules for thicket."""
# This module will contain shared utilities
-
# For now, it's empty but can be expanded with common functions
+
# For now, it's empty but can be expanded with common functions
tests/__init__.py

This is a binary file and will not be displayed.

-84
tests/conftest.py
···
-
"""Test configuration and fixtures for thicket."""
-
-
import tempfile
-
from pathlib import Path
-
-
import pytest
-
-
from thicket.models import ThicketConfig, UserConfig
-
-
-
@pytest.fixture
-
def temp_dir():
-
"""Create a temporary directory for tests."""
-
with tempfile.TemporaryDirectory() as tmp_dir:
-
yield Path(tmp_dir)
-
-
-
@pytest.fixture
-
def sample_config(temp_dir):
-
"""Create a sample configuration for testing."""
-
git_store = temp_dir / "git_store"
-
cache_dir = temp_dir / "cache"
-
-
return ThicketConfig(
-
git_store=git_store,
-
cache_dir=cache_dir,
-
users=[
-
UserConfig(
-
username="testuser",
-
feeds=["https://example.com/feed.xml"],
-
email="test@example.com",
-
display_name="Test User",
-
)
-
],
-
)
-
-
-
@pytest.fixture
-
def sample_atom_feed():
-
"""Sample Atom feed XML for testing."""
-
return """<?xml version="1.0" encoding="utf-8"?>
-
<feed xmlns="http://www.w3.org/2005/Atom">
-
<title>Test Feed</title>
-
<link href="https://example.com/"/>
-
<updated>2025-01-01T00:00:00Z</updated>
-
<author>
-
<name>Test Author</name>
-
<email>author@example.com</email>
-
</author>
-
<id>https://example.com/</id>
-
-
<entry>
-
<title>Test Entry</title>
-
<link href="https://example.com/entry/1"/>
-
<id>https://example.com/entry/1</id>
-
<updated>2025-01-01T00:00:00Z</updated>
-
<summary>This is a test entry.</summary>
-
<content type="html">
-
<![CDATA[<p>This is the content of the test entry.</p>]]>
-
</content>
-
</entry>
-
</feed>"""
-
-
-
@pytest.fixture
-
def sample_rss_feed():
-
"""Sample RSS feed XML for testing."""
-
return """<?xml version="1.0" encoding="UTF-8"?>
-
<rss version="2.0">
-
<channel>
-
<title>Test RSS Feed</title>
-
<link>https://example.com/</link>
-
<description>Test RSS feed for testing</description>
-
<managingEditor>editor@example.com</managingEditor>
-
-
<item>
-
<title>Test RSS Entry</title>
-
<link>https://example.com/rss/entry/1</link>
-
<description>This is a test RSS entry.</description>
-
<pubDate>Mon, 01 Jan 2025 00:00:00 GMT</pubDate>
-
<guid>https://example.com/rss/entry/1</guid>
-
</item>
-
</channel>
-
</rss>"""
-132
tests/test_feed_parser.py
···
-
"""Tests for feed parser functionality."""
-
-
import pytest
-
from pydantic import HttpUrl
-
-
from thicket.core.feed_parser import FeedParser
-
from thicket.models import AtomEntry, FeedMetadata
-
-
-
class TestFeedParser:
-
"""Test the FeedParser class."""
-
-
def test_init(self):
-
"""Test parser initialization."""
-
parser = FeedParser()
-
assert parser.user_agent == "thicket/0.1.0"
-
assert "a" in parser.allowed_tags
-
assert "href" in parser.allowed_attributes["a"]
-
-
def test_parse_atom_feed(self, sample_atom_feed):
-
"""Test parsing an Atom feed."""
-
parser = FeedParser()
-
metadata, entries = parser.parse_feed(sample_atom_feed)
-
-
# Check metadata
-
assert isinstance(metadata, FeedMetadata)
-
assert metadata.title == "Test Feed"
-
assert metadata.author_name == "Test Author"
-
assert metadata.author_email == "author@example.com"
-
assert metadata.link == HttpUrl("https://example.com/")
-
-
# Check entries
-
assert len(entries) == 1
-
entry = entries[0]
-
assert isinstance(entry, AtomEntry)
-
assert entry.title == "Test Entry"
-
assert entry.id == "https://example.com/entry/1"
-
assert entry.link == HttpUrl("https://example.com/entry/1")
-
assert entry.summary == "This is a test entry."
-
assert "<p>This is the content of the test entry.</p>" in entry.content
-
-
def test_parse_rss_feed(self, sample_rss_feed):
-
"""Test parsing an RSS feed."""
-
parser = FeedParser()
-
metadata, entries = parser.parse_feed(sample_rss_feed)
-
-
# Check metadata
-
assert isinstance(metadata, FeedMetadata)
-
assert metadata.title == "Test RSS Feed"
-
assert metadata.link == HttpUrl("https://example.com/")
-
assert metadata.author_email == "editor@example.com"
-
-
# Check entries
-
assert len(entries) == 1
-
entry = entries[0]
-
assert isinstance(entry, AtomEntry)
-
assert entry.title == "Test RSS Entry"
-
assert entry.id == "https://example.com/rss/entry/1"
-
assert entry.summary == "This is a test RSS entry."
-
-
def test_sanitize_entry_id(self):
-
"""Test entry ID sanitization."""
-
parser = FeedParser()
-
-
# Test URL ID
-
url_id = "https://example.com/posts/2025/01/test-post"
-
sanitized = parser.sanitize_entry_id(url_id)
-
assert sanitized == "posts_2025_01_test-post"
-
-
# Test problematic characters
-
bad_id = "test/with\\bad:chars|and<more>"
-
sanitized = parser.sanitize_entry_id(bad_id)
-
assert sanitized == "test_with_bad_chars_and_more_"
-
-
# Test empty ID
-
empty_id = ""
-
sanitized = parser.sanitize_entry_id(empty_id)
-
assert sanitized == "entry"
-
-
# Test very long ID
-
long_id = "a" * 300
-
sanitized = parser.sanitize_entry_id(long_id)
-
assert len(sanitized) == 200
-
-
def test_sanitize_html(self):
-
"""Test HTML sanitization."""
-
parser = FeedParser()
-
-
# Test allowed tags
-
safe_html = "<p>This is <strong>safe</strong> HTML</p>"
-
sanitized = parser._sanitize_html(safe_html)
-
assert sanitized == safe_html
-
-
# Test dangerous tags
-
dangerous_html = "<script>alert('xss')</script><p>Safe content</p>"
-
sanitized = parser._sanitize_html(dangerous_html)
-
assert "<script>" not in sanitized
-
assert "<p>Safe content</p>" in sanitized
-
-
# Test attributes
-
html_with_attrs = '<a href="https://example.com" onclick="alert()">Link</a>'
-
sanitized = parser._sanitize_html(html_with_attrs)
-
assert 'href="https://example.com"' in sanitized
-
assert 'onclick' not in sanitized
-
-
def test_extract_feed_metadata(self):
-
"""Test feed metadata extraction."""
-
parser = FeedParser()
-
-
# Test with feedparser parsed data
-
import feedparser
-
parsed = feedparser.parse("""<?xml version="1.0" encoding="utf-8"?>
-
<feed xmlns="http://www.w3.org/2005/Atom">
-
<title>Test Feed</title>
-
<link href="https://example.com/"/>
-
<author>
-
<name>Test Author</name>
-
<email>author@example.com</email>
-
<uri>https://example.com/about</uri>
-
</author>
-
<logo>https://example.com/logo.png</logo>
-
<icon>https://example.com/icon.png</icon>
-
</feed>""")
-
-
metadata = parser._extract_feed_metadata(parsed.feed)
-
assert metadata.title == "Test Feed"
-
assert metadata.author_name == "Test Author"
-
assert metadata.author_email == "author@example.com"
-
assert metadata.author_uri == HttpUrl("https://example.com/about")
-
assert metadata.link == HttpUrl("https://example.com/")
-
assert metadata.logo == HttpUrl("https://example.com/logo.png")
-
assert metadata.icon == HttpUrl("https://example.com/icon.png")
-277
tests/test_git_store.py
···
-
"""Tests for Git store functionality."""
-
-
import json
-
from datetime import datetime
-
-
import pytest
-
from pydantic import HttpUrl
-
-
from thicket.core.git_store import GitStore
-
from thicket.models import AtomEntry, DuplicateMap, UserMetadata
-
-
-
class TestGitStore:
-
"""Test the GitStore class."""
-
-
def test_init_new_repo(self, temp_dir):
-
"""Test initializing a new Git repository."""
-
repo_path = temp_dir / "test_repo"
-
store = GitStore(repo_path)
-
-
assert store.repo_path == repo_path
-
assert store.repo is not None
-
assert repo_path.exists()
-
assert (repo_path / ".git").exists()
-
assert (repo_path / "index.json").exists()
-
assert (repo_path / "duplicates.json").exists()
-
-
def test_init_existing_repo(self, temp_dir):
-
"""Test initializing with existing repository."""
-
repo_path = temp_dir / "test_repo"
-
-
# Create first store
-
store1 = GitStore(repo_path)
-
store1.add_user("testuser", display_name="Test User")
-
-
# Create second store pointing to same repo
-
store2 = GitStore(repo_path)
-
user = store2.get_user("testuser")
-
-
assert user is not None
-
assert user.username == "testuser"
-
assert user.display_name == "Test User"
-
-
def test_add_user(self, temp_dir):
-
"""Test adding a user to the Git store."""
-
store = GitStore(temp_dir / "test_repo")
-
-
user = store.add_user(
-
username="testuser",
-
display_name="Test User",
-
email="test@example.com",
-
homepage="https://example.com",
-
icon="https://example.com/icon.png",
-
feeds=["https://example.com/feed.xml"],
-
)
-
-
assert isinstance(user, UserMetadata)
-
assert user.username == "testuser"
-
assert user.display_name == "Test User"
-
assert user.email == "test@example.com"
-
assert user.homepage == "https://example.com"
-
assert user.icon == "https://example.com/icon.png"
-
assert user.feeds == ["https://example.com/feed.xml"]
-
assert user.directory == "testuser"
-
-
# Check that user directory was created
-
user_dir = store.repo_path / "testuser"
-
assert user_dir.exists()
-
assert (user_dir / "metadata.json").exists()
-
-
# Check metadata file content
-
with open(user_dir / "metadata.json") as f:
-
metadata = json.load(f)
-
assert metadata["username"] == "testuser"
-
assert metadata["display_name"] == "Test User"
-
-
def test_get_user(self, temp_dir):
-
"""Test getting user metadata."""
-
store = GitStore(temp_dir / "test_repo")
-
-
# Add user
-
store.add_user("testuser", display_name="Test User")
-
-
# Get user
-
user = store.get_user("testuser")
-
assert user is not None
-
assert user.username == "testuser"
-
assert user.display_name == "Test User"
-
-
# Try to get non-existent user
-
non_user = store.get_user("nonexistent")
-
assert non_user is None
-
-
def test_store_entry(self, temp_dir):
-
"""Test storing an entry."""
-
store = GitStore(temp_dir / "test_repo")
-
-
# Add user first
-
store.add_user("testuser")
-
-
# Create test entry
-
entry = AtomEntry(
-
id="https://example.com/entry/1",
-
title="Test Entry",
-
link=HttpUrl("https://example.com/entry/1"),
-
updated=datetime.now(),
-
summary="Test entry summary",
-
content="<p>Test content</p>",
-
)
-
-
# Store entry
-
result = store.store_entry("testuser", entry)
-
assert result is True
-
-
# Check that entry file was created
-
user_dir = store.repo_path / "testuser"
-
entry_files = list(user_dir.glob("*.json"))
-
entry_files = [f for f in entry_files if f.name != "metadata.json"]
-
assert len(entry_files) == 1
-
-
# Check entry content
-
with open(entry_files[0]) as f:
-
stored_entry = json.load(f)
-
assert stored_entry["title"] == "Test Entry"
-
assert stored_entry["id"] == "https://example.com/entry/1"
-
-
def test_get_entry(self, temp_dir):
-
"""Test retrieving an entry."""
-
store = GitStore(temp_dir / "test_repo")
-
-
# Add user and entry
-
store.add_user("testuser")
-
entry = AtomEntry(
-
id="https://example.com/entry/1",
-
title="Test Entry",
-
link=HttpUrl("https://example.com/entry/1"),
-
updated=datetime.now(),
-
)
-
store.store_entry("testuser", entry)
-
-
# Get entry
-
retrieved = store.get_entry("testuser", "https://example.com/entry/1")
-
assert retrieved is not None
-
assert retrieved.title == "Test Entry"
-
assert retrieved.id == "https://example.com/entry/1"
-
-
# Try to get non-existent entry
-
non_entry = store.get_entry("testuser", "https://example.com/nonexistent")
-
assert non_entry is None
-
-
def test_list_entries(self, temp_dir):
-
"""Test listing entries for a user."""
-
store = GitStore(temp_dir / "test_repo")
-
-
# Add user
-
store.add_user("testuser")
-
-
# Add multiple entries
-
for i in range(3):
-
entry = AtomEntry(
-
id=f"https://example.com/entry/{i}",
-
title=f"Test Entry {i}",
-
link=HttpUrl(f"https://example.com/entry/{i}"),
-
updated=datetime.now(),
-
)
-
store.store_entry("testuser", entry)
-
-
# List all entries
-
entries = store.list_entries("testuser")
-
assert len(entries) == 3
-
-
# List with limit
-
limited = store.list_entries("testuser", limit=2)
-
assert len(limited) == 2
-
-
# List for non-existent user
-
none_entries = store.list_entries("nonexistent")
-
assert len(none_entries) == 0
-
-
def test_duplicates(self, temp_dir):
-
"""Test duplicate management."""
-
store = GitStore(temp_dir / "test_repo")
-
-
# Get initial duplicates (should be empty)
-
duplicates = store.get_duplicates()
-
assert isinstance(duplicates, DuplicateMap)
-
assert len(duplicates.duplicates) == 0
-
-
# Add duplicate
-
store.add_duplicate("https://example.com/dup", "https://example.com/canonical")
-
-
# Check duplicate was added
-
duplicates = store.get_duplicates()
-
assert len(duplicates.duplicates) == 1
-
assert duplicates.is_duplicate("https://example.com/dup")
-
assert duplicates.get_canonical("https://example.com/dup") == "https://example.com/canonical"
-
-
# Remove duplicate
-
result = store.remove_duplicate("https://example.com/dup")
-
assert result is True
-
-
# Check duplicate was removed
-
duplicates = store.get_duplicates()
-
assert len(duplicates.duplicates) == 0
-
assert not duplicates.is_duplicate("https://example.com/dup")
-
-
def test_search_entries(self, temp_dir):
-
"""Test searching entries."""
-
store = GitStore(temp_dir / "test_repo")
-
-
# Add user
-
store.add_user("testuser")
-
-
# Add entries with different content
-
entries_data = [
-
("Test Python Programming", "Learning Python basics"),
-
("JavaScript Tutorial", "Advanced JavaScript concepts"),
-
("Python Web Development", "Building web apps with Python"),
-
]
-
-
for title, summary in entries_data:
-
entry = AtomEntry(
-
id=f"https://example.com/entry/{title.lower().replace(' ', '-')}",
-
title=title,
-
link=HttpUrl(f"https://example.com/entry/{title.lower().replace(' ', '-')}"),
-
updated=datetime.now(),
-
summary=summary,
-
)
-
store.store_entry("testuser", entry)
-
-
# Search for Python entries
-
results = store.search_entries("Python")
-
assert len(results) == 2
-
-
# Search for specific user
-
results = store.search_entries("Python", username="testuser")
-
assert len(results) == 2
-
-
# Search with limit
-
results = store.search_entries("Python", limit=1)
-
assert len(results) == 1
-
-
# Search for non-existent term
-
results = store.search_entries("NonExistent")
-
assert len(results) == 0
-
-
def test_get_stats(self, temp_dir):
-
"""Test getting repository statistics."""
-
store = GitStore(temp_dir / "test_repo")
-
-
# Get initial stats
-
stats = store.get_stats()
-
assert stats["total_users"] == 0
-
assert stats["total_entries"] == 0
-
assert stats["total_duplicates"] == 0
-
-
# Add user and entries
-
store.add_user("testuser")
-
for i in range(3):
-
entry = AtomEntry(
-
id=f"https://example.com/entry/{i}",
-
title=f"Test Entry {i}",
-
link=HttpUrl(f"https://example.com/entry/{i}"),
-
updated=datetime.now(),
-
)
-
store.store_entry("testuser", entry)
-
-
# Add duplicate
-
store.add_duplicate("https://example.com/dup", "https://example.com/canonical")
-
-
# Get updated stats
-
stats = store.get_stats()
-
assert stats["total_users"] == 1
-
assert stats["total_entries"] == 3
-
assert stats["total_duplicates"] == 1
-
assert "last_updated" in stats
-
assert "repository_size" in stats
-353
tests/test_models.py
···
-
"""Tests for pydantic models."""
-
-
from datetime import datetime
-
from pathlib import Path
-
-
import pytest
-
from pydantic import HttpUrl, ValidationError
-
-
from thicket.models import (
-
AtomEntry,
-
DuplicateMap,
-
FeedMetadata,
-
ThicketConfig,
-
UserConfig,
-
UserMetadata,
-
)
-
-
-
class TestUserConfig:
-
"""Test UserConfig model."""
-
-
def test_valid_user_config(self):
-
"""Test creating valid user config."""
-
config = UserConfig(
-
username="testuser",
-
feeds=["https://example.com/feed.xml"],
-
email="test@example.com",
-
homepage="https://example.com",
-
display_name="Test User",
-
)
-
-
assert config.username == "testuser"
-
assert len(config.feeds) == 1
-
assert config.feeds[0] == HttpUrl("https://example.com/feed.xml")
-
assert config.email == "test@example.com"
-
assert config.display_name == "Test User"
-
-
def test_invalid_email(self):
-
"""Test validation of invalid email."""
-
with pytest.raises(ValidationError):
-
UserConfig(
-
username="testuser",
-
feeds=["https://example.com/feed.xml"],
-
email="invalid-email",
-
)
-
-
def test_invalid_feed_url(self):
-
"""Test validation of invalid feed URL."""
-
with pytest.raises(ValidationError):
-
UserConfig(
-
username="testuser",
-
feeds=["not-a-url"],
-
)
-
-
def test_optional_fields(self):
-
"""Test optional fields with None values."""
-
config = UserConfig(
-
username="testuser",
-
feeds=["https://example.com/feed.xml"],
-
)
-
-
assert config.email is None
-
assert config.homepage is None
-
assert config.icon is None
-
assert config.display_name is None
-
-
-
class TestThicketConfig:
-
"""Test ThicketConfig model."""
-
-
def test_valid_config(self, temp_dir):
-
"""Test creating valid configuration."""
-
config = ThicketConfig(
-
git_store=temp_dir / "git_store",
-
cache_dir=temp_dir / "cache",
-
users=[
-
UserConfig(
-
username="testuser",
-
feeds=["https://example.com/feed.xml"],
-
)
-
],
-
)
-
-
assert config.git_store == temp_dir / "git_store"
-
assert config.cache_dir == temp_dir / "cache"
-
assert len(config.users) == 1
-
assert config.users[0].username == "testuser"
-
-
def test_find_user(self, temp_dir):
-
"""Test finding user by username."""
-
config = ThicketConfig(
-
git_store=temp_dir / "git_store",
-
cache_dir=temp_dir / "cache",
-
users=[
-
UserConfig(username="user1", feeds=["https://example.com/feed1.xml"]),
-
UserConfig(username="user2", feeds=["https://example.com/feed2.xml"]),
-
],
-
)
-
-
user = config.find_user("user1")
-
assert user is not None
-
assert user.username == "user1"
-
-
non_user = config.find_user("nonexistent")
-
assert non_user is None
-
-
def test_add_user(self, temp_dir):
-
"""Test adding a new user."""
-
config = ThicketConfig(
-
git_store=temp_dir / "git_store",
-
cache_dir=temp_dir / "cache",
-
users=[],
-
)
-
-
new_user = UserConfig(
-
username="newuser",
-
feeds=["https://example.com/feed.xml"],
-
)
-
-
config.add_user(new_user)
-
assert len(config.users) == 1
-
assert config.users[0].username == "newuser"
-
-
def test_add_feed_to_user(self, temp_dir):
-
"""Test adding feed to existing user."""
-
config = ThicketConfig(
-
git_store=temp_dir / "git_store",
-
cache_dir=temp_dir / "cache",
-
users=[
-
UserConfig(username="testuser", feeds=["https://example.com/feed1.xml"]),
-
],
-
)
-
-
result = config.add_feed_to_user("testuser", HttpUrl("https://example.com/feed2.xml"))
-
assert result is True
-
-
user = config.find_user("testuser")
-
assert len(user.feeds) == 2
-
assert HttpUrl("https://example.com/feed2.xml") in user.feeds
-
-
# Test adding to non-existent user
-
result = config.add_feed_to_user("nonexistent", HttpUrl("https://example.com/feed.xml"))
-
assert result is False
-
-
-
class TestAtomEntry:
-
"""Test AtomEntry model."""
-
-
def test_valid_entry(self):
-
"""Test creating valid Atom entry."""
-
entry = AtomEntry(
-
id="https://example.com/entry/1",
-
title="Test Entry",
-
link=HttpUrl("https://example.com/entry/1"),
-
updated=datetime.now(),
-
published=datetime.now(),
-
summary="Test summary",
-
content="<p>Test content</p>",
-
content_type="html",
-
author={"name": "Test Author"},
-
categories=["test", "example"],
-
)
-
-
assert entry.id == "https://example.com/entry/1"
-
assert entry.title == "Test Entry"
-
assert entry.summary == "Test summary"
-
assert entry.content == "<p>Test content</p>"
-
assert entry.content_type == "html"
-
assert entry.author["name"] == "Test Author"
-
assert "test" in entry.categories
-
-
def test_minimal_entry(self):
-
"""Test creating minimal Atom entry."""
-
entry = AtomEntry(
-
id="https://example.com/entry/1",
-
title="Test Entry",
-
link=HttpUrl("https://example.com/entry/1"),
-
updated=datetime.now(),
-
)
-
-
assert entry.id == "https://example.com/entry/1"
-
assert entry.title == "Test Entry"
-
assert entry.published is None
-
assert entry.summary is None
-
assert entry.content is None
-
assert entry.content_type == "html" # default
-
assert entry.author is None
-
assert entry.categories == []
-
-
-
class TestDuplicateMap:
-
"""Test DuplicateMap model."""
-
-
def test_empty_duplicates(self):
-
"""Test empty duplicate map."""
-
dup_map = DuplicateMap()
-
assert len(dup_map.duplicates) == 0
-
assert not dup_map.is_duplicate("test")
-
assert dup_map.get_canonical("test") == "test"
-
-
def test_add_duplicate(self):
-
"""Test adding duplicate mapping."""
-
dup_map = DuplicateMap()
-
dup_map.add_duplicate("dup1", "canonical1")
-
-
assert len(dup_map.duplicates) == 1
-
assert dup_map.is_duplicate("dup1")
-
assert dup_map.get_canonical("dup1") == "canonical1"
-
assert dup_map.get_canonical("canonical1") == "canonical1"
-
-
def test_remove_duplicate(self):
-
"""Test removing duplicate mapping."""
-
dup_map = DuplicateMap()
-
dup_map.add_duplicate("dup1", "canonical1")
-
-
result = dup_map.remove_duplicate("dup1")
-
assert result is True
-
assert len(dup_map.duplicates) == 0
-
assert not dup_map.is_duplicate("dup1")
-
-
# Test removing non-existent duplicate
-
result = dup_map.remove_duplicate("nonexistent")
-
assert result is False
-
-
def test_get_duplicates_for_canonical(self):
-
"""Test getting all duplicates for a canonical ID."""
-
dup_map = DuplicateMap()
-
dup_map.add_duplicate("dup1", "canonical1")
-
dup_map.add_duplicate("dup2", "canonical1")
-
dup_map.add_duplicate("dup3", "canonical2")
-
-
dups = dup_map.get_duplicates_for_canonical("canonical1")
-
assert len(dups) == 2
-
assert "dup1" in dups
-
assert "dup2" in dups
-
-
dups = dup_map.get_duplicates_for_canonical("canonical2")
-
assert len(dups) == 1
-
assert "dup3" in dups
-
-
dups = dup_map.get_duplicates_for_canonical("nonexistent")
-
assert len(dups) == 0
-
-
-
class TestFeedMetadata:
-
"""Test FeedMetadata model."""
-
-
def test_valid_metadata(self):
-
"""Test creating valid feed metadata."""
-
metadata = FeedMetadata(
-
title="Test Feed",
-
author_name="Test Author",
-
author_email="author@example.com",
-
author_uri=HttpUrl("https://example.com/author"),
-
link=HttpUrl("https://example.com"),
-
description="Test description",
-
)
-
-
assert metadata.title == "Test Feed"
-
assert metadata.author_name == "Test Author"
-
assert metadata.author_email == "author@example.com"
-
assert metadata.link == HttpUrl("https://example.com")
-
-
def test_to_user_config(self):
-
"""Test converting metadata to user config."""
-
metadata = FeedMetadata(
-
title="Test Feed",
-
author_name="Test Author",
-
author_email="author@example.com",
-
author_uri=HttpUrl("https://example.com/author"),
-
link=HttpUrl("https://example.com"),
-
logo=HttpUrl("https://example.com/logo.png"),
-
)
-
-
feed_url = HttpUrl("https://example.com/feed.xml")
-
user_config = metadata.to_user_config("testuser", feed_url)
-
-
assert user_config.username == "testuser"
-
assert user_config.feeds == [feed_url]
-
assert user_config.display_name == "Test Author"
-
assert user_config.email == "author@example.com"
-
assert user_config.homepage == HttpUrl("https://example.com/author")
-
assert user_config.icon == HttpUrl("https://example.com/logo.png")
-
-
def test_to_user_config_fallbacks(self):
-
"""Test fallback logic in to_user_config."""
-
metadata = FeedMetadata(
-
title="Test Feed",
-
link=HttpUrl("https://example.com"),
-
icon=HttpUrl("https://example.com/icon.png"),
-
)
-
-
feed_url = HttpUrl("https://example.com/feed.xml")
-
user_config = metadata.to_user_config("testuser", feed_url)
-
-
assert user_config.display_name == "Test Feed" # Falls back to title
-
assert user_config.homepage == HttpUrl("https://example.com") # Falls back to link
-
assert user_config.icon == HttpUrl("https://example.com/icon.png")
-
assert user_config.email is None
-
-
-
class TestUserMetadata:
-
"""Test UserMetadata model."""
-
-
def test_valid_metadata(self):
-
"""Test creating valid user metadata."""
-
now = datetime.now()
-
metadata = UserMetadata(
-
username="testuser",
-
directory="testuser",
-
created=now,
-
last_updated=now,
-
feeds=["https://example.com/feed.xml"],
-
entry_count=5,
-
)
-
-
assert metadata.username == "testuser"
-
assert metadata.directory == "testuser"
-
assert metadata.entry_count == 5
-
assert len(metadata.feeds) == 1
-
-
def test_update_timestamp(self):
-
"""Test updating timestamp."""
-
now = datetime.now()
-
metadata = UserMetadata(
-
username="testuser",
-
directory="testuser",
-
created=now,
-
last_updated=now,
-
)
-
-
original_time = metadata.last_updated
-
metadata.update_timestamp()
-
-
assert metadata.last_updated > original_time
-
-
def test_increment_entry_count(self):
-
"""Test incrementing entry count."""
-
metadata = UserMetadata(
-
username="testuser",
-
directory="testuser",
-
created=datetime.now(),
-
last_updated=datetime.now(),
-
entry_count=5,
-
)
-
-
original_count = metadata.entry_count
-
original_time = metadata.last_updated
-
-
metadata.increment_entry_count(3)
-
-
assert metadata.entry_count == original_count + 3
-
assert metadata.last_updated > original_time
+1105
uv.lock
···
+
version = 1
+
revision = 2
+
requires-python = ">=3.9"
+
resolution-markers = [
+
"python_full_version >= '3.10'",
+
"python_full_version < '3.10'",
+
]
+
+
[[package]]
+
name = "annotated-types"
+
version = "0.7.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
+
]
+
+
[[package]]
+
name = "anyio"
+
version = "4.9.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
+
{ name = "idna" },
+
{ name = "sniffio" },
+
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" },
+
]
+
+
[[package]]
+
name = "black"
+
version = "25.1.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
+
{ name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+
{ name = "mypy-extensions" },
+
{ name = "packaging" },
+
{ name = "pathspec" },
+
{ name = "platformdirs" },
+
{ name = "tomli", marker = "python_full_version < '3.11'" },
+
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419, upload-time = "2025-01-29T05:37:06.642Z" },
+
{ url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080, upload-time = "2025-01-29T05:37:09.321Z" },
+
{ url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886, upload-time = "2025-01-29T04:18:24.432Z" },
+
{ url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404, upload-time = "2025-01-29T04:19:04.296Z" },
+
{ url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372, upload-time = "2025-01-29T05:37:11.71Z" },
+
{ url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865, upload-time = "2025-01-29T05:37:14.309Z" },
+
{ url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699, upload-time = "2025-01-29T04:18:17.688Z" },
+
{ url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028, upload-time = "2025-01-29T04:18:51.711Z" },
+
{ url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988, upload-time = "2025-01-29T05:37:16.707Z" },
+
{ url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985, upload-time = "2025-01-29T05:37:18.273Z" },
+
{ url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" },
+
{ url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860, upload-time = "2025-01-29T04:19:12.944Z" },
+
{ url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673, upload-time = "2025-01-29T05:37:20.574Z" },
+
{ url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190, upload-time = "2025-01-29T05:37:22.106Z" },
+
{ url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" },
+
{ url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613, upload-time = "2025-01-29T04:19:27.63Z" },
+
{ url = "https://files.pythonhosted.org/packages/d3/b6/ae7507470a4830dbbfe875c701e84a4a5fb9183d1497834871a715716a92/black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0", size = 1628593, upload-time = "2025-01-29T05:37:23.672Z" },
+
{ url = "https://files.pythonhosted.org/packages/24/c1/ae36fa59a59f9363017ed397750a0cd79a470490860bc7713967d89cdd31/black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f", size = 1460000, upload-time = "2025-01-29T05:37:25.829Z" },
+
{ url = "https://files.pythonhosted.org/packages/ac/b6/98f832e7a6c49aa3a464760c67c7856363aa644f2f3c74cf7d624168607e/black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e", size = 1765963, upload-time = "2025-01-29T04:18:38.116Z" },
+
{ url = "https://files.pythonhosted.org/packages/ce/e9/2cb0a017eb7024f70e0d2e9bdb8c5a5b078c5740c7f8816065d06f04c557/black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355", size = 1419419, upload-time = "2025-01-29T04:18:30.191Z" },
+
{ url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" },
+
]
+
+
[[package]]
+
name = "bleach"
+
version = "6.2.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "webencodings" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/76/9a/0e33f5054c54d349ea62c277191c020c2d6ef1d65ab2cb1993f91ec846d1/bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f", size = 203083, upload-time = "2024-10-29T18:30:40.477Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/fc/55/96142937f66150805c25c4d0f31ee4132fd33497753400734f9dfdcbdc66/bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e", size = 163406, upload-time = "2024-10-29T18:30:38.186Z" },
+
]
+
+
[[package]]
+
name = "certifi"
+
version = "2025.7.14"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" },
+
]
+
+
[[package]]
+
name = "click"
+
version = "8.1.8"
+
source = { registry = "https://pypi.org/simple" }
+
resolution-markers = [
+
"python_full_version < '3.10'",
+
]
+
dependencies = [
+
{ name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" },
+
]
+
+
[[package]]
+
name = "click"
+
version = "8.2.1"
+
source = { registry = "https://pypi.org/simple" }
+
resolution-markers = [
+
"python_full_version >= '3.10'",
+
]
+
dependencies = [
+
{ name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" },
+
]
+
+
[[package]]
+
name = "colorama"
+
version = "0.4.6"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
+
]
+
+
[[package]]
+
name = "coverage"
+
version = "7.9.2"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/04/b7/c0465ca253df10a9e8dae0692a4ae6e9726d245390aaef92360e1d6d3832/coverage-7.9.2.tar.gz", hash = "sha256:997024fa51e3290264ffd7492ec97d0690293ccd2b45a6cd7d82d945a4a80c8b", size = 813556, upload-time = "2025-07-03T10:54:15.101Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/a1/0d/5c2114fd776c207bd55068ae8dc1bef63ecd1b767b3389984a8e58f2b926/coverage-7.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:66283a192a14a3854b2e7f3418d7db05cdf411012ab7ff5db98ff3b181e1f912", size = 212039, upload-time = "2025-07-03T10:52:38.955Z" },
+
{ url = "https://files.pythonhosted.org/packages/cf/ad/dc51f40492dc2d5fcd31bb44577bc0cc8920757d6bc5d3e4293146524ef9/coverage-7.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e01d138540ef34fcf35c1aa24d06c3de2a4cffa349e29a10056544f35cca15f", size = 212428, upload-time = "2025-07-03T10:52:41.36Z" },
+
{ url = "https://files.pythonhosted.org/packages/a2/a3/55cb3ff1b36f00df04439c3993d8529193cdf165a2467bf1402539070f16/coverage-7.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f22627c1fe2745ee98d3ab87679ca73a97e75ca75eb5faee48660d060875465f", size = 241534, upload-time = "2025-07-03T10:52:42.956Z" },
+
{ url = "https://files.pythonhosted.org/packages/eb/c9/a8410b91b6be4f6e9c2e9f0dce93749b6b40b751d7065b4410bf89cb654b/coverage-7.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b1c2d8363247b46bd51f393f86c94096e64a1cf6906803fa8d5a9d03784bdbf", size = 239408, upload-time = "2025-07-03T10:52:44.199Z" },
+
{ url = "https://files.pythonhosted.org/packages/ff/c4/6f3e56d467c612b9070ae71d5d3b114c0b899b5788e1ca3c93068ccb7018/coverage-7.9.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c10c882b114faf82dbd33e876d0cbd5e1d1ebc0d2a74ceef642c6152f3f4d547", size = 240552, upload-time = "2025-07-03T10:52:45.477Z" },
+
{ url = "https://files.pythonhosted.org/packages/fd/20/04eda789d15af1ce79bce5cc5fd64057c3a0ac08fd0576377a3096c24663/coverage-7.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:de3c0378bdf7066c3988d66cd5232d161e933b87103b014ab1b0b4676098fa45", size = 240464, upload-time = "2025-07-03T10:52:46.809Z" },
+
{ url = "https://files.pythonhosted.org/packages/a9/5a/217b32c94cc1a0b90f253514815332d08ec0812194a1ce9cca97dda1cd20/coverage-7.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1e2f097eae0e5991e7623958a24ced3282676c93c013dde41399ff63e230fcf2", size = 239134, upload-time = "2025-07-03T10:52:48.149Z" },
+
{ url = "https://files.pythonhosted.org/packages/34/73/1d019c48f413465eb5d3b6898b6279e87141c80049f7dbf73fd020138549/coverage-7.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28dc1f67e83a14e7079b6cea4d314bc8b24d1aed42d3582ff89c0295f09b181e", size = 239405, upload-time = "2025-07-03T10:52:49.687Z" },
+
{ url = "https://files.pythonhosted.org/packages/49/6c/a2beca7aa2595dad0c0d3f350382c381c92400efe5261e2631f734a0e3fe/coverage-7.9.2-cp310-cp310-win32.whl", hash = "sha256:bf7d773da6af9e10dbddacbf4e5cab13d06d0ed93561d44dae0188a42c65be7e", size = 214519, upload-time = "2025-07-03T10:52:51.036Z" },
+
{ url = "https://files.pythonhosted.org/packages/fc/c8/91e5e4a21f9a51e2c7cdd86e587ae01a4fcff06fc3fa8cde4d6f7cf68df4/coverage-7.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:0c0378ba787681ab1897f7c89b415bd56b0b2d9a47e5a3d8dc0ea55aac118d6c", size = 215400, upload-time = "2025-07-03T10:52:52.313Z" },
+
{ url = "https://files.pythonhosted.org/packages/39/40/916786453bcfafa4c788abee4ccd6f592b5b5eca0cd61a32a4e5a7ef6e02/coverage-7.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a7a56a2964a9687b6aba5b5ced6971af308ef6f79a91043c05dd4ee3ebc3e9ba", size = 212152, upload-time = "2025-07-03T10:52:53.562Z" },
+
{ url = "https://files.pythonhosted.org/packages/9f/66/cc13bae303284b546a030762957322bbbff1ee6b6cb8dc70a40f8a78512f/coverage-7.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123d589f32c11d9be7fe2e66d823a236fe759b0096f5db3fb1b75b2fa414a4fa", size = 212540, upload-time = "2025-07-03T10:52:55.196Z" },
+
{ url = "https://files.pythonhosted.org/packages/0f/3c/d56a764b2e5a3d43257c36af4a62c379df44636817bb5f89265de4bf8bd7/coverage-7.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:333b2e0ca576a7dbd66e85ab402e35c03b0b22f525eed82681c4b866e2e2653a", size = 245097, upload-time = "2025-07-03T10:52:56.509Z" },
+
{ url = "https://files.pythonhosted.org/packages/b1/46/bd064ea8b3c94eb4ca5d90e34d15b806cba091ffb2b8e89a0d7066c45791/coverage-7.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:326802760da234baf9f2f85a39e4a4b5861b94f6c8d95251f699e4f73b1835dc", size = 242812, upload-time = "2025-07-03T10:52:57.842Z" },
+
{ url = "https://files.pythonhosted.org/packages/43/02/d91992c2b29bc7afb729463bc918ebe5f361be7f1daae93375a5759d1e28/coverage-7.9.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e7be4cfec248df38ce40968c95d3952fbffd57b400d4b9bb580f28179556d2", size = 244617, upload-time = "2025-07-03T10:52:59.239Z" },
+
{ url = "https://files.pythonhosted.org/packages/b7/4f/8fadff6bf56595a16d2d6e33415841b0163ac660873ed9a4e9046194f779/coverage-7.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0b4a4cb73b9f2b891c1788711408ef9707666501ba23684387277ededab1097c", size = 244263, upload-time = "2025-07-03T10:53:00.601Z" },
+
{ url = "https://files.pythonhosted.org/packages/9b/d2/e0be7446a2bba11739edb9f9ba4eff30b30d8257370e237418eb44a14d11/coverage-7.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2c8937fa16c8c9fbbd9f118588756e7bcdc7e16a470766a9aef912dd3f117dbd", size = 242314, upload-time = "2025-07-03T10:53:01.932Z" },
+
{ url = "https://files.pythonhosted.org/packages/9d/7d/dcbac9345000121b8b57a3094c2dfcf1ccc52d8a14a40c1d4bc89f936f80/coverage-7.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42da2280c4d30c57a9b578bafd1d4494fa6c056d4c419d9689e66d775539be74", size = 242904, upload-time = "2025-07-03T10:53:03.478Z" },
+
{ url = "https://files.pythonhosted.org/packages/41/58/11e8db0a0c0510cf31bbbdc8caf5d74a358b696302a45948d7c768dfd1cf/coverage-7.9.2-cp311-cp311-win32.whl", hash = "sha256:14fa8d3da147f5fdf9d298cacc18791818f3f1a9f542c8958b80c228320e90c6", size = 214553, upload-time = "2025-07-03T10:53:05.174Z" },
+
{ url = "https://files.pythonhosted.org/packages/3a/7d/751794ec8907a15e257136e48dc1021b1f671220ecccfd6c4eaf30802714/coverage-7.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:549cab4892fc82004f9739963163fd3aac7a7b0df430669b75b86d293d2df2a7", size = 215441, upload-time = "2025-07-03T10:53:06.472Z" },
+
{ url = "https://files.pythonhosted.org/packages/62/5b/34abcedf7b946c1c9e15b44f326cb5b0da852885312b30e916f674913428/coverage-7.9.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2667a2b913e307f06aa4e5677f01a9746cd08e4b35e14ebcde6420a9ebb4c62", size = 213873, upload-time = "2025-07-03T10:53:07.699Z" },
+
{ url = "https://files.pythonhosted.org/packages/53/d7/7deefc6fd4f0f1d4c58051f4004e366afc9e7ab60217ac393f247a1de70a/coverage-7.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae9eb07f1cfacd9cfe8eaee6f4ff4b8a289a668c39c165cd0c8548484920ffc0", size = 212344, upload-time = "2025-07-03T10:53:09.3Z" },
+
{ url = "https://files.pythonhosted.org/packages/95/0c/ee03c95d32be4d519e6a02e601267769ce2e9a91fc8faa1b540e3626c680/coverage-7.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9ce85551f9a1119f02adc46d3014b5ee3f765deac166acf20dbb851ceb79b6f3", size = 212580, upload-time = "2025-07-03T10:53:11.52Z" },
+
{ url = "https://files.pythonhosted.org/packages/8b/9f/826fa4b544b27620086211b87a52ca67592622e1f3af9e0a62c87aea153a/coverage-7.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8f6389ac977c5fb322e0e38885fbbf901743f79d47f50db706e7644dcdcb6e1", size = 246383, upload-time = "2025-07-03T10:53:13.134Z" },
+
{ url = "https://files.pythonhosted.org/packages/7f/b3/4477aafe2a546427b58b9c540665feff874f4db651f4d3cb21b308b3a6d2/coverage-7.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d9eae8cdfcd58fe7893b88993723583a6ce4dfbfd9f29e001922544f95615", size = 243400, upload-time = "2025-07-03T10:53:14.614Z" },
+
{ url = "https://files.pythonhosted.org/packages/f8/c2/efffa43778490c226d9d434827702f2dfbc8041d79101a795f11cbb2cf1e/coverage-7.9.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae939811e14e53ed8a9818dad51d434a41ee09df9305663735f2e2d2d7d959b", size = 245591, upload-time = "2025-07-03T10:53:15.872Z" },
+
{ url = "https://files.pythonhosted.org/packages/c6/e7/a59888e882c9a5f0192d8627a30ae57910d5d449c80229b55e7643c078c4/coverage-7.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:31991156251ec202c798501e0a42bbdf2169dcb0f137b1f5c0f4267f3fc68ef9", size = 245402, upload-time = "2025-07-03T10:53:17.124Z" },
+
{ url = "https://files.pythonhosted.org/packages/92/a5/72fcd653ae3d214927edc100ce67440ed8a0a1e3576b8d5e6d066ed239db/coverage-7.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d0d67963f9cbfc7c7f96d4ac74ed60ecbebd2ea6eeb51887af0f8dce205e545f", size = 243583, upload-time = "2025-07-03T10:53:18.781Z" },
+
{ url = "https://files.pythonhosted.org/packages/5c/f5/84e70e4df28f4a131d580d7d510aa1ffd95037293da66fd20d446090a13b/coverage-7.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49b752a2858b10580969ec6af6f090a9a440a64a301ac1528d7ca5f7ed497f4d", size = 244815, upload-time = "2025-07-03T10:53:20.168Z" },
+
{ url = "https://files.pythonhosted.org/packages/39/e7/d73d7cbdbd09fdcf4642655ae843ad403d9cbda55d725721965f3580a314/coverage-7.9.2-cp312-cp312-win32.whl", hash = "sha256:88d7598b8ee130f32f8a43198ee02edd16d7f77692fa056cb779616bbea1b355", size = 214719, upload-time = "2025-07-03T10:53:21.521Z" },
+
{ url = "https://files.pythonhosted.org/packages/9f/d6/7486dcc3474e2e6ad26a2af2db7e7c162ccd889c4c68fa14ea8ec189c9e9/coverage-7.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:9dfb070f830739ee49d7c83e4941cc767e503e4394fdecb3b54bfdac1d7662c0", size = 215509, upload-time = "2025-07-03T10:53:22.853Z" },
+
{ url = "https://files.pythonhosted.org/packages/b7/34/0439f1ae2593b0346164d907cdf96a529b40b7721a45fdcf8b03c95fcd90/coverage-7.9.2-cp312-cp312-win_arm64.whl", hash = "sha256:4e2c058aef613e79df00e86b6d42a641c877211384ce5bd07585ed7ba71ab31b", size = 213910, upload-time = "2025-07-03T10:53:24.472Z" },
+
{ url = "https://files.pythonhosted.org/packages/94/9d/7a8edf7acbcaa5e5c489a646226bed9591ee1c5e6a84733c0140e9ce1ae1/coverage-7.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:985abe7f242e0d7bba228ab01070fde1d6c8fa12f142e43debe9ed1dde686038", size = 212367, upload-time = "2025-07-03T10:53:25.811Z" },
+
{ url = "https://files.pythonhosted.org/packages/e8/9e/5cd6f130150712301f7e40fb5865c1bc27b97689ec57297e568d972eec3c/coverage-7.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c3939264a76d44fde7f213924021ed31f55ef28111a19649fec90c0f109e6d", size = 212632, upload-time = "2025-07-03T10:53:27.075Z" },
+
{ url = "https://files.pythonhosted.org/packages/a8/de/6287a2c2036f9fd991c61cefa8c64e57390e30c894ad3aa52fac4c1e14a8/coverage-7.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae5d563e970dbe04382f736ec214ef48103d1b875967c89d83c6e3f21706d5b3", size = 245793, upload-time = "2025-07-03T10:53:28.408Z" },
+
{ url = "https://files.pythonhosted.org/packages/06/cc/9b5a9961d8160e3cb0b558c71f8051fe08aa2dd4b502ee937225da564ed1/coverage-7.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdd612e59baed2a93c8843c9a7cb902260f181370f1d772f4842987535071d14", size = 243006, upload-time = "2025-07-03T10:53:29.754Z" },
+
{ url = "https://files.pythonhosted.org/packages/49/d9/4616b787d9f597d6443f5588619c1c9f659e1f5fc9eebf63699eb6d34b78/coverage-7.9.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:256ea87cb2a1ed992bcdfc349d8042dcea1b80436f4ddf6e246d6bee4b5d73b6", size = 244990, upload-time = "2025-07-03T10:53:31.098Z" },
+
{ url = "https://files.pythonhosted.org/packages/48/83/801cdc10f137b2d02b005a761661649ffa60eb173dcdaeb77f571e4dc192/coverage-7.9.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f44ae036b63c8ea432f610534a2668b0c3aee810e7037ab9d8ff6883de480f5b", size = 245157, upload-time = "2025-07-03T10:53:32.717Z" },
+
{ url = "https://files.pythonhosted.org/packages/c8/a4/41911ed7e9d3ceb0ffb019e7635468df7499f5cc3edca5f7dfc078e9c5ec/coverage-7.9.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82d76ad87c932935417a19b10cfe7abb15fd3f923cfe47dbdaa74ef4e503752d", size = 243128, upload-time = "2025-07-03T10:53:34.009Z" },
+
{ url = "https://files.pythonhosted.org/packages/10/41/344543b71d31ac9cb00a664d5d0c9ef134a0fe87cb7d8430003b20fa0b7d/coverage-7.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:619317bb86de4193debc712b9e59d5cffd91dc1d178627ab2a77b9870deb2868", size = 244511, upload-time = "2025-07-03T10:53:35.434Z" },
+
{ url = "https://files.pythonhosted.org/packages/d5/81/3b68c77e4812105e2a060f6946ba9e6f898ddcdc0d2bfc8b4b152a9ae522/coverage-7.9.2-cp313-cp313-win32.whl", hash = "sha256:0a07757de9feb1dfafd16ab651e0f628fd7ce551604d1bf23e47e1ddca93f08a", size = 214765, upload-time = "2025-07-03T10:53:36.787Z" },
+
{ url = "https://files.pythonhosted.org/packages/06/a2/7fac400f6a346bb1a4004eb2a76fbff0e242cd48926a2ce37a22a6a1d917/coverage-7.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:115db3d1f4d3f35f5bb021e270edd85011934ff97c8797216b62f461dd69374b", size = 215536, upload-time = "2025-07-03T10:53:38.188Z" },
+
{ url = "https://files.pythonhosted.org/packages/08/47/2c6c215452b4f90d87017e61ea0fd9e0486bb734cb515e3de56e2c32075f/coverage-7.9.2-cp313-cp313-win_arm64.whl", hash = "sha256:48f82f889c80af8b2a7bb6e158d95a3fbec6a3453a1004d04e4f3b5945a02694", size = 213943, upload-time = "2025-07-03T10:53:39.492Z" },
+
{ url = "https://files.pythonhosted.org/packages/a3/46/e211e942b22d6af5e0f323faa8a9bc7c447a1cf1923b64c47523f36ed488/coverage-7.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:55a28954545f9d2f96870b40f6c3386a59ba8ed50caf2d949676dac3ecab99f5", size = 213088, upload-time = "2025-07-03T10:53:40.874Z" },
+
{ url = "https://files.pythonhosted.org/packages/d2/2f/762551f97e124442eccd907bf8b0de54348635b8866a73567eb4e6417acf/coverage-7.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cdef6504637731a63c133bb2e6f0f0214e2748495ec15fe42d1e219d1b133f0b", size = 213298, upload-time = "2025-07-03T10:53:42.218Z" },
+
{ url = "https://files.pythonhosted.org/packages/7a/b7/76d2d132b7baf7360ed69be0bcab968f151fa31abe6d067f0384439d9edb/coverage-7.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd5ebe66c7a97273d5d2ddd4ad0ed2e706b39630ed4b53e713d360626c3dbb3", size = 256541, upload-time = "2025-07-03T10:53:43.823Z" },
+
{ url = "https://files.pythonhosted.org/packages/a0/17/392b219837d7ad47d8e5974ce5f8dc3deb9f99a53b3bd4d123602f960c81/coverage-7.9.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9303aed20872d7a3c9cb39c5d2b9bdbe44e3a9a1aecb52920f7e7495410dfab8", size = 252761, upload-time = "2025-07-03T10:53:45.19Z" },
+
{ url = "https://files.pythonhosted.org/packages/d5/77/4256d3577fe1b0daa8d3836a1ebe68eaa07dd2cbaf20cf5ab1115d6949d4/coverage-7.9.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc18ea9e417a04d1920a9a76fe9ebd2f43ca505b81994598482f938d5c315f46", size = 254917, upload-time = "2025-07-03T10:53:46.931Z" },
+
{ url = "https://files.pythonhosted.org/packages/53/99/fc1a008eef1805e1ddb123cf17af864743354479ea5129a8f838c433cc2c/coverage-7.9.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6406cff19880aaaadc932152242523e892faff224da29e241ce2fca329866584", size = 256147, upload-time = "2025-07-03T10:53:48.289Z" },
+
{ url = "https://files.pythonhosted.org/packages/92/c0/f63bf667e18b7f88c2bdb3160870e277c4874ced87e21426128d70aa741f/coverage-7.9.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d0d4f6ecdf37fcc19c88fec3e2277d5dee740fb51ffdd69b9579b8c31e4232e", size = 254261, upload-time = "2025-07-03T10:53:49.99Z" },
+
{ url = "https://files.pythonhosted.org/packages/8c/32/37dd1c42ce3016ff8ec9e4b607650d2e34845c0585d3518b2a93b4830c1a/coverage-7.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c33624f50cf8de418ab2b4d6ca9eda96dc45b2c4231336bac91454520e8d1fac", size = 255099, upload-time = "2025-07-03T10:53:51.354Z" },
+
{ url = "https://files.pythonhosted.org/packages/da/2e/af6b86f7c95441ce82f035b3affe1cd147f727bbd92f563be35e2d585683/coverage-7.9.2-cp313-cp313t-win32.whl", hash = "sha256:1df6b76e737c6a92210eebcb2390af59a141f9e9430210595251fbaf02d46926", size = 215440, upload-time = "2025-07-03T10:53:52.808Z" },
+
{ url = "https://files.pythonhosted.org/packages/4d/bb/8a785d91b308867f6b2e36e41c569b367c00b70c17f54b13ac29bcd2d8c8/coverage-7.9.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f5fd54310b92741ebe00d9c0d1d7b2b27463952c022da6d47c175d246a98d1bd", size = 216537, upload-time = "2025-07-03T10:53:54.273Z" },
+
{ url = "https://files.pythonhosted.org/packages/1d/a0/a6bffb5e0f41a47279fd45a8f3155bf193f77990ae1c30f9c224b61cacb0/coverage-7.9.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c48c2375287108c887ee87d13b4070a381c6537d30e8487b24ec721bf2a781cb", size = 214398, upload-time = "2025-07-03T10:53:56.715Z" },
+
{ url = "https://files.pythonhosted.org/packages/62/ab/b4b06662ccaa00ca7bbee967b7035a33a58b41efb92d8c89a6c523a2ccd5/coverage-7.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddc39510ac922a5c4c27849b739f875d3e1d9e590d1e7b64c98dadf037a16cce", size = 212037, upload-time = "2025-07-03T10:53:58.055Z" },
+
{ url = "https://files.pythonhosted.org/packages/bb/5e/04619995657acc898d15bfad42b510344b3a74d4d5bc34f2e279d46c781c/coverage-7.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a535c0c7364acd55229749c2b3e5eebf141865de3a8f697076a3291985f02d30", size = 212412, upload-time = "2025-07-03T10:53:59.451Z" },
+
{ url = "https://files.pythonhosted.org/packages/14/e7/1465710224dc6d31c534e7714cbd907210622a044adc81c810e72eea873f/coverage-7.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df0f9ef28e0f20c767ccdccfc5ae5f83a6f4a2fbdfbcbcc8487a8a78771168c8", size = 241164, upload-time = "2025-07-03T10:54:00.852Z" },
+
{ url = "https://files.pythonhosted.org/packages/ab/f2/44c6fbd2794afeb9ab6c0a14d3c088ab1dae3dff3df2624609981237bbb4/coverage-7.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f3da12e0ccbcb348969221d29441ac714bbddc4d74e13923d3d5a7a0bebef7a", size = 239032, upload-time = "2025-07-03T10:54:02.25Z" },
+
{ url = "https://files.pythonhosted.org/packages/6a/d2/7a79845429c0aa2e6788bc45c26a2e3052fa91082c9ea1dea56fb531952c/coverage-7.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a17eaf46f56ae0f870f14a3cbc2e4632fe3771eab7f687eda1ee59b73d09fe4", size = 240148, upload-time = "2025-07-03T10:54:03.618Z" },
+
{ url = "https://files.pythonhosted.org/packages/9c/7d/2731d1b4c9c672d82d30d218224dfc62939cf3800bc8aba0258fefb191f5/coverage-7.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:669135a9d25df55d1ed56a11bf555f37c922cf08d80799d4f65d77d7d6123fcf", size = 239875, upload-time = "2025-07-03T10:54:05.022Z" },
+
{ url = "https://files.pythonhosted.org/packages/1b/83/685958715429a9da09cf172c15750ca5c795dd7259466f2645403696557b/coverage-7.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9d3a700304d01a627df9db4322dc082a0ce1e8fc74ac238e2af39ced4c083193", size = 238127, upload-time = "2025-07-03T10:54:06.366Z" },
+
{ url = "https://files.pythonhosted.org/packages/34/ff/161a4313308b3783126790adfae1970adbe4886fda8788792e435249910a/coverage-7.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:71ae8b53855644a0b1579d4041304ddc9995c7b21c8a1f16753c4d8903b4dfed", size = 239064, upload-time = "2025-07-03T10:54:07.878Z" },
+
{ url = "https://files.pythonhosted.org/packages/17/14/fe33f41b2e80811021de059621f44c01ebe4d6b08bdb82d54a514488e933/coverage-7.9.2-cp39-cp39-win32.whl", hash = "sha256:dd7a57b33b5cf27acb491e890720af45db05589a80c1ffc798462a765be6d4d7", size = 214522, upload-time = "2025-07-03T10:54:09.331Z" },
+
{ url = "https://files.pythonhosted.org/packages/6e/30/63d850ec31b5c6f6a7b4e853016375b846258300320eda29376e2786ceeb/coverage-7.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f65bb452e579d5540c8b37ec105dd54d8b9307b07bcaa186818c104ffda22441", size = 215419, upload-time = "2025-07-03T10:54:10.681Z" },
+
{ url = "https://files.pythonhosted.org/packages/d7/85/f8bbefac27d286386961c25515431482a425967e23d3698b75a250872924/coverage-7.9.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:8a1166db2fb62473285bcb092f586e081e92656c7dfa8e9f62b4d39d7e6b5050", size = 204013, upload-time = "2025-07-03T10:54:12.084Z" },
+
{ url = "https://files.pythonhosted.org/packages/3c/38/bbe2e63902847cf79036ecc75550d0698af31c91c7575352eb25190d0fb3/coverage-7.9.2-py3-none-any.whl", hash = "sha256:e425cd5b00f6fc0ed7cdbd766c70be8baab4b7839e4d4fe5fac48581dd968ea4", size = 204005, upload-time = "2025-07-03T10:54:13.491Z" },
+
]
+
+
[package.optional-dependencies]
+
toml = [
+
{ name = "tomli", marker = "python_full_version <= '3.11'" },
+
]
+
+
[[package]]
+
name = "dnspython"
+
version = "2.7.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" },
+
]
+
+
[[package]]
+
name = "email-validator"
+
version = "2.2.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "dnspython" },
+
{ name = "idna" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" },
+
]
+
+
[[package]]
+
name = "exceptiongroup"
+
version = "1.3.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" },
+
]
+
+
[[package]]
+
name = "feedparser"
+
version = "6.0.11"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "sgmllib3k" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/ff/aa/7af346ebeb42a76bf108027fe7f3328bb4e57a3a96e53e21fd9ef9dd6dd0/feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5", size = 286197, upload-time = "2023-12-10T16:03:20.854Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/7c/d4/8c31aad9cc18f451c49f7f9cfb5799dadffc88177f7917bc90a66459b1d7/feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45", size = 81343, upload-time = "2023-12-10T16:03:19.484Z" },
+
]
+
+
[[package]]
+
name = "gitdb"
+
version = "4.0.12"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "smmap" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684, upload-time = "2025-01-02T07:20:46.413Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794, upload-time = "2025-01-02T07:20:43.624Z" },
+
]
+
+
[[package]]
+
name = "gitpython"
+
version = "3.1.44"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "gitdb" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/c0/89/37df0b71473153574a5cdef8f242de422a0f5d26d7a9e231e6f169b4ad14/gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269", size = 214196, upload-time = "2025-01-02T07:32:43.59Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/1d/9a/4114a9057db2f1462d5c8f8390ab7383925fe1ac012eaa42402ad65c2963/GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110", size = 207599, upload-time = "2025-01-02T07:32:40.731Z" },
+
]
+
+
[[package]]
+
name = "h11"
+
version = "0.16.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
+
]
+
+
[[package]]
+
name = "httpcore"
+
version = "1.0.9"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "certifi" },
+
{ name = "h11" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
+
]
+
+
[[package]]
+
name = "httpx"
+
version = "0.28.1"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "anyio" },
+
{ name = "certifi" },
+
{ name = "httpcore" },
+
{ name = "idna" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
+
]
+
+
[[package]]
+
name = "idna"
+
version = "3.10"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
+
]
+
+
[[package]]
+
name = "iniconfig"
+
version = "2.1.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
+
]
+
+
[[package]]
+
name = "jinja2"
+
version = "3.1.6"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "markupsafe" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
+
]
+
+
[[package]]
+
name = "markdown-it-py"
+
version = "3.0.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "mdurl" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" },
+
]
+
+
[[package]]
+
name = "markupsafe"
+
version = "3.0.2"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" },
+
{ url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" },
+
{ url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" },
+
{ url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" },
+
{ url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" },
+
{ url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" },
+
{ url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" },
+
{ url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" },
+
{ url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" },
+
{ url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" },
+
{ url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" },
+
{ url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" },
+
{ url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" },
+
{ url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" },
+
{ url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" },
+
{ url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" },
+
{ url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" },
+
{ url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" },
+
{ url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" },
+
{ url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" },
+
{ url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" },
+
{ url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" },
+
{ url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" },
+
{ url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" },
+
{ url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" },
+
{ url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" },
+
{ url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" },
+
{ url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" },
+
{ url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" },
+
{ url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" },
+
{ url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" },
+
{ url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" },
+
{ url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" },
+
{ url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" },
+
{ url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" },
+
{ url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" },
+
{ url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" },
+
{ url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" },
+
{ url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" },
+
{ url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" },
+
{ url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" },
+
{ url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" },
+
{ url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" },
+
{ url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" },
+
{ url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" },
+
{ url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" },
+
{ url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" },
+
{ url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" },
+
{ url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" },
+
{ url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" },
+
{ url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" },
+
{ url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" },
+
{ url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" },
+
{ url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" },
+
{ url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" },
+
{ url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" },
+
{ url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" },
+
{ url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" },
+
{ url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" },
+
{ url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" },
+
]
+
+
[[package]]
+
name = "mdurl"
+
version = "0.1.2"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
+
]
+
+
[[package]]
+
name = "mypy"
+
version = "1.17.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "mypy-extensions" },
+
{ name = "pathspec" },
+
{ name = "tomli", marker = "python_full_version < '3.11'" },
+
{ name = "typing-extensions" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/1e/e3/034322d5a779685218ed69286c32faa505247f1f096251ef66c8fd203b08/mypy-1.17.0.tar.gz", hash = "sha256:e5d7ccc08ba089c06e2f5629c660388ef1fee708444f1dee0b9203fa031dee03", size = 3352114, upload-time = "2025-07-14T20:34:30.181Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/6a/31/e762baa3b73905c856d45ab77b4af850e8159dffffd86a52879539a08c6b/mypy-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8e08de6138043108b3b18f09d3f817a4783912e48828ab397ecf183135d84d6", size = 10998313, upload-time = "2025-07-14T20:33:24.519Z" },
+
{ url = "https://files.pythonhosted.org/packages/1c/c1/25b2f0d46fb7e0b5e2bee61ec3a47fe13eff9e3c2f2234f144858bbe6485/mypy-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce4a17920ec144647d448fc43725b5873548b1aae6c603225626747ededf582d", size = 10128922, upload-time = "2025-07-14T20:34:06.414Z" },
+
{ url = "https://files.pythonhosted.org/packages/02/78/6d646603a57aa8a2886df1b8881fe777ea60f28098790c1089230cd9c61d/mypy-1.17.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ff25d151cc057fdddb1cb1881ef36e9c41fa2a5e78d8dd71bee6e4dcd2bc05b", size = 11913524, upload-time = "2025-07-14T20:33:19.109Z" },
+
{ url = "https://files.pythonhosted.org/packages/4f/19/dae6c55e87ee426fb76980f7e78484450cad1c01c55a1dc4e91c930bea01/mypy-1.17.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93468cf29aa9a132bceb103bd8475f78cacde2b1b9a94fd978d50d4bdf616c9a", size = 12650527, upload-time = "2025-07-14T20:32:44.095Z" },
+
{ url = "https://files.pythonhosted.org/packages/86/e1/f916845a235235a6c1e4d4d065a3930113767001d491b8b2e1b61ca56647/mypy-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:98189382b310f16343151f65dd7e6867386d3e35f7878c45cfa11383d175d91f", size = 12897284, upload-time = "2025-07-14T20:33:38.168Z" },
+
{ url = "https://files.pythonhosted.org/packages/ae/dc/414760708a4ea1b096bd214d26a24e30ac5e917ef293bc33cdb6fe22d2da/mypy-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:c004135a300ab06a045c1c0d8e3f10215e71d7b4f5bb9a42ab80236364429937", size = 9506493, upload-time = "2025-07-14T20:34:01.093Z" },
+
{ url = "https://files.pythonhosted.org/packages/d4/24/82efb502b0b0f661c49aa21cfe3e1999ddf64bf5500fc03b5a1536a39d39/mypy-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d4fe5c72fd262d9c2c91c1117d16aac555e05f5beb2bae6a755274c6eec42be", size = 10914150, upload-time = "2025-07-14T20:31:51.985Z" },
+
{ url = "https://files.pythonhosted.org/packages/03/96/8ef9a6ff8cedadff4400e2254689ca1dc4b420b92c55255b44573de10c54/mypy-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d96b196e5c16f41b4f7736840e8455958e832871990c7ba26bf58175e357ed61", size = 10039845, upload-time = "2025-07-14T20:32:30.527Z" },
+
{ url = "https://files.pythonhosted.org/packages/df/32/7ce359a56be779d38021d07941cfbb099b41411d72d827230a36203dbb81/mypy-1.17.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:73a0ff2dd10337ceb521c080d4147755ee302dcde6e1a913babd59473904615f", size = 11837246, upload-time = "2025-07-14T20:32:01.28Z" },
+
{ url = "https://files.pythonhosted.org/packages/82/16/b775047054de4d8dbd668df9137707e54b07fe18c7923839cd1e524bf756/mypy-1.17.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24cfcc1179c4447854e9e406d3af0f77736d631ec87d31c6281ecd5025df625d", size = 12571106, upload-time = "2025-07-14T20:34:26.942Z" },
+
{ url = "https://files.pythonhosted.org/packages/a1/cf/fa33eaf29a606102c8d9ffa45a386a04c2203d9ad18bf4eef3e20c43ebc8/mypy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c56f180ff6430e6373db7a1d569317675b0a451caf5fef6ce4ab365f5f2f6c3", size = 12759960, upload-time = "2025-07-14T20:33:42.882Z" },
+
{ url = "https://files.pythonhosted.org/packages/94/75/3f5a29209f27e739ca57e6350bc6b783a38c7621bdf9cac3ab8a08665801/mypy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:eafaf8b9252734400f9b77df98b4eee3d2eecab16104680d51341c75702cad70", size = 9503888, upload-time = "2025-07-14T20:32:34.392Z" },
+
{ url = "https://files.pythonhosted.org/packages/12/e9/e6824ed620bbf51d3bf4d6cbbe4953e83eaf31a448d1b3cfb3620ccb641c/mypy-1.17.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f986f1cab8dbec39ba6e0eaa42d4d3ac6686516a5d3dccd64be095db05ebc6bb", size = 11086395, upload-time = "2025-07-14T20:34:11.452Z" },
+
{ url = "https://files.pythonhosted.org/packages/ba/51/a4afd1ae279707953be175d303f04a5a7bd7e28dc62463ad29c1c857927e/mypy-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:51e455a54d199dd6e931cd7ea987d061c2afbaf0960f7f66deef47c90d1b304d", size = 10120052, upload-time = "2025-07-14T20:33:09.897Z" },
+
{ url = "https://files.pythonhosted.org/packages/8a/71/19adfeac926ba8205f1d1466d0d360d07b46486bf64360c54cb5a2bd86a8/mypy-1.17.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3204d773bab5ff4ebbd1f8efa11b498027cd57017c003ae970f310e5b96be8d8", size = 11861806, upload-time = "2025-07-14T20:32:16.028Z" },
+
{ url = "https://files.pythonhosted.org/packages/0b/64/d6120eca3835baf7179e6797a0b61d6c47e0bc2324b1f6819d8428d5b9ba/mypy-1.17.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1051df7ec0886fa246a530ae917c473491e9a0ba6938cfd0ec2abc1076495c3e", size = 12744371, upload-time = "2025-07-14T20:33:33.503Z" },
+
{ url = "https://files.pythonhosted.org/packages/1f/dc/56f53b5255a166f5bd0f137eed960e5065f2744509dfe69474ff0ba772a5/mypy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f773c6d14dcc108a5b141b4456b0871df638eb411a89cd1c0c001fc4a9d08fc8", size = 12914558, upload-time = "2025-07-14T20:33:56.961Z" },
+
{ url = "https://files.pythonhosted.org/packages/69/ac/070bad311171badc9add2910e7f89271695a25c136de24bbafc7eded56d5/mypy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:1619a485fd0e9c959b943c7b519ed26b712de3002d7de43154a489a2d0fd817d", size = 9585447, upload-time = "2025-07-14T20:32:20.594Z" },
+
{ url = "https://files.pythonhosted.org/packages/be/7b/5f8ab461369b9e62157072156935cec9d272196556bdc7c2ff5f4c7c0f9b/mypy-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c41aa59211e49d717d92b3bb1238c06d387c9325d3122085113c79118bebb06", size = 11070019, upload-time = "2025-07-14T20:32:07.99Z" },
+
{ url = "https://files.pythonhosted.org/packages/9c/f8/c49c9e5a2ac0badcc54beb24e774d2499748302c9568f7f09e8730e953fa/mypy-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e69db1fb65b3114f98c753e3930a00514f5b68794ba80590eb02090d54a5d4a", size = 10114457, upload-time = "2025-07-14T20:33:47.285Z" },
+
{ url = "https://files.pythonhosted.org/packages/89/0c/fb3f9c939ad9beed3e328008b3fb90b20fda2cddc0f7e4c20dbefefc3b33/mypy-1.17.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:03ba330b76710f83d6ac500053f7727270b6b8553b0423348ffb3af6f2f7b889", size = 11857838, upload-time = "2025-07-14T20:33:14.462Z" },
+
{ url = "https://files.pythonhosted.org/packages/4c/66/85607ab5137d65e4f54d9797b77d5a038ef34f714929cf8ad30b03f628df/mypy-1.17.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037bc0f0b124ce46bfde955c647f3e395c6174476a968c0f22c95a8d2f589bba", size = 12731358, upload-time = "2025-07-14T20:32:25.579Z" },
+
{ url = "https://files.pythonhosted.org/packages/73/d0/341dbbfb35ce53d01f8f2969facbb66486cee9804048bf6c01b048127501/mypy-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c38876106cb6132259683632b287238858bd58de267d80defb6f418e9ee50658", size = 12917480, upload-time = "2025-07-14T20:34:21.868Z" },
+
{ url = "https://files.pythonhosted.org/packages/64/63/70c8b7dbfc520089ac48d01367a97e8acd734f65bd07813081f508a8c94c/mypy-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:d30ba01c0f151998f367506fab31c2ac4527e6a7b2690107c7a7f9e3cb419a9c", size = 9589666, upload-time = "2025-07-14T20:34:16.841Z" },
+
{ url = "https://files.pythonhosted.org/packages/9f/a0/6263dd11941231f688f0a8f2faf90ceac1dc243d148d314a089d2fe25108/mypy-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:63e751f1b5ab51d6f3d219fe3a2fe4523eaa387d854ad06906c63883fde5b1ab", size = 10988185, upload-time = "2025-07-14T20:33:04.797Z" },
+
{ url = "https://files.pythonhosted.org/packages/02/13/b8f16d6b0dc80277129559c8e7dbc9011241a0da8f60d031edb0e6e9ac8f/mypy-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7fb09d05e0f1c329a36dcd30e27564a3555717cde87301fae4fb542402ddfad", size = 10120169, upload-time = "2025-07-14T20:32:38.84Z" },
+
{ url = "https://files.pythonhosted.org/packages/14/ef/978ba79df0d65af680e20d43121363cf643eb79b04bf3880d01fc8afeb6f/mypy-1.17.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b72c34ce05ac3a1361ae2ebb50757fb6e3624032d91488d93544e9f82db0ed6c", size = 11918121, upload-time = "2025-07-14T20:33:52.328Z" },
+
{ url = "https://files.pythonhosted.org/packages/f4/10/55ef70b104151a0d8280474f05268ff0a2a79be8d788d5e647257d121309/mypy-1.17.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:434ad499ad8dde8b2f6391ddfa982f41cb07ccda8e3c67781b1bfd4e5f9450a8", size = 12648821, upload-time = "2025-07-14T20:32:59.631Z" },
+
{ url = "https://files.pythonhosted.org/packages/26/8c/7781fcd2e1eef48fbedd3a422c21fe300a8e03ed5be2eb4bd10246a77f4e/mypy-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f105f61a5eff52e137fd73bee32958b2add9d9f0a856f17314018646af838e97", size = 12896955, upload-time = "2025-07-14T20:32:49.543Z" },
+
{ url = "https://files.pythonhosted.org/packages/78/13/03ac759dabe86e98ca7b6681f114f90ee03f3ff8365a57049d311bd4a4e3/mypy-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:ba06254a5a22729853209550d80f94e28690d5530c661f9416a68ac097b13fc4", size = 9512957, upload-time = "2025-07-14T20:33:28.619Z" },
+
{ url = "https://files.pythonhosted.org/packages/e3/fc/ee058cc4316f219078464555873e99d170bde1d9569abd833300dbeb484a/mypy-1.17.0-py3-none-any.whl", hash = "sha256:15d9d0018237ab058e5de3d8fce61b6fa72cc59cc78fd91f1b474bce12abf496", size = 2283195, upload-time = "2025-07-14T20:31:54.753Z" },
+
]
+
+
[[package]]
+
name = "mypy-extensions"
+
version = "1.1.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
+
]
+
+
[[package]]
+
name = "packaging"
+
version = "25.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
+
]
+
+
[[package]]
+
name = "pathspec"
+
version = "0.12.1"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
+
]
+
+
[[package]]
+
name = "pendulum"
+
version = "3.1.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "python-dateutil" },
+
{ name = "tzdata" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/23/7c/009c12b86c7cc6c403aec80f8a4308598dfc5995e5c523a5491faaa3952e/pendulum-3.1.0.tar.gz", hash = "sha256:66f96303560f41d097bee7d2dc98ffca716fbb3a832c4b3062034c2d45865015", size = 85930, upload-time = "2025-04-19T14:30:01.675Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/b4/d8/398cd27903a6899d0ae47b896d88e0b15849fc334931a6732e7ce3be9a45/pendulum-3.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:aa545a59e6517cf43597455a6fb44daa4a6e08473d67a7ad34e4fa951efb9620", size = 338637, upload-time = "2025-04-19T14:00:56.429Z" },
+
{ url = "https://files.pythonhosted.org/packages/aa/9d/a125554919c6db14e189393254c7781ee98ed5a121b6c05652d353b03c12/pendulum-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:299df2da6c490ede86bb8d58c65e33d7a2a42479d21475a54b467b03ccb88531", size = 326003, upload-time = "2025-04-19T14:00:58.192Z" },
+
{ url = "https://files.pythonhosted.org/packages/53/9f/43a5a902f904e06252c259c2f6cf2dceafbb25aef158df08f79c0089dfd7/pendulum-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbaa66e3ab179a2746eec67462f852a5d555bd709c25030aef38477468dd008e", size = 344335, upload-time = "2025-04-19T14:00:59.985Z" },
+
{ url = "https://files.pythonhosted.org/packages/ca/24/00fcd6abd1f7623d2bbcca048b45f01aa8bb6b647e0477c3a8ea6094335c/pendulum-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3907ab3744c32e339c358d88ec80cd35fa2d4b25c77a3c67e6b39e99b7090c5", size = 382169, upload-time = "2025-04-19T14:01:01.411Z" },
+
{ url = "https://files.pythonhosted.org/packages/32/bc/20a87f24c26c6c4daf3c69311208b28130b4d19c006da16efc0e55715963/pendulum-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8244958c5bc4ed1c47ee84b098ddd95287a3fc59e569ca6e2b664c6396138ec4", size = 436675, upload-time = "2025-04-19T14:01:03.068Z" },
+
{ url = "https://files.pythonhosted.org/packages/1d/eb/3b1818a796408a250b8e6cfaa5372b991c0cbec768e02e0f9a226755383d/pendulum-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca5722b3993b85ff7dfced48d86b318f863c359877b6badf1a3601e35199ef8f", size = 353728, upload-time = "2025-04-19T14:01:04.483Z" },
+
{ url = "https://files.pythonhosted.org/packages/36/23/755ef61f863b2777925171a59509540205b561a9e07ee7de0b5be9226bea/pendulum-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5b77a3dc010eea1a4916ef3771163d808bfc3e02b894c37df311287f18e5b764", size = 524465, upload-time = "2025-04-19T14:01:05.865Z" },
+
{ url = "https://files.pythonhosted.org/packages/07/1f/a3e5f08890d13d93eee725778bfeaa233db5c55463e526857dffbc1a47e4/pendulum-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d6e1eff4a15fdb8fb3867c5469e691c2465eef002a6a541c47b48a390ff4cf4", size = 525690, upload-time = "2025-04-19T14:01:07.707Z" },
+
{ url = "https://files.pythonhosted.org/packages/43/c5/bf8ce472b81e8f5f074e8ba39899d288acce417c2c4a9ec7486d56970e28/pendulum-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:73de43ec85b46ac75db848c8e2f3f5d086e90b11cd9c7f029e14c8d748d920e2", size = 260356, upload-time = "2025-04-19T14:01:09.339Z" },
+
{ url = "https://files.pythonhosted.org/packages/5e/6e/d28d3c22e6708b819a94c05bd05a3dfaed5c685379e8b6dc4b34b473b942/pendulum-3.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:61a03d14f8c64d13b2f7d5859e4b4053c4a7d3b02339f6c71f3e4606bfd67423", size = 338596, upload-time = "2025-04-19T14:01:11.306Z" },
+
{ url = "https://files.pythonhosted.org/packages/e1/e6/43324d58021d463c2eeb6146b169d2c935f2f840f9e45ac2d500453d954c/pendulum-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e674ed2d158afa5c361e60f1f67872dc55b492a10cacdaa7fcd7b7da5f158f24", size = 325854, upload-time = "2025-04-19T14:01:13.156Z" },
+
{ url = "https://files.pythonhosted.org/packages/b0/a7/d2ae79b960bfdea94dab67e2f118697b08bc9e98eb6bd8d32c4d99240da3/pendulum-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c75377eb16e58bbe7e03ea89eeea49be6fc5de0934a4aef0e263f8b4fa71bc2", size = 344334, upload-time = "2025-04-19T14:01:15.151Z" },
+
{ url = "https://files.pythonhosted.org/packages/96/94/941f071212e23c29aae7def891fb636930c648386e059ce09ea0dcd43933/pendulum-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:656b8b0ce070f0f2e5e2668247d3c783c55336534aa1f13bd0969535878955e1", size = 382259, upload-time = "2025-04-19T14:01:16.924Z" },
+
{ url = "https://files.pythonhosted.org/packages/51/ad/a78a701656aec00d16fee636704445c23ca11617a0bfe7c3848d1caa5157/pendulum-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48962903e6c1afe1f13548cb6252666056086c107d59e3d64795c58c9298bc2e", size = 436361, upload-time = "2025-04-19T14:01:18.796Z" },
+
{ url = "https://files.pythonhosted.org/packages/da/93/83f59ccbf4435c29dca8c63a6560fcbe4783079a468a5f91d9f886fd21f0/pendulum-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d364ec3f8e65010fefd4b0aaf7be5eb97e5df761b107a06f5e743b7c3f52c311", size = 353653, upload-time = "2025-04-19T14:01:20.159Z" },
+
{ url = "https://files.pythonhosted.org/packages/6f/0f/42d6644ec6339b41066f594e52d286162aecd2e9735aaf994d7e00c9e09d/pendulum-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dd52caffc2afb86612ec43bbeb226f204ea12ebff9f3d12f900a7d3097210fcc", size = 524567, upload-time = "2025-04-19T14:01:21.457Z" },
+
{ url = "https://files.pythonhosted.org/packages/de/45/d84d909202755ab9d3379e5481fdf70f53344ebefbd68d6f5803ddde98a6/pendulum-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d439fccaa35c91f686bd59d30604dab01e8b5c1d0dd66e81648c432fd3f8a539", size = 525571, upload-time = "2025-04-19T14:01:23.329Z" },
+
{ url = "https://files.pythonhosted.org/packages/0d/e0/4de160773ce3c2f7843c310db19dd919a0cd02cc1c0384866f63b18a6251/pendulum-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:43288773a86d9c5c0ddb645f88f615ff6bd12fd1410b34323662beccb18f3b49", size = 260259, upload-time = "2025-04-19T14:01:24.689Z" },
+
{ url = "https://files.pythonhosted.org/packages/c1/7f/ffa278f78112c6c6e5130a702042f52aab5c649ae2edf814df07810bbba5/pendulum-3.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:569ea5072ae0f11d625e03b36d865f8037b76e838a3b621f6967314193896a11", size = 253899, upload-time = "2025-04-19T14:01:26.442Z" },
+
{ url = "https://files.pythonhosted.org/packages/7a/d7/b1bfe15a742f2c2713acb1fdc7dc3594ff46ef9418ac6a96fcb12a6ba60b/pendulum-3.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4dfd53e7583ccae138be86d6c0a0b324c7547df2afcec1876943c4d481cf9608", size = 336209, upload-time = "2025-04-19T14:01:27.815Z" },
+
{ url = "https://files.pythonhosted.org/packages/eb/87/0392da0c603c828b926d9f7097fbdddaafc01388cb8a00888635d04758c3/pendulum-3.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a6e06a28f3a7d696546347805536f6f38be458cb79de4f80754430696bea9e6", size = 323130, upload-time = "2025-04-19T14:01:29.336Z" },
+
{ url = "https://files.pythonhosted.org/packages/c0/61/95f1eec25796be6dddf71440ee16ec1fd0c573fc61a73bd1ef6daacd529a/pendulum-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e68d6a51880708084afd8958af42dc8c5e819a70a6c6ae903b1c4bfc61e0f25", size = 341509, upload-time = "2025-04-19T14:01:31.1Z" },
+
{ url = "https://files.pythonhosted.org/packages/b5/7b/eb0f5e6aa87d5e1b467a1611009dbdc92f0f72425ebf07669bfadd8885a6/pendulum-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e3f1e5da39a7ea7119efda1dd96b529748c1566f8a983412d0908455d606942", size = 378674, upload-time = "2025-04-19T14:01:32.974Z" },
+
{ url = "https://files.pythonhosted.org/packages/29/68/5a4c1b5de3e54e16cab21d2ec88f9cd3f18599e96cc90a441c0b0ab6b03f/pendulum-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9af1e5eeddb4ebbe1b1c9afb9fd8077d73416ade42dd61264b3f3b87742e0bb", size = 436133, upload-time = "2025-04-19T14:01:34.349Z" },
+
{ url = "https://files.pythonhosted.org/packages/87/5d/f7a1d693e5c0f789185117d5c1d5bee104f5b0d9fbf061d715fb61c840a8/pendulum-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f74aa8029a42e327bfc150472e0e4d2358fa5d795f70460160ba81b94b6945", size = 351232, upload-time = "2025-04-19T14:01:35.669Z" },
+
{ url = "https://files.pythonhosted.org/packages/30/77/c97617eb31f1d0554edb073201a294019b9e0a9bd2f73c68e6d8d048cd6b/pendulum-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cf6229e5ee70c2660148523f46c472e677654d0097bec010d6730f08312a4931", size = 521562, upload-time = "2025-04-19T14:01:37.05Z" },
+
{ url = "https://files.pythonhosted.org/packages/76/22/0d0ef3393303877e757b848ecef8a9a8c7627e17e7590af82d14633b2cd1/pendulum-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:350cabb23bf1aec7c7694b915d3030bff53a2ad4aeabc8c8c0d807c8194113d6", size = 523221, upload-time = "2025-04-19T14:01:38.444Z" },
+
{ url = "https://files.pythonhosted.org/packages/99/f3/aefb579aa3cebd6f2866b205fc7a60d33e9a696e9e629024752107dc3cf5/pendulum-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:42959341e843077c41d47420f28c3631de054abd64da83f9b956519b5c7a06a7", size = 260502, upload-time = "2025-04-19T14:01:39.814Z" },
+
{ url = "https://files.pythonhosted.org/packages/02/74/4332b5d6e34c63d4df8e8eab2249e74c05513b1477757463f7fdca99e9be/pendulum-3.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:006758e2125da2e624493324dfd5d7d1b02b0c44bc39358e18bf0f66d0767f5f", size = 253089, upload-time = "2025-04-19T14:01:41.171Z" },
+
{ url = "https://files.pythonhosted.org/packages/8e/1f/af928ba4aa403dac9569f787adcf024005e7654433d71f7a84e608716837/pendulum-3.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:28658b0baf4b30eb31d096a375983cfed033e60c0a7bbe94fa23f06cd779b50b", size = 336209, upload-time = "2025-04-19T14:01:42.775Z" },
+
{ url = "https://files.pythonhosted.org/packages/b6/16/b010643007ba964c397da7fa622924423883c1bbff1a53f9d1022cd7f024/pendulum-3.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b114dcb99ce511cb8f5495c7b6f0056b2c3dba444ef1ea6e48030d7371bd531a", size = 323132, upload-time = "2025-04-19T14:01:44.577Z" },
+
{ url = "https://files.pythonhosted.org/packages/64/19/c3c47aeecb5d9bceb0e89faafd800d39809b696c5b7bba8ec8370ad5052c/pendulum-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2404a6a54c80252ea393291f0b7f35525a61abae3d795407f34e118a8f133a18", size = 341509, upload-time = "2025-04-19T14:01:46.084Z" },
+
{ url = "https://files.pythonhosted.org/packages/38/cf/c06921ff6b860ff7e62e70b8e5d4dc70e36f5abb66d168bd64d51760bc4e/pendulum-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d06999790d9ee9962a1627e469f98568bf7ad1085553fa3c30ed08b3944a14d7", size = 378674, upload-time = "2025-04-19T14:01:47.727Z" },
+
{ url = "https://files.pythonhosted.org/packages/62/0b/a43953b9eba11e82612b033ac5133f716f1b76b6108a65da6f408b3cc016/pendulum-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94751c52f6b7c306734d1044c2c6067a474237e1e5afa2f665d1fbcbbbcf24b3", size = 436133, upload-time = "2025-04-19T14:01:49.126Z" },
+
{ url = "https://files.pythonhosted.org/packages/eb/a0/ec3d70b3b96e23ae1d039f132af35e17704c22a8250d1887aaefea4d78a6/pendulum-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5553ac27be05e997ec26d7f004cf72788f4ce11fe60bb80dda604a64055b29d0", size = 351232, upload-time = "2025-04-19T14:01:50.575Z" },
+
{ url = "https://files.pythonhosted.org/packages/f4/97/aba23f1716b82f6951ba2b1c9178a2d107d1e66c102762a9bf19988547ea/pendulum-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f8dee234ca6142bf0514368d01a72945a44685aaa2fc4c14c98d09da9437b620", size = 521563, upload-time = "2025-04-19T14:01:51.9Z" },
+
{ url = "https://files.pythonhosted.org/packages/01/33/2c0d5216cc53d16db0c4b3d510f141ee0a540937f8675948541190fbd48b/pendulum-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7378084fe54faab4ee481897a00b710876f2e901ded6221671e827a253e643f2", size = 523221, upload-time = "2025-04-19T14:01:53.275Z" },
+
{ url = "https://files.pythonhosted.org/packages/51/89/8de955c339c31aeae77fd86d3225509b998c81875e9dba28cb88b8cbf4b3/pendulum-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:8539db7ae2c8da430ac2515079e288948c8ebf7eb1edd3e8281b5cdf433040d6", size = 260501, upload-time = "2025-04-19T14:01:54.749Z" },
+
{ url = "https://files.pythonhosted.org/packages/15/c3/226a3837363e94f8722461848feec18bfdd7d5172564d53aa3c3397ff01e/pendulum-3.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:1ce26a608e1f7387cd393fba2a129507c4900958d4f47b90757ec17656856571", size = 253087, upload-time = "2025-04-19T14:01:55.998Z" },
+
{ url = "https://files.pythonhosted.org/packages/c9/eb/e128af9e1a216c17b932f8c7f1f927f97cd8fd71d6e60148e1c69ad2bcf4/pendulum-3.1.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2504df1a7ff8e0827781a601ff399bfcad23e7b7943f87ef33db02c11131f5e8", size = 339467, upload-time = "2025-04-19T14:01:57.346Z" },
+
{ url = "https://files.pythonhosted.org/packages/2f/c3/a7f515c6dbe89f2d92216ccd9a2e31dd657767d7f64177da1579092a41e3/pendulum-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4041a7156695499b6676ed092f27e17760db2341bf350f6c5ea9137dd2cfd3f6", size = 326898, upload-time = "2025-04-19T14:01:58.789Z" },
+
{ url = "https://files.pythonhosted.org/packages/15/da/80774350340e9d06789460b6e8def2d497cffe09334a8911be8d226caec5/pendulum-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b277e9177651d6af8500b95f0af1e3c1769064f2353c06f638d3c1e065063e", size = 344866, upload-time = "2025-04-19T14:02:00.688Z" },
+
{ url = "https://files.pythonhosted.org/packages/55/cf/771d1fee8d14abefbccbed99622c6f26e33b839b8964a18188b4f9159bf1/pendulum-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:784cf82b676118816fb81ea6bcbdf8f3b0c49aa74fcb895647ef7f8046093471", size = 382902, upload-time = "2025-04-19T14:02:02.077Z" },
+
{ url = "https://files.pythonhosted.org/packages/bd/95/0660ae5dbe9212c99fbccb27ca64361018600aa954fab95641653a39ce36/pendulum-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e44277a391fa5ad2e9ce02b1b24fd9489cb2a371ae2459eddb238301d31204d", size = 437782, upload-time = "2025-04-19T14:02:03.984Z" },
+
{ url = "https://files.pythonhosted.org/packages/04/1f/007dff40bb0325ae91eba3d4d6ce911945d808f87d3cebaa4556ca07f35d/pendulum-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a7d0bca8cca92d60734b64fa4fa58b17b8ec1f55112bf77d00ee65248d19177", size = 354462, upload-time = "2025-04-19T14:02:05.831Z" },
+
{ url = "https://files.pythonhosted.org/packages/e6/cc/8d2ed88beab1622623e321bd1f754eee174bb97e6ffcd34ceb9cce87a4ea/pendulum-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bfac5e02faee02c180444e722c298690688ec1c3dfa1aab65fb4e0e3825d84ed", size = 525009, upload-time = "2025-04-19T14:02:07.231Z" },
+
{ url = "https://files.pythonhosted.org/packages/44/c0/a503df53796b0dc1dae7d50573d936f86f8f482cf4acd9adbbd2a30ef817/pendulum-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0da70941b062220e734c2c510ad30daa60aca1a37e893f1baa0da065ffa4c72", size = 526351, upload-time = "2025-04-19T14:02:08.619Z" },
+
{ url = "https://files.pythonhosted.org/packages/c6/bc/1fbc57b2e482a0ee3b5a0759c4b3b5127f0401cdce4afef111a3b6179d8d/pendulum-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:300a237fb81028edb9604d4d1bb205b80515fd22ab9c1a4c55014d07869122f8", size = 260813, upload-time = "2025-04-19T14:02:10.677Z" },
+
{ url = "https://files.pythonhosted.org/packages/66/10/3258c084653606d2be2c7168998eda4a57cf1559cecb43cf1100000fda5f/pendulum-3.1.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d2cac744940299d8da41a3ed941aa1e02b5abbc9ae2c525f3aa2ae30c28a86b5", size = 339442, upload-time = "2025-04-19T14:02:12.512Z" },
+
{ url = "https://files.pythonhosted.org/packages/98/d5/98a1a10cd1cfb3390fbf070864e9a10de8e70a9d4509832132f4d900d655/pendulum-3.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ffb39c3f3906a9c9a108fa98e5556f18b52d2c6451984bbfe2f14436ec4fc9d4", size = 326609, upload-time = "2025-04-19T14:02:13.838Z" },
+
{ url = "https://files.pythonhosted.org/packages/0a/2e/448abdebc11b9c54e190d273cb084162643199fc184cb1bb6bff7900e67f/pendulum-3.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebe18b1c2eb364064cc4a68a65900f1465cac47d0891dab82341766bcc05b40c", size = 344777, upload-time = "2025-04-19T14:02:15.512Z" },
+
{ url = "https://files.pythonhosted.org/packages/ed/91/ee857bbd51168bf08b89c3a4705c920725eee0f830ccc513b8370f6ce71d/pendulum-3.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e9b28a35cec9fcd90f224b4878456129a057dbd694fc8266a9393834804995", size = 354404, upload-time = "2025-04-19T14:02:16.91Z" },
+
{ url = "https://files.pythonhosted.org/packages/bc/d4/e63a57df65e2b2d10f3aa917a4069be9abf5ac7d56d11336e0510742d8a6/pendulum-3.1.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a3be19b73a9c6a866724419295482f817727e635ccc82f07ae6f818943a1ee96", size = 524948, upload-time = "2025-04-19T14:02:18.808Z" },
+
{ url = "https://files.pythonhosted.org/packages/93/87/04e74600c5a5674e5f341b8888b530a9de9b84b31889f80fac3bee3e9e87/pendulum-3.1.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:24a53b523819bda4c70245687a589b5ea88711f7caac4be5f276d843fe63076b", size = 526340, upload-time = "2025-04-19T14:02:20.242Z" },
+
{ url = "https://files.pythonhosted.org/packages/48/27/d3577a5f6f7d1fbf1138d87ce21ebab363c78642513b991d1c424d658d09/pendulum-3.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bd701789414fbd0be3c75f46803f31e91140c23821e4bcb0fa2bddcdd051c425", size = 261089, upload-time = "2025-04-19T14:02:21.631Z" },
+
{ url = "https://files.pythonhosted.org/packages/53/8f/6620b0df6acdd8c020ec4f5907e3deb8c6d46970568299b66eed9b5f53b4/pendulum-3.1.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0803639fc98e03f74d0b83955a2800bcee1c99b0700638aae9ab7ceb1a7dcca3", size = 340056, upload-time = "2025-04-19T14:02:23.041Z" },
+
{ url = "https://files.pythonhosted.org/packages/dd/90/957491643cff9d97764e443bd0ef7c5c549e733e306159d6ca8ab4034fb5/pendulum-3.1.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4cceff50503ef9cb021e53a238f867c9843b4dd55859582d682f3c9e52460699", size = 327095, upload-time = "2025-04-19T14:02:24.866Z" },
+
{ url = "https://files.pythonhosted.org/packages/fe/f2/550855e0285671278f763963b2470f437d340759aaef927fedb204e58fb0/pendulum-3.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2cf8adcf3030eef78c3cd82afd9948cd1a4ae1a9450e9ac128b9e744c42825f", size = 345131, upload-time = "2025-04-19T14:02:26.332Z" },
+
{ url = "https://files.pythonhosted.org/packages/b4/8f/938b83fe3e1450f4b04d1f96e8b2c288e07ad6b942260fef24cfd98cc3d0/pendulum-3.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5bce0f71c10e983e1c39e1eb37b9a5f5c2aa0c15a36edaaa0a844fb1fbc7bbb", size = 354886, upload-time = "2025-04-19T14:02:28.205Z" },
+
{ url = "https://files.pythonhosted.org/packages/d9/aa/0e3c231a7e35b362226204d7276a47c0e225aa59b30c7c9cd2a8e2660967/pendulum-3.1.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c1354be2df38f031ac6a985949b6541be7d39dd7e44c8804f4bc9a39dea9f3bb", size = 525344, upload-time = "2025-04-19T14:02:30.116Z" },
+
{ url = "https://files.pythonhosted.org/packages/0b/c7/d3654a790129684d0e8dc04707cb6d75633d7b102a962c6dc0f862c64c25/pendulum-3.1.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cbd933a40c915ed5c41b083115cca15c7afa8179363b2a61db167c64fa0670", size = 526685, upload-time = "2025-04-19T14:02:31.523Z" },
+
{ url = "https://files.pythonhosted.org/packages/50/d9/4a166256386b7973e36ff44135e8d009f4afb25d6c72df5380ccfd6fbb89/pendulum-3.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3363a470b5d67dbf8d9fd1bf77dcdbf720788bc3be4a10bdcd28ae5d7dbd26c4", size = 261170, upload-time = "2025-04-19T14:02:33.099Z" },
+
{ url = "https://files.pythonhosted.org/packages/6e/23/e98758924d1b3aac11a626268eabf7f3cf177e7837c28d47bf84c64532d0/pendulum-3.1.0-py3-none-any.whl", hash = "sha256:f9178c2a8e291758ade1e8dd6371b1d26d08371b4c7730a6e9a3ef8b16ebae0f", size = 111799, upload-time = "2025-04-19T14:02:34.739Z" },
+
]
+
+
[[package]]
+
name = "platformdirs"
+
version = "4.3.8"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" },
+
]
+
+
[[package]]
+
name = "pluggy"
+
version = "1.6.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
+
]
+
+
[[package]]
+
name = "pydantic"
+
version = "2.11.7"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "annotated-types" },
+
{ name = "pydantic-core" },
+
{ name = "typing-extensions" },
+
{ name = "typing-inspection" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" },
+
]
+
+
[[package]]
+
name = "pydantic-core"
+
version = "2.33.2"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "typing-extensions" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" },
+
{ url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" },
+
{ url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" },
+
{ url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" },
+
{ url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" },
+
{ url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" },
+
{ url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" },
+
{ url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" },
+
{ url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" },
+
{ url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" },
+
{ url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" },
+
{ url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" },
+
{ url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" },
+
{ url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" },
+
{ url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" },
+
{ url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" },
+
{ url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" },
+
{ url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" },
+
{ url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" },
+
{ url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" },
+
{ url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" },
+
{ url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" },
+
{ url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" },
+
{ url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" },
+
{ url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" },
+
{ url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" },
+
{ url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" },
+
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" },
+
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" },
+
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" },
+
{ url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" },
+
{ url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" },
+
{ url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" },
+
{ url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" },
+
{ url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" },
+
{ url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" },
+
{ url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" },
+
{ url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" },
+
{ url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" },
+
{ url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" },
+
{ url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" },
+
{ url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" },
+
{ url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" },
+
{ url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" },
+
{ url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" },
+
{ url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" },
+
{ url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" },
+
{ url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" },
+
{ url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" },
+
{ url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" },
+
{ url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" },
+
{ url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" },
+
{ url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" },
+
{ url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" },
+
{ url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" },
+
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
+
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
+
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
+
{ url = "https://files.pythonhosted.org/packages/53/ea/bbe9095cdd771987d13c82d104a9c8559ae9aec1e29f139e286fd2e9256e/pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d", size = 2028677, upload-time = "2025-04-23T18:32:27.227Z" },
+
{ url = "https://files.pythonhosted.org/packages/49/1d/4ac5ed228078737d457a609013e8f7edc64adc37b91d619ea965758369e5/pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954", size = 1864735, upload-time = "2025-04-23T18:32:29.019Z" },
+
{ url = "https://files.pythonhosted.org/packages/23/9a/2e70d6388d7cda488ae38f57bc2f7b03ee442fbcf0d75d848304ac7e405b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb", size = 1898467, upload-time = "2025-04-23T18:32:31.119Z" },
+
{ url = "https://files.pythonhosted.org/packages/ff/2e/1568934feb43370c1ffb78a77f0baaa5a8b6897513e7a91051af707ffdc4/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7", size = 1983041, upload-time = "2025-04-23T18:32:33.655Z" },
+
{ url = "https://files.pythonhosted.org/packages/01/1a/1a1118f38ab64eac2f6269eb8c120ab915be30e387bb561e3af904b12499/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4", size = 2136503, upload-time = "2025-04-23T18:32:35.519Z" },
+
{ url = "https://files.pythonhosted.org/packages/5c/da/44754d1d7ae0f22d6d3ce6c6b1486fc07ac2c524ed8f6eca636e2e1ee49b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b", size = 2736079, upload-time = "2025-04-23T18:32:37.659Z" },
+
{ url = "https://files.pythonhosted.org/packages/4d/98/f43cd89172220ec5aa86654967b22d862146bc4d736b1350b4c41e7c9c03/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3", size = 2006508, upload-time = "2025-04-23T18:32:39.637Z" },
+
{ url = "https://files.pythonhosted.org/packages/2b/cc/f77e8e242171d2158309f830f7d5d07e0531b756106f36bc18712dc439df/pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a", size = 2113693, upload-time = "2025-04-23T18:32:41.818Z" },
+
{ url = "https://files.pythonhosted.org/packages/54/7a/7be6a7bd43e0a47c147ba7fbf124fe8aaf1200bc587da925509641113b2d/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782", size = 2074224, upload-time = "2025-04-23T18:32:44.033Z" },
+
{ url = "https://files.pythonhosted.org/packages/2a/07/31cf8fadffbb03be1cb520850e00a8490c0927ec456e8293cafda0726184/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9", size = 2245403, upload-time = "2025-04-23T18:32:45.836Z" },
+
{ url = "https://files.pythonhosted.org/packages/b6/8d/bbaf4c6721b668d44f01861f297eb01c9b35f612f6b8e14173cb204e6240/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e", size = 2242331, upload-time = "2025-04-23T18:32:47.618Z" },
+
{ url = "https://files.pythonhosted.org/packages/bb/93/3cc157026bca8f5006250e74515119fcaa6d6858aceee8f67ab6dc548c16/pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9", size = 1910571, upload-time = "2025-04-23T18:32:49.401Z" },
+
{ url = "https://files.pythonhosted.org/packages/5b/90/7edc3b2a0d9f0dda8806c04e511a67b0b7a41d2187e2003673a996fb4310/pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3", size = 1956504, upload-time = "2025-04-23T18:32:51.287Z" },
+
{ url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" },
+
{ url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" },
+
{ url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" },
+
{ url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" },
+
{ url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" },
+
{ url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" },
+
{ url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" },
+
{ url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" },
+
{ url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" },
+
{ url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" },
+
{ url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" },
+
{ url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" },
+
{ url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" },
+
{ url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" },
+
{ url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" },
+
{ url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" },
+
{ url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" },
+
{ url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" },
+
{ url = "https://files.pythonhosted.org/packages/08/98/dbf3fdfabaf81cda5622154fda78ea9965ac467e3239078e0dcd6df159e7/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101", size = 2024034, upload-time = "2025-04-23T18:33:32.843Z" },
+
{ url = "https://files.pythonhosted.org/packages/8d/99/7810aa9256e7f2ccd492590f86b79d370df1e9292f1f80b000b6a75bd2fb/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64", size = 1858578, upload-time = "2025-04-23T18:33:34.912Z" },
+
{ url = "https://files.pythonhosted.org/packages/d8/60/bc06fa9027c7006cc6dd21e48dbf39076dc39d9abbaf718a1604973a9670/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d", size = 1892858, upload-time = "2025-04-23T18:33:36.933Z" },
+
{ url = "https://files.pythonhosted.org/packages/f2/40/9d03997d9518816c68b4dfccb88969756b9146031b61cd37f781c74c9b6a/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535", size = 2068498, upload-time = "2025-04-23T18:33:38.997Z" },
+
{ url = "https://files.pythonhosted.org/packages/d8/62/d490198d05d2d86672dc269f52579cad7261ced64c2df213d5c16e0aecb1/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d", size = 2108428, upload-time = "2025-04-23T18:33:41.18Z" },
+
{ url = "https://files.pythonhosted.org/packages/9a/ec/4cd215534fd10b8549015f12ea650a1a973da20ce46430b68fc3185573e8/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6", size = 2069854, upload-time = "2025-04-23T18:33:43.446Z" },
+
{ url = "https://files.pythonhosted.org/packages/1a/1a/abbd63d47e1d9b0d632fee6bb15785d0889c8a6e0a6c3b5a8e28ac1ec5d2/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca", size = 2237859, upload-time = "2025-04-23T18:33:45.56Z" },
+
{ url = "https://files.pythonhosted.org/packages/80/1c/fa883643429908b1c90598fd2642af8839efd1d835b65af1f75fba4d94fe/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039", size = 2239059, upload-time = "2025-04-23T18:33:47.735Z" },
+
{ url = "https://files.pythonhosted.org/packages/d4/29/3cade8a924a61f60ccfa10842f75eb12787e1440e2b8660ceffeb26685e7/pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27", size = 2066661, upload-time = "2025-04-23T18:33:49.995Z" },
+
]
+
+
[[package]]
+
name = "pydantic-settings"
+
version = "2.10.1"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "pydantic" },
+
{ name = "python-dotenv" },
+
{ name = "typing-inspection" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" },
+
]
+
+
[[package]]
+
name = "pygments"
+
version = "2.19.2"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+
]
+
+
[[package]]
+
name = "pytest"
+
version = "8.4.1"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "colorama", marker = "sys_platform == 'win32'" },
+
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
+
{ name = "iniconfig" },
+
{ name = "packaging" },
+
{ name = "pluggy" },
+
{ name = "pygments" },
+
{ name = "tomli", marker = "python_full_version < '3.11'" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" },
+
]
+
+
[[package]]
+
name = "pytest-asyncio"
+
version = "1.0.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "pytest" },
+
{ name = "typing-extensions", marker = "python_full_version < '3.10'" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/d0/d4/14f53324cb1a6381bef29d698987625d80052bb33932d8e7cbf9b337b17c/pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f", size = 46960, upload-time = "2025-05-26T04:54:40.484Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/30/05/ce271016e351fddc8399e546f6e23761967ee09c8c568bbfbecb0c150171/pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3", size = 15976, upload-time = "2025-05-26T04:54:39.035Z" },
+
]
+
+
[[package]]
+
name = "pytest-cov"
+
version = "6.2.1"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "coverage", extra = ["toml"] },
+
{ name = "pluggy" },
+
{ name = "pytest" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" },
+
]
+
+
[[package]]
+
name = "python-dateutil"
+
version = "2.9.0.post0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "six" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
+
]
+
+
[[package]]
+
name = "python-dotenv"
+
version = "1.1.1"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
+
]
+
+
[[package]]
+
name = "pyyaml"
+
version = "6.0.2"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" },
+
{ url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" },
+
{ url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" },
+
{ url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" },
+
{ url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" },
+
{ url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" },
+
{ url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" },
+
{ url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" },
+
{ url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" },
+
{ url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" },
+
{ url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" },
+
{ url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" },
+
{ url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" },
+
{ url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" },
+
{ url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" },
+
{ url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" },
+
{ url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" },
+
{ url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" },
+
{ url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" },
+
{ url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" },
+
{ url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" },
+
{ url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" },
+
{ url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" },
+
{ url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" },
+
{ url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" },
+
{ url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" },
+
{ url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" },
+
{ url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" },
+
{ url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" },
+
{ url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" },
+
{ url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" },
+
{ url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" },
+
{ url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" },
+
{ url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" },
+
{ url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" },
+
{ url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" },
+
{ url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777, upload-time = "2024-08-06T20:33:25.896Z" },
+
{ url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318, upload-time = "2024-08-06T20:33:27.212Z" },
+
{ url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891, upload-time = "2024-08-06T20:33:28.974Z" },
+
{ url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614, upload-time = "2024-08-06T20:33:34.157Z" },
+
{ url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360, upload-time = "2024-08-06T20:33:35.84Z" },
+
{ url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006, upload-time = "2024-08-06T20:33:37.501Z" },
+
{ url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577, upload-time = "2024-08-06T20:33:39.389Z" },
+
{ url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593, upload-time = "2024-08-06T20:33:46.63Z" },
+
{ url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" },
+
]
+
+
[[package]]
+
name = "rich"
+
version = "14.0.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "markdown-it-py" },
+
{ name = "pygments" },
+
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" },
+
]
+
+
[[package]]
+
name = "ruff"
+
version = "0.12.3"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/c3/2a/43955b530c49684d3c38fcda18c43caf91e99204c2a065552528e0552d4f/ruff-0.12.3.tar.gz", hash = "sha256:f1b5a4b6668fd7b7ea3697d8d98857390b40c1320a63a178eee6be0899ea2d77", size = 4459341, upload-time = "2025-07-11T13:21:16.086Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/e2/fd/b44c5115539de0d598d75232a1cc7201430b6891808df111b8b0506aae43/ruff-0.12.3-py3-none-linux_armv6l.whl", hash = "sha256:47552138f7206454eaf0c4fe827e546e9ddac62c2a3d2585ca54d29a890137a2", size = 10430499, upload-time = "2025-07-11T13:20:26.321Z" },
+
{ url = "https://files.pythonhosted.org/packages/43/c5/9eba4f337970d7f639a37077be067e4ec80a2ad359e4cc6c5b56805cbc66/ruff-0.12.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:0a9153b000c6fe169bb307f5bd1b691221c4286c133407b8827c406a55282041", size = 11213413, upload-time = "2025-07-11T13:20:30.017Z" },
+
{ url = "https://files.pythonhosted.org/packages/e2/2c/fac3016236cf1fe0bdc8e5de4f24c76ce53c6dd9b5f350d902549b7719b2/ruff-0.12.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fa6b24600cf3b750e48ddb6057e901dd5b9aa426e316addb2a1af185a7509882", size = 10586941, upload-time = "2025-07-11T13:20:33.046Z" },
+
{ url = "https://files.pythonhosted.org/packages/c5/0f/41fec224e9dfa49a139f0b402ad6f5d53696ba1800e0f77b279d55210ca9/ruff-0.12.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2506961bf6ead54887ba3562604d69cb430f59b42133d36976421bc8bd45901", size = 10783001, upload-time = "2025-07-11T13:20:35.534Z" },
+
{ url = "https://files.pythonhosted.org/packages/0d/ca/dd64a9ce56d9ed6cad109606ac014860b1c217c883e93bf61536400ba107/ruff-0.12.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4faaff1f90cea9d3033cbbcdf1acf5d7fb11d8180758feb31337391691f3df0", size = 10269641, upload-time = "2025-07-11T13:20:38.459Z" },
+
{ url = "https://files.pythonhosted.org/packages/63/5c/2be545034c6bd5ce5bb740ced3e7014d7916f4c445974be11d2a406d5088/ruff-0.12.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40dced4a79d7c264389de1c59467d5d5cefd79e7e06d1dfa2c75497b5269a5a6", size = 11875059, upload-time = "2025-07-11T13:20:41.517Z" },
+
{ url = "https://files.pythonhosted.org/packages/8e/d4/a74ef1e801ceb5855e9527dae105eaff136afcb9cc4d2056d44feb0e4792/ruff-0.12.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0262d50ba2767ed0fe212aa7e62112a1dcbfd46b858c5bf7bbd11f326998bafc", size = 12658890, upload-time = "2025-07-11T13:20:44.442Z" },
+
{ url = "https://files.pythonhosted.org/packages/13/c8/1057916416de02e6d7c9bcd550868a49b72df94e3cca0aeb77457dcd9644/ruff-0.12.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12371aec33e1a3758597c5c631bae9a5286f3c963bdfb4d17acdd2d395406687", size = 12232008, upload-time = "2025-07-11T13:20:47.374Z" },
+
{ url = "https://files.pythonhosted.org/packages/f5/59/4f7c130cc25220392051fadfe15f63ed70001487eca21d1796db46cbcc04/ruff-0.12.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:560f13b6baa49785665276c963edc363f8ad4b4fc910a883e2625bdb14a83a9e", size = 11499096, upload-time = "2025-07-11T13:20:50.348Z" },
+
{ url = "https://files.pythonhosted.org/packages/d4/01/a0ad24a5d2ed6be03a312e30d32d4e3904bfdbc1cdbe63c47be9d0e82c79/ruff-0.12.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023040a3499f6f974ae9091bcdd0385dd9e9eb4942f231c23c57708147b06311", size = 11688307, upload-time = "2025-07-11T13:20:52.945Z" },
+
{ url = "https://files.pythonhosted.org/packages/93/72/08f9e826085b1f57c9a0226e48acb27643ff19b61516a34c6cab9d6ff3fa/ruff-0.12.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:883d844967bffff5ab28bba1a4d246c1a1b2933f48cb9840f3fdc5111c603b07", size = 10661020, upload-time = "2025-07-11T13:20:55.799Z" },
+
{ url = "https://files.pythonhosted.org/packages/80/a0/68da1250d12893466c78e54b4a0ff381370a33d848804bb51279367fc688/ruff-0.12.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2120d3aa855ff385e0e562fdee14d564c9675edbe41625c87eeab744a7830d12", size = 10246300, upload-time = "2025-07-11T13:20:58.222Z" },
+
{ url = "https://files.pythonhosted.org/packages/6a/22/5f0093d556403e04b6fd0984fc0fb32fbb6f6ce116828fd54306a946f444/ruff-0.12.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b16647cbb470eaf4750d27dddc6ebf7758b918887b56d39e9c22cce2049082b", size = 11263119, upload-time = "2025-07-11T13:21:01.503Z" },
+
{ url = "https://files.pythonhosted.org/packages/92/c9/f4c0b69bdaffb9968ba40dd5fa7df354ae0c73d01f988601d8fac0c639b1/ruff-0.12.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e1417051edb436230023575b149e8ff843a324557fe0a265863b7602df86722f", size = 11746990, upload-time = "2025-07-11T13:21:04.524Z" },
+
{ url = "https://files.pythonhosted.org/packages/fe/84/7cc7bd73924ee6be4724be0db5414a4a2ed82d06b30827342315a1be9e9c/ruff-0.12.3-py3-none-win32.whl", hash = "sha256:dfd45e6e926deb6409d0616078a666ebce93e55e07f0fb0228d4b2608b2c248d", size = 10589263, upload-time = "2025-07-11T13:21:07.148Z" },
+
{ url = "https://files.pythonhosted.org/packages/07/87/c070f5f027bd81f3efee7d14cb4d84067ecf67a3a8efb43aadfc72aa79a6/ruff-0.12.3-py3-none-win_amd64.whl", hash = "sha256:a946cf1e7ba3209bdef039eb97647f1c77f6f540e5845ec9c114d3af8df873e7", size = 11695072, upload-time = "2025-07-11T13:21:11.004Z" },
+
{ url = "https://files.pythonhosted.org/packages/e0/30/f3eaf6563c637b6e66238ed6535f6775480db973c836336e4122161986fc/ruff-0.12.3-py3-none-win_arm64.whl", hash = "sha256:5f9c7c9c8f84c2d7f27e93674d27136fbf489720251544c4da7fb3d742e011b1", size = 10805855, upload-time = "2025-07-11T13:21:13.547Z" },
+
]
+
+
[[package]]
+
name = "sgmllib3k"
+
version = "1.0.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/9e/bd/3704a8c3e0942d711c1299ebf7b9091930adae6675d7c8f476a7ce48653c/sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9", size = 5750, upload-time = "2010-08-24T14:33:52.445Z" }
+
+
[[package]]
+
name = "shellingham"
+
version = "1.5.4"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
+
]
+
+
[[package]]
+
name = "six"
+
version = "1.17.0"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
+
]
+
+
[[package]]
+
name = "smmap"
+
version = "5.0.2"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329, upload-time = "2025-01-02T07:14:40.909Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" },
+
]
+
+
[[package]]
+
name = "sniffio"
+
version = "1.3.1"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
+
]
+
+
[[package]]
+
name = "thicket"
+
source = { editable = "." }
+
dependencies = [
+
{ name = "bleach" },
+
{ name = "email-validator" },
+
{ name = "feedparser" },
+
{ name = "gitpython" },
+
{ name = "httpx" },
+
{ name = "jinja2" },
+
{ name = "pendulum" },
+
{ name = "platformdirs" },
+
{ name = "pydantic" },
+
{ name = "pydantic-settings" },
+
{ name = "pyyaml" },
+
{ name = "rich" },
+
{ name = "typer" },
+
]
+
+
[package.optional-dependencies]
+
dev = [
+
{ name = "black" },
+
{ name = "mypy" },
+
{ name = "pytest" },
+
{ name = "pytest-asyncio" },
+
{ name = "pytest-cov" },
+
{ name = "ruff" },
+
{ name = "types-pyyaml" },
+
]
+
+
[package.metadata]
+
requires-dist = [
+
{ name = "black", marker = "extra == 'dev'", specifier = ">=24.0.0" },
+
{ name = "bleach", specifier = ">=6.0.0" },
+
{ name = "email-validator" },
+
{ name = "feedparser", specifier = ">=6.0.11" },
+
{ name = "gitpython", specifier = ">=3.1.40" },
+
{ name = "httpx", specifier = ">=0.28.0" },
+
{ name = "jinja2", specifier = ">=3.1.6" },
+
{ name = "mypy", marker = "extra == 'dev'", specifier = ">=1.13.0" },
+
{ name = "pendulum", specifier = ">=3.0.0" },
+
{ name = "platformdirs", specifier = ">=4.0.0" },
+
{ name = "pydantic", specifier = ">=2.11.0" },
+
{ name = "pydantic-settings", specifier = ">=2.10.0" },
+
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" },
+
{ name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.24.0" },
+
{ name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=6.0.0" },
+
{ name = "pyyaml", specifier = ">=6.0.0" },
+
{ name = "rich", specifier = ">=13.0.0" },
+
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.8.0" },
+
{ name = "typer", specifier = ">=0.15.0" },
+
{ name = "types-pyyaml", marker = "extra == 'dev'", specifier = ">=6.0.0" },
+
]
+
provides-extras = ["dev"]
+
+
[[package]]
+
name = "tomli"
+
version = "2.2.1"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" },
+
{ url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" },
+
{ url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" },
+
{ url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" },
+
{ url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" },
+
{ url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" },
+
{ url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" },
+
{ url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" },
+
{ url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" },
+
{ url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" },
+
{ url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" },
+
{ url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" },
+
{ url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" },
+
{ url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" },
+
{ url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" },
+
{ url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" },
+
{ url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" },
+
{ url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" },
+
{ url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" },
+
{ url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" },
+
{ url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" },
+
{ url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" },
+
{ url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" },
+
{ url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" },
+
{ url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" },
+
{ url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" },
+
{ url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" },
+
{ url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" },
+
{ url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" },
+
{ url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" },
+
{ url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" },
+
]
+
+
[[package]]
+
name = "typer"
+
version = "0.16.0"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
+
{ name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
+
{ name = "rich" },
+
{ name = "shellingham" },
+
{ name = "typing-extensions" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625, upload-time = "2025-05-26T14:30:31.824Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317, upload-time = "2025-05-26T14:30:30.523Z" },
+
]
+
+
[[package]]
+
name = "types-pyyaml"
+
version = "6.0.12.20250516"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/4e/22/59e2aeb48ceeee1f7cd4537db9568df80d62bdb44a7f9e743502ea8aab9c/types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba", size = 17378, upload-time = "2025-05-16T03:08:04.897Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/99/5f/e0af6f7f6a260d9af67e1db4f54d732abad514252a7a378a6c4d17dd1036/types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530", size = 20312, upload-time = "2025-05-16T03:08:04.019Z" },
+
]
+
+
[[package]]
+
name = "typing-extensions"
+
version = "4.14.1"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" },
+
]
+
+
[[package]]
+
name = "typing-inspection"
+
version = "0.4.1"
+
source = { registry = "https://pypi.org/simple" }
+
dependencies = [
+
{ name = "typing-extensions" },
+
]
+
sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
+
]
+
+
[[package]]
+
name = "tzdata"
+
version = "2025.2"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
+
]
+
+
[[package]]
+
name = "webencodings"
+
version = "0.5.1"
+
source = { registry = "https://pypi.org/simple" }
+
sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" }
+
wheels = [
+
{ url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" },
+
]