Compare changes

Choose any two refs to compare.

+4
src/atpasser/nsid/__init__.py
···
Initalizes an NSID object.
Parameters:
+
domainAuthority (list): Domain Authority as list.
+
domainAuthorityAsText (str): Domain Authority as text.
+
name (str): The name.
+
fragment (str | none): The fragment.
nsid (str): The NSID.
"""
+5 -5
docs/roadmap.md
···
|Data Model|**Will be rewritten**|
|Lexicon|**Will be rewritten with Data Model part**|
|Cryptography||
-
|Accounts||
+
|Accounts|**Will implemented in Server and Client library**|
|Repository||
|Blobs|`blob` Naming blobs' CID only|
|Labels||
-
|HTTP API||
-
|OAuth||
-
|Event Stream||
-
|Sync||
+
|HTTP API|**Will implemented in Server and Client library**|
+
|OAuth|**Will implemented in Server and Client library**|
+
|Event Stream|**Will implemented in Server and Client library**|
+
|Sync|**Will implemented in Server and Client library**|
|DID|`did` Almost done|
|Handle|`handle` Almost done|
|NSID|`nsid` Almost done|
-54
src/atpasser/did/__init__.py
···
-
import re
-
from pyld import jsonld
-
-
-
class DID:
-
"""
-
A class representing a DID.
-
-
Attributes:
-
uri (str): The DID URI.
-
"""
-
-
def __init__(self, uri: str) -> None:
-
"""
-
Initalizes an DID object.
-
-
Parameters:
-
uri (str): The DID URI.
-
"""
-
pattern = re.compile("^did:[a-z]+:[a-zA-Z0-9._:%-]*[a-zA-Z0-9._-]$")
-
patternMatch = pattern.match(uri)
-
if patternMatch and len(uri) <= 2048:
-
self.uri = patternMatch[0]
-
else:
-
raise ValueError
-
-
def __str__(self) -> str:
-
"""
-
Convert the TID to a string by given the URI.
-
"""
-
return self.uri
-
-
def __eq__(self, value: object, /) -> bool:
-
"""
-
Check if the 2 values are exactly the same.
-
"""
-
if isinstance(value, DID):
-
return str(self) == str(value)
-
else:
-
return False
-
-
def fetch(self):
-
"""
-
Fetch the metadata of the DID.
-
-
Returns:
-
list: the pseudo-list which can be read by `PyLD` library
-
"""
-
if self.uri.startswith("did:plc:"):
-
return jsonld.expand(f"https://plc.directory/{self.uri}")
-
elif self.uri.startswith("did:web"):
-
return jsonld.expand(
-
f"https://{self.uri.replace("did:web:","")}/.well-known/did.json"
-
)
-96
src/atpasser/handle/__init__.py
···
-
import dns.resolver, requests
-
-
from atpasser import did
-
-
-
class Handle:
-
"""
-
A class representing a Handle.
-
-
-
Attributes:
-
handle (str): The Handle.
-
"""
-
-
def __init__(self, handle: str) -> None:
-
"""
-
Initalizes an Handle object.
-
-
Parameters:
-
handle (str): The Handle.
-
"""
-
-
if len(handle) > 253:
-
raise ValueError("handle is more than 253 chars")
-
-
labels = handle.lower().split(".")
-
-
if len(labels) < 2:
-
raise ValueError("are you tld?")
-
-
if labels[0] == "" or labels[-1] == "":
-
raise ValueError("proceeding or tariling ascii periods")
-
-
for label in labels:
-
if len(label) not in range(1, 64):
-
raise ValueError("two periods or segment longer than 63 char")
-
charset = set(label)
-
validset = set("abcdefghijklmnopqrstuvwxyz0123456789-")
-
if not charset.issubset(validset):
-
raise ValueError("invalid char used in segment")
-
if label.startswith("-") or label.endswith("-"):
-
raise ValueError("segments starts or ends with hyphen")
-
-
tld = labels[-1]
-
if tld[0] in "0123456789":
-
raise ValueError("tld starts with digit")
-
-
self.handle = handle
-
-
def __str__(self) -> str:
-
"""
-
-
Convert the TID to a string by given the URI.
-
"""
-
return self.handle
-
-
def __eq__(self, value: object, /) -> bool:
-
"""
-
-
Check if the 2 values are exactly the same.
-
"""
-
-
if isinstance(value, Handle):
-
-
return str(self) == str(value)
-
else:
-
-
return False
-
-
def toTID(self):
-
"""
-
Convert the handle to TID.
-
-
Returns:
-
An DID object, or `None` if the handle is invalid.
-
"""
-
try:
-
answers = dns.resolver.resolve("_atproto." + self.handle, "TXT")
-
except:
-
answers = []
-
for answer in answers:
-
if str(answer).startswith('"did='):
-
try:
-
uri = str(answer)[5:-1]
-
return did.DID(uri)
-
except:
-
pass # cannot resolve via dns
-
response = requests.get(f"https://{self.handle}/.well-known/atproto-did")
-
if response.status_code // 100 != 2:
-
return None
-
if response.headers.get("Content-Type") != "text/plain":
-
pass # Pass for now, because some sites like neocities, breaks this rule
-
try:
-
return did.DID(response.text)
-
except:
-
return None
+61 -17
src/atpasser/uri/handle.py
···
"""
if len(handle) > 253:
-
raise InvalidHandleError(handle, "exceeds maximum length", f"Handle length {len(handle)} exceeds maximum allowed length of 253 characters")
+
raise InvalidHandleError(
+
handle,
+
"exceeds maximum length",
+
f"Handle length {len(handle)} exceeds maximum allowed length of 253 characters",
+
)
labels = handle.lower().split(".")
if len(labels) < 2:
-
raise InvalidHandleError(handle, "invalid format", "Handle must contain at least one dot separator, e.g., 'example.com'")
+
raise InvalidHandleError(
+
handle,
+
"invalid format",
+
"Handle must contain at least one dot separator, e.g., 'example.com'",
+
)
if labels[0] == "" or labels[-1] == "":
-
raise InvalidHandleError(handle, "invalid format", "Handle cannot start or end with a dot")
+
raise InvalidHandleError(
+
handle, "invalid format", "Handle cannot start or end with a dot"
+
)
for i, label in enumerate(labels):
if len(label) not in range(1, 64):
-
raise InvalidHandleError(handle, "segment length error", f"Handle segment {i+1} length {len(label)} is not in the 1-63 character range")
+
raise InvalidHandleError(
+
handle,
+
"segment length error",
+
f"Handle segment {i+1} length {len(label)} is not in the 1-63 character range",
+
)
charset = set(label)
validset = set("abcdefghijklmnopqrstuvwxyz0123456789-")
if not charset.issubset(validset):
invalid_chars = charset - validset
-
raise InvalidHandleError(handle, "contains invalid characters", f"Handle segment {i+1} contains invalid characters: {', '.join(invalid_chars)}")
+
raise InvalidHandleError(
+
handle,
+
"contains invalid characters",
+
f"Handle segment {i+1} contains invalid characters: {', '.join(invalid_chars)}",
+
)
if label.startswith("-") or label.endswith("-"):
-
raise InvalidHandleError(handle, "invalid format", f"Handle segment {i+1} cannot start or end with a hyphen")
+
raise InvalidHandleError(
+
handle,
+
"invalid format",
+
f"Handle segment {i+1} cannot start or end with a hyphen",
+
)
tld = labels[-1]
if tld[0] in "0123456789":
-
raise InvalidHandleError(handle, "invalid format", "Handle's top-level domain cannot start with a digit")
+
raise InvalidHandleError(
+
handle,
+
"invalid format",
+
"Handle's top-level domain cannot start with a digit",
+
)
self.handle = handle
···
answers = dns.resolver.resolve("_atproto." + self.handle, "TXT")
except Exception as e:
answers = []
-
+
for answer in answers:
answer_str = str(answer)
if answer_str.startswith('"did='):
···
# Continue trying other records or methods
continue
except Exception as e:
-
raise ResolutionError(self.handle, "DNS resolution", f"Error parsing DNS TXT record: {str(e)}")
-
+
raise ResolutionError(
+
self.handle,
+
"DNS resolution",
+
f"Error parsing DNS TXT record: {str(e)}",
+
)
+
# If DNS resolution fails, try HTTP method
try:
-
response = requests.get(f"https://{self.handle}/.well-known/atproto-did")
+
response = requests.get(
+
f"https://{self.handle}/.well-known/atproto-did"
+
)
if response.status_code // 100 != 2:
return None
-
+
# Some websites may return incorrect Content-Type, so here we only warn without throwing an exception
content_type = response.headers.get("Content-Type")
if content_type != "text/plain" and content_type:
# Log warning but don't block processing
pass
-
+
try:
return DID(response.text.strip())
except InvalidDIDError:
return None
except requests.RequestException as e:
-
raise ResolutionError(self.handle, "HTTP request", f"Error requesting well-known endpoint: {str(e)}")
+
raise ResolutionError(
+
self.handle,
+
"HTTP request",
+
f"Error requesting well-known endpoint: {str(e)}",
+
)
except Exception as e:
-
raise ResolutionError(self.handle, "HTTP parsing", f"Error parsing HTTP response: {str(e)}")
-
+
raise ResolutionError(
+
self.handle,
+
"HTTP parsing",
+
f"Error parsing HTTP response: {str(e)}",
+
)
+
except Exception as e:
if isinstance(e, ResolutionError):
raise
-
raise ResolutionError(self.handle, "resolution", f"Unknown error occurred while resolving Handle: {str(e)}")
+
raise ResolutionError(
+
self.handle,
+
"resolution",
+
f"Unknown error occurred while resolving Handle: {str(e)}",
+
)
+82 -20
src/atpasser/uri/nsid.py
···
if "#" in nsid:
parts = nsid.split("#", 1)
if len(parts) != 2:
-
raise InvalidNSIDError(nsid, "invalid format", "NSID fragment format is incorrect")
+
raise InvalidNSIDError(
+
nsid, "invalid format", "NSID fragment format is incorrect"
+
)
nsidWithoutFragment, fragment = parts
else:
nsidWithoutFragment, fragment = nsid, None
···
if not set([x for x in nsidWithoutFragment]).issubset(
set([chr(i) for i in range(0x80)])
):
-
raise InvalidNSIDError(nsid, "contains invalid characters", "NSID must only contain ASCII characters")
+
raise InvalidNSIDError(
+
nsid,
+
"contains invalid characters",
+
"NSID must only contain ASCII characters",
+
)
# Check length
if len(nsidWithoutFragment) > 317:
-
raise InvalidNSIDError(nsid, "exceeds maximum length", f"NSID length {len(nsidWithoutFragment)} exceeds maximum allowed length of 317 characters")
+
raise InvalidNSIDError(
+
nsid,
+
"exceeds maximum length",
+
f"NSID length {len(nsidWithoutFragment)} exceeds maximum allowed length of 317 characters",
+
)
segments = nsidWithoutFragment.split(".")
# Check for leading or trailing dots
if nsidWithoutFragment.startswith(".") or nsidWithoutFragment.endswith("."):
-
raise InvalidNSIDError(nsid, "invalid format", "NSID cannot start or end with a dot")
+
raise InvalidNSIDError(
+
nsid, "invalid format", "NSID cannot start or end with a dot"
+
)
# Check segment count
if len(segments) < 3:
-
raise InvalidNSIDError(nsid, "invalid format", f"NSID must contain at least 3 segments, currently has {len(segments)}")
+
raise InvalidNSIDError(
+
nsid,
+
"invalid format",
+
f"NSID must contain at least 3 segments, currently has {len(segments)}",
+
)
domainAuthority = [segment.lower() for segment in segments[0:-1]]
# Check domain authority length
if len(".".join(domainAuthority)) > 253:
-
raise InvalidNSIDError(nsid, "domain authority length exceeds limit", "Domain authority part length exceeds 253 characters")
+
raise InvalidNSIDError(
+
nsid,
+
"domain authority length exceeds limit",
+
"Domain authority part length exceeds 253 characters",
+
)
# Check each domain segment
for i, segment in enumerate(domainAuthority):
if len(segment) > 63 or segment == "":
-
raise InvalidNSIDError(nsid, "segment length error", f"Domain authority segment {i+1} length is not in the 1-63 character range")
+
raise InvalidNSIDError(
+
nsid,
+
"segment length error",
+
f"Domain authority segment {i+1} length is not in the 1-63 character range",
+
)
if not set(segment).issubset(set("abcdefghijklmnopqrstuvwxyz0123456789-")):
-
invalid_chars = set(segment) - set("abcdefghijklmnopqrstuvwxyz0123456789-")
-
raise InvalidNSIDError(nsid, "contains invalid characters", f"Domain authority segment {i+1} contains invalid characters: {', '.join(invalid_chars)}")
+
invalid_chars = set(segment) - set(
+
"abcdefghijklmnopqrstuvwxyz0123456789-"
+
)
+
raise InvalidNSIDError(
+
nsid,
+
"contains invalid characters",
+
f"Domain authority segment {i+1} contains invalid characters: {', '.join(invalid_chars)}",
+
)
if segment.startswith("-") or segment.endswith("-"):
-
raise InvalidNSIDError(nsid, "invalid format", f"Domain authority segment {i+1} cannot start or end with a hyphen")
-
+
raise InvalidNSIDError(
+
nsid,
+
"invalid format",
+
f"Domain authority segment {i+1} cannot start or end with a hyphen",
+
)
+
# Check if top-level domain starts with a digit
if segments[0][0] in "0123456789":
-
raise InvalidNSIDError(nsid, "invalid format", "NSID's top-level domain cannot start with a digit")
+
raise InvalidNSIDError(
+
nsid,
+
"invalid format",
+
"NSID's top-level domain cannot start with a digit",
+
)
self.domainAuthority = domainAuthority
self.domainAuthorityAsText = ".".join(domainAuthority)
···
# Check name
if name == "" or len(name) > 63:
-
raise InvalidNSIDError(nsid, "name length error", "NSID name cannot be empty and length cannot exceed 63 characters")
+
raise InvalidNSIDError(
+
nsid,
+
"name length error",
+
"NSID name cannot be empty and length cannot exceed 63 characters",
+
)
if not set(name).issubset(
set("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
):
-
invalid_chars = set(name) - set("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
-
raise InvalidNSIDError(nsid, "contains invalid characters", f"NSID name contains invalid characters: {', '.join(invalid_chars)}")
+
invalid_chars = set(name) - set(
+
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+
)
+
raise InvalidNSIDError(
+
nsid,
+
"contains invalid characters",
+
f"NSID name contains invalid characters: {', '.join(invalid_chars)}",
+
)
if name[0] in "0123456789":
-
raise InvalidNSIDError(nsid, "invalid format", "NSID name cannot start with a digit")
+
raise InvalidNSIDError(
+
nsid, "invalid format", "NSID name cannot start with a digit"
+
)
self.name = name
# Check fragment
if fragment != None:
if fragment == "" or len(fragment) > 63:
-
raise InvalidNSIDError(nsid, "fragment length error", "NSID fragment cannot be empty and length cannot exceed 63 characters")
+
raise InvalidNSIDError(
+
nsid,
+
"fragment length error",
+
"NSID fragment cannot be empty and length cannot exceed 63 characters",
+
)
if not set(fragment).issubset(
set("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
):
-
invalid_chars = set(fragment) - set("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
-
raise InvalidNSIDError(nsid, "contains invalid characters", f"NSID fragment contains invalid characters: {', '.join(invalid_chars)}")
+
invalid_chars = set(fragment) - set(
+
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+
)
+
raise InvalidNSIDError(
+
nsid,
+
"contains invalid characters",
+
f"NSID fragment contains invalid characters: {', '.join(invalid_chars)}",
+
)
if fragment[0] in "0123456789":
-
raise InvalidNSIDError(nsid, "invalid format", "NSID fragment cannot start with a digit")
+
raise InvalidNSIDError(
+
nsid, "invalid format", "NSID fragment cannot start with a digit"
+
)
self.fragment = fragment
-92
src/atpasser/uri/restricted.py
···
-
from . import handle, nsid
-
from . import rkey as rKey
-
from . import URI
-
from .exceptions import InvalidRestrictedURIError, InvalidURIError
-
-
-
class RestrictedURI(URI):
-
"""A class representing a restricted AT Protocol URI for record access.
-
-
RestrictedURIs provide a specialized form of AT Protocol URIs that specifically
-
address records within repositories. They follow the format 'at://authority/collection/rkey'
-
where collection identifies the record type (via NSID) and rkey identifies the specific
-
record instance. This format is commonly used for referencing specific social records
-
like posts, profiles, and other user-generated content.
-
-
Attributes:
-
collection (atpasser.uri.NSID): The record collection identified by NSID.
-
rkey (atpasser.uri.RKey): The record key identifying a specific record instance.
-
"""
-
-
def __init__(self, uri: str) -> None:
-
"""Initializes a restricted URI with validation.
-
-
Parses and validates an AT Protocol URI specifically for record access.
-
Restricted URIs must have a valid authority (DID or handle), may include
-
a collection (NSID), and optionally a record key (rkey). Query parameters
-
and fragments are not allowed in restricted URIs.
-
-
Args:
-
uri (str): The AT Protocol URI string to parse as a restricted URI.
-
-
Raises:
-
InvalidRestrictedURIError: If the URI has query parameters or fragments, invalid authority,
-
or too many path segments for a restricted URI format.
-
InvalidURIError: If the underlying URI validation fails.
-
"""
-
-
try:
-
super().__init__(uri)
-
except InvalidURIError:
-
raise
-
except Exception as e:
-
raise InvalidURIError(
-
uri, "base URI validation failed", f"Failed to parse base URI: {str(e)}"
-
)
-
-
if self.query is not None:
-
raise InvalidRestrictedURIError(
-
uri,
-
"query parameters not supported",
-
"Restricted URI cannot contain query parameters",
-
)
-
-
if self.fragment is not None:
-
raise InvalidRestrictedURIError(
-
uri,
-
"fragments not supported",
-
"Restricted URI cannot contain fragments",
-
)
-
-
if self.authority is None:
-
raise InvalidRestrictedURIError(
-
uri,
-
"invalid authority",
-
"Restricted URI must contain a valid DID or Handle",
-
)
-
-
try:
-
if len(self.path) == 0:
-
self.collection, self.rkey = None, None
-
-
elif len(self.path) == 1:
-
self.collection = nsid.NSID(self.path[0])
-
self.rkey = None
-
-
elif len(self.path) == 2:
-
self.collection = nsid.NSID(self.path[0])
-
self.rkey = rKey.RKey(self.path[1])
-
else:
-
raise InvalidRestrictedURIError(
-
uri,
-
"too many path segments",
-
f"Restricted URI can have at most 2 path segments, currently has {len(self.path)}",
-
)
-
except Exception as e:
-
if isinstance(e, (InvalidRestrictedURIError, InvalidURIError)):
-
raise
-
raise InvalidRestrictedURIError(
-
uri,
-
"parsing error",
-
f"Error occurred while parsing Restricted URI: {str(e)}",
-
)
-136
src/atpasser/uri/tid.py
···
-
import datetime, random
-
-
-
class TID:
-
"""A class representing a TID (Time-based Identifier) in the AT Protocol.
-
-
TIDs are time-based identifiers used for ordering and uniquely identifying
-
records in the AT Protocol. They combine a microsecond-precision timestamp
-
with a clock identifier to ensure uniqueness even when multiple records
-
are created in the same microsecond. TIDs are sortable and provide both
-
temporal ordering and uniqueness guarantees.
-
-
Attributes:
-
timestamp (datetime.datetime): The timestamp component of the TID.
-
clockIdentifier (int): Clock identifier (0-1023) for disambiguation.
-
"""
-
-
def __init__(
-
self, time: datetime.datetime | None = None, clockIdentifier: int | None = None
-
) -> None:
-
"""Initializes a TID object with timestamp and clock identifier.
-
-
Creates a new TID with the specified timestamp and clock identifier.
-
If no timestamp is provided, uses the current time. If no clock identifier
-
is provided, generates a random value between 0-1023.
-
-
Args:
-
time (datetime.datetime, optional): The timestamp for the TID.
-
Defaults to current time if not provided.
-
clockIdentifier (int, optional): Clock identifier (0-1023) for
-
disambiguation. Defaults to random value if not provided.
-
"""
-
if time == None:
-
self.timestamp = datetime.datetime.now()
-
else:
-
self.timestamp = time
-
if clockIdentifier == None:
-
self.clockIdentifier = random.randrange(0, 1024)
-
else:
-
self.clockIdentifier = clockIdentifier
-
-
def __int__(self):
-
"""Convert the TID to its integer representation.
-
-
Combines the timestamp (in microseconds) and clock identifier into
-
a single 64-bit integer where the high bits represent the timestamp
-
and the low 10 bits represent the clock identifier.
-
-
Returns:
-
int: The integer representation of the TID.
-
"""
-
timestamp = int(self.timestamp.timestamp() * 1000000)
-
return timestamp * 1024 + self.clockIdentifier
-
-
def __str__(self):
-
"""Convert the TID to a base32-sortable string representation.
-
-
Encodes the TID as a base32 string using a custom character set that
-
maintains lexicographical sort order corresponding to temporal order.
-
This format is commonly used in the AT Protocol for compact,
-
sortable identifiers.
-
-
Returns:
-
str: The base32 string representation of the TID.
-
"""
-
integer = int(self)
-
binary = f"{integer:065b}"
-
return "".join(
-
[
-
"234567abcdefghijklmnopqrstuvwxyz"[int(binary[i : i + 5], base=2)]
-
for i in range(0, len(binary), 5)
-
]
-
)
-
-
def __eq__(self, value: object, /) -> bool:
-
"""Check if two TID objects represent the same identifier.
-
-
Args:
-
value (object): The object to compare with.
-
-
Returns:
-
bool: True if the objects represent the same TID, False otherwise.
-
"""
-
if isinstance(value, TID):
-
return int(self) == int(value)
-
else:
-
return False
-
-
-
def importTIDfromInteger(value: int | None = None):
-
"""Create a TID object from an integer representation.
-
-
Converts a 64-bit integer back into a TID object by extracting the
-
timestamp and clock identifier components. If no value is provided,
-
creates a TID for the current time.
-
-
Args:
-
value (int, optional): The integer value to convert to a TID.
-
Defaults to creating a TID for the current time if not provided.
-
-
Returns:
-
TID: The TID object created from the integer value.
-
"""
-
if value == None:
-
value = int(TID())
-
clockIdentifier = value % 1024
-
timestamp = (value >> 10) / 1000000
-
return TID(datetime.datetime.fromtimestamp(timestamp), clockIdentifier)
-
-
-
def importTIDfromBase32(value: str | None = None):
-
"""Create a TID object from a base32 string representation.
-
-
Converts a base32-encoded TID string back into a TID object by decoding
-
the string to its integer representation and then extracting the timestamp
-
and clock identifier components. If no value is provided, creates a TID
-
for the current time.
-
-
Args:
-
value (str, optional): The base32 string to convert to a TID.
-
Defaults to creating a TID for the current time if not provided.
-
-
Returns:
-
TID: The TID object created from the base32 string.
-
"""
-
if value == None:
-
value = str(TID())
-
b32s = "234567abcdefghijklmnopqrstuvwxyz"
-
return importTIDfromInteger(
-
sum(
-
[
-
b32s.find(value[i]) * (32 ** (len(value) - i - 1))
-
for i in range(len(value))
-
]
-
)
-
)
+3
src/atpasser/uri/__init__.py
···
"JSONPath parsing failed",
f"Failed to parse JSONPath fragment '{fragment}': {str(e)}",
)
+
else:
+
self.fragment = None
+
self.fragmentAsText = None
if query != None:
try:
-182
src/atpasser/data/decoder.py
···
-
"""
-
JSON decoder for ATProto data model.
-
-
This module provides a JSON decoder that handles ATProto-specific data types,
-
including bytes, CID links, and typed objects.
-
"""
-
-
import base64
-
import json
-
from typing import Any, Callable, Dict, Optional
-
from cid import CIDv0, CIDv1, make_cid
-
-
-
class JsonDecoder(json.JSONDecoder):
-
"""A JSON decoder that supports ATProto data types.
-
-
This decoder extends the standard JSON decoder to handle ATProto-specific
-
data types, including bytes, CID links, and typed objects.
-
-
Attributes:
-
type_hook_registry: Registry for type-specific hooks.
-
encoding: The encoding to use for string deserialization.
-
"""
-
-
def __init__(
-
self,
-
*,
-
object_hook: Optional[Callable[[Dict[str, Any]], Any]] = None,
-
type_hook_registry: Optional[Any] = None,
-
type_processor_registry: Optional[Any] = None,
-
encoding: str = "utf-8",
-
**kwargs: Any,
-
) -> None:
-
"""Initialize the JSON decoder.
-
-
Args:
-
object_hook: Optional function to call with each decoded object.
-
type_hook_registry: Registry for type-specific hooks.
-
type_processor_registry: Registry for type-specific processors.
-
encoding: The encoding to use for string deserialization.
-
**kwargs: Additional keyword arguments to pass to the parent class.
-
"""
-
# Use the type processor registry if provided, otherwise use the type hook registry
-
if type_processor_registry is not None:
-
type_hook_registry = type_processor_registry.to_hook_registry()
-
elif type_hook_registry is None:
-
from .hooks import get_global_registry
-
-
type_hook_registry = get_global_registry()
-
-
# Create a combined object hook that calls both the custom hook and our hook
-
combined_hook = self._create_combined_hook(object_hook, type_hook_registry)
-
-
super().__init__(object_hook=combined_hook, **kwargs)
-
self.type_hook_registry = type_hook_registry
-
self.type_processor_registry = type_processor_registry
-
self.encoding = encoding
-
-
def _create_combined_hook(
-
self,
-
custom_hook: Optional[Callable[[Dict[str, Any]], Any]],
-
type_hook_registry: Optional[Any],
-
) -> Callable[[Dict[str, Any]], Any]:
-
"""Create a combined object hook function.
-
-
Args:
-
custom_hook: Optional custom object hook function.
-
type_hook_registry: Registry for type-specific hooks.
-
-
Returns:
-
A combined object hook function.
-
"""
-
-
def combined_hook(obj: Dict[str, Any]) -> Any:
-
# First, apply our ATProto-specific decoding
-
decoded_obj = self._atproto_object_hook(obj)
-
-
# Then, apply the custom hook if provided
-
if custom_hook is not None:
-
decoded_obj = custom_hook(decoded_obj)
-
-
return decoded_obj
-
-
return combined_hook
-
-
def _atproto_object_hook(self, obj: Dict[str, Any]) -> Any:
-
"""Handle ATProto-specific object decoding.
-
-
Args:
-
obj: The object to decode.
-
-
Returns:
-
The decoded object.
-
"""
-
# Handle $bytes key (RFC-4648 base64 decoding)
-
if "$bytes" in obj:
-
if len(obj) != 1:
-
# If there are other keys, this is invalid
-
raise ValueError(f"Invalid $bytes object: {obj}")
-
return base64.b64decode(obj["$bytes"].encode(self.encoding))
-
-
# Handle $link key (CID parsing)
-
elif "$link" in obj:
-
if len(obj) != 1:
-
# If there are other keys, this is invalid
-
raise ValueError(f"Invalid $link object: {obj}")
-
return make_cid(obj["$link"])
-
-
# Handle $type key (typed objects)
-
elif "$type" in obj:
-
type_value = obj["$type"]
-
remaining_obj = {k: v for k, v in obj.items() if k != "$type"}
-
-
# Check if there's a registered type handler
-
if self.type_hook_registry is not None:
-
handler = self.type_hook_registry.get_handler(type_value)
-
if handler is not None:
-
return handler(remaining_obj)
-
-
# If no handler is registered, return a typed object
-
return TypedObject(type_value, remaining_obj)
-
-
# Handle nested objects recursively
-
elif isinstance(obj, dict):
-
return {
-
k: self._atproto_object_hook(v) if isinstance(v, dict) else v
-
for k, v in obj.items()
-
}
-
-
return obj
-
-
-
class TypedObject:
-
"""A typed object in the ATProto data model.
-
-
This class represents an object with a $type field in the ATProto data model.
-
-
Attributes:
-
type: The type of the object.
-
data: The data associated with the object.
-
"""
-
-
def __init__(self, type_name: str, data: Dict[str, Any]) -> None:
-
"""Initialize a typed object.
-
-
Args:
-
type_name: The type of the object.
-
data: The data associated with the object.
-
"""
-
self.type_name = type_name
-
self.data = data
-
-
def __repr__(self) -> str:
-
"""Return a string representation of the typed object.
-
-
Returns:
-
A string representation of the typed object.
-
"""
-
return f"TypedObject(type_name={self.type_name!r}, data={self.data!r})"
-
-
def __eq__(self, other: Any) -> bool:
-
"""Check if two typed objects are equal.
-
-
Args:
-
other: The object to compare with.
-
-
Returns:
-
True if the objects are equal, False otherwise.
-
"""
-
if not isinstance(other, TypedObject):
-
return False
-
return self.type_name == other.type_name and self.data == other.data
-
-
def __atproto_json_encode__(self) -> Dict[str, Any]:
-
"""Encode the typed object to a JSON-serializable format.
-
-
Returns:
-
A JSON-serializable representation of the typed object.
-
"""
-
result = {"$type": self.type_name}
-
result.update(self.data)
-
return result
-82
src/atpasser/data/encoder.py
···
-
"""
-
JSON encoder for ATProto data model.
-
-
This module provides a JSON encoder that handles ATProto-specific data types,
-
including bytes, CID links, and typed objects.
-
"""
-
-
import base64
-
import json
-
from typing import Any, Optional
-
from cid import CIDv0, CIDv1
-
-
-
class JsonEncoder(json.JSONEncoder):
-
"""A JSON encoder that supports ATProto data types.
-
-
This encoder extends the standard JSON encoder to handle ATProto-specific
-
data types, including bytes, CID links, and typed objects.
-
-
Attributes:
-
encoding (str): The encoding to use for string serialization.
-
type_processor_registry: Registry for type-specific processors.
-
"""
-
-
def __init__(
-
self,
-
*,
-
encoding: str = "utf-8",
-
type_processor_registry: Optional[Any] = None,
-
**kwargs: Any,
-
) -> None:
-
"""Initialize the JSON encoder.
-
-
Args:
-
encoding: The encoding to use for string serialization.
-
type_processor_registry: Registry for type-specific processors.
-
**kwargs: Additional keyword arguments to pass to the parent class.
-
"""
-
super().__init__(**kwargs)
-
self.encoding = encoding
-
self.type_processor_registry = type_processor_registry
-
-
def default(self, o: Any) -> Any:
-
"""Convert an object to a serializable format.
-
-
Args:
-
o: The object to serialize.
-
-
Returns:
-
A serializable representation of the object.
-
-
Raises:
-
TypeError: If the object is not serializable.
-
"""
-
if isinstance(o, bytes):
-
# Handle bytes using RFC-4648 base64 encoding
-
return {"$bytes": base64.b64encode(o).decode(self.encoding)}
-
elif isinstance(o, (CIDv0, CIDv1)):
-
# Handle CID objects
-
return {"$link": str(o)}
-
elif hasattr(o, "__atproto_json_encode__"):
-
# Handle objects with custom ATProto encoding
-
return o.__atproto_json_encode__()
-
elif self.type_processor_registry is not None:
-
# Try to find a type processor for this object
-
obj_type_name = type(o).__name__
-
encoder = self.type_processor_registry.get_encoder(obj_type_name)
-
if encoder is not None:
-
result = encoder(o)
-
# Add $type field if not already present
-
if isinstance(result, dict) and "$type" not in result:
-
result["$type"] = obj_type_name
-
return result
-
elif isinstance(o, dict):
-
# Handle dictionaries recursively
-
return {k: self.default(v) for k, v in o.items()}
-
elif isinstance(o, (list, tuple)):
-
# Handle lists and tuples recursively
-
return [self.default(item) for item in o]
-
else:
-
# Use the parent class for other types
-
return super().default(o)
-227
src/atpasser/data/hooks.py
···
-
"""
-
Type hook system for ATProto JSON decoder.
-
-
This module provides a decorator-based system for registering custom type handlers
-
for objects with $type keys in the ATProto data model.
-
"""
-
-
import functools
-
from typing import Any, Callable, Dict, Optional, TypeVar, Union
-
-
# Type variable for the decorated function
-
F = TypeVar("F", bound=Callable[..., Any])
-
-
-
class TypeHookRegistry:
-
"""Registry for type-specific hooks in the ATProto JSON decoder.
-
-
This class maintains a registry of type-specific hooks that can be used
-
to customize the decoding of objects with $type keys in the ATProto data model.
-
-
Attributes:
-
_handlers: Dictionary mapping type names to handler functions.
-
"""
-
-
def __init__(self) -> None:
-
"""Initialize the type hook registry."""
-
self._handlers: Dict[str, Callable[[Dict[str, Any]], Any]] = {}
-
-
def register(self, type_name: str) -> Callable[[F], F]:
-
"""Register a type handler function.
-
-
This method can be used as a decorator to register a function as a handler
-
for a specific type.
-
-
Args:
-
type_name: The name of the type to handle.
-
-
Returns:
-
A decorator function that registers the decorated function as a handler.
-
-
Example:
-
>>> registry = TypeHookRegistry()
-
>>>
-
>>> @registry.register("app.bsky.feed.post")
-
... def handle_post(data: Dict[str, Any]) -> Any:
-
... return Post(**data)
-
"""
-
-
def decorator(func: F) -> F:
-
self._handlers[type_name] = func
-
return func
-
-
return decorator
-
-
def register_handler(
-
self, type_name: str, handler: Callable[[Dict[str, Any]], Any]
-
) -> None:
-
"""Register a type handler function directly.
-
-
Args:
-
type_name: The name of the type to handle.
-
handler: The function to call when decoding objects of this type.
-
-
Example:
-
>>> registry = TypeHookRegistry()
-
>>>
-
>>> def handle_post(data: Dict[str, Any]) -> Any:
-
... return Post(**data)
-
>>>
-
>>> registry.register_handler("app.bsky.feed.post", handle_post)
-
"""
-
self._handlers[type_name] = handler
-
-
def unregister(self, type_name: str) -> None:
-
"""Unregister a type handler function.
-
-
Args:
-
type_name: The name of the type to unregister.
-
"""
-
if type_name in self._handlers:
-
del self._handlers[type_name]
-
-
def get_handler(self, type_name: str) -> Optional[Callable[[Dict[str, Any]], Any]]:
-
"""Get the handler function for a specific type.
-
-
Args:
-
type_name: The name of the type to get the handler for.
-
-
Returns:
-
The handler function for the specified type, or None if no handler
-
is registered.
-
"""
-
return self._handlers.get(type_name)
-
-
def has_handler(self, type_name: str) -> bool:
-
"""Check if a handler is registered for a specific type.
-
-
Args:
-
type_name: The name of the type to check.
-
-
Returns:
-
True if a handler is registered for the specified type, False otherwise.
-
"""
-
return type_name in self._handlers
-
-
def clear(self) -> None:
-
"""Clear all registered handlers."""
-
self._handlers.clear()
-
-
def get_registered_types(self) -> set:
-
"""Get the set of all registered type names.
-
-
Returns:
-
A set of all registered type names.
-
"""
-
return set(self._handlers.keys())
-
-
-
# Global registry instance
-
_global_registry = TypeHookRegistry()
-
-
-
def type_handler(type_name: str) -> Callable[[F], F]:
-
"""Register a global type handler function.
-
-
This decorator registers a function as a global handler for a specific type
-
in the ATProto data model.
-
-
Args:
-
type_name: The name of the type to handle.
-
-
Returns:
-
A decorator function that registers the decorated function as a handler.
-
-
Example:
-
>>> @type_handler("app.bsky.feed.post")
-
... def handle_post(data: Dict[str, Any]) -> Any:
-
... return Post(**data)
-
"""
-
return _global_registry.register(type_name)
-
-
-
def get_global_registry() -> TypeHookRegistry:
-
"""Get the global type hook registry.
-
-
Returns:
-
The global TypeHookRegistry instance.
-
"""
-
return _global_registry
-
-
-
def register_type_handler(
-
type_name: str, handler: Callable[[Dict[str, Any]], Any]
-
) -> None:
-
"""Register a global type handler function directly.
-
-
Args:
-
type_name: The name of the type to handle.
-
handler: The function to call when decoding objects of this type.
-
-
Example:
-
>>> def handle_post(data: Dict[str, Any]) -> Any:
-
... return Post(**data)
-
>>>
-
>>> register_type_handler("app.bsky.feed.post", handle_post)
-
"""
-
_global_registry.register_handler(type_name, handler)
-
-
-
def unregister_type_handler(type_name: str) -> None:
-
"""Unregister a global type handler function.
-
-
Args:
-
type_name: The name of the type to unregister.
-
"""
-
_global_registry.unregister(type_name)
-
-
-
def get_type_handler(type_name: str) -> Optional[Callable[[Dict[str, Any]], Any]]:
-
"""Get the global handler function for a specific type.
-
-
Args:
-
type_name: The name of the type to get the handler for.
-
-
Returns:
-
The handler function for the specified type, or None if no handler
-
is registered.
-
"""
-
return _global_registry.get_handler(type_name)
-
-
-
def has_type_handler(type_name: str) -> bool:
-
"""Check if a global handler is registered for a specific type.
-
-
Args:
-
type_name: The name of the type to check.
-
-
Returns:
-
True if a handler is registered for the specified type, False otherwise.
-
"""
-
return _global_registry.has_handler(type_name)
-
-
-
def clear_type_handlers() -> None:
-
"""Clear all globally registered handlers."""
-
_global_registry.clear()
-
-
-
def get_registered_types() -> set:
-
"""Get the set of all globally registered type names.
-
-
Returns:
-
A set of all registered type names.
-
"""
-
return _global_registry.get_registered_types()
-
-
-
def create_registry() -> TypeHookRegistry:
-
"""Create a new type hook registry.
-
-
This function creates a new, independent registry that can be used
-
instead of the global registry.
-
-
Returns:
-
A new TypeHookRegistry instance.
-
"""
-
return TypeHookRegistry()
-510
src/atpasser/data/types.py
···
-
"""
-
Type processor system for ATProto JSON decoder.
-
-
This module provides an advanced type processor system that allows users to
-
register custom type converters for objects with $type keys in the ATProto data model.
-
"""
-
-
import inspect
-
from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union
-
from .hooks import TypeHookRegistry
-
-
# Type variable for the decorated class
-
T = TypeVar("T")
-
-
-
class TypeProcessor:
-
"""A type processor for ATProto JSON objects.
-
-
This class represents a processor for a specific type in the ATProto data model.
-
It contains information about how to convert JSON data to Python objects and
-
vice versa.
-
-
Attributes:
-
type_name: The name of the type this processor handles.
-
decoder: The function to decode JSON data to a Python object.
-
encoder: The function to encode a Python object to JSON data.
-
priority: The priority of this processor (higher values = higher priority).
-
"""
-
-
def __init__(
-
self,
-
type_name: str,
-
decoder: Optional[Callable[[Dict[str, Any]], Any]] = None,
-
encoder: Optional[Callable[[Any], Dict[str, Any]]] = None,
-
priority: int = 0,
-
) -> None:
-
"""Initialize a type processor.
-
-
Args:
-
type_name: The name of the type this processor handles.
-
decoder: The function to decode JSON data to a Python object.
-
encoder: The function to encode a Python object to JSON data.
-
priority: The priority of this processor (higher values = higher priority).
-
"""
-
self.type_name = type_name
-
self.decoder = decoder
-
self.encoder = encoder
-
self.priority = priority
-
-
def decode(self, data: Dict[str, Any]) -> Any:
-
"""Decode JSON data to a Python object.
-
-
Args:
-
data: The JSON data to decode.
-
-
Returns:
-
The decoded Python object.
-
-
Raises:
-
ValueError: If no decoder is registered.
-
"""
-
if self.decoder is None:
-
raise ValueError(f"No decoder registered for type {self.type_name}")
-
return self.decoder(data)
-
-
def encode(self, obj: Any) -> Dict[str, Any]:
-
"""Encode a Python object to JSON data.
-
-
Args:
-
obj: The Python object to encode.
-
-
Returns:
-
The encoded JSON data.
-
-
Raises:
-
ValueError: If no encoder is registered.
-
"""
-
if self.encoder is None:
-
raise ValueError(f"No encoder registered for type {self.type_name}")
-
return self.encoder(obj)
-
-
-
class TypeProcessorRegistry:
-
"""Registry for type processors in the ATProto JSON decoder.
-
-
This class maintains a registry of type processors that can be used
-
to customize the encoding and decoding of objects with $type keys in
-
the ATProto data model.
-
-
Attributes:
-
_processors: Dictionary mapping type names to processor lists.
-
"""
-
-
def __init__(self) -> None:
-
"""Initialize the type processor registry."""
-
self._processors: Dict[str, List[TypeProcessor]] = {}
-
-
def register_processor(self, processor: TypeProcessor) -> None:
-
"""Register a type processor.
-
-
Args:
-
processor: The type processor to register.
-
"""
-
if processor.type_name not in self._processors:
-
self._processors[processor.type_name] = []
-
-
self._processors[processor.type_name].append(processor)
-
# Sort processors by priority (descending)
-
self._processors[processor.type_name].sort(
-
key=lambda p: p.priority, reverse=True
-
)
-
-
def register(
-
self, type_name: str, priority: int = 0
-
) -> Callable[[Callable[[Dict[str, Any]], Any]], Callable[[Dict[str, Any]], Any]]:
-
"""Register a type decoder function.
-
-
This method can be used as a decorator to register a function as a decoder
-
for a specific type.
-
-
Args:
-
type_name: The name of the type to handle.
-
priority: The priority of this processor (higher values = higher priority).
-
-
Returns:
-
A decorator function that registers the decorated function as a decoder.
-
-
Example:
-
>>> registry = TypeProcessorRegistry()
-
>>>
-
>>> @registry.register("app.bsky.feed.post", priority=10)
-
... def decode_post(data: Dict[str, Any]) -> Any:
-
... return Post(**data)
-
"""
-
-
def decorator(
-
func: Callable[[Dict[str, Any]], Any],
-
) -> Callable[[Dict[str, Any]], Any]:
-
processor = TypeProcessor(type_name, decoder=func, priority=priority)
-
self.register_processor(processor)
-
return func
-
-
return decorator
-
-
def register_encoder(
-
self, type_name: str, priority: int = 0
-
) -> Callable[[Callable[[Any], Dict[str, Any]]], Callable[[Any], Dict[str, Any]]]:
-
"""Register a type encoder function.
-
-
This method can be used as a decorator to register a function as an encoder
-
for a specific type.
-
-
Args:
-
type_name: The name of the type to handle.
-
priority: The priority of this processor (higher values = higher priority).
-
-
Returns:
-
A decorator function that registers the decorated function as an encoder.
-
-
Example:
-
>>> registry = TypeProcessorRegistry()
-
>>>
-
>>> @registry.register_encoder("app.bsky.feed.post", priority=10)
-
... def encode_post(post: Post) -> Dict[str, Any]:
-
... return {"text": post.text, "createdAt": post.created_at}
-
"""
-
-
def decorator(
-
func: Callable[[Any], Dict[str, Any]],
-
) -> Callable[[Any], Dict[str, Any]]:
-
# Check if a processor for this type already exists
-
if type_name in self._processors:
-
for processor in self._processors[type_name]:
-
if processor.decoder is not None:
-
# Update the existing processor with the encoder
-
processor.encoder = func
-
break
-
else:
-
# No decoder found, create a new processor
-
processor = TypeProcessor(
-
type_name, encoder=func, priority=priority
-
)
-
self.register_processor(processor)
-
else:
-
# No processor exists, create a new one
-
processor = TypeProcessor(type_name, encoder=func, priority=priority)
-
self.register_processor(processor)
-
-
return func
-
-
return decorator
-
-
def register_class(
-
self, type_name: str, priority: int = 0
-
) -> Callable[[Type[T]], Type[T]]:
-
"""Register a class for both encoding and decoding.
-
-
This method can be used as a decorator to register a class for both
-
encoding and decoding of a specific type.
-
-
The class must have a class method `from_json` that takes a dictionary
-
and returns an instance of the class, and an instance method `to_json`
-
that returns a dictionary.
-
-
Args:
-
type_name: The name of the type to handle.
-
priority: The priority of this processor (higher values = higher priority).
-
-
Returns:
-
A decorator function that registers the decorated class.
-
-
Example:
-
>>> registry = TypeProcessorRegistry()
-
>>>
-
>>> @registry.register_class("app.bsky.feed.post", priority=10)
-
... class Post:
-
... def __init__(self, text: str, created_at: str) -> None:
-
... self.text = text
-
... self.created_at = created_at
-
...
-
... @classmethod
-
... def from_json(cls, data: Dict[str, Any]) -> "Post":
-
... return cls(data["text"], data["createdAt"])
-
...
-
... def to_json(self) -> Dict[str, Any]:
-
... return {"text": self.text, "createdAt": self.created_at}
-
"""
-
-
def decorator(cls: Type[T]) -> Type[T]:
-
# Create decoder from class method
-
if hasattr(cls, "from_json"):
-
decoder = lambda data: getattr(cls, "from_json")(data)
-
else:
-
# Try to create a decoder from the constructor
-
init_signature = inspect.signature(cls.__init__)
-
if init_signature.parameters:
-
# Create a decoder that passes the data as keyword arguments
-
decoder = lambda data: cls(**data)
-
else:
-
raise ValueError(
-
f"Class {cls.__name__} has no from_json method or compatible __init__"
-
)
-
-
# Create encoder from instance method
-
if hasattr(cls, "to_json"):
-
encoder = lambda obj: obj.to_json()
-
else:
-
raise ValueError(f"Class {cls.__name__} has no to_json method")
-
-
# Register the processor
-
processor = TypeProcessor(
-
type_name, decoder=decoder, encoder=encoder, priority=priority
-
)
-
self.register_processor(processor)
-
-
return cls
-
-
return decorator
-
-
def unregister(self, type_name: str, priority: Optional[int] = None) -> None:
-
"""Unregister type processors.
-
-
Args:
-
type_name: The name of the type to unregister.
-
priority: If specified, only unregister processors with this priority.
-
"""
-
if type_name in self._processors:
-
if priority is not None:
-
# Remove processors with the specified priority
-
self._processors[type_name] = [
-
p for p in self._processors[type_name] if p.priority != priority
-
]
-
else:
-
# Remove all processors for this type
-
del self._processors[type_name]
-
-
def get_decoder(self, type_name: str) -> Optional[Callable[[Dict[str, Any]], Any]]:
-
"""Get the decoder function for a specific type.
-
-
Args:
-
type_name: The name of the type to get the decoder for.
-
-
Returns:
-
The decoder function for the specified type, or None if no decoder
-
is registered.
-
"""
-
if type_name in self._processors and self._processors[type_name]:
-
# Return the decoder of the highest priority processor
-
return self._processors[type_name][0].decoder
-
return None
-
-
def get_encoder(self, type_name: str) -> Optional[Callable[[Any], Dict[str, Any]]]:
-
"""Get the encoder function for a specific type.
-
-
Args:
-
type_name: The name of the type to get the encoder for.
-
-
Returns:
-
The encoder function for the specified type, or None if no encoder
-
is registered.
-
"""
-
if type_name in self._processors and self._processors[type_name]:
-
# Return the encoder of the highest priority processor
-
return self._processors[type_name][0].encoder
-
return None
-
-
def has_processor(self, type_name: str) -> bool:
-
"""Check if a processor is registered for a specific type.
-
-
Args:
-
type_name: The name of the type to check.
-
-
Returns:
-
True if a processor is registered for the specified type, False otherwise.
-
"""
-
return type_name in self._processors and bool(self._processors[type_name])
-
-
def clear(self) -> None:
-
"""Clear all registered processors."""
-
self._processors.clear()
-
-
def get_registered_types(self) -> set:
-
"""Get the set of all registered type names.
-
-
Returns:
-
A set of all registered type names.
-
"""
-
return set(self._processors.keys())
-
-
def to_hook_registry(self) -> TypeHookRegistry:
-
"""Convert this processor registry to a hook registry.
-
-
This method creates a TypeHookRegistry that uses the decoders from
-
this processor registry.
-
-
Returns:
-
A TypeHookRegistry with the same decoders as this processor registry.
-
"""
-
hook_registry = TypeHookRegistry()
-
-
for type_name, processors in self._processors.items():
-
if processors and processors[0].decoder is not None:
-
hook_registry.register_handler(type_name, processors[0].decoder)
-
-
return hook_registry
-
-
-
# Global registry instance
-
_global_processor_registry = TypeProcessorRegistry()
-
-
-
def register_type(
-
type_name: str, priority: int = 0
-
) -> Callable[[Callable[[Dict[str, Any]], Any]], Callable[[Dict[str, Any]], Any]]:
-
"""Register a global type decoder function.
-
-
This decorator registers a function as a global decoder for a specific type
-
in the ATProto data model.
-
-
Args:
-
type_name: The name of the type to handle.
-
priority: The priority of this processor (higher values = higher priority).
-
-
Returns:
-
A decorator function that registers the decorated function as a decoder.
-
-
Example:
-
>>> @register_type("app.bsky.feed.post", priority=10)
-
... def decode_post(data: Dict[str, Any]) -> Any:
-
... return Post(**data)
-
"""
-
return _global_processor_registry.register(type_name, priority)
-
-
-
def get_global_processor_registry() -> TypeProcessorRegistry:
-
"""Get the global type processor registry.
-
-
Returns:
-
The global TypeProcessorRegistry instance.
-
"""
-
return _global_processor_registry
-
-
-
def register_type_encoder(
-
type_name: str, priority: int = 0
-
) -> Callable[[Callable[[Any], Dict[str, Any]]], Callable[[Any], Dict[str, Any]]]:
-
"""Register a global type encoder function.
-
-
This decorator registers a function as a global encoder for a specific type
-
in the ATProto data model.
-
-
Args:
-
type_name: The name of the type to handle.
-
priority: The priority of this processor (higher values = higher priority).
-
-
Returns:
-
A decorator function that registers the decorated function as an encoder.
-
-
Example:
-
>>> @register_type_encoder("app.bsky.feed.post", priority=10)
-
... def encode_post(post: Post) -> Dict[str, Any]:
-
... return {"text": post.text, "createdAt": post.created_at}
-
"""
-
return _global_processor_registry.register_encoder(type_name, priority)
-
-
-
def register_type_class(
-
type_name: str, priority: int = 0
-
) -> Callable[[Type[T]], Type[T]]:
-
"""Register a class for both global encoding and decoding.
-
-
This decorator registers a class for both encoding and decoding of a specific type
-
in the ATProto data model.
-
-
Args:
-
type_name: The name of the type to handle.
-
priority: The priority of this processor (higher values = higher priority).
-
-
Returns:
-
A decorator function that registers the decorated class.
-
-
Example:
-
>>> @register_type_class("app.bsky.feed.post", priority=10)
-
... class Post:
-
... def __init__(self, text: str, created_at: str) -> None:
-
... self.text = text
-
... self.created_at = created_at
-
...
-
... @classmethod
-
... def from_json(cls, data: Dict[str, Any]) -> "Post":
-
... return cls(data["text"], data["createdAt"])
-
...
-
... def to_json(self) -> Dict[str, Any]:
-
... return {"text": self.text, "createdAt": self.created_at}
-
"""
-
return _global_processor_registry.register_class(type_name, priority)
-
-
-
def unregister_type(type_name: str, priority: Optional[int] = None) -> None:
-
"""Unregister global type processors.
-
-
Args:
-
type_name: The name of the type to unregister.
-
priority: If specified, only unregister processors with this priority.
-
"""
-
_global_processor_registry.unregister(type_name, priority)
-
-
-
def get_type_decoder(type_name: str) -> Optional[Callable[[Dict[str, Any]], Any]]:
-
"""Get the global decoder function for a specific type.
-
-
Args:
-
type_name: The name of the type to get the decoder for.
-
-
Returns:
-
The decoder function for the specified type, or None if no decoder
-
is registered.
-
"""
-
return _global_processor_registry.get_decoder(type_name)
-
-
-
def get_type_encoder(type_name: str) -> Optional[Callable[[Any], Dict[str, Any]]]:
-
"""Get the global encoder function for a specific type.
-
-
Args:
-
type_name: The name of the type to get the encoder for.
-
-
Returns:
-
The encoder function for the specified type, or None if no encoder
-
is registered.
-
"""
-
return _global_processor_registry.get_encoder(type_name)
-
-
-
def has_type_processor(type_name: str) -> bool:
-
"""Check if a global processor is registered for a specific type.
-
-
Args:
-
type_name: The name of the type to check.
-
-
Returns:
-
True if a processor is registered for the specified type, False otherwise.
-
"""
-
return _global_processor_registry.has_processor(type_name)
-
-
-
def clear_type_processors() -> None:
-
"""Clear all globally registered processors."""
-
_global_processor_registry.clear()
-
-
-
def get_registered_types() -> set:
-
"""Get the set of all globally registered type names.
-
-
Returns:
-
A set of all registered type names.
-
"""
-
return _global_processor_registry.get_registered_types()
-
-
-
def create_processor_registry() -> TypeProcessorRegistry:
-
"""Create a new type processor registry.
-
-
This function creates a new, independent registry that can be used
-
instead of the global registry.
-
-
Returns:
-
A new TypeProcessorRegistry instance.
-
"""
-
return TypeProcessorRegistry()
-346
src/atpasser/data/wrapper.py
···
-
"""
-
JSON wrapper functions for ATProto data model.
-
-
This module provides wrapper functions that mirror the standard json module
-
but with support for ATProto-specific data types.
-
"""
-
-
import json
-
import io
-
from typing import Any, Callable, Dict, Optional, TextIO, Union
-
from .encoder import JsonEncoder
-
from .decoder import JsonDecoder
-
from .hooks import TypeHookRegistry
-
from .types import TypeProcessorRegistry
-
-
-
def dump(
-
obj: Any,
-
fp: TextIO,
-
*,
-
skipkeys: bool = False,
-
ensure_ascii: bool = True,
-
check_circular: bool = True,
-
allow_nan: bool = True,
-
cls: Optional[type[JsonEncoder]] = None,
-
indent: Optional[Union[int, str]] = None,
-
separators: Optional[tuple[str, str]] = None,
-
default: Optional[Callable[[Any], Any]] = None,
-
sort_keys: bool = False,
-
encoding: str = "utf-8",
-
type_processor_registry: Optional[TypeProcessorRegistry] = None,
-
**kwargs: Any,
-
) -> None:
-
"""Serialize obj as a JSON formatted stream to fp.
-
-
This function is similar to json.dump() but supports ATProto-specific
-
data types, including bytes, CID links, and typed objects.
-
-
Args:
-
obj: The object to serialize.
-
fp: A file-like object with a write() method.
-
skipkeys: If True, dict keys that are not basic types (str, int, float,
-
bool, None) will be skipped instead of raising a TypeError.
-
ensure_ascii: If True, the output is guaranteed to have all incoming
-
non-ASCII characters escaped. If False, these characters will be
-
output as-is.
-
check_circular: If True, circular references will be checked and
-
a CircularReferenceError will be raised if one is found.
-
allow_nan: If True, NaN, Infinity, and -Infinity will be encoded as
-
such. This behavior is not JSON specification compliant, but it
-
is consistent with most JavaScript based encoders and decoders.
-
Otherwise, it will raise a ValueError.
-
cls: A custom JSONEncoder subclass. If not specified, JsonEncoder is used.
-
indent: If indent is a non-negative integer or string, then JSON array
-
elements and object members will be pretty-printed with that indent
-
level. An indent level of 0, negative, or "" will only insert newlines.
-
None (the default) selects the most compact representation.
-
separators: If specified, separators should be an (item_separator, key_separator)
-
tuple. The default is (', ', ': ') if indent is None and (',', ': ') otherwise.
-
To get the most compact JSON representation, you should specify (',', ':')
-
to eliminate whitespace.
-
default: If specified, default should be a function that gets called for
-
objects that can't otherwise be serialized. It should return a JSON
-
encodable version of the object or raise a TypeError.
-
sort_keys: If sort_keys is True, then the output of dictionaries will be
-
sorted by key.
-
encoding: The encoding to use for string serialization.
-
type_processor_registry: Registry for type-specific processors.
-
**kwargs: Additional keyword arguments to pass to the JSON encoder.
-
"""
-
if cls is None:
-
cls = JsonEncoder
-
-
# Use the global type processor registry if none is provided
-
if type_processor_registry is None:
-
from .types import get_global_processor_registry
-
-
type_processor_registry = get_global_processor_registry()
-
-
# Create an encoder instance with the specified encoding and type processor registry
-
encoder = cls(
-
encoding=encoding, type_processor_registry=type_processor_registry, **kwargs
-
)
-
-
# Use the standard json.dump with our custom encoder
-
json.dump(
-
obj,
-
fp,
-
skipkeys=skipkeys,
-
ensure_ascii=ensure_ascii,
-
check_circular=check_circular,
-
allow_nan=allow_nan,
-
cls=cls,
-
indent=indent,
-
separators=separators,
-
default=default,
-
sort_keys=sort_keys,
-
**kwargs,
-
)
-
-
-
def dumps(
-
obj: Any,
-
*,
-
skipkeys: bool = False,
-
ensure_ascii: bool = True,
-
check_circular: bool = True,
-
allow_nan: bool = True,
-
cls: Optional[type[JsonEncoder]] = None,
-
indent: Optional[Union[int, str]] = None,
-
separators: Optional[tuple[str, str]] = None,
-
default: Optional[Callable[[Any], Any]] = None,
-
sort_keys: bool = False,
-
encoding: str = "utf-8",
-
type_processor_registry: Optional[TypeProcessorRegistry] = None,
-
**kwargs: Any,
-
) -> str:
-
"""Serialize obj to a JSON formatted string.
-
-
This function is similar to json.dumps() but supports ATProto-specific
-
data types, including bytes, CID links, and typed objects.
-
-
Args:
-
obj: The object to serialize.
-
skipkeys: If True, dict keys that are not basic types (str, int, float,
-
bool, None) will be skipped instead of raising a TypeError.
-
ensure_ascii: If True, the output is guaranteed to have all incoming
-
non-ASCII characters escaped. If False, these characters will be
-
output as-is.
-
check_circular: If True, circular references will be checked and
-
a CircularReferenceError will be raised if one is found.
-
allow_nan: If True, NaN, Infinity, and -Infinity will be encoded as
-
such. This behavior is not JSON specification compliant, but it
-
is consistent with most JavaScript based encoders and decoders.
-
Otherwise, it will raise a ValueError.
-
cls: A custom JSONEncoder subclass. If not specified, JsonEncoder is used.
-
indent: If indent is a non-negative integer or string, then JSON array
-
elements and object members will be pretty-printed with that indent
-
level. An indent level of 0, negative, or "" will only insert newlines.
-
None (the default) selects the most compact representation.
-
separators: If specified, separators should be an (item_separator, key_separator)
-
tuple. The default is (', ', ': ') if indent is None and (',', ': ') otherwise.
-
To get the most compact JSON representation, you should specify (',', ':')
-
to eliminate whitespace.
-
default: If specified, default should be a function that gets called for
-
objects that can't otherwise be serialized. It should return a JSON
-
encodable version of the object or raise a TypeError.
-
sort_keys: If sort_keys is True, then the output of dictionaries will be
-
sorted by key.
-
encoding: The encoding to use for string serialization.
-
type_processor_registry: Registry for type-specific processors.
-
**kwargs: Additional keyword arguments to pass to the JSON encoder.
-
-
Returns:
-
A JSON formatted string.
-
"""
-
if cls is None:
-
cls = JsonEncoder
-
-
# Create an encoder instance with the specified encoding and type processor registry
-
encoder = cls(
-
encoding=encoding, type_processor_registry=type_processor_registry, **kwargs
-
)
-
-
# Use the standard json.dumps with our custom encoder
-
return json.dumps(
-
obj,
-
skipkeys=skipkeys,
-
ensure_ascii=ensure_ascii,
-
check_circular=check_circular,
-
allow_nan=allow_nan,
-
cls=cls,
-
indent=indent,
-
separators=separators,
-
default=default,
-
sort_keys=sort_keys,
-
**kwargs,
-
)
-
-
-
def load(
-
fp: TextIO,
-
*,
-
cls: Optional[type[JsonDecoder]] = None,
-
object_hook: Optional[Callable[[Dict[str, Any]], Any]] = None,
-
parse_float: Optional[Callable[[str], Any]] = None,
-
parse_int: Optional[Callable[[str], Any]] = None,
-
parse_constant: Optional[Callable[[str], Any]] = None,
-
object_pairs_hook: Optional[Callable[[list[tuple[str, Any]]], Any]] = None,
-
type_hook_registry: Optional[TypeHookRegistry] = None,
-
type_processor_registry: Optional[TypeProcessorRegistry] = None,
-
encoding: str = "utf-8",
-
**kwargs: Any,
-
) -> Any:
-
"""Deserialize fp (a .read()-supporting text file or binary file containing
-
a JSON document) to a Python object.
-
-
This function is similar to json.load() but supports ATProto-specific
-
data types, including bytes, CID links, and typed objects.
-
-
Args:
-
fp: A .read()-supporting text file or binary file containing a JSON document.
-
cls: A custom JSONDecoder subclass. If not specified, JsonDecoder is used.
-
object_hook: Optional function that will be called with the result of
-
every JSON object decoded and its return value will be used in place
-
of the given dict.
-
parse_float: Optional function that will be called with the string of
-
every JSON float to be decoded. By default, this is equivalent to
-
float(num_str). This can be used to use another datatype or parser
-
for JSON floats (e.g. decimal.Decimal).
-
parse_int: Optional function that will be called with the string of
-
every JSON int to be decoded. By default, this is equivalent to
-
int(num_str). This can be used to use another datatype or parser
-
for JSON integers (e.g. float).
-
parse_constant: Optional function that will be called with the string of
-
every JSON constant to be decoded. By default, this is equivalent to
-
constant_mapping[constant_str]. This can be used to use another
-
datatype or parser for JSON constants (e.g. decimal.Decimal).
-
object_pairs_hook: Optional function that will be called with the result
-
of every JSON object decoded with an ordered list of pairs. The return
-
value of object_pairs_hook will be used instead of the dict. This
-
feature can be used to implement custom decoders. If object_hook is
-
also defined, the object_pairs_hook takes priority.
-
type_hook_registry: Registry for type-specific hooks.
-
type_processor_registry: Registry for type-specific processors.
-
encoding: The encoding to use for string deserialization.
-
**kwargs: Additional keyword arguments to pass to the JSON decoder.
-
-
Returns:
-
A Python object.
-
"""
-
if cls is None:
-
cls = JsonDecoder
-
-
# Use the global type hook registry if none is provided
-
if type_hook_registry is None and type_processor_registry is None:
-
from .hooks import get_global_registry
-
-
type_hook_registry = get_global_registry()
-
elif type_processor_registry is not None:
-
# Convert the type processor registry to a hook registry
-
type_hook_registry = type_processor_registry.to_hook_registry()
-
-
# Create a decoder instance with the specified parameters
-
decoder = cls(
-
object_hook=object_hook,
-
type_hook_registry=type_hook_registry,
-
encoding=encoding,
-
**kwargs,
-
)
-
-
# Use the standard json.load with our custom decoder
-
return json.load(
-
fp,
-
cls=cls,
-
object_hook=object_hook,
-
parse_float=parse_float,
-
parse_int=parse_int,
-
parse_constant=parse_constant,
-
object_pairs_hook=object_pairs_hook,
-
**kwargs,
-
)
-
-
-
def loads(
-
s: Union[str, bytes],
-
*,
-
cls: Optional[type[JsonDecoder]] = None,
-
object_hook: Optional[Callable[[Dict[str, Any]], Any]] = None,
-
parse_float: Optional[Callable[[str], Any]] = None,
-
parse_int: Optional[Callable[[str], Any]] = None,
-
parse_constant: Optional[Callable[[str], Any]] = None,
-
object_pairs_hook: Optional[Callable[[list[tuple[str, Any]]], Any]] = None,
-
type_hook_registry: Optional[TypeHookRegistry] = None,
-
type_processor_registry: Optional[TypeProcessorRegistry] = None,
-
encoding: str = "utf-8",
-
**kwargs: Any,
-
) -> Any:
-
"""Deserialize s (a str, bytes or bytearray instance containing a JSON document)
-
to a Python object.
-
-
This function is similar to json.loads() but supports ATProto-specific
-
data types, including bytes, CID links, and typed objects.
-
-
Args:
-
s: A str, bytes or bytearray instance containing a JSON document.
-
cls: A custom JSONDecoder subclass. If not specified, JsonDecoder is used.
-
object_hook: Optional function that will be called with the result of
-
every JSON object decoded and its return value will be used in place
-
of the given dict.
-
parse_float: Optional function that will be called with the string of
-
every JSON float to be decoded. By default, this is equivalent to
-
float(num_str). This can be used to use another datatype or parser
-
for JSON floats (e.g. decimal.Decimal).
-
parse_int: Optional function that will be called with the string of
-
every JSON int to be decoded. By default, this is equivalent to
-
int(num_str). This can be used to use another datatype or parser
-
for JSON integers (e.g. float).
-
parse_constant: Optional function that will be called with the string of
-
every JSON constant to be decoded. By default, this is equivalent to
-
constant_mapping[constant_str]. This can be used to use another
-
datatype or parser for JSON constants (e.g. decimal.Decimal).
-
object_pairs_hook: Optional function that will be called with the result
-
of every JSON object decoded with an ordered list of pairs. The return
-
value of object_pairs_hook will be used instead of the dict. This
-
feature can be used to implement custom decoders. If object_hook is
-
also defined, the object_pairs_hook takes priority.
-
type_hook_registry: Registry for type-specific hooks.
-
type_processor_registry: Registry for type-specific processors.
-
encoding: The encoding to use for string deserialization.
-
**kwargs: Additional keyword arguments to pass to the JSON decoder.
-
-
Returns:
-
A Python object.
-
"""
-
if cls is None:
-
cls = JsonDecoder
-
-
# Use the global type hook registry if none is provided
-
if type_hook_registry is None and type_processor_registry is None:
-
from .hooks import get_global_registry
-
-
type_hook_registry = get_global_registry()
-
elif type_processor_registry is not None:
-
# Convert the type processor registry to a hook registry
-
type_hook_registry = type_processor_registry.to_hook_registry()
-
-
# Create a decoder instance with the specified parameters
-
decoder = cls(
-
object_hook=object_hook,
-
type_hook_registry=type_hook_registry,
-
encoding=encoding,
-
**kwargs,
-
)
-
-
# Use the standard json.loads with our custom decoder
-
return json.loads(
-
s,
-
cls=cls,
-
object_hook=object_hook,
-
parse_float=parse_float,
-
parse_int=parse_int,
-
parse_constant=parse_constant,
-
object_pairs_hook=object_pairs_hook,
-
**kwargs,
-
)
+6 -1
.gitignore
···
__marimo__/
# Streamlit
-
.streamlit/secrets.toml
+
.streamlit/secrets.toml
+
+
#####
+
+
# Added by DWN - temp dir
+
tmp/
+10 -2
README.md
···
-
# ATPasser โ•ฌ<!
+
# ATPasser!
A simple library for the [Authenticated Transfer Protocol](https://atproto.com/specs/atp) (AT Protocol or atproto for short).
···
---
-
[See the roadmap](docs/roadmap.md)
+
## Other ATProto libraries
+
+
[There's an ATProto SDK already (and used by lots of projects) by MarshalX,](https://github.com/MarshalX/atproto) and why do this exists?
+
+
The first reason is that I'm recovering the now-closed [Tietiequan](https://tangled.org/@dwn.dwnfonts.cc/bluesky-circle) app and found that some API has changed so I have to rewrite it via vanilla JS.
+
+
The second reason is that I'm a newbie in ATProto, wanting to know how ATProto is, and how this can be represented in Python.
+
+
The architecture will be small, only containing the data model and the client.
---
-16
src/atpasser/blob/__init__.py
···
-
import cid
-
import multihash, hashlib
-
-
-
def generateCID(file):
-
hasher = hashlib.new("sha-256")
-
while True:
-
chunk = file.read(8192)
-
if not chunk:
-
break
-
hasher.update(chunk)
-
-
digest = hasher.digest
-
mh = multihash.encode(digest, "sha-256")
-
-
return cid.CIDv1(codec="raw", multihash=mh)
-76
src/atpasser/data/_data.py
···
-
import base64
-
from cid import CIDv0, CIDv1, cid, make_cid
-
import json
-
-
-
class Data:
-
"""
-
A class representing data with "$type" key.
-
-
Attributes:
-
type (str): The type of the data.
-
json (str): Original object in JSON format.
-
"""
-
-
def __init__(self, dataType: str, json: str = "{}") -> None:
-
"""
-
Initalizes data object.
-
-
Parameters:
-
type (str): The type of the data.
-
json (str): Original object in JSON format.
-
"""
-
self.type = dataType
-
self.json = json
-
-
def data(self):
-
"""
-
Loads data as a Python-friendly format.
-
-
Returns:
-
dict: Converted data from JSON object.
-
"""
-
return json.loads(self.json, object_hook=dataHook)
-
-
-
def dataHook(data: dict):
-
"""
-
Treated as `JSONDecoder`'s `object_hook`
-
-
Parameters:
-
data: data in format that `JSONDecoder` like ;)
-
"""
-
if "$bytes" in data:
-
return base64.b64decode(data["$bytes"])
-
elif "$link" in data:
-
return make_cid(data["$link"])
-
elif "$type" in data:
-
dataType = data["$type"]
-
del data["$type"]
-
return Data(dataType, json.dumps(data))
-
else:
-
return data
-
-
-
def _convertDataToFakeJSON(data):
-
if isinstance(data, bytes):
-
return {"$bytes": base64.b64encode(data)}
-
elif isinstance(data, (CIDv0, CIDv1)):
-
return {"link": data.encode()}
-
elif isinstance(data, dict):
-
for item in data:
-
data[item] = _convertDataToFakeJSON(data[item])
-
elif isinstance(data, (tuple, list, set)):
-
return [_convertDataToFakeJSON(item) for item in data]
-
else:
-
return data
-
-
-
class DataEncoder(json.JSONEncoder):
-
"""
-
A superset of `JSONEncoder` to support ATProto data.
-
"""
-
-
def default(self, o):
-
result = _convertDataToFakeJSON(o)
-
return super().default(result)
-61
src/atpasser/data/_wrapper.py
···
-
from json import loads
-
from typing import Callable, Any
-
from ._data import *
-
import functools
-
-
# Pyright did the whole job. Thank it.
-
-
-
class DataDecoder(json.JSONDecoder):
-
"""
-
A superset of `JSONDecoder` to support ATProto data.
-
"""
-
-
def __init__(
-
self,
-
*,
-
object_hook: Callable[[dict[str, Any]], Any] | None = dataHook,
-
parse_float: Callable[[str], Any] | None = None,
-
parse_int: Callable[[str], Any] | None = None,
-
parse_constant: Callable[[str], Any] | None = None,
-
strict: bool = True,
-
object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None,
-
) -> None:
-
super().__init__(
-
object_hook=object_hook,
-
parse_float=parse_float,
-
parse_int=parse_int,
-
parse_constant=parse_constant,
-
strict=strict,
-
object_pairs_hook=object_pairs_hook,
-
)
-
-
-
# Screw it. I have to make 4 `json`-like functions.
-
-
-
def _dataDecoratorForDump(func):
-
@functools.wraps(func)
-
def wrapper(obj, *args, **kwargs):
-
kwargs.setdefault("cls", DataEncoder)
-
return func(obj, *args, **kwargs)
-
-
return wrapper
-
-
-
def _dataDecoratorForLoad(func):
-
@functools.wraps(func)
-
def wrapper(obj, *args, **kwargs):
-
kwargs.setdefault("cls", DataDecoder)
-
return func(obj, *args, **kwargs)
-
-
return wrapper
-
-
-
dump = _dataDecoratorForDump(json.dump)
-
dumps = _dataDecoratorForDump(json.dumps)
-
load = _dataDecoratorForLoad(json.load)
-
loads = _dataDecoratorForLoad(json.loads)
-
"""
-
Wrapper of the JSON functions to support ATProto data.
-
"""
-137
src/atpasser/data/cbor.py
···
-
from datetime import tzinfo
-
import typing
-
import cbor2
-
import cid
-
-
from .data import dataHook, Data
-
-
-
def tagHook(decoder: cbor2.CBORDecoder, tag: cbor2.CBORTag, shareable_index=None):
-
"""
-
A simple tag hook for CID support.
-
"""
-
return cid.from_bytes(tag.value) if tag.tag == 42 else tag
-
-
-
class CBOREncoder(cbor2.CBOREncoder):
-
"""
-
Wrapper of cbor2.CBOREncoder.
-
"""
-
-
def __init__(
-
self,
-
fp: typing.IO[bytes],
-
datetime_as_timestamp: bool = False,
-
timezone: tzinfo | None = None,
-
value_sharing: bool = False,
-
default: (
-
typing.Callable[[cbor2.CBOREncoder, typing.Any], typing.Any] | None
-
) = None,
-
canonical: bool = False,
-
date_as_datetime: bool = False,
-
string_referencing: bool = False,
-
indefinite_containers: bool = False,
-
):
-
super().__init__(
-
fp,
-
datetime_as_timestamp,
-
timezone,
-
value_sharing,
-
default,
-
canonical,
-
date_as_datetime,
-
string_referencing,
-
indefinite_containers,
-
)
-
-
@cbor2.shareable_encoder
-
def cidOrDataEncoder(self: cbor2.CBOREncoder, value: cid.CIDv0 | cid.CIDv1 | Data):
-
"""
-
Encode CID or Data to CBOR Tag.
-
"""
-
if isinstance(value, (cid.CIDv0, cid.CIDv1)):
-
self.encode(cbor2.CBORTag(42, value.encode()))
-
elif isinstance(value, Data):
-
self.encode(value.data())
-
-
-
def _cborObjectHook(decoder: cbor2.CBORDecoder, value):
-
return dataHook(value)
-
-
-
class CBORDecoder(cbor2.CBORDecoder):
-
"""
-
Wrapper of cbor2.CBORDecoder.
-
"""
-
-
def __init__(
-
self,
-
fp: typing.IO[bytes],
-
tag_hook: (
-
typing.Callable[[cbor2.CBORDecoder, cbor2.CBORTag], typing.Any] | None
-
) = tagHook,
-
object_hook: (
-
typing.Callable[
-
[cbor2.CBORDecoder, dict[typing.Any, typing.Any]], typing.Any
-
]
-
| None
-
) = _cborObjectHook,
-
str_errors: typing.Literal["strict", "error", "replace"] = "strict",
-
):
-
super().__init__(fp, tag_hook, object_hook, str_errors)
-
-
-
# Make things for CBOR again.
-
-
from io import BytesIO
-
-
-
def dumps(
-
obj: object,
-
datetime_as_timestamp: bool = False,
-
timezone: tzinfo | None = None,
-
value_sharing: bool = False,
-
default: typing.Callable[[cbor2.CBOREncoder, typing.Any], typing.Any] | None = None,
-
canonical: bool = False,
-
date_as_datetime: bool = False,
-
string_referencing: bool = False,
-
indefinite_containers: bool = False,
-
) -> bytes:
-
with BytesIO() as fp:
-
CBOREncoder(
-
fp,
-
datetime_as_timestamp=datetime_as_timestamp,
-
timezone=timezone,
-
value_sharing=value_sharing,
-
default=default,
-
canonical=canonical,
-
date_as_datetime=date_as_datetime,
-
string_referencing=string_referencing,
-
indefinite_containers=indefinite_containers,
-
).encode(obj)
-
return fp.getvalue()
-
-
-
def dump(
-
obj: object,
-
fp: typing.IO[bytes],
-
datetime_as_timestamp: bool = False,
-
timezone: tzinfo | None = None,
-
value_sharing: bool = False,
-
default: typing.Callable[[cbor2.CBOREncoder, typing.Any], typing.Any] | None = None,
-
canonical: bool = False,
-
date_as_datetime: bool = False,
-
string_referencing: bool = False,
-
indefinite_containers: bool = False,
-
) -> None:
-
CBOREncoder(
-
fp,
-
datetime_as_timestamp=datetime_as_timestamp,
-
timezone=timezone,
-
value_sharing=value_sharing,
-
default=default,
-
canonical=canonical,
-
date_as_datetime=date_as_datetime,
-
string_referencing=string_referencing,
-
indefinite_containers=indefinite_containers,
-
).encode(obj)
-179
tests/_strings.py
···
-
from atpasser import did, handle, nsid, rKey, uri
-
-
-
testStrings, testMethods = {}, {}
-
-
-
testStrings[
-
"did"
-
-
] = """did:plc:z72i7hdynmk6r22z27h6tvur
-
-
did:web:blueskyweb.xyz
-
-
did:method:val:two
-
-
did:m:v
-
-
did:method::::val
-
-
did:method:-:_:.
-
-
did:key:zQ3shZc2QzApp2oymGvQbzP8eKheVshBHbU4ZYjeXqwSKEn6N
-
-
did:METHOD:val
-
-
did:m123:val
-
-
DID:method:val
-
did:method:
-
-
did:method:val/two
-
-
did:method:val?two
-
-
did:method:val#two"""
-
-
testMethods["did"] = did.DID
-
-
-
testStrings[
-
"handle"
-
-
] = """jay.bsky.social
-
-
8.cn
-
-
name.t--t
-
-
XX.LCS.MIT.EDU
-
a.co
-
-
xn--notarealidn.com
-
-
xn--fiqa61au8b7zsevnm8ak20mc4a87e.xn--fiqs8s
-
-
xn--ls8h.test
-
example.t
-
-
jo@hn.test
-
-
๐Ÿ’ฉ.tes
-
t
-
john..test
-
-
xn--bcher-.tld
-
-
john.0
-
-
cn.8
-
-
www.maseล‚kowski.pl.com
-
-
org
-
-
name.org.
-
-
2gzyxa5ihm7nsggfxnu52rck2vv4rvmdlkiu3zzui5du4xyclen53wid.onion
-
laptop.local
-
-
blah.arpa"""
-
-
testMethods["handle"] = handle.Handle
-
-
-
testStrings[
-
"nsid"
-
-
] = """com.example.fooBar
-
-
net.users.bob.ping
-
-
a-0.b-1.c
-
-
a.b.c
-
-
com.example.fooBarV2
-
-
cn.8.lex.stuff
-
-
com.exa๐Ÿ’ฉple.thin
-
com.example
-
-
com.example.3"""
-
-
testMethods["nsid"] = nsid.NSID
-
-
-
testStrings[
-
-
"rkey"
-
-
] = """3jui7kd54zh2y
-
self
-
example.com
-
-
~1.2-3_
-
-
dHJ1ZQ
-
pre:fix
-
-
_
-
-
alpha/beta
-
.
-
..
-
-
#extra
-
-
@handle
-
-
any space
-
-
any+space
-
-
number[3]
-
-
number(3)
-
-
"quote"
-
-
dHJ1ZQ=="""
-
-
testMethods["rkey"] = rKey.RKey
-
-
-
testStrings[
-
"uri"
-
-
] = """at://foo.com/com.example.foo/123
-
-
at://foo.com/example/123
-
-
at://computer
-
-
at://example.com:3000
-
-
at://foo.com/
-
-
at://user:pass@foo.com"""
-
-
testMethods["uri"] = uri.URI
-
-
-
for item in testMethods:
-
-
print(f"START TEST {item}")
-
-
for value in testStrings[item].splitlines():
-
-
print(f"Value: {value}")
-
-
try:
-
-
print(f"str(): {str(testMethods[item](value))}")
-
-
except Exception as e:
-
-
print(f"ร— {e}")
-
+41
src/atpasser/model/typed.py
···
+
from typing import Any
+
from pydantic import field_serializer
+
from .base import DataModel
+
+
class TypedDataModel(DataModel):
+
"""
+
Model for AT Protocol data with type information.
+
+
Includes support for $type field that specifies Lexicon schema.
+
"""
+
+
type: str | None = None
+
"""Lexicon schema type identifier"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize typed data model with automatic $type handling.
+
+
Args:
+
**data: Data including optional $type field
+
"""
+
# Extract $type if present
+
dataType = data.pop("$type", None)
+
if dataType:
+
data["type"] = dataType
+
super().__init__(**data)
+
+
@field_serializer("type")
+
def serializeType(self, v: str | None) -> dict[str, str] | None:
+
"""
+
Serialize type field to $type object.
+
+
Args:
+
v: Type value to serialize
+
+
Returns:
+
$type object if type is not None
+
"""
+
if v is not None:
+
return {"$type": v}
+
return None
+36
src/atpasser/model/exceptions.py
···
+
class AtprotoModelError(Exception):
+
"""Base exception for all AT Protocol model errors"""
+
pass
+
+
class ValidationError(AtprotoModelError):
+
"""Raised when data validation fails"""
+
def __init__(self, field: str, message: str):
+
self.field = field
+
self.message = message
+
super().__init__(f"Validation error for field '{field}': {message}")
+
+
class SerializationError(AtprotoModelError):
+
"""Raised when data serialization fails"""
+
def __init__(self, field: str, message: str):
+
self.field = field
+
self.message = message
+
super().__init__(f"Serialization error for field '{field}': {message}")
+
+
class DeserializationError(AtprotoModelError):
+
"""Raised when data deserialization fails"""
+
def __init__(self, field: str, message: str):
+
self.field = field
+
self.message = message
+
super().__init__(f"Deserialization error for field '{field}': {message}")
+
+
class InvalidCIDError(AtprotoModelError):
+
"""Raised when CID validation fails"""
+
pass
+
+
class InvalidBlobError(AtprotoModelError):
+
"""Raised when blob validation fails"""
+
pass
+
+
class TypeMismatchError(AtprotoModelError):
+
"""Raised when type validation fails"""
+
pass
+3
src/atpasser/model/__init__.py
···
ProcedureModel,
SubscriptionModel
)
+
from .converter import LexiconConverter
__all__ = [
"DataModel",
···
"QueryModel",
"ProcedureModel",
"SubscriptionModel",
+
# Converter
+
"LexiconConverter",
# Exceptions
"AtprotoModelError",
"ValidationError",
-5
src/atpasser/model/base.py
···
import base64
-
import re
-
from datetime import datetime
from typing import Any
-
from collections.abc import Mapping
from cid.cid import CIDv1, make_cid
from pydantic import BaseModel, field_serializer, field_validator, ConfigDict
-
from pydantic.fields import FieldInfo
from .exceptions import (
-
ValidationError,
SerializationError,
DeserializationError,
InvalidCIDError
+5 -5
src/atpasser/model/blob.py
···
from typing import Any
from pydantic import field_validator, ConfigDict
from .base import DataModel
-
from .exceptions import ValidationError, InvalidBlobError
+
from .exceptions import ValidationError
class BlobModel(DataModel):
"""
···
Validated size
Raises:
-
ValueError: If size is not positive
+
ValidationError: If size is not positive
"""
if v <= 0:
-
raise ValueError("Blob size must be positive and non-zero")
+
raise ValidationError(field="size", message="must be positive and non-zero")
return v
@field_validator("mimeType")
···
Validated MIME type
Raises:
-
ValueError: If MIME type is empty
+
ValidationError: If MIME type is empty
"""
if not v:
-
raise ValueError("MIME type cannot be empty")
+
raise ValidationError(field="mimeType", message="cannot be empty")
return v
+323 -10
poetry.lock
···
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.7"
-
groups = ["main"]
+
groups = ["main", "dev"]
files = [
{file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"},
{file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"},
···
description = "Foreign Function Interface for Python calling C code."
optional = false
python-versions = ">=3.9"
-
groups = ["main"]
+
groups = ["main", "dev"]
markers = "platform_python_implementation != \"PyPy\""
files = [
{file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"},
···
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7"
-
groups = ["main"]
+
groups = ["main", "dev"]
files = [
{file = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"},
{file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"},
···
{file = "charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14"},
]
+
[[package]]
+
name = "colorama"
+
version = "0.4.6"
+
description = "Cross-platform colored terminal text."
+
optional = false
+
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+
groups = ["dev"]
+
markers = "sys_platform == \"win32\""
+
files = [
+
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+
]
+
+
[[package]]
+
name = "coverage"
+
version = "7.10.7"
+
description = "Code coverage measurement for Python"
+
optional = false
+
python-versions = ">=3.9"
+
groups = ["dev"]
+
files = [
+
{file = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"},
+
{file = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"},
+
{file = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"},
+
{file = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"},
+
{file = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"},
+
{file = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"},
+
{file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"},
+
{file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"},
+
{file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"},
+
{file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"},
+
{file = "coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"},
+
{file = "coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"},
+
{file = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"},
+
{file = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"},
+
{file = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"},
+
{file = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"},
+
{file = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"},
+
{file = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"},
+
{file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"},
+
{file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"},
+
{file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"},
+
{file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"},
+
{file = "coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"},
+
{file = "coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"},
+
{file = "coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"},
+
{file = "coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417"},
+
{file = "coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973"},
+
{file = "coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c"},
+
{file = "coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7"},
+
{file = "coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6"},
+
{file = "coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59"},
+
{file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b"},
+
{file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a"},
+
{file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb"},
+
{file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1"},
+
{file = "coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"},
+
{file = "coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"},
+
{file = "coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"},
+
{file = "coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d"},
+
{file = "coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b"},
+
{file = "coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e"},
+
{file = "coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b"},
+
{file = "coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49"},
+
{file = "coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911"},
+
{file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0"},
+
{file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f"},
+
{file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c"},
+
{file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f"},
+
{file = "coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698"},
+
{file = "coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843"},
+
{file = "coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546"},
+
{file = "coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c"},
+
{file = "coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15"},
+
{file = "coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4"},
+
{file = "coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0"},
+
{file = "coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0"},
+
{file = "coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65"},
+
{file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541"},
+
{file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6"},
+
{file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999"},
+
{file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2"},
+
{file = "coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a"},
+
{file = "coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb"},
+
{file = "coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb"},
+
{file = "coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520"},
+
{file = "coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32"},
+
{file = "coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f"},
+
{file = "coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a"},
+
{file = "coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360"},
+
{file = "coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69"},
+
{file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14"},
+
{file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe"},
+
{file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e"},
+
{file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd"},
+
{file = "coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2"},
+
{file = "coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681"},
+
{file = "coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880"},
+
{file = "coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63"},
+
{file = "coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2"},
+
{file = "coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d"},
+
{file = "coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0"},
+
{file = "coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699"},
+
{file = "coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9"},
+
{file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f"},
+
{file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1"},
+
{file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0"},
+
{file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399"},
+
{file = "coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235"},
+
{file = "coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d"},
+
{file = "coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a"},
+
{file = "coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3"},
+
{file = "coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c"},
+
{file = "coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396"},
+
{file = "coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40"},
+
{file = "coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594"},
+
{file = "coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a"},
+
{file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b"},
+
{file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3"},
+
{file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0"},
+
{file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f"},
+
{file = "coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431"},
+
{file = "coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07"},
+
{file = "coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"},
+
{file = "coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239"},
+
]
+
+
[package.extras]
+
toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
+
[[package]]
name = "cryptography"
version = "45.0.7"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
-
groups = ["main"]
+
groups = ["main", "dev"]
files = [
{file = "cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee"},
{file = "cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6"},
···
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
-
groups = ["main"]
+
groups = ["main", "dev"]
files = [
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
···
[package.extras]
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
+
[[package]]
+
name = "iniconfig"
+
version = "2.1.0"
+
description = "brain-dead simple config-ini parsing"
+
optional = false
+
python-versions = ">=3.8"
+
groups = ["dev"]
+
files = [
+
{file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"},
+
{file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"},
+
]
+
[[package]]
name = "jsonpath-ng"
version = "1.7.0"
···
{file = "morphys-1.0-py2.py3-none-any.whl", hash = "sha256:76d6dbaa4d65f597e59d332c81da786d83e4669387b9b2a750cfec74e7beec20"},
]
+
[[package]]
+
name = "packaging"
+
version = "25.0"
+
description = "Core utilities for Python packages"
+
optional = false
+
python-versions = ">=3.8"
+
groups = ["dev"]
+
files = [
+
{file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
+
{file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
+
]
+
+
[[package]]
+
name = "pluggy"
+
version = "1.6.0"
+
description = "plugin and hook calling mechanisms for python"
+
optional = false
+
python-versions = ">=3.9"
+
groups = ["dev"]
+
files = [
+
{file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
+
{file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
+
]
+
+
[package.extras]
+
dev = ["pre-commit", "tox"]
+
testing = ["coverage", "pytest", "pytest-benchmark"]
+
[[package]]
name = "ply"
version = "3.11"
···
description = "C parser in Python"
optional = false
python-versions = ">=3.8"
-
groups = ["main"]
+
groups = ["main", "dev"]
markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\""
files = [
{file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"},
···
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+
[[package]]
+
name = "pygithub"
+
version = "2.8.1"
+
description = "Use the full Github API v3"
+
optional = false
+
python-versions = ">=3.8"
+
groups = ["dev"]
+
files = [
+
{file = "pygithub-2.8.1-py3-none-any.whl", hash = "sha256:23a0a5bca93baef082e03411bf0ce27204c32be8bfa7abc92fe4a3e132936df0"},
+
{file = "pygithub-2.8.1.tar.gz", hash = "sha256:341b7c78521cb07324ff670afd1baa2bf5c286f8d9fd302c1798ba594a5400c9"},
+
]
+
+
[package.dependencies]
+
pyjwt = {version = ">=2.4.0", extras = ["crypto"]}
+
pynacl = ">=1.4.0"
+
requests = ">=2.14.0"
+
typing-extensions = ">=4.5.0"
+
urllib3 = ">=1.26.0"
+
+
[[package]]
+
name = "pygments"
+
version = "2.19.2"
+
description = "Pygments is a syntax highlighting package written in Python."
+
optional = false
+
python-versions = ">=3.8"
+
groups = ["dev"]
+
files = [
+
{file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
+
{file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
+
]
+
+
[package.extras]
+
windows-terminal = ["colorama (>=0.4.6)"]
+
+
[[package]]
+
name = "pyjwt"
+
version = "2.10.1"
+
description = "JSON Web Token implementation in Python"
+
optional = false
+
python-versions = ">=3.9"
+
groups = ["dev"]
+
files = [
+
{file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"},
+
{file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"},
+
]
+
+
[package.dependencies]
+
cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""}
+
+
[package.extras]
+
crypto = ["cryptography (>=3.4.0)"]
+
dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"]
+
docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
+
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
+
[[package]]
name = "pyld"
version = "2.0.4"
···
blake2 = ["pyblake2"]
sha3 = ["pysha3"]
+
[[package]]
+
name = "pynacl"
+
version = "1.6.0"
+
description = "Python binding to the Networking and Cryptography (NaCl) library"
+
optional = false
+
python-versions = ">=3.8"
+
groups = ["dev"]
+
files = [
+
{file = "pynacl-1.6.0-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:f46386c24a65383a9081d68e9c2de909b1834ec74ff3013271f1bca9c2d233eb"},
+
{file = "pynacl-1.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:dea103a1afcbc333bc0e992e64233d360d393d1e63d0bc88554f572365664348"},
+
{file = "pynacl-1.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:04f20784083014e265ad58c1b2dd562c3e35864b5394a14ab54f5d150ee9e53e"},
+
{file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbcc4452a1eb10cd5217318c822fde4be279c9de8567f78bad24c773c21254f8"},
+
{file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51fed9fe1bec9e7ff9af31cd0abba179d0e984a2960c77e8e5292c7e9b7f7b5d"},
+
{file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:10d755cf2a455d8c0f8c767a43d68f24d163b8fe93ccfaabfa7bafd26be58d73"},
+
{file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:536703b8f90e911294831a7fbcd0c062b837f3ccaa923d92a6254e11178aaf42"},
+
{file = "pynacl-1.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6b08eab48c9669d515a344fb0ef27e2cbde847721e34bba94a343baa0f33f1f4"},
+
{file = "pynacl-1.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5789f016e08e5606803161ba24de01b5a345d24590a80323379fc4408832d290"},
+
{file = "pynacl-1.6.0-cp314-cp314t-win32.whl", hash = "sha256:4853c154dc16ea12f8f3ee4b7e763331876316cc3a9f06aeedf39bcdca8f9995"},
+
{file = "pynacl-1.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:347dcddce0b4d83ed3f32fd00379c83c425abee5a9d2cd0a2c84871334eaff64"},
+
{file = "pynacl-1.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2d6cd56ce4998cb66a6c112fda7b1fdce5266c9f05044fa72972613bef376d15"},
+
{file = "pynacl-1.6.0-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:f4b3824920e206b4f52abd7de621ea7a44fd3cb5c8daceb7c3612345dfc54f2e"},
+
{file = "pynacl-1.6.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:16dd347cdc8ae0b0f6187a2608c0af1c8b7ecbbe6b4a06bff8253c192f696990"},
+
{file = "pynacl-1.6.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16c60daceee88d04f8d41d0a4004a7ed8d9a5126b997efd2933e08e93a3bd850"},
+
{file = "pynacl-1.6.0-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25720bad35dfac34a2bcdd61d9e08d6bfc6041bebc7751d9c9f2446cf1e77d64"},
+
{file = "pynacl-1.6.0-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8bfaa0a28a1ab718bad6239979a5a57a8d1506d0caf2fba17e524dbb409441cf"},
+
{file = "pynacl-1.6.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ef214b90556bb46a485b7da8258e59204c244b1b5b576fb71848819b468c44a7"},
+
{file = "pynacl-1.6.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:49c336dd80ea54780bcff6a03ee1a476be1612423010472e60af83452aa0f442"},
+
{file = "pynacl-1.6.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f3482abf0f9815e7246d461fab597aa179b7524628a4bc36f86a7dc418d2608d"},
+
{file = "pynacl-1.6.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:140373378e34a1f6977e573033d1dd1de88d2a5d90ec6958c9485b2fd9f3eb90"},
+
{file = "pynacl-1.6.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6b393bc5e5a0eb86bb85b533deb2d2c815666665f840a09e0aa3362bb6088736"},
+
{file = "pynacl-1.6.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a25cfede801f01e54179b8ff9514bd7b5944da560b7040939732d1804d25419"},
+
{file = "pynacl-1.6.0-cp38-abi3-win32.whl", hash = "sha256:dcdeb41c22ff3c66eef5e63049abf7639e0db4edee57ba70531fc1b6b133185d"},
+
{file = "pynacl-1.6.0-cp38-abi3-win_amd64.whl", hash = "sha256:cf831615cc16ba324240de79d925eacae8265b7691412ac6b24221db157f6bd1"},
+
{file = "pynacl-1.6.0-cp38-abi3-win_arm64.whl", hash = "sha256:84709cea8f888e618c21ed9a0efdb1a59cc63141c403db8bf56c469b71ad56f2"},
+
{file = "pynacl-1.6.0.tar.gz", hash = "sha256:cb36deafe6e2bce3b286e5d1f3e1c246e0ccdb8808ddb4550bb2792f2df298f2"},
+
]
+
+
[package.dependencies]
+
cffi = [
+
{version = ">=1.4.1", markers = "platform_python_implementation != \"PyPy\" and python_version < \"3.14\""},
+
{version = ">=2.0.0", markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.14\""},
+
]
+
+
[package.extras]
+
docs = ["sphinx (<7)", "sphinx_rtd_theme"]
+
tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
+
+
[[package]]
+
name = "pytest"
+
version = "8.4.2"
+
description = "pytest: simple powerful testing with Python"
+
optional = false
+
python-versions = ">=3.9"
+
groups = ["dev"]
+
files = [
+
{file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"},
+
{file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"},
+
]
+
+
[package.dependencies]
+
colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""}
+
iniconfig = ">=1"
+
packaging = ">=20"
+
pluggy = ">=1.5,<2"
+
pygments = ">=2.7.2"
+
+
[package.extras]
+
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"]
+
+
[[package]]
+
name = "pytest-cov"
+
version = "5.0.0"
+
description = "Pytest plugin for measuring coverage."
+
optional = false
+
python-versions = ">=3.8"
+
groups = ["dev"]
+
files = [
+
{file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
+
{file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
+
]
+
+
[package.dependencies]
+
coverage = {version = ">=5.2.1", extras = ["toml"]}
+
pytest = ">=4.6"
+
+
[package.extras]
+
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
+
[[package]]
name = "python-baseconv"
version = "1.2.2"
···
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.9"
-
groups = ["main"]
+
groups = ["main", "dev"]
files = [
{file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"},
{file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"},
···
description = "Backported and Experimental Type Hints for Python 3.9+"
optional = false
python-versions = ">=3.9"
-
groups = ["main"]
+
groups = ["main", "dev"]
files = [
{file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"},
{file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"},
···
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.9"
-
groups = ["main"]
+
groups = ["main", "dev"]
files = [
{file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"},
{file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"},
···
[metadata]
lock-version = "2.1"
python-versions = ">=3.13"
-
content-hash = "4919ab150fee9e4e358e57bada62225cb2d92c52509b26169db269691b86cefe"
+
content-hash = "4124a2c3969985b1847d0e2ebf72c3bbe32c3a6fa870a0424c7c0e51ebb6e7f5"
+7
src/atpasser/model/converter.py
···
UnknownModel, RecordModel, QueryModel,
ProcedureModel, SubscriptionModel
)
+
from .types.binary import BytesModel, CidLinkModel
+
from .blob import BlobModel
class LexiconConverter:
"""
···
"integer": IntegerModel,
"string": StringModel,
+
# Binary types
+
"bytes": BytesModel,
+
"cid-link": CidLinkModel,
+
"blob": BlobModel,
+
# Complex types
"array": ArrayModel,
"object": ObjectModel,
+214
src/atpasser/model/types/complex.py
···
+
from typing import Any
+
from pydantic import field_validator
+
from ..base import DataModel
+
+
class ArrayModel(DataModel):
+
"""
+
Model for AT Protocol array type.
+
+
Represents an array of elements with support for item schema definition,
+
minimum/maximum length constraints as specified in Lexicon.
+
"""
+
+
items: Any
+
"""Schema definition for array elements"""
+
+
minLength: int | None = None
+
"""Minimum number of elements"""
+
+
maxLength: int | None = None
+
"""Maximum number of elements"""
+
+
value: list[Any]
+
"""Array values"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize array model with validation.
+
+
Args:
+
**data: Input data containing array values
+
+
Raises:
+
ValueError: If array violates constraints
+
"""
+
super().__init__(**data)
+
+
@field_validator("value", mode="before")
+
def validate_array(cls, v: Any) -> list[Any]:
+
"""
+
Validate array structure and elements.
+
+
Args:
+
v: Value to validate
+
+
Returns:
+
Validated array
+
+
Raises:
+
ValueError: If array violates constraints
+
"""
+
if not isinstance(v, list):
+
raise ValueError("Value must be an array")
+
+
# Validate length constraints
+
if cls.minLength is not None and len(v) < cls.minLength:
+
raise ValueError(f"Array must have at least {cls.minLength} items")
+
+
if cls.maxLength is not None and len(v) > cls.maxLength:
+
raise ValueError(f"Array must have at most {cls.maxLength} items")
+
+
return v
+
+
class ObjectModel(DataModel):
+
"""
+
Model for AT Protocol object type.
+
+
Represents a generic object schema with properties definitions,
+
required fields and nullable fields as specified in Lexicon.
+
"""
+
+
properties: dict[str, Any]
+
"""Map of property names to their schema definitions"""
+
+
required: list[str] | None = None
+
"""List of required property names"""
+
+
nullable: list[str] | None = None
+
"""List of properties that can be null"""
+
+
value: dict[str, Any]
+
"""Object property values"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize object model with validation.
+
+
Args:
+
**data: Input data containing object properties
+
+
Raises:
+
ValueError: If object violates constraints
+
"""
+
super().__init__(**data)
+
+
@field_validator("value", mode="before")
+
def validate_object(cls, v: Any) -> dict[str, Any]:
+
"""
+
Validate object structure and properties.
+
+
Args:
+
v: Value to validate
+
+
Returns:
+
Validated object
+
+
Raises:
+
ValueError: If object violates constraints
+
"""
+
if not isinstance(v, dict):
+
raise ValueError("Value must be an object")
+
+
# Validate required fields
+
if cls.required:
+
for field in cls.required:
+
if field not in v:
+
raise ValueError(f"Missing required field: {field}")
+
+
# Validate nullable fields
+
if cls.nullable:
+
for field, value in v.items():
+
if field not in cls.nullable and value is None:
+
raise ValueError(f"Field {field} cannot be null")
+
+
return v
+
+
class ParamsModel(DataModel):
+
"""
+
Model for AT Protocol params type.
+
+
Specialized for HTTP query parameters with support for boolean,
+
integer, string and unknown types as specified in Lexicon.
+
"""
+
+
required: list[str] | None = None
+
"""List of required parameter names"""
+
+
properties: dict[str, Any]
+
"""Map of parameter names to their schema definitions"""
+
+
value: dict[str, Any]
+
"""Parameter values
+
+
Supported types:
+
- boolean
+
- integer
+
- string
+
- array (of boolean/integer/string/unknown)
+
- unknown (object)
+
"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize params model with validation.
+
+
Args:
+
**data: Input data containing parameter values
+
+
Raises:
+
ValueError: If parameters violate constraints
+
"""
+
super().__init__(**data)
+
+
@field_validator("value", mode="before")
+
def validate_params(cls, v: Any) -> dict[str, Any]:
+
"""
+
Validate parameters structure and values.
+
+
Args:
+
v: Value to validate
+
+
Returns:
+
Validated parameters
+
+
Raises:
+
ValueError: If parameters violate constraints
+
"""
+
if not isinstance(v, dict):
+
raise ValueError("Value must be a dictionary of parameters")
+
+
# Validate required parameters
+
if cls.required:
+
for param in cls.required:
+
if param not in v:
+
raise ValueError(f"Missing required parameter: {param}")
+
+
# Validate parameter types
+
for param, value in v.items():
+
if param in cls.properties:
+
prop_type = cls.properties[param].get("type")
+
if prop_type == "boolean" and not isinstance(value, bool):
+
raise ValueError(f"Parameter {param} must be boolean")
+
elif prop_type == "integer" and not isinstance(value, int):
+
raise ValueError(f"Parameter {param} must be integer")
+
elif prop_type == "string" and not isinstance(value, str):
+
raise ValueError(f"Parameter {param} must be string")
+
elif prop_type == "array":
+
if not isinstance(value, list):
+
raise ValueError(f"Parameter {param} must be array")
+
# Validate array items if schema is specified
+
if "items" in cls.properties[param]:
+
item_type = cls.properties[param]["items"].get("type")
+
for item in value:
+
if item_type == "boolean" and not isinstance(item, bool):
+
raise ValueError(f"Array item in {param} must be boolean")
+
elif item_type == "integer" and not isinstance(item, int):
+
raise ValueError(f"Array item in {param} must be integer")
+
elif item_type == "string" and not isinstance(item, str):
+
raise ValueError(f"Array item in {param} must be string")
+
elif item_type == "unknown" and not isinstance(item, dict):
+
raise ValueError(f"Array item in {param} must be object")
+
elif prop_type == "unknown" and not isinstance(value, dict):
+
raise ValueError(f"Parameter {param} must be object")
+
+
return v
+172
src/atpasser/model/types/primitive.py
···
+
from typing import Any
+
from pydantic import field_validator
+
from ..base import DataModel
+
+
class NullModel(DataModel):
+
"""
+
Model for AT Protocol null type.
+
+
Represents a null value in AT Protocol data model. This model ensures proper
+
serialization and validation of null values according to Lexicon specification.
+
"""
+
+
value: None = None
+
"""Always None for null type"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize null model with validation.
+
+
Args:
+
**data: Input data (must be empty or contain only None values)
+
+
Raises:
+
ValueError: If non-null value is provided
+
"""
+
if data and any(v is not None for v in data.values()):
+
raise ValueError("NullModel only accepts None values")
+
super().__init__(**data)
+
+
@field_validator("*", mode="before")
+
def validate_null(cls, v: Any) -> None:
+
"""
+
Validate that value is null.
+
+
Args:
+
v: Value to validate
+
+
Returns:
+
None if validation succeeds
+
+
Raises:
+
ValueError: If value is not null
+
"""
+
if v is not None:
+
raise ValueError("NullModel only accepts None values")
+
return None
+
+
class BooleanModel(DataModel):
+
"""
+
Model for AT Protocol boolean type.
+
+
Represents a boolean value in AT Protocol data model with support for
+
default values and constants as specified in Lexicon.
+
"""
+
+
value: bool
+
"""Boolean value"""
+
+
default: bool | None = None
+
"""Default value if not provided"""
+
+
const: bool | None = None
+
"""Fixed constant value if specified"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize boolean model with validation.
+
+
Args:
+
**data: Input data containing boolean value
+
+
Raises:
+
ValueError: If value doesn't match const or is not boolean
+
"""
+
super().__init__(**data)
+
if self.const is not None and self.value != self.const:
+
raise ValueError(f"Boolean value must be {self.const}")
+
+
@field_validator("value", mode="before")
+
def validate_boolean(cls, v: Any) -> bool:
+
"""
+
Validate and convert input to boolean.
+
+
Args:
+
v: Value to validate
+
+
Returns:
+
Validated boolean value
+
+
Raises:
+
ValueError: If value cannot be converted to boolean
+
"""
+
if isinstance(v, bool):
+
return v
+
if isinstance(v, str):
+
if v.lower() in ("true", "1"):
+
return True
+
if v.lower() in ("false", "0"):
+
return False
+
raise ValueError("Value must be a boolean")
+
+
class IntegerModel(DataModel):
+
"""
+
Model for AT Protocol integer type.
+
+
Represents a signed integer number with support for minimum/maximum values,
+
enumeration sets, default values and constraints as specified in Lexicon.
+
"""
+
+
value: int
+
"""Integer value"""
+
+
minimum: int | None = None
+
"""Minimum acceptable value"""
+
+
maximum: int | None = None
+
"""Maximum acceptable value"""
+
+
enum: list[int] | None = None
+
"""Closed set of allowed values"""
+
+
default: int | None = None
+
"""Default value if not provided"""
+
+
const: int | None = None
+
"""Fixed constant value if specified"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize integer model with validation.
+
+
Args:
+
**data: Input data containing integer value
+
+
Raises:
+
ValueError: If value violates constraints
+
"""
+
super().__init__(**data)
+
if self.const is not None and self.value != self.const:
+
raise ValueError(f"Integer value must be {self.const}")
+
+
@field_validator("value", mode="before")
+
def validate_integer(cls, v: Any) -> int:
+
"""
+
Validate and convert input to integer.
+
+
Args:
+
v: Value to validate
+
+
Returns:
+
Validated integer value
+
+
Raises:
+
ValueError: If value violates constraints
+
"""
+
if not isinstance(v, int):
+
try:
+
v = int(v)
+
except (TypeError, ValueError):
+
raise ValueError("Value must be an integer")
+
+
# Validate against instance attributes
+
if cls.enum and v not in cls.enum:
+
raise ValueError(f"Value must be one of {cls.enum}")
+
+
if cls.minimum is not None and v < cls.minimum:
+
raise ValueError(f"Value must be >= {cls.minimum}")
+
+
if cls.maximum is not None and v > cls.maximum:
+
raise ValueError(f"Value must be <= {cls.maximum}")
+
+
return v
+131
src/atpasser/model/types/reference.py
···
+
from typing import Any
+
from pydantic import field_validator
+
from ..base import DataModel
+
+
class TokenModel(DataModel):
+
"""
+
Model for AT Protocol token type.
+
+
Represents empty data values which exist only to be referenced by name.
+
Tokens encode as string data with the string being the fully-qualified
+
reference to the token itself (NSID followed by optional fragment).
+
"""
+
+
name: str
+
"""Token name/identifier"""
+
+
description: str | None = None
+
"""Description clarifying the meaning of the token"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize token model.
+
+
Args:
+
**data: Input data containing token name
+
"""
+
super().__init__(**data)
+
+
@field_validator("name")
+
def validate_name(cls, v: str) -> str:
+
"""
+
Validate token name format.
+
+
Args:
+
v: Name to validate
+
+
Returns:
+
Validated name
+
+
Raises:
+
ValueError: If name contains whitespace
+
"""
+
if any(c.isspace() for c in v):
+
raise ValueError("Token name must not contain whitespace")
+
return v
+
+
class RefModel(DataModel):
+
"""
+
Model for AT Protocol ref type.
+
+
Represents a reference to another schema definition, either globally
+
(using NSID) or locally (using #-delimited name).
+
"""
+
+
ref: str
+
"""Reference to schema definition (NSID or #name)"""
+
+
description: str | None = None
+
"""Description of the reference"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize reference model.
+
+
Args:
+
**data: Input data containing reference
+
"""
+
super().__init__(**data)
+
+
@field_validator("ref")
+
def validate_ref(cls, v: str) -> str:
+
"""
+
Validate reference format.
+
+
Args:
+
v: Reference to validate
+
+
Returns:
+
Validated reference
+
+
Raises:
+
ValueError: If reference is empty or invalid
+
"""
+
if not v:
+
raise ValueError("Reference cannot be empty")
+
return v
+
+
class UnionModel(DataModel):
+
"""
+
Model for AT Protocol union type.
+
+
Represents that multiple possible types could be present at a location.
+
The references follow the same syntax as `ref`, allowing references to
+
both global or local schema definitions.
+
"""
+
+
refs: list[str]
+
"""References to schema definitions"""
+
+
closed: bool = False
+
"""Indicates if union is open (can be extended) or closed"""
+
+
description: str | None = None
+
"""Description of the union"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize union model.
+
+
Args:
+
**data: Input data containing union references
+
"""
+
super().__init__(**data)
+
+
@field_validator("refs")
+
def validate_refs(cls, v: list[str]) -> list[str]:
+
"""
+
Validate union references.
+
+
Args:
+
v: References to validate
+
+
Returns:
+
Validated references
+
+
Raises:
+
ValueError: If references list is empty for closed union
+
"""
+
if cls.closed and not v:
+
raise ValueError("Closed union must have at least one reference")
+
return v
+323
src/atpasser/model/types/special.py
···
+
from typing import Any
+
from pydantic import field_validator
+
from ..base import DataModel
+
+
class UnknownModel(DataModel):
+
"""
+
Model for AT Protocol unknown type.
+
+
Indicates that any data object could appear at this location,
+
with no specific validation. The top-level data must be an object.
+
"""
+
+
description: str | None = None
+
"""Description of the unknown type usage"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize unknown model.
+
+
Args:
+
**data: Input data containing unknown object
+
"""
+
super().__init__(**data)
+
+
@field_validator("*", mode="before")
+
def validate_unknown(cls, v: Any) -> Any:
+
"""
+
Validate unknown data is an object.
+
+
Args:
+
v: Value to validate
+
+
Returns:
+
Validated value
+
+
Raises:
+
ValueError: If value is not an object
+
"""
+
if not isinstance(v, dict):
+
raise ValueError("Unknown type must be an object")
+
return v
+
+
class RecordModel(DataModel):
+
"""
+
Model for AT Protocol record type.
+
+
Describes an object that can be stored in a repository record.
+
Records must include a $type field indicating their schema.
+
"""
+
+
key: str
+
"""Specifies the Record Key type"""
+
+
record: dict[str, Any]
+
"""Schema definition with type 'object'"""
+
+
type: str
+
"""Lexicon schema type identifier"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize record model with validation.
+
+
Args:
+
**data: Input data containing record values
+
+
Raises:
+
ValueError: If record is missing required fields
+
"""
+
# Extract $type if present
+
data_type = data.pop("$type", None)
+
if data_type:
+
data["type"] = data_type
+
super().__init__(**data)
+
+
@field_validator("type")
+
def validate_type(cls, v: str) -> str:
+
"""
+
Validate record type field.
+
+
Args:
+
v: Type value to validate
+
+
Returns:
+
Validated type
+
+
Raises:
+
ValueError: If type is empty
+
"""
+
if not v:
+
raise ValueError("Record must have a type")
+
return v
+
+
@field_validator("record", mode="before")
+
def validate_record(cls, v: Any) -> dict[str, Any]:
+
"""
+
Validate record structure.
+
+
Args:
+
v: Record value to validate
+
+
Returns:
+
Validated record
+
+
Raises:
+
ValueError: If record is not an object
+
"""
+
if not isinstance(v, dict):
+
raise ValueError("Record must be an object")
+
return v
+
+
class QueryModel(DataModel):
+
"""
+
Model for AT Protocol query type.
+
+
Describes an XRPC Query endpoint (HTTP GET) with support for
+
parameters, output schema and error responses.
+
"""
+
+
parameters: dict[str, Any] | None = None
+
"""HTTP query parameters schema"""
+
+
output: dict[str, Any] | None = None
+
"""HTTP response body schema"""
+
+
errors: list[dict[str, str]] | None = None
+
"""Possible error responses"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize query model with validation.
+
+
Args:
+
**data: Input data containing query definition
+
"""
+
super().__init__(**data)
+
+
@field_validator("output")
+
def validate_output(cls, v: dict[str, Any] | None) -> dict[str, Any] | None:
+
"""
+
Validate output schema.
+
+
Args:
+
v: Output schema to validate
+
+
Returns:
+
Validated output schema
+
+
Raises:
+
ValueError: If output schema is invalid
+
"""
+
if v and "encoding" not in v:
+
raise ValueError("Output must specify encoding")
+
return v
+
+
@field_validator("errors")
+
def validate_errors(cls, v: list[dict[str, str]] | None) -> list[dict[str, str]] | None:
+
"""
+
Validate error definitions.
+
+
Args:
+
v: Error definitions to validate
+
+
Returns:
+
Validated error definitions
+
+
Raises:
+
ValueError: If any error definition is invalid
+
"""
+
if v:
+
for error in v:
+
if "name" not in error:
+
raise ValueError("Error must have a name")
+
return v
+
+
class ProcedureModel(DataModel):
+
"""
+
Model for AT Protocol procedure type.
+
+
Describes an XRPC Procedure endpoint (HTTP POST) with support for
+
parameters, input/output schemas and error responses.
+
"""
+
+
parameters: dict[str, Any] | None = None
+
"""HTTP query parameters schema"""
+
+
input: dict[str, Any] | None = None
+
"""HTTP request body schema"""
+
+
output: dict[str, Any] | None = None
+
"""HTTP response body schema"""
+
+
errors: list[dict[str, str]] | None = None
+
"""Possible error responses"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize procedure model with validation.
+
+
Args:
+
**data: Input data containing procedure definition
+
"""
+
super().__init__(**data)
+
+
@field_validator("input")
+
def validate_input(cls, v: dict[str, Any] | None) -> dict[str, Any] | None:
+
"""
+
Validate input schema.
+
+
Args:
+
v: Input schema to validate
+
+
Returns:
+
Validated input schema
+
+
Raises:
+
ValueError: If input schema is invalid
+
"""
+
if v and "encoding" not in v:
+
raise ValueError("Input must specify encoding")
+
return v
+
+
@field_validator("output")
+
def validate_output(cls, v: dict[str, Any] | None) -> dict[str, Any] | None:
+
"""
+
Validate output schema.
+
+
Args:
+
v: Output schema to validate
+
+
Returns:
+
Validated output schema
+
+
Raises:
+
ValueError: If output schema is invalid
+
"""
+
if v and "encoding" not in v:
+
raise ValueError("Output must specify encoding")
+
return v
+
+
@field_validator("errors")
+
def validate_errors(cls, v: list[dict[str, str]] | None) -> list[dict[str, str]] | None:
+
"""
+
Validate error definitions.
+
+
Args:
+
v: Error definitions to validate
+
+
Returns:
+
Validated error definitions
+
+
Raises:
+
ValueError: If any error definition is invalid
+
"""
+
if v:
+
for error in v:
+
if "name" not in error:
+
raise ValueError("Error must have a name")
+
return v
+
+
class SubscriptionModel(DataModel):
+
"""
+
Model for AT Protocol subscription type.
+
+
Describes an Event Stream (WebSocket) with support for parameters,
+
message schemas and error responses.
+
"""
+
+
parameters: dict[str, Any] | None = None
+
"""HTTP query parameters schema"""
+
+
message: dict[str, Any] | None = None
+
"""Specifies what messages can be"""
+
+
errors: list[dict[str, str]] | None = None
+
"""Possible error responses"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize subscription model with validation.
+
+
Args:
+
**data: Input data containing subscription definition
+
"""
+
super().__init__(**data)
+
+
@field_validator("message")
+
def validate_message(cls, v: dict[str, Any] | None) -> dict[str, Any] | None:
+
"""
+
Validate message schema.
+
+
Args:
+
v: Message schema to validate
+
+
Returns:
+
Validated message schema
+
+
Raises:
+
ValueError: If message schema is invalid
+
"""
+
if v and "schema" not in v:
+
raise ValueError("Message must specify schema")
+
return v
+
+
@field_validator("errors")
+
def validate_errors(cls, v: list[dict[str, str]] | None) -> list[dict[str, str]] | None:
+
"""
+
Validate error definitions.
+
+
Args:
+
v: Error definitions to validate
+
+
Returns:
+
Validated error definitions
+
+
Raises:
+
ValueError: If any error definition is invalid
+
"""
+
if v:
+
for error in v:
+
if "name" not in error:
+
raise ValueError("Error must have a name")
+
return v
+249
src/atpasser/model/types/string.py
···
+
from typing import Any
+
import re
+
from datetime import datetime
+
from pydantic import field_validator
+
from ..base import DataModel
+
+
class StringModel(DataModel):
+
"""
+
Model for AT Protocol string type.
+
+
Represents a Unicode string with support for format restrictions, length limits,
+
known values, enumeration sets, default values and constants as specified in Lexicon.
+
"""
+
+
value: str
+
"""String value"""
+
+
format: str | None = None
+
"""String format restriction (e.g. 'datetime', 'uri')"""
+
+
maxLength: int | None = None
+
"""Maximum length in UTF-8 bytes"""
+
+
minLength: int | None = None
+
"""Minimum length in UTF-8 bytes"""
+
+
knownValues: list[str] | None = None
+
"""Suggested/common values (not enforced)"""
+
+
enum: list[str] | None = None
+
"""Closed set of allowed values"""
+
+
default: str | None = None
+
"""Default value if not provided"""
+
+
const: str | None = None
+
"""Fixed constant value if specified"""
+
+
def __init__(self, **data: Any) -> None:
+
"""
+
Initialize string model with validation.
+
+
Args:
+
**data: Input data containing string value
+
+
Raises:
+
ValueError: If value violates constraints
+
"""
+
super().__init__(**data)
+
if self.const is not None and self.value != self.const:
+
raise ValueError(f"String value must be {self.const}")
+
+
@field_validator("value", mode="before")
+
def validate_string(cls, v: Any) -> str:
+
"""
+
Validate and convert input to string.
+
+
Args:
+
v: Value to validate
+
+
Returns:
+
Validated string value
+
+
Raises:
+
ValueError: If value violates constraints
+
"""
+
if not isinstance(v, str):
+
v = str(v)
+
+
# Validate length constraints
+
if cls.minLength is not None and len(v.encode()) < cls.minLength:
+
raise ValueError(f"String must be at least {cls.minLength} bytes")
+
+
if cls.maxLength is not None and len(v.encode()) > cls.maxLength:
+
raise ValueError(f"String must be at most {cls.maxLength} bytes")
+
+
# Validate enum
+
if cls.enum and v not in cls.enum:
+
raise ValueError(f"Value must be one of {cls.enum}")
+
+
# Validate format if specified
+
if cls.format:
+
if cls.format == "datetime":
+
cls._validate_datetime(v)
+
elif cls.format == "uri":
+
cls._validate_uri(v)
+
elif cls.format == "did":
+
cls._validate_did(v)
+
elif cls.format == "handle":
+
cls._validate_handle(v)
+
elif cls.format == "at-identifier":
+
cls._validate_at_identifier(v)
+
elif cls.format == "at-uri":
+
cls._validate_at_uri(v)
+
elif cls.format == "cid":
+
cls._validate_cid(v)
+
elif cls.format == "nsid":
+
cls._validate_nsid(v)
+
elif cls.format == "tid":
+
cls._validate_tid(v)
+
elif cls.format == "record-key":
+
cls._validate_record_key(v)
+
elif cls.format == "language":
+
cls._validate_language(v)
+
+
return v
+
+
@classmethod
+
def _validate_datetime(cls, v: str) -> None:
+
"""Validate RFC 3339 datetime format"""
+
try:
+
datetime.fromisoformat(v.replace("Z", "+00:00"))
+
except ValueError:
+
raise ValueError("Invalid datetime format, must be RFC 3339")
+
+
@classmethod
+
def _validate_uri(cls, v: str) -> None:
+
"""Validate URI format"""
+
if len(v) > 8192: # 8KB max
+
raise ValueError("URI too long, max 8KB")
+
if not re.match(r"^[a-zA-Z][a-zA-Z0-9+.-]*:.+", v):
+
raise ValueError("Invalid URI format")
+
+
@classmethod
+
def _validate_did(cls, v: str) -> None:
+
"""Validate DID format"""
+
if len(v) > 2048:
+
raise ValueError("DID too long, max 2048 chars")
+
if not re.match(r"^did:[a-z]+:[a-zA-Z0-9._:%-]*[a-zA-Z0-9._-]$", v):
+
raise ValueError("Invalid URI format")
+
+
@classmethod
+
def _validate_handle(cls, v: str) -> None:
+
"""Validate handle format"""
+
if not re.match(r"^([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?$", v):
+
raise ValueError("Handle contains invalid characters")
+
if len(v) > 253:
+
raise ValueError("Handle too long, max 253 chars")
+
+
@classmethod
+
def _validate_at_identifier(cls, v: str) -> None:
+
"""Validate at-identifier format (DID or handle)"""
+
try:
+
if v.startswith("did:"):
+
cls._validate_did(v)
+
else:
+
cls._validate_handle(v)
+
except ValueError as e:
+
raise ValueError(f"Invalid at-identifier: {e}")
+
+
@classmethod
+
def _validate_at_uri(cls, v: str) -> None:
+
"""
+
Validate AT-URI format according to AT Protocol specification.
+
+
Args:
+
v: AT-URI string to validate
+
+
Raises:
+
ValueError: If URI violates any of these rules:
+
- Must start with 'at://'
+
- Max length 8KB
+
- No trailing slash
+
- Authority must be valid DID or handle
+
- Path segments must follow NSID/RKEY rules if present
+
"""
+
if not v.startswith("at://"):
+
raise ValueError("AT-URI must start with 'at://'")
+
if len(v) > 8192: # 8KB
+
raise ValueError("AT-URI too long, max 8KB")
+
if v.endswith('/'):
+
raise ValueError("AT-URI cannot have trailing slash")
+
+
# Split into parts
+
parts = v[5:].split('/') # Skip 'at://'
+
authority = parts[0]
+
+
# Validate authority (DID or handle)
+
if not authority:
+
raise ValueError("AT-URI must have authority")
+
+
if authority.startswith('did:'):
+
# Basic DID format check - actual DID validation is done elsewhere
+
if len(authority) > 2048:
+
raise ValueError("DID too long")
+
if ':' not in authority[4:]:
+
raise ValueError("Invalid DID format")
+
else:
+
# Handle validation
+
if not re.match(r'^[a-z0-9.-]+$', authority):
+
raise ValueError("Invalid handle characters")
+
if len(authority) > 253:
+
raise ValueError("Handle too long")
+
+
# Validate path segments if present
+
if len(parts) > 1:
+
if len(parts) > 3:
+
raise ValueError("AT-URI path too deep")
+
+
collection = parts[1]
+
if not re.match(r'^[a-zA-Z0-9.-]+$', collection):
+
raise ValueError("Invalid collection NSID")
+
+
if len(parts) > 2:
+
rkey = parts[2]
+
if not rkey:
+
raise ValueError("Record key cannot be empty")
+
if not re.match(r'^[a-zA-Z0-9._:%-~]+$', rkey):
+
raise ValueError("Invalid record key characters")
+
+
@classmethod
+
def _validate_cid(cls, v: str) -> None:
+
"""Validate CID string format"""
+
if len(v) > 100:
+
raise ValueError("CID too long, max 100 chars")
+
if not re.match(r"^[a-zA-Z0-9]+$", v):
+
raise ValueError("CID contains invalid characters")
+
+
@classmethod
+
def _validate_nsid(cls, v: str) -> None:
+
"""Validate NSID format"""
+
if len(v) > 317:
+
raise ValueError("NSID too long, max 317 chars")
+
if not re.match(r"^[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(\.[a-zA-Z]([a-zA-Z0-9]{0,62})?)$", v):
+
raise ValueError("NSID contains invalid characters")
+
+
@classmethod
+
def _validate_tid(cls, v: str) -> None:
+
"""Validate TID format"""
+
if len(v) > 13:
+
raise ValueError("TID too long, max 13 chars")
+
if not re.match(r"^[234567abcdefghij][234567abcdefghijklmnopqrstuvwxyz]{12}$", v):
+
raise ValueError("TID contains invalid characters")
+
+
@classmethod
+
def _validate_record_key(cls, v: str) -> None:
+
"""Validate record-key format"""
+
if len(v) > 512:
+
raise ValueError("Record key too long, max 512 chars")
+
if v == "." or v == "..":
+
raise ValueError(f"Record key is {v}, which is not allowed")
+
if not re.match(r"^[a-zA-Z0-9._:%-~]+$", v):
+
raise ValueError("Record key contains invalid characters")
+
+
@classmethod
+
def _validate_language(cls, v: str) -> None:
+
"""Validate BCP 47 language tag"""
+
if not re.match(r"^[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*$", v):
+
raise ValueError("Invalid language tag format")