A better Rust ATProto crate
at main 4.8 kB view raw
1use super::LexiconSource; 2use crate::lexicon::LexiconDoc; 3use jacquard_common::IntoStatic; 4use miette::{Result, miette}; 5use serde::{Deserialize, Serialize}; 6use serde_json::Value; 7use std::collections::HashMap; 8 9#[derive(Debug, Clone)] 10pub struct SlicesSource { 11 pub slice: String, 12} 13 14#[derive(Serialize)] 15struct GetRecordsRequest { 16 slice: String, 17 #[serde(skip_serializing_if = "Option::is_none")] 18 limit: Option<u32>, 19 #[serde(skip_serializing_if = "Option::is_none")] 20 cursor: Option<String>, 21} 22 23#[derive(Deserialize)] 24struct GetRecordsResponse { 25 records: Vec<Value>, 26 #[serde(default)] 27 cursor: Option<String>, 28} 29 30impl LexiconSource for SlicesSource { 31 async fn fetch(&self) -> Result<HashMap<String, LexiconDoc<'_>>> { 32 let client = reqwest::Client::new(); 33 let base_url = "https://api.slices.network/xrpc"; 34 let endpoint = format!("{}/com.atproto.lexicon.schema.getRecords", base_url); 35 36 let mut lexicons = HashMap::new(); 37 let mut cursor: Option<String> = None; 38 let mut total_fetched = 0; 39 let mut failed_nsids = std::collections::HashSet::new(); 40 let mut page_count = 0; 41 const MAX_PAGES: usize = 200; // Safety limit 42 43 loop { 44 page_count += 1; 45 if page_count > MAX_PAGES { 46 eprintln!( 47 "Warning: Hit max page limit ({}) for slices source", 48 MAX_PAGES 49 ); 50 break; 51 } 52 let req_body = GetRecordsRequest { 53 slice: self.slice.clone(), 54 limit: Some(100), 55 cursor: cursor.clone(), 56 }; 57 58 let resp = client 59 .post(&endpoint) 60 .json(&req_body) 61 .send() 62 .await 63 .map_err(|e| miette!("Failed to fetch from slices API: {}", e))?; 64 65 if !resp.status().is_success() { 66 let status = resp.status(); 67 let body = resp.text().await.unwrap_or_default(); 68 return Err(miette!("Slices API returned error {}: {}", status, body)); 69 } 70 71 let response: GetRecordsResponse = resp 72 .json() 73 .await 74 .map_err(|e| miette!("Failed to parse response: {}", e))?; 75 76 total_fetched += response.records.len(); 77 78 for record_data in response.records.iter() { 79 match Self::parse_lexicon_record(&record_data, &mut failed_nsids) { 80 Some(doc) => { 81 let nsid = doc.id.to_string(); 82 lexicons.insert(nsid, doc); 83 } 84 None => {} 85 } 86 } 87 88 let new_cursor = response.cursor; 89 90 // Detect if we got no new results - API might be looping 91 if response.records.is_empty() { 92 break; 93 } 94 95 // Detect duplicate cursor 96 if new_cursor == cursor { 97 eprintln!("Warning: Slices API returned same cursor, stopping pagination"); 98 break; 99 } 100 101 cursor = new_cursor; 102 if cursor.is_none() { 103 break; 104 } 105 } 106 107 if !failed_nsids.is_empty() { 108 eprintln!( 109 "Warning: Failed to parse {} out of {} lexicons from slices", 110 failed_nsids.len(), 111 total_fetched 112 ); 113 } 114 115 Ok(lexicons) 116 } 117} 118 119impl SlicesSource { 120 fn parse_lexicon_record( 121 record_data: &Value, 122 failed_nsids: &mut std::collections::HashSet<String>, 123 ) -> Option<LexiconDoc<'static>> { 124 // Extract the 'value' field from the record 125 let value = record_data.get("value")?; 126 127 // Convert to JSON string and then parse to handle lifetimes properly 128 match serde_json::to_string(value) { 129 Ok(json) => match serde_json::from_str::<LexiconDoc>(&json) { 130 Ok(doc) => Some(doc.into_static()), 131 Err(_e) => { 132 // Track failed NSID for summary 133 if let Value::Object(obj) = value { 134 if let Some(Value::String(id)) = obj.get("id") { 135 failed_nsids.insert(id.clone()); 136 } 137 } 138 None 139 } 140 }, 141 Err(_e) => { 142 // Track failed NSID for summary 143 if let Value::Object(obj) = value { 144 if let Some(Value::String(id)) = obj.get("id") { 145 failed_nsids.insert(id.clone()); 146 } 147 } 148 None 149 } 150 } 151 } 152}