A better Rust ATProto crate
at oauth 96 kB view raw
1use crate::corpus::LexiconCorpus; 2use crate::error::{CodegenError, Result}; 3use crate::lexicon::{ 4 LexArrayItem, LexInteger, LexObject, LexObjectProperty, LexRecord, LexString, LexStringFormat, 5 LexUserType, LexXrpcBody, LexXrpcBodySchema, LexXrpcError, LexXrpcProcedure, LexXrpcQuery, 6 LexXrpcSubscription, LexXrpcSubscriptionMessageSchema, 7}; 8use heck::{ToPascalCase, ToSnakeCase}; 9use proc_macro2::TokenStream; 10use quote::quote; 11 12/// Convert a value string to a valid Rust variant name 13fn value_to_variant_name(value: &str) -> String { 14 // Remove leading special chars and convert to pascal case 15 let clean = value.trim_start_matches(|c: char| !c.is_alphanumeric()); 16 let variant = clean.replace('-', "_").to_pascal_case(); 17 18 // Prefix with underscore if starts with digit 19 if variant.chars().next().map_or(false, |c| c.is_ascii_digit()) { 20 format!("_{}", variant) 21 } else if variant.is_empty() { 22 "Unknown".to_string() 23 } else { 24 variant 25 } 26} 27 28/// Create an identifier, using raw identifier if necessary for keywords 29fn make_ident(s: &str) -> syn::Ident { 30 syn::parse_str::<syn::Ident>(s) 31 .unwrap_or_else(|_| syn::Ident::new_raw(s, proc_macro2::Span::call_site())) 32} 33 34/// Code generator for lexicon types 35pub struct CodeGenerator<'c> { 36 corpus: &'c LexiconCorpus, 37 root_module: String, 38} 39 40impl<'c> CodeGenerator<'c> { 41 /// Create a new code generator 42 pub fn new(corpus: &'c LexiconCorpus, root_module: impl Into<String>) -> Self { 43 Self { 44 corpus, 45 root_module: root_module.into(), 46 } 47 } 48 49 /// Generate code for a lexicon def 50 pub fn generate_def( 51 &self, 52 nsid: &str, 53 def_name: &str, 54 def: &LexUserType<'static>, 55 ) -> Result<TokenStream> { 56 match def { 57 LexUserType::Record(record) => self.generate_record(nsid, def_name, record), 58 LexUserType::Object(obj) => self.generate_object(nsid, def_name, obj), 59 LexUserType::XrpcQuery(query) => self.generate_query(nsid, def_name, query), 60 LexUserType::XrpcProcedure(proc) => self.generate_procedure(nsid, def_name, proc), 61 LexUserType::Token(_) => { 62 // Token types are marker types used in knownValues enums. 63 // We don't generate anything for them - the knownValues enum 64 // is the actual type that gets used. 65 Ok(quote! {}) 66 } 67 LexUserType::String(s) if s.known_values.is_some() => { 68 self.generate_known_values_enum(nsid, def_name, s) 69 } 70 LexUserType::String(s) => { 71 // Plain string type alias 72 let type_name = self.def_to_type_name(nsid, def_name); 73 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site()); 74 let rust_type = self.string_to_rust_type(s); 75 let doc = self.generate_doc_comment(s.description.as_ref()); 76 Ok(quote! { 77 #doc 78 pub type #ident<'a> = #rust_type; 79 }) 80 } 81 LexUserType::Integer(i) if i.r#enum.is_some() => { 82 self.generate_integer_enum(nsid, def_name, i) 83 } 84 LexUserType::Array(array) => { 85 // Top-level array becomes type alias to Vec<ItemType> 86 let type_name = self.def_to_type_name(nsid, def_name); 87 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site()); 88 let item_type = self.array_item_to_rust_type(nsid, &array.items)?; 89 let doc = self.generate_doc_comment(array.description.as_ref()); 90 let needs_lifetime = self.array_item_needs_lifetime(&array.items); 91 if needs_lifetime { 92 Ok(quote! { 93 #doc 94 pub type #ident<'a> = Vec<#item_type>; 95 }) 96 } else { 97 Ok(quote! { 98 #doc 99 pub type #ident = Vec<#item_type>; 100 }) 101 } 102 } 103 LexUserType::Boolean(_) 104 | LexUserType::Integer(_) 105 | LexUserType::Bytes(_) 106 | LexUserType::CidLink(_) 107 | LexUserType::Unknown(_) => { 108 // These are rarely top-level defs, but if they are, make type aliases 109 let type_name = self.def_to_type_name(nsid, def_name); 110 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site()); 111 let (rust_type, needs_lifetime) = match def { 112 LexUserType::Boolean(_) => (quote! { bool }, false), 113 LexUserType::Integer(_) => (quote! { i64 }, false), 114 LexUserType::Bytes(_) => (quote! { bytes::Bytes }, false), 115 LexUserType::CidLink(_) => { 116 (quote! { jacquard_common::types::cid::CidLink<'a> }, true) 117 } 118 LexUserType::Unknown(_) => { 119 (quote! { jacquard_common::types::value::Data<'a> }, true) 120 } 121 _ => unreachable!(), 122 }; 123 if needs_lifetime { 124 Ok(quote! { 125 pub type #ident<'a> = #rust_type; 126 }) 127 } else { 128 Ok(quote! { 129 pub type #ident = #rust_type; 130 }) 131 } 132 } 133 LexUserType::Blob(_) => Err(CodegenError::unsupported( 134 format!("top-level def type {:?}", def), 135 nsid, 136 None::<String>, 137 )), 138 LexUserType::XrpcSubscription(sub) => self.generate_subscription(nsid, def_name, sub), 139 } 140 } 141 142 /// Generate a record type 143 fn generate_record( 144 &self, 145 nsid: &str, 146 def_name: &str, 147 record: &LexRecord<'static>, 148 ) -> Result<TokenStream> { 149 match &record.record { 150 crate::lexicon::LexRecordRecord::Object(obj) => { 151 let type_name = self.def_to_type_name(nsid, def_name); 152 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site()); 153 154 // Generate main struct fields 155 let fields = self.generate_object_fields(nsid, &type_name, obj, false)?; 156 let doc = self.generate_doc_comment(record.description.as_ref()); 157 158 // Records always get a lifetime since they have the #[lexicon] attribute 159 // which adds extra_data: BTreeMap<..., Data<'a>> 160 let struct_def = quote! { 161 #doc 162 #[jacquard_derive::lexicon] 163 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)] 164 #[serde(rename_all = "camelCase")] 165 pub struct #ident<'a> { 166 #fields 167 } 168 }; 169 170 // Generate union types for this record 171 let mut unions = Vec::new(); 172 for (field_name, field_type) in &obj.properties { 173 if let LexObjectProperty::Union(union) = field_type { 174 let union_name = 175 format!("{}Record{}", type_name, field_name.to_pascal_case()); 176 // Clone refs to avoid lifetime issues 177 let refs: Vec<_> = union.refs.iter().cloned().collect(); 178 let union_def = 179 self.generate_union(&union_name, &refs, None, union.closed)?; 180 unions.push(union_def); 181 } 182 } 183 184 // Generate Collection trait impl 185 let collection_impl = quote! { 186 impl jacquard_common::types::collection::Collection for #ident<'_> { 187 const NSID: &'static str = #nsid; 188 } 189 }; 190 191 // Generate IntoStatic impl 192 let field_names: Vec<&str> = obj.properties.keys().map(|k| k.as_str()).collect(); 193 let into_static_impl = 194 self.generate_into_static_for_struct(&type_name, &field_names, true, true); 195 196 Ok(quote! { 197 #struct_def 198 #(#unions)* 199 #collection_impl 200 #into_static_impl 201 }) 202 } 203 } 204 } 205 206 /// Generate an object type 207 fn generate_object( 208 &self, 209 nsid: &str, 210 def_name: &str, 211 obj: &LexObject<'static>, 212 ) -> Result<TokenStream> { 213 let type_name = self.def_to_type_name(nsid, def_name); 214 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site()); 215 216 let fields = self.generate_object_fields(nsid, &type_name, obj, false)?; 217 let doc = self.generate_doc_comment(obj.description.as_ref()); 218 219 // Objects always get a lifetime since they have the #[lexicon] attribute 220 // which adds extra_data: BTreeMap<..., Data<'a>> 221 let struct_def = quote! { 222 #doc 223 #[jacquard_derive::lexicon] 224 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)] 225 #[serde(rename_all = "camelCase")] 226 pub struct #ident<'a> { 227 #fields 228 } 229 }; 230 231 // Generate union types for this object 232 let mut unions = Vec::new(); 233 for (field_name, field_type) in &obj.properties { 234 if let LexObjectProperty::Union(union) = field_type { 235 let union_name = format!("{}Record{}", type_name, field_name.to_pascal_case()); 236 let refs: Vec<_> = union.refs.iter().cloned().collect(); 237 let union_def = self.generate_union(&union_name, &refs, None, union.closed)?; 238 unions.push(union_def); 239 } 240 } 241 242 // Generate IntoStatic impl 243 let field_names: Vec<&str> = obj.properties.keys().map(|k| k.as_str()).collect(); 244 let into_static_impl = 245 self.generate_into_static_for_struct(&type_name, &field_names, true, true); 246 247 Ok(quote! { 248 #struct_def 249 #(#unions)* 250 #into_static_impl 251 }) 252 } 253 254 /// Generate fields for an object 255 fn generate_object_fields( 256 &self, 257 nsid: &str, 258 parent_type_name: &str, 259 obj: &LexObject<'static>, 260 is_builder: bool, 261 ) -> Result<TokenStream> { 262 let required = obj.required.as_ref().map(|r| r.as_slice()).unwrap_or(&[]); 263 264 let mut fields = Vec::new(); 265 for (field_name, field_type) in &obj.properties { 266 let is_required = required.contains(field_name); 267 let field_tokens = self.generate_field( 268 nsid, 269 parent_type_name, 270 field_name, 271 field_type, 272 is_required, 273 is_builder, 274 )?; 275 fields.push(field_tokens); 276 } 277 278 Ok(quote! { #(#fields)* }) 279 } 280 281 /// Generate a single field 282 fn generate_field( 283 &self, 284 nsid: &str, 285 parent_type_name: &str, 286 field_name: &str, 287 field_type: &LexObjectProperty<'static>, 288 is_required: bool, 289 is_builder: bool, 290 ) -> Result<TokenStream> { 291 let field_ident = make_ident(&field_name.to_snake_case()); 292 293 let rust_type = 294 self.property_to_rust_type(nsid, parent_type_name, field_name, field_type)?; 295 let needs_lifetime = self.property_needs_lifetime(field_type); 296 297 // Check if this is a CowStr field for builder(into) attribute 298 let is_cowstr = matches!(field_type, LexObjectProperty::String(s) if s.format.is_none()); 299 300 let rust_type = if is_required { 301 rust_type 302 } else { 303 quote! { std::option::Option<#rust_type> } 304 }; 305 306 // Extract description from field type 307 let description = match field_type { 308 LexObjectProperty::Ref(r) => r.description.as_ref(), 309 LexObjectProperty::Union(u) => u.description.as_ref(), 310 LexObjectProperty::Bytes(b) => b.description.as_ref(), 311 LexObjectProperty::CidLink(c) => c.description.as_ref(), 312 LexObjectProperty::Array(a) => a.description.as_ref(), 313 LexObjectProperty::Blob(b) => b.description.as_ref(), 314 LexObjectProperty::Boolean(b) => b.description.as_ref(), 315 LexObjectProperty::Integer(i) => i.description.as_ref(), 316 LexObjectProperty::String(s) => s.description.as_ref(), 317 LexObjectProperty::Unknown(u) => u.description.as_ref(), 318 }; 319 let doc = self.generate_doc_comment(description); 320 321 let mut attrs = Vec::new(); 322 323 if !is_required { 324 attrs.push(quote! { #[serde(skip_serializing_if = "std::option::Option::is_none")] }); 325 } 326 327 // Add serde(borrow) to all fields with lifetimes 328 if needs_lifetime { 329 attrs.push(quote! { #[serde(borrow)] }); 330 } 331 332 // Add builder(into) for CowStr fields to allow String, &str, etc., but only for builder structs 333 if is_builder && is_cowstr { 334 attrs.push(quote! { #[builder(into)] }); 335 } 336 337 Ok(quote! { 338 #doc 339 #(#attrs)* 340 pub #field_ident: #rust_type, 341 }) 342 } 343 344 /// Check if a property type needs a lifetime parameter 345 fn property_needs_lifetime(&self, prop: &LexObjectProperty<'static>) -> bool { 346 match prop { 347 LexObjectProperty::Boolean(_) | LexObjectProperty::Integer(_) => false, 348 LexObjectProperty::String(s) => self.string_needs_lifetime(s), 349 LexObjectProperty::Bytes(_) => false, // Bytes is owned 350 LexObjectProperty::CidLink(_) 351 | LexObjectProperty::Blob(_) 352 | LexObjectProperty::Unknown(_) => true, 353 LexObjectProperty::Array(array) => self.array_item_needs_lifetime(&array.items), 354 LexObjectProperty::Ref(ref_type) => { 355 // Check if the ref target actually needs a lifetime 356 self.ref_needs_lifetime(&ref_type.r#ref) 357 } 358 LexObjectProperty::Union(_) => true, // Unions generally have lifetimes 359 } 360 } 361 362 /// Check if an array item type needs a lifetime parameter 363 fn array_item_needs_lifetime(&self, item: &LexArrayItem) -> bool { 364 match item { 365 LexArrayItem::Boolean(_) | LexArrayItem::Integer(_) => false, 366 LexArrayItem::String(s) => self.string_needs_lifetime(s), 367 LexArrayItem::Bytes(_) => false, 368 LexArrayItem::CidLink(_) | LexArrayItem::Blob(_) | LexArrayItem::Unknown(_) => true, 369 LexArrayItem::Ref(ref_type) => self.ref_needs_lifetime(&ref_type.r#ref), 370 LexArrayItem::Union(_) => true, 371 } 372 } 373 374 /// Check if a string type needs a lifetime parameter 375 fn string_needs_lifetime(&self, s: &LexString) -> bool { 376 match s.format { 377 Some(LexStringFormat::Datetime) 378 | Some(LexStringFormat::Language) 379 | Some(LexStringFormat::Tid) => false, 380 _ => true, // Most string types borrow 381 } 382 } 383 384 /// Check if a ref needs a lifetime parameter 385 fn ref_needs_lifetime(&self, ref_str: &str) -> bool { 386 // Try to resolve the ref 387 if let Some((_doc, def)) = self.corpus.resolve_ref(ref_str) { 388 self.def_needs_lifetime(def) 389 } else { 390 // If we can't resolve it, assume it needs a lifetime (safe default) 391 true 392 } 393 } 394 395 /// Check if a lexicon def needs a lifetime parameter 396 fn def_needs_lifetime(&self, def: &LexUserType<'static>) -> bool { 397 match def { 398 // Records and Objects always have lifetimes now since they get #[lexicon] attribute 399 LexUserType::Record(_) => true, 400 LexUserType::Object(_) => true, 401 LexUserType::Token(_) => false, 402 LexUserType::String(s) => { 403 // Check if it's a known values enum or a regular string 404 if s.known_values.is_some() { 405 // Known values enums have Other(CowStr<'a>) variant 406 true 407 } else { 408 self.string_needs_lifetime(s) 409 } 410 } 411 LexUserType::Integer(_) => false, 412 LexUserType::Boolean(_) => false, 413 LexUserType::Bytes(_) => false, 414 LexUserType::CidLink(_) | LexUserType::Blob(_) | LexUserType::Unknown(_) => true, 415 LexUserType::Array(array) => self.array_item_needs_lifetime(&array.items), 416 LexUserType::XrpcQuery(_) 417 | LexUserType::XrpcProcedure(_) 418 | LexUserType::XrpcSubscription(_) => { 419 // XRPC types generate multiple structs, not a single type we can reference 420 // Shouldn't be referenced directly 421 true 422 } 423 } 424 } 425 426 /// Check if xrpc params need a lifetime parameter 427 fn params_need_lifetime(&self, params: &crate::lexicon::LexXrpcParameters<'static>) -> bool { 428 params.properties.values().any(|prop| { 429 use crate::lexicon::LexXrpcParametersProperty; 430 match prop { 431 LexXrpcParametersProperty::Boolean(_) | LexXrpcParametersProperty::Integer(_) => { 432 false 433 } 434 LexXrpcParametersProperty::String(s) => self.string_needs_lifetime(s), 435 LexXrpcParametersProperty::Unknown(_) => true, 436 LexXrpcParametersProperty::Array(arr) => { 437 use crate::lexicon::LexPrimitiveArrayItem; 438 match &arr.items { 439 LexPrimitiveArrayItem::Boolean(_) | LexPrimitiveArrayItem::Integer(_) => { 440 false 441 } 442 LexPrimitiveArrayItem::String(s) => self.string_needs_lifetime(s), 443 LexPrimitiveArrayItem::Unknown(_) => true, 444 } 445 } 446 } 447 }) 448 } 449 450 /// Convert a property type to Rust type 451 fn property_to_rust_type( 452 &self, 453 nsid: &str, 454 parent_type_name: &str, 455 field_name: &str, 456 prop: &LexObjectProperty<'static>, 457 ) -> Result<TokenStream> { 458 match prop { 459 LexObjectProperty::Boolean(_) => Ok(quote! { bool }), 460 LexObjectProperty::Integer(_) => Ok(quote! { i64 }), 461 LexObjectProperty::String(s) => Ok(self.string_to_rust_type(s)), 462 LexObjectProperty::Bytes(_) => Ok(quote! { bytes::Bytes }), 463 LexObjectProperty::CidLink(_) => { 464 Ok(quote! { jacquard_common::types::cid::CidLink<'a> }) 465 } 466 LexObjectProperty::Blob(_) => Ok(quote! { jacquard_common::types::blob::Blob<'a> }), 467 LexObjectProperty::Unknown(_) => Ok(quote! { jacquard_common::types::value::Data<'a> }), 468 LexObjectProperty::Array(array) => { 469 let item_type = self.array_item_to_rust_type(nsid, &array.items)?; 470 Ok(quote! { Vec<#item_type> }) 471 } 472 LexObjectProperty::Ref(ref_type) => { 473 // Handle local refs (starting with #) by prepending the current NSID 474 let ref_str = if ref_type.r#ref.starts_with('#') { 475 format!("{}{}", nsid, ref_type.r#ref) 476 } else { 477 ref_type.r#ref.to_string() 478 }; 479 self.ref_to_rust_type(&ref_str) 480 } 481 LexObjectProperty::Union(_union) => { 482 // Generate unique union type name: StatusView + embed -> StatusViewRecordEmbed 483 let union_name = 484 format!("{}Record{}", parent_type_name, field_name.to_pascal_case()); 485 let union_ident = syn::Ident::new(&union_name, proc_macro2::Span::call_site()); 486 Ok(quote! { #union_ident<'a> }) 487 } 488 } 489 } 490 491 /// Convert array item to Rust type 492 fn array_item_to_rust_type(&self, nsid: &str, item: &LexArrayItem) -> Result<TokenStream> { 493 match item { 494 LexArrayItem::Boolean(_) => Ok(quote! { bool }), 495 LexArrayItem::Integer(_) => Ok(quote! { i64 }), 496 LexArrayItem::String(s) => Ok(self.string_to_rust_type(s)), 497 LexArrayItem::Bytes(_) => Ok(quote! { bytes::Bytes }), 498 LexArrayItem::CidLink(_) => Ok(quote! { jacquard_common::types::cid::CidLink<'a> }), 499 LexArrayItem::Blob(_) => Ok(quote! { jacquard_common::types::blob::Blob<'a> }), 500 LexArrayItem::Unknown(_) => Ok(quote! { jacquard_common::types::value::Data<'a> }), 501 LexArrayItem::Ref(ref_type) => { 502 // Handle local refs (starting with #) by prepending the current NSID 503 let ref_str = if ref_type.r#ref.starts_with('#') { 504 format!("{}{}", nsid, ref_type.r#ref) 505 } else { 506 ref_type.r#ref.to_string() 507 }; 508 self.ref_to_rust_type(&ref_str) 509 } 510 LexArrayItem::Union(_) => { 511 // For now, use Data 512 Ok(quote! { jacquard_common::types::value::Data<'a> }) 513 } 514 } 515 } 516 517 /// Convert string type to Rust type 518 fn string_to_rust_type(&self, s: &LexString) -> TokenStream { 519 match s.format { 520 Some(LexStringFormat::Datetime) => { 521 quote! { jacquard_common::types::string::Datetime } 522 } 523 Some(LexStringFormat::Did) => quote! { jacquard_common::types::string::Did<'a> }, 524 Some(LexStringFormat::Handle) => quote! { jacquard_common::types::string::Handle<'a> }, 525 Some(LexStringFormat::AtIdentifier) => { 526 quote! { jacquard_common::types::ident::AtIdentifier<'a> } 527 } 528 Some(LexStringFormat::Nsid) => quote! { jacquard_common::types::string::Nsid<'a> }, 529 Some(LexStringFormat::AtUri) => quote! { jacquard_common::types::string::AtUri<'a> }, 530 Some(LexStringFormat::Uri) => quote! { jacquard_common::types::string::Uri<'a> }, 531 Some(LexStringFormat::Cid) => quote! { jacquard_common::types::string::Cid<'a> }, 532 Some(LexStringFormat::Language) => { 533 quote! { jacquard_common::types::string::Language } 534 } 535 Some(LexStringFormat::Tid) => quote! { jacquard_common::types::string::Tid }, 536 Some(LexStringFormat::RecordKey) => { 537 quote! { jacquard_common::types::string::RecordKey<jacquard_common::types::string::Rkey<'a>> } 538 } 539 None => quote! { jacquard_common::CowStr<'a> }, 540 } 541 } 542 543 /// Convert ref to Rust type path 544 fn ref_to_rust_type(&self, ref_str: &str) -> Result<TokenStream> { 545 // Parse NSID and fragment 546 let (ref_nsid, ref_def) = if let Some((nsid, fragment)) = ref_str.split_once('#') { 547 (nsid, fragment) 548 } else { 549 (ref_str, "main") 550 }; 551 552 // Check if ref exists 553 if !self.corpus.ref_exists(ref_str) { 554 // Fallback to Data 555 return Ok(quote! { jacquard_common::types::value::Data<'a> }); 556 } 557 558 // Convert NSID to module path 559 // com.atproto.repo.strongRef -> com_atproto::repo::strong_ref::StrongRef 560 // app.bsky.richtext.facet -> app_bsky::richtext::facet::Facet 561 // app.bsky.actor.defs#nux -> app_bsky::actor::Nux (defs go in parent module) 562 let parts: Vec<&str> = ref_nsid.split('.').collect(); 563 let last_segment = parts.last().unwrap(); 564 565 let type_name = self.def_to_type_name(ref_nsid, ref_def); 566 567 let path_str = if *last_segment == "defs" && parts.len() >= 3 { 568 // defs types go in parent module 569 let first_two = format!("{}_{}", parts[0], parts[1]); 570 if parts.len() == 3 { 571 // com.atproto.defs -> com_atproto::TypeName 572 format!("{}::{}::{}", self.root_module, first_two, type_name) 573 } else { 574 // app.bsky.actor.defs -> app_bsky::actor::TypeName 575 let middle: Vec<&str> = parts[2..parts.len() - 1].iter().copied().collect(); 576 format!( 577 "{}::{}::{}::{}", 578 self.root_module, 579 first_two, 580 middle.join("::"), 581 type_name 582 ) 583 } 584 } else { 585 // Regular types go in their own module file 586 let (module_path, file_module) = if parts.len() >= 3 { 587 // Join first two segments with underscore 588 let first_two = format!("{}_{}", parts[0], parts[1]); 589 let file_name = last_segment.to_snake_case(); 590 591 if parts.len() > 3 { 592 // Middle segments form the module path 593 let middle: Vec<&str> = parts[2..parts.len() - 1].iter().copied().collect(); 594 let base_path = format!("{}::{}", first_two, middle.join("::")); 595 (base_path, file_name) 596 } else { 597 // Only 3 parts: com.atproto.label -> com_atproto, file: label 598 (first_two, file_name) 599 } 600 } else if parts.len() == 2 { 601 // e.g., "com.example" -> "com_example", file: example 602 let first = parts[0].to_string(); 603 let file_name = parts[1].to_snake_case(); 604 (first, file_name) 605 } else { 606 (parts[0].to_string(), "main".to_string()) 607 }; 608 609 format!( 610 "{}::{}::{}::{}", 611 self.root_module, module_path, file_module, type_name 612 ) 613 }; 614 615 let path: syn::Path = syn::parse_str(&path_str).map_err(|e| CodegenError::Other { 616 message: format!("Failed to parse path: {}", e), 617 source: None, 618 })?; 619 620 // Only add lifetime if the target type needs it 621 if self.ref_needs_lifetime(ref_str) { 622 Ok(quote! { #path<'a> }) 623 } else { 624 Ok(quote! { #path }) 625 } 626 } 627 628 /// Generate query type 629 fn generate_query( 630 &self, 631 nsid: &str, 632 def_name: &str, 633 query: &LexXrpcQuery<'static>, 634 ) -> Result<TokenStream> { 635 let type_base = self.def_to_type_name(nsid, def_name); 636 let mut output = Vec::new(); 637 638 let params_has_lifetime = query 639 .parameters 640 .as_ref() 641 .map(|p| match p { 642 crate::lexicon::LexXrpcQueryParameter::Params(params) => { 643 self.params_need_lifetime(params) 644 } 645 }) 646 .unwrap_or(false); 647 let has_params = query.parameters.is_some(); 648 let has_output = query.output.is_some(); 649 let has_errors = query.errors.is_some(); 650 651 if let Some(params) = &query.parameters { 652 let params_struct = self.generate_params_struct(&type_base, params)?; 653 output.push(params_struct); 654 } 655 656 if let Some(body) = &query.output { 657 let output_struct = self.generate_output_struct(&type_base, body)?; 658 output.push(output_struct); 659 } 660 661 if let Some(errors) = &query.errors { 662 let error_enum = self.generate_error_enum(&type_base, errors)?; 663 output.push(error_enum); 664 } 665 666 // Generate XrpcRequest impl 667 let output_encoding = query 668 .output 669 .as_ref() 670 .map(|o| o.encoding.as_ref()) 671 .unwrap_or("application/json"); 672 let xrpc_impl = self.generate_xrpc_request_impl( 673 nsid, 674 &type_base, 675 quote! { jacquard_common::types::xrpc::XrpcMethod::Query }, 676 output_encoding, 677 has_params, 678 params_has_lifetime, 679 has_output, 680 has_errors, 681 false, // queries never have binary inputs 682 )?; 683 output.push(xrpc_impl); 684 685 Ok(quote! { 686 #(#output)* 687 }) 688 } 689 690 /// Generate procedure type 691 fn generate_procedure( 692 &self, 693 nsid: &str, 694 def_name: &str, 695 proc: &LexXrpcProcedure<'static>, 696 ) -> Result<TokenStream> { 697 let type_base = self.def_to_type_name(nsid, def_name); 698 let mut output = Vec::new(); 699 700 // Check if input is a binary body (no schema) 701 let is_binary_input = proc 702 .input 703 .as_ref() 704 .map(|i| i.schema.is_none()) 705 .unwrap_or(false); 706 707 // Input bodies with schemas have lifetimes (they get #[lexicon] attribute) 708 // Binary inputs don't have lifetimes 709 let params_has_lifetime = proc.input.is_some() && !is_binary_input; 710 let has_input = proc.input.is_some(); 711 let has_output = proc.output.is_some(); 712 let has_errors = proc.errors.is_some(); 713 714 if let Some(params) = &proc.parameters { 715 let params_struct = self.generate_params_struct_proc(&type_base, params)?; 716 output.push(params_struct); 717 } 718 719 if let Some(body) = &proc.input { 720 let input_struct = self.generate_input_struct(&type_base, body)?; 721 output.push(input_struct); 722 } 723 724 if let Some(body) = &proc.output { 725 let output_struct = self.generate_output_struct(&type_base, body)?; 726 output.push(output_struct); 727 } 728 729 if let Some(errors) = &proc.errors { 730 let error_enum = self.generate_error_enum(&type_base, errors)?; 731 output.push(error_enum); 732 } 733 734 // Generate XrpcRequest impl 735 let input_encoding = proc 736 .input 737 .as_ref() 738 .map(|i| i.encoding.as_ref()) 739 .unwrap_or("application/json"); 740 let output_encoding = proc 741 .output 742 .as_ref() 743 .map(|o| o.encoding.as_ref()) 744 .unwrap_or("application/json"); 745 let xrpc_impl = self.generate_xrpc_request_impl( 746 nsid, 747 &type_base, 748 quote! { jacquard_common::types::xrpc::XrpcMethod::Procedure(#input_encoding) }, 749 output_encoding, 750 has_input, 751 params_has_lifetime, 752 has_output, 753 has_errors, 754 is_binary_input, 755 )?; 756 output.push(xrpc_impl); 757 758 Ok(quote! { 759 #(#output)* 760 }) 761 } 762 763 fn generate_subscription( 764 &self, 765 nsid: &str, 766 def_name: &str, 767 sub: &LexXrpcSubscription<'static>, 768 ) -> Result<TokenStream> { 769 let type_base = self.def_to_type_name(nsid, def_name); 770 let mut output = Vec::new(); 771 772 if let Some(params) = &sub.parameters { 773 // Extract LexXrpcParameters from the enum 774 match params { 775 crate::lexicon::LexXrpcSubscriptionParameter::Params(params_inner) => { 776 let params_struct = 777 self.generate_params_struct_inner(&type_base, params_inner)?; 778 output.push(params_struct); 779 } 780 } 781 } 782 783 if let Some(message) = &sub.message { 784 if let Some(schema) = &message.schema { 785 let message_type = self.generate_subscription_message(&type_base, schema)?; 786 output.push(message_type); 787 } 788 } 789 790 if let Some(errors) = &sub.errors { 791 let error_enum = self.generate_error_enum(&type_base, errors)?; 792 output.push(error_enum); 793 } 794 795 Ok(quote! { 796 #(#output)* 797 }) 798 } 799 800 fn generate_subscription_message( 801 &self, 802 type_base: &str, 803 schema: &LexXrpcSubscriptionMessageSchema<'static>, 804 ) -> Result<TokenStream> { 805 use crate::lexicon::LexXrpcSubscriptionMessageSchema; 806 807 match schema { 808 LexXrpcSubscriptionMessageSchema::Union(union) => { 809 // Generate a union enum for the message 810 let enum_name = format!("{}Message", type_base); 811 let enum_ident = syn::Ident::new(&enum_name, proc_macro2::Span::call_site()); 812 813 let mut variants = Vec::new(); 814 for ref_str in &union.refs { 815 let ref_str_s = ref_str.as_ref(); 816 // Parse ref to get NSID and def name 817 let (ref_nsid, ref_def) = 818 if let Some((nsid, fragment)) = ref_str.split_once('#') { 819 (nsid, fragment) 820 } else { 821 (ref_str.as_ref(), "main") 822 }; 823 824 let variant_name = if ref_def == "main" { 825 ref_nsid.split('.').last().unwrap().to_pascal_case() 826 } else { 827 ref_def.to_pascal_case() 828 }; 829 let variant_ident = 830 syn::Ident::new(&variant_name, proc_macro2::Span::call_site()); 831 let type_path = self.ref_to_rust_type(ref_str)?; 832 833 variants.push(quote! { 834 #[serde(rename = #ref_str_s)] 835 #variant_ident(Box<#type_path>) 836 }); 837 } 838 839 let doc = self.generate_doc_comment(union.description.as_ref()); 840 841 // Generate IntoStatic impl for the enum 842 let variant_info: Vec<(String, EnumVariantKind)> = union 843 .refs 844 .iter() 845 .map(|ref_str| { 846 let ref_def = if let Some((_, fragment)) = ref_str.split_once('#') { 847 fragment 848 } else { 849 "main" 850 }; 851 let variant_name = if ref_def == "main" { 852 ref_str.split('.').last().unwrap().to_pascal_case() 853 } else { 854 ref_def.to_pascal_case() 855 }; 856 (variant_name, EnumVariantKind::Tuple) 857 }) 858 .collect(); 859 let into_static_impl = self.generate_into_static_for_enum( 860 &enum_name, 861 &variant_info, 862 true, 863 true, // open union 864 ); 865 866 Ok(quote! { 867 #doc 868 #[jacquard_derive::open_union] 869 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)] 870 #[serde(tag = "$type")] 871 #[serde(bound(deserialize = "'de: 'a"))] 872 pub enum #enum_ident<'a> { 873 #(#variants,)* 874 } 875 876 #into_static_impl 877 }) 878 } 879 LexXrpcSubscriptionMessageSchema::Object(obj) => { 880 // Generate a struct for the message 881 let struct_name = format!("{}Message", type_base); 882 let struct_ident = syn::Ident::new(&struct_name, proc_macro2::Span::call_site()); 883 884 let fields = self.generate_object_fields("", &struct_name, obj, false)?; 885 let doc = self.generate_doc_comment(obj.description.as_ref()); 886 887 // Subscription message structs always get a lifetime since they have the #[lexicon] attribute 888 // which adds extra_data: BTreeMap<..., Data<'a>> 889 let struct_def = quote! { 890 #doc 891 #[jacquard_derive::lexicon] 892 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)] 893 #[serde(rename_all = "camelCase")] 894 pub struct #struct_ident<'a> { 895 #fields 896 } 897 }; 898 899 // Generate union types for this message 900 let mut unions = Vec::new(); 901 for (field_name, field_type) in &obj.properties { 902 if let LexObjectProperty::Union(union) = field_type { 903 let union_name = 904 format!("{}Record{}", struct_name, field_name.to_pascal_case()); 905 let refs: Vec<_> = union.refs.iter().cloned().collect(); 906 let union_def = 907 self.generate_union(&union_name, &refs, None, union.closed)?; 908 unions.push(union_def); 909 } 910 } 911 912 // Generate IntoStatic impl 913 let field_names: Vec<&str> = obj.properties.keys().map(|k| k.as_str()).collect(); 914 let into_static_impl = 915 self.generate_into_static_for_struct(&struct_name, &field_names, true, true); 916 917 Ok(quote! { 918 #struct_def 919 #(#unions)* 920 #into_static_impl 921 }) 922 } 923 LexXrpcSubscriptionMessageSchema::Ref(ref_type) => { 924 // Just a type alias to the referenced type 925 // Refs generally have lifetimes, so always add <'a> 926 let type_name = format!("{}Message", type_base); 927 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site()); 928 let rust_type = self.ref_to_rust_type(&ref_type.r#ref)?; 929 let doc = self.generate_doc_comment(ref_type.description.as_ref()); 930 931 Ok(quote! { 932 #doc 933 pub type #ident<'a> = #rust_type; 934 }) 935 } 936 } 937 } 938 939 /// Convert def name to Rust type name 940 fn def_to_type_name(&self, nsid: &str, def_name: &str) -> String { 941 if def_name == "main" { 942 // Use last segment of NSID 943 let base_name = nsid.split('.').last().unwrap().to_pascal_case(); 944 945 // Check if any other def would collide with this name 946 if let Some(doc) = self.corpus.get(nsid) { 947 let has_collision = doc.defs.keys().any(|other_def| { 948 let other_def_str: &str = other_def.as_ref(); 949 other_def_str != "main" && other_def_str.to_pascal_case() == base_name 950 }); 951 952 if has_collision { 953 return format!("{}Record", base_name); 954 } 955 } 956 957 base_name 958 } else { 959 def_name.to_pascal_case() 960 } 961 } 962 963 /// Convert NSID to file path relative to output directory 964 /// 965 /// - `app.bsky.feed.post` → `app_bsky/feed/post.rs` 966 /// - `com.atproto.label.defs` → `com_atproto/label.rs` (defs go in parent) 967 fn nsid_to_file_path(&self, nsid: &str) -> std::path::PathBuf { 968 let parts: Vec<&str> = nsid.split('.').collect(); 969 970 if parts.len() < 2 { 971 // Shouldn't happen with valid NSIDs, but handle gracefully 972 return format!("{}.rs", parts[0]).into(); 973 } 974 975 let last = parts.last().unwrap(); 976 977 if *last == "defs" && parts.len() >= 3 { 978 // defs go in parent module: com.atproto.label.defs → com_atproto/label.rs 979 let first_two = format!("{}_{}", parts[0], parts[1]); 980 if parts.len() == 3 { 981 // com.atproto.defs → com_atproto.rs 982 format!("{}.rs", first_two).into() 983 } else { 984 // com.atproto.label.defs → com_atproto/label.rs 985 let middle: Vec<&str> = parts[2..parts.len() - 1].iter().copied().collect(); 986 let mut path = std::path::PathBuf::from(first_two); 987 for segment in &middle[..middle.len() - 1] { 988 path.push(segment); 989 } 990 path.push(format!("{}.rs", middle.last().unwrap())); 991 path 992 } 993 } else { 994 // Regular path: app.bsky.feed.post → app_bsky/feed/post.rs 995 let first_two = format!("{}_{}", parts[0], parts[1]); 996 let mut path = std::path::PathBuf::from(first_two); 997 998 for segment in &parts[2..parts.len() - 1] { 999 path.push(segment); 1000 } 1001 1002 path.push(format!("{}.rs", last.to_snake_case())); 1003 path 1004 } 1005 } 1006 1007 /// Generate all code for the corpus, organized by file 1008 /// Returns a map of file paths to (tokens, optional NSID) 1009 pub fn generate_all( 1010 &self, 1011 ) -> Result<std::collections::BTreeMap<std::path::PathBuf, (TokenStream, Option<String>)>> { 1012 use std::collections::BTreeMap; 1013 1014 let mut file_contents: BTreeMap<std::path::PathBuf, Vec<TokenStream>> = BTreeMap::new(); 1015 let mut file_nsids: BTreeMap<std::path::PathBuf, String> = BTreeMap::new(); 1016 1017 // Generate code for all lexicons 1018 for (nsid, doc) in self.corpus.iter() { 1019 let file_path = self.nsid_to_file_path(nsid.as_ref()); 1020 1021 // Track which NSID this file is for 1022 file_nsids.insert(file_path.clone(), nsid.to_string()); 1023 1024 for (def_name, def) in &doc.defs { 1025 let tokens = self.generate_def(nsid.as_ref(), def_name.as_ref(), def)?; 1026 file_contents 1027 .entry(file_path.clone()) 1028 .or_default() 1029 .push(tokens); 1030 } 1031 } 1032 1033 // Combine all tokens for each file 1034 let mut result = BTreeMap::new(); 1035 for (path, tokens_vec) in file_contents { 1036 let nsid = file_nsids.get(&path).cloned(); 1037 result.insert(path, (quote! { #(#tokens_vec)* }, nsid)); 1038 } 1039 1040 Ok(result) 1041 } 1042 1043 /// Generate parent module files with pub mod declarations 1044 pub fn generate_module_tree( 1045 &self, 1046 file_map: &std::collections::BTreeMap<std::path::PathBuf, (TokenStream, Option<String>)>, 1047 defs_only: &std::collections::BTreeMap<std::path::PathBuf, (TokenStream, Option<String>)>, 1048 ) -> std::collections::BTreeMap<std::path::PathBuf, (TokenStream, Option<String>)> { 1049 use std::collections::{BTreeMap, BTreeSet}; 1050 1051 // Track what modules each directory needs to declare 1052 // Key: directory path, Value: set of module names (file stems) 1053 let mut dir_modules: BTreeMap<std::path::PathBuf, BTreeSet<String>> = BTreeMap::new(); 1054 1055 // Collect all parent directories that have files 1056 let mut all_dirs: BTreeSet<std::path::PathBuf> = BTreeSet::new(); 1057 for path in file_map.keys() { 1058 if let Some(parent_dir) = path.parent() { 1059 all_dirs.insert(parent_dir.to_path_buf()); 1060 } 1061 } 1062 1063 for path in file_map.keys() { 1064 if let Some(parent_dir) = path.parent() { 1065 if let Some(file_stem) = path.file_stem().and_then(|s| s.to_str()) { 1066 // Skip mod.rs and lib.rs - they're module files, not modules to declare 1067 if file_stem == "mod" || file_stem == "lib" { 1068 continue; 1069 } 1070 1071 // Always add the module declaration to parent 1072 dir_modules 1073 .entry(parent_dir.to_path_buf()) 1074 .or_default() 1075 .insert(file_stem.to_string()); 1076 } 1077 } 1078 } 1079 1080 // Generate module files 1081 let mut result = BTreeMap::new(); 1082 1083 for (dir, module_names) in dir_modules { 1084 let mod_file_path = if dir.components().count() == 0 { 1085 // Root directory -> lib.rs for library crates 1086 std::path::PathBuf::from("lib.rs") 1087 } else { 1088 // Subdirectory: app_bsky/feed -> app_bsky/feed.rs (Rust 2018 style) 1089 let dir_name = dir.file_name().and_then(|s| s.to_str()).unwrap_or("mod"); 1090 let mut path = dir 1091 .parent() 1092 .unwrap_or_else(|| std::path::Path::new("")) 1093 .to_path_buf(); 1094 path.push(format!("{}.rs", dir_name)); 1095 path 1096 }; 1097 1098 let is_root = dir.components().count() == 0; 1099 let mods: Vec<_> = module_names 1100 .iter() 1101 .map(|name| { 1102 let ident = syn::Ident::new(name, proc_macro2::Span::call_site()); 1103 if is_root { 1104 // Top-level modules get feature gates 1105 quote! { 1106 #[cfg(feature = #name)] 1107 pub mod #ident; 1108 } 1109 } else { 1110 quote! { pub mod #ident; } 1111 } 1112 }) 1113 .collect(); 1114 1115 // If this file already exists in defs_only (e.g., from defs), merge the content 1116 let module_tokens = quote! { #(#mods)* }; 1117 if let Some((existing_tokens, nsid)) = defs_only.get(&mod_file_path) { 1118 // Put module declarations FIRST, then existing defs content 1119 result.insert( 1120 mod_file_path, 1121 (quote! { #module_tokens #existing_tokens }, nsid.clone()), 1122 ); 1123 } else { 1124 result.insert(mod_file_path, (module_tokens, None)); 1125 } 1126 } 1127 1128 result 1129 } 1130 1131 /// Write all generated code to disk 1132 pub fn write_to_disk(&self, output_dir: &std::path::Path) -> Result<()> { 1133 // Generate all code (defs only) 1134 let defs_files = self.generate_all()?; 1135 let mut all_files = defs_files.clone(); 1136 1137 // Generate module tree iteratively until no new files appear 1138 loop { 1139 let module_map = self.generate_module_tree(&all_files, &defs_files); 1140 let old_count = all_files.len(); 1141 1142 // Merge new module files 1143 for (path, tokens) in module_map { 1144 all_files.insert(path, tokens); 1145 } 1146 1147 if all_files.len() == old_count { 1148 // No new files added 1149 break; 1150 } 1151 } 1152 1153 // Write to disk 1154 for (path, (tokens, nsid)) in all_files { 1155 let full_path = output_dir.join(&path); 1156 1157 // Create parent directories 1158 if let Some(parent) = full_path.parent() { 1159 std::fs::create_dir_all(parent).map_err(|e| CodegenError::Other { 1160 message: format!("Failed to create directory {:?}: {}", parent, e), 1161 source: None, 1162 })?; 1163 } 1164 1165 // Format code 1166 let file: syn::File = syn::parse2(tokens.clone()).map_err(|e| CodegenError::Other { 1167 message: format!( 1168 "Failed to parse tokens for {:?}: {}\nTokens: {}", 1169 path, e, tokens 1170 ), 1171 source: None, 1172 })?; 1173 let mut formatted = prettyplease::unparse(&file); 1174 1175 // Add blank lines between top-level items for better readability 1176 let lines: Vec<&str> = formatted.lines().collect(); 1177 let mut result_lines = Vec::new(); 1178 1179 for (i, line) in lines.iter().enumerate() { 1180 result_lines.push(*line); 1181 1182 // Add blank line after closing braces that are at column 0 (top-level items) 1183 if *line == "}" && i + 1 < lines.len() && !lines[i + 1].is_empty() { 1184 result_lines.push(""); 1185 } 1186 1187 // Add blank line after last pub mod declaration before structs/enums 1188 if line.starts_with("pub mod ") && i + 1 < lines.len() { 1189 let next_line = lines[i + 1]; 1190 if !next_line.starts_with("pub mod ") && !next_line.is_empty() { 1191 result_lines.push(""); 1192 } 1193 } 1194 } 1195 1196 formatted = result_lines.join("\n"); 1197 1198 // Add header comment 1199 let header = if let Some(nsid) = nsid { 1200 format!( 1201 "// @generated by jacquard-lexicon. DO NOT EDIT.\n//\n// Lexicon: {}\n//\n// This file was automatically generated from Lexicon schemas.\n// Any manual changes will be overwritten on the next regeneration.\n\n", 1202 nsid 1203 ) 1204 } else { 1205 "// @generated by jacquard-lexicon. DO NOT EDIT.\n//\n// This file was automatically generated from Lexicon schemas.\n// Any manual changes will be overwritten on the next regeneration.\n\n".to_string() 1206 }; 1207 formatted = format!("{}{}", header, formatted); 1208 1209 // Write file 1210 std::fs::write(&full_path, formatted).map_err(|e| CodegenError::Other { 1211 message: format!("Failed to write file {:?}: {}", full_path, e), 1212 source: None, 1213 })?; 1214 } 1215 1216 Ok(()) 1217 } 1218 1219 /// Generate doc comment from description 1220 fn generate_doc_comment(&self, desc: Option<&jacquard_common::CowStr>) -> TokenStream { 1221 if let Some(desc) = desc { 1222 let doc = desc.as_ref(); 1223 quote! { #[doc = #doc] } 1224 } else { 1225 quote! {} 1226 } 1227 } 1228 1229 /// Generate params struct from XRPC query parameters 1230 fn generate_params_struct( 1231 &self, 1232 type_base: &str, 1233 params: &crate::lexicon::LexXrpcQueryParameter<'static>, 1234 ) -> Result<TokenStream> { 1235 use crate::lexicon::LexXrpcQueryParameter; 1236 match params { 1237 LexXrpcQueryParameter::Params(p) => self.generate_params_struct_inner(type_base, p), 1238 } 1239 } 1240 1241 /// Generate params struct from XRPC procedure parameters (query string params) 1242 fn generate_params_struct_proc( 1243 &self, 1244 type_base: &str, 1245 params: &crate::lexicon::LexXrpcProcedureParameter<'static>, 1246 ) -> Result<TokenStream> { 1247 use crate::lexicon::LexXrpcProcedureParameter; 1248 match params { 1249 // For procedures, query string params still get "Params" suffix since the main struct is the input 1250 LexXrpcProcedureParameter::Params(p) => { 1251 let struct_name = format!("{}Params", type_base); 1252 let ident = syn::Ident::new(&struct_name, proc_macro2::Span::call_site()); 1253 self.generate_params_struct_inner_with_name(&ident, p) 1254 } 1255 } 1256 } 1257 1258 /// Generate params struct inner (shared implementation) 1259 fn generate_params_struct_inner( 1260 &self, 1261 type_base: &str, 1262 p: &crate::lexicon::LexXrpcParameters<'static>, 1263 ) -> Result<TokenStream> { 1264 let ident = syn::Ident::new(type_base, proc_macro2::Span::call_site()); 1265 self.generate_params_struct_inner_with_name(&ident, p) 1266 } 1267 1268 /// Generate params struct with custom name 1269 fn generate_params_struct_inner_with_name( 1270 &self, 1271 ident: &syn::Ident, 1272 p: &crate::lexicon::LexXrpcParameters<'static>, 1273 ) -> Result<TokenStream> { 1274 let required = p.required.as_ref().map(|r| r.as_slice()).unwrap_or(&[]); 1275 let mut fields = Vec::new(); 1276 let mut default_fns = Vec::new(); 1277 1278 for (field_name, field_type) in &p.properties { 1279 let is_required = required.contains(field_name); 1280 let (field_tokens, default_fn) = 1281 self.generate_param_field_with_default("", field_name, field_type, is_required)?; 1282 fields.push(field_tokens); 1283 if let Some(fn_def) = default_fn { 1284 default_fns.push(fn_def); 1285 } 1286 } 1287 1288 let doc = self.generate_doc_comment(p.description.as_ref()); 1289 let needs_lifetime = self.params_need_lifetime(p); 1290 1291 let derives = quote! { 1292 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq, bon::Builder)] 1293 #[builder(start_fn = new)] 1294 }; 1295 1296 // Generate IntoStatic impl 1297 let field_names: Vec<&str> = p.properties.keys().map(|k| k.as_str()).collect(); 1298 let type_name = ident.to_string(); 1299 let into_static_impl = 1300 self.generate_into_static_for_struct(&type_name, &field_names, needs_lifetime, false); 1301 1302 if needs_lifetime { 1303 Ok(quote! { 1304 #(#default_fns)* 1305 1306 #doc 1307 #derives 1308 #[serde(rename_all = "camelCase")] 1309 pub struct #ident<'a> { 1310 #(#fields)* 1311 } 1312 1313 #into_static_impl 1314 }) 1315 } else { 1316 Ok(quote! { 1317 #(#default_fns)* 1318 1319 #doc 1320 #derives 1321 #[serde(rename_all = "camelCase")] 1322 pub struct #ident { 1323 #(#fields)* 1324 } 1325 1326 #into_static_impl 1327 }) 1328 } 1329 } 1330 1331 /// Generate param field with serde default if present 1332 /// Returns (field_tokens, optional_default_function) 1333 fn generate_param_field_with_default( 1334 &self, 1335 nsid: &str, 1336 field_name: &str, 1337 field_type: &crate::lexicon::LexXrpcParametersProperty<'static>, 1338 is_required: bool, 1339 ) -> Result<(TokenStream, Option<TokenStream>)> { 1340 use crate::lexicon::LexXrpcParametersProperty; 1341 use heck::ToSnakeCase; 1342 1343 // Get base field 1344 let base_field = self.generate_param_field(nsid, field_name, field_type, is_required)?; 1345 1346 // Generate default function and attribute for required fields with defaults 1347 // For optional fields, just add doc comments 1348 let (doc_comment, serde_attr, default_fn) = if is_required { 1349 match field_type { 1350 LexXrpcParametersProperty::Boolean(b) if b.default.is_some() => { 1351 let v = b.default.unwrap(); 1352 let fn_name = format!("_default_{}", field_name.to_snake_case()); 1353 let fn_ident = syn::Ident::new(&fn_name, proc_macro2::Span::call_site()); 1354 ( 1355 Some(format!("Defaults to `{}`", v)), 1356 Some(quote! { #[serde(default = #fn_name)] }), 1357 Some(quote! { 1358 fn #fn_ident() -> bool { #v } 1359 }), 1360 ) 1361 } 1362 LexXrpcParametersProperty::Integer(i) if i.default.is_some() => { 1363 let v = i.default.unwrap(); 1364 let fn_name = format!("_default_{}", field_name.to_snake_case()); 1365 let fn_ident = syn::Ident::new(&fn_name, proc_macro2::Span::call_site()); 1366 ( 1367 Some(format!("Defaults to `{}`", v)), 1368 Some(quote! { #[serde(default = #fn_name)] }), 1369 Some(quote! { 1370 fn #fn_ident() -> i64 { #v } 1371 }), 1372 ) 1373 } 1374 LexXrpcParametersProperty::String(s) if s.default.is_some() => { 1375 let v = s.default.as_ref().unwrap().as_ref(); 1376 let fn_name = format!("_default_{}", field_name.to_snake_case()); 1377 let fn_ident = syn::Ident::new(&fn_name, proc_macro2::Span::call_site()); 1378 ( 1379 Some(format!("Defaults to `\"{}\"`", v)), 1380 Some(quote! { #[serde(default = #fn_name)] }), 1381 Some(quote! { 1382 fn #fn_ident() -> jacquard_common::CowStr<'static> { 1383 jacquard_common::CowStr::from(#v) 1384 } 1385 }), 1386 ) 1387 } 1388 _ => (None, None, None), 1389 } 1390 } else { 1391 // Optional fields - just doc comments, no serde defaults 1392 let doc = match field_type { 1393 LexXrpcParametersProperty::Integer(i) => { 1394 let mut parts = Vec::new(); 1395 if let Some(def) = i.default { 1396 parts.push(format!("default: {}", def)); 1397 } 1398 if let Some(min) = i.minimum { 1399 parts.push(format!("min: {}", min)); 1400 } 1401 if let Some(max) = i.maximum { 1402 parts.push(format!("max: {}", max)); 1403 } 1404 if !parts.is_empty() { 1405 Some(format!("({})", parts.join(", "))) 1406 } else { 1407 None 1408 } 1409 } 1410 LexXrpcParametersProperty::String(s) => { 1411 let mut parts = Vec::new(); 1412 if let Some(def) = s.default.as_ref() { 1413 parts.push(format!("default: \"{}\"", def.as_ref())); 1414 } 1415 if let Some(min) = s.min_length { 1416 parts.push(format!("min length: {}", min)); 1417 } 1418 if let Some(max) = s.max_length { 1419 parts.push(format!("max length: {}", max)); 1420 } 1421 if !parts.is_empty() { 1422 Some(format!("({})", parts.join(", "))) 1423 } else { 1424 None 1425 } 1426 } 1427 LexXrpcParametersProperty::Boolean(b) => { 1428 b.default.map(|v| format!("(default: {})", v)) 1429 } 1430 _ => None, 1431 }; 1432 (doc, None, None) 1433 }; 1434 1435 let doc = doc_comment.as_ref().map(|d| quote! { #[doc = #d] }); 1436 let field_with_attrs = match (doc, serde_attr) { 1437 (Some(doc), Some(attr)) => quote! { 1438 #doc 1439 #attr 1440 #base_field 1441 }, 1442 (Some(doc), None) => quote! { 1443 #doc 1444 #base_field 1445 }, 1446 (None, Some(attr)) => quote! { 1447 #attr 1448 #base_field 1449 }, 1450 (None, None) => base_field, 1451 }; 1452 1453 Ok((field_with_attrs, default_fn)) 1454 } 1455 1456 /// Generate input struct from XRPC body 1457 fn generate_input_struct( 1458 &self, 1459 type_base: &str, 1460 body: &LexXrpcBody<'static>, 1461 ) -> Result<TokenStream> { 1462 let ident = syn::Ident::new(type_base, proc_macro2::Span::call_site()); 1463 1464 // Check if this is a binary body (no schema, just raw bytes) 1465 let is_binary_body = body.schema.is_none(); 1466 1467 let fields = if let Some(schema) = &body.schema { 1468 self.generate_body_fields("", type_base, schema, true)? 1469 } else { 1470 // Binary body: just a bytes field 1471 quote! { 1472 pub body: bytes::Bytes, 1473 } 1474 }; 1475 1476 let doc = self.generate_doc_comment(body.description.as_ref()); 1477 1478 // Binary bodies don't need #[lexicon] attribute or lifetime 1479 let struct_def = if is_binary_body { 1480 quote! { 1481 #doc 1482 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq, bon::Builder)] 1483 #[builder(start_fn = new)] 1484 #[serde(rename_all = "camelCase")] 1485 pub struct #ident { 1486 #fields 1487 } 1488 } 1489 } else { 1490 // Input structs with schemas: manually add extra_data field with #[builder(default)] 1491 // for bon compatibility. The #[lexicon] macro will see it exists and skip adding it. 1492 quote! { 1493 #doc 1494 #[jacquard_derive::lexicon] 1495 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq, bon::Builder)] 1496 #[serde(rename_all = "camelCase")] 1497 #[builder(start_fn = new)] 1498 pub struct #ident<'a> { 1499 #fields 1500 #[serde(flatten)] 1501 #[serde(borrow)] 1502 #[builder(default)] 1503 pub extra_data: ::std::collections::BTreeMap< 1504 ::jacquard_common::smol_str::SmolStr, 1505 ::jacquard_common::types::value::Data<'a> 1506 >, 1507 } 1508 } 1509 }; 1510 1511 // Generate union types if schema is an Object 1512 let mut unions = Vec::new(); 1513 if let Some(crate::lexicon::LexXrpcBodySchema::Object(obj)) = &body.schema { 1514 for (field_name, field_type) in &obj.properties { 1515 if let LexObjectProperty::Union(union) = field_type { 1516 let union_name = format!("{}Record{}", type_base, field_name.to_pascal_case()); 1517 let refs: Vec<_> = union.refs.iter().cloned().collect(); 1518 let union_def = self.generate_union(&union_name, &refs, None, union.closed)?; 1519 unions.push(union_def); 1520 } 1521 } 1522 } 1523 1524 // Generate IntoStatic impl 1525 let into_static_impl = if is_binary_body { 1526 // Binary bodies: simple clone of the Bytes field 1527 quote! { 1528 impl jacquard_common::IntoStatic for #ident { 1529 type Output = #ident; 1530 fn into_static(self) -> Self::Output { 1531 self 1532 } 1533 } 1534 } 1535 } else { 1536 let field_names: Vec<&str> = match &body.schema { 1537 Some(crate::lexicon::LexXrpcBodySchema::Object(obj)) => { 1538 obj.properties.keys().map(|k| k.as_str()).collect() 1539 } 1540 Some(_) => { 1541 // For Ref or Union schemas, there's just a single flattened field 1542 vec!["value"] 1543 } 1544 None => { 1545 // No schema means no fields, just extra_data 1546 vec![] 1547 } 1548 }; 1549 self.generate_into_static_for_struct(type_base, &field_names, true, true) 1550 }; 1551 1552 Ok(quote! { 1553 #struct_def 1554 #(#unions)* 1555 #into_static_impl 1556 }) 1557 } 1558 1559 /// Generate output struct from XRPC body 1560 fn generate_output_struct( 1561 &self, 1562 type_base: &str, 1563 body: &LexXrpcBody<'static>, 1564 ) -> Result<TokenStream> { 1565 let struct_name = format!("{}Output", type_base); 1566 let ident = syn::Ident::new(&struct_name, proc_macro2::Span::call_site()); 1567 1568 let fields = if let Some(schema) = &body.schema { 1569 self.generate_body_fields("", &struct_name, schema, false)? 1570 } else { 1571 quote! {} 1572 }; 1573 1574 let doc = self.generate_doc_comment(body.description.as_ref()); 1575 1576 // Output structs always get a lifetime since they have the #[lexicon] attribute 1577 // which adds extra_data: BTreeMap<..., Data<'a>> 1578 let struct_def = quote! { 1579 #doc 1580 #[jacquard_derive::lexicon] 1581 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)] 1582 #[serde(rename_all = "camelCase")] 1583 pub struct #ident<'a> { 1584 #fields 1585 } 1586 }; 1587 1588 // Generate union types if schema is an Object 1589 let mut unions = Vec::new(); 1590 if let Some(crate::lexicon::LexXrpcBodySchema::Object(obj)) = &body.schema { 1591 for (field_name, field_type) in &obj.properties { 1592 if let LexObjectProperty::Union(union) = field_type { 1593 let union_name = 1594 format!("{}Record{}", struct_name, field_name.to_pascal_case()); 1595 let refs: Vec<_> = union.refs.iter().cloned().collect(); 1596 let union_def = self.generate_union(&union_name, &refs, None, union.closed)?; 1597 unions.push(union_def); 1598 } 1599 } 1600 } 1601 1602 // Generate IntoStatic impl 1603 let field_names: Vec<&str> = match &body.schema { 1604 Some(crate::lexicon::LexXrpcBodySchema::Object(obj)) => { 1605 obj.properties.keys().map(|k| k.as_str()).collect() 1606 } 1607 Some(_) => { 1608 // For Ref or Union schemas, there's just a single flattened field 1609 vec!["value"] 1610 } 1611 None => { 1612 // No schema means no fields, just extra_data 1613 vec![] 1614 } 1615 }; 1616 let into_static_impl = 1617 self.generate_into_static_for_struct(&struct_name, &field_names, true, true); 1618 1619 Ok(quote! { 1620 #struct_def 1621 #(#unions)* 1622 #into_static_impl 1623 }) 1624 } 1625 1626 /// Generate fields from XRPC body schema 1627 fn generate_body_fields( 1628 &self, 1629 nsid: &str, 1630 parent_type_name: &str, 1631 schema: &LexXrpcBodySchema<'static>, 1632 is_builder: bool, 1633 ) -> Result<TokenStream> { 1634 use crate::lexicon::LexXrpcBodySchema; 1635 1636 match schema { 1637 LexXrpcBodySchema::Object(obj) => { 1638 self.generate_object_fields(nsid, parent_type_name, obj, is_builder) 1639 } 1640 LexXrpcBodySchema::Ref(ref_type) => { 1641 let rust_type = self.ref_to_rust_type(&ref_type.r#ref)?; 1642 Ok(quote! { 1643 #[serde(flatten)] 1644 #[serde(borrow)] 1645 pub value: #rust_type, 1646 }) 1647 } 1648 LexXrpcBodySchema::Union(_union) => { 1649 let rust_type = quote! { jacquard_common::types::value::Data<'a> }; 1650 Ok(quote! { 1651 #[serde(flatten)] 1652 #[serde(borrow)] 1653 pub value: #rust_type, 1654 }) 1655 } 1656 } 1657 } 1658 1659 /// Generate a field for XRPC parameters 1660 fn generate_param_field( 1661 &self, 1662 _nsid: &str, 1663 field_name: &str, 1664 field_type: &crate::lexicon::LexXrpcParametersProperty<'static>, 1665 is_required: bool, 1666 ) -> Result<TokenStream> { 1667 use crate::lexicon::LexXrpcParametersProperty; 1668 1669 let field_ident = make_ident(&field_name.to_snake_case()); 1670 1671 let (rust_type, needs_lifetime, is_cowstr) = match field_type { 1672 LexXrpcParametersProperty::Boolean(_) => (quote! { bool }, false, false), 1673 LexXrpcParametersProperty::Integer(_) => (quote! { i64 }, false, false), 1674 LexXrpcParametersProperty::String(s) => { 1675 let is_cowstr = s.format.is_none(); // CowStr for plain strings 1676 ( 1677 self.string_to_rust_type(s), 1678 self.string_needs_lifetime(s), 1679 is_cowstr, 1680 ) 1681 } 1682 LexXrpcParametersProperty::Unknown(_) => ( 1683 quote! { jacquard_common::types::value::Data<'a> }, 1684 true, 1685 false, 1686 ), 1687 LexXrpcParametersProperty::Array(arr) => { 1688 let needs_lifetime = match &arr.items { 1689 crate::lexicon::LexPrimitiveArrayItem::Boolean(_) 1690 | crate::lexicon::LexPrimitiveArrayItem::Integer(_) => false, 1691 crate::lexicon::LexPrimitiveArrayItem::String(s) => { 1692 self.string_needs_lifetime(s) 1693 } 1694 crate::lexicon::LexPrimitiveArrayItem::Unknown(_) => true, 1695 }; 1696 let item_type = match &arr.items { 1697 crate::lexicon::LexPrimitiveArrayItem::Boolean(_) => quote! { bool }, 1698 crate::lexicon::LexPrimitiveArrayItem::Integer(_) => quote! { i64 }, 1699 crate::lexicon::LexPrimitiveArrayItem::String(s) => self.string_to_rust_type(s), 1700 crate::lexicon::LexPrimitiveArrayItem::Unknown(_) => { 1701 quote! { jacquard_common::types::value::Data<'a> } 1702 } 1703 }; 1704 (quote! { Vec<#item_type> }, needs_lifetime, false) 1705 } 1706 }; 1707 1708 let rust_type = if is_required { 1709 rust_type 1710 } else { 1711 quote! { std::option::Option<#rust_type> } 1712 }; 1713 1714 let mut attrs = Vec::new(); 1715 1716 if !is_required { 1717 attrs.push(quote! { #[serde(skip_serializing_if = "std::option::Option::is_none")] }); 1718 } 1719 1720 // Add serde(borrow) to all fields with lifetimes 1721 if needs_lifetime { 1722 attrs.push(quote! { #[serde(borrow)] }); 1723 } 1724 1725 // Add builder(into) for CowStr fields to allow String, &str, etc. 1726 if is_cowstr { 1727 attrs.push(quote! { #[builder(into)] }); 1728 } 1729 1730 Ok(quote! { 1731 #(#attrs)* 1732 pub #field_ident: #rust_type, 1733 }) 1734 } 1735 1736 /// Generate error enum from XRPC errors 1737 fn generate_error_enum( 1738 &self, 1739 type_base: &str, 1740 errors: &[LexXrpcError<'static>], 1741 ) -> Result<TokenStream> { 1742 let enum_name = format!("{}Error", type_base); 1743 let ident = syn::Ident::new(&enum_name, proc_macro2::Span::call_site()); 1744 1745 let mut variants = Vec::new(); 1746 let mut display_arms = Vec::new(); 1747 1748 for error in errors { 1749 let variant_name = error.name.to_pascal_case(); 1750 let variant_ident = syn::Ident::new(&variant_name, proc_macro2::Span::call_site()); 1751 1752 let error_name = error.name.as_ref(); 1753 let doc = self.generate_doc_comment(error.description.as_ref()); 1754 1755 variants.push(quote! { 1756 #doc 1757 #[serde(rename = #error_name)] 1758 #variant_ident(std::option::Option<String>) 1759 }); 1760 1761 display_arms.push(quote! { 1762 Self::#variant_ident(msg) => { 1763 write!(f, #error_name)?; 1764 if let Some(msg) = msg { 1765 write!(f, ": {}", msg)?; 1766 } 1767 Ok(()) 1768 } 1769 }); 1770 } 1771 1772 // Generate IntoStatic impl 1773 let variant_info: Vec<(String, EnumVariantKind)> = errors 1774 .iter() 1775 .map(|e| (e.name.to_pascal_case(), EnumVariantKind::Tuple)) 1776 .collect(); 1777 let into_static_impl = 1778 self.generate_into_static_for_enum(&enum_name, &variant_info, true, true); 1779 1780 Ok(quote! { 1781 #[jacquard_derive::open_union] 1782 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq, thiserror::Error, miette::Diagnostic)] 1783 #[serde(tag = "error", content = "message")] 1784 #[serde(bound(deserialize = "'de: 'a"))] 1785 pub enum #ident<'a> { 1786 #(#variants,)* 1787 } 1788 1789 impl std::fmt::Display for #ident<'_> { 1790 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 1791 match self { 1792 #(#display_arms)* 1793 Self::Unknown(err) => write!(f, "Unknown error: {:?}", err), 1794 } 1795 } 1796 } 1797 1798 #into_static_impl 1799 }) 1800 } 1801 1802 /// Generate enum for string with known values 1803 fn generate_known_values_enum( 1804 &self, 1805 nsid: &str, 1806 def_name: &str, 1807 string: &LexString<'static>, 1808 ) -> Result<TokenStream> { 1809 let type_name = self.def_to_type_name(nsid, def_name); 1810 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site()); 1811 1812 let known_values = string.known_values.as_ref().unwrap(); 1813 let mut variants = Vec::new(); 1814 let mut from_str_arms = Vec::new(); 1815 let mut as_str_arms = Vec::new(); 1816 1817 for value in known_values { 1818 // Convert value to valid Rust identifier 1819 let value_str = value.as_ref(); 1820 let variant_name = value_to_variant_name(value_str); 1821 let variant_ident = syn::Ident::new(&variant_name, proc_macro2::Span::call_site()); 1822 1823 variants.push(quote! { 1824 #variant_ident 1825 }); 1826 1827 from_str_arms.push(quote! { 1828 #value_str => Self::#variant_ident 1829 }); 1830 1831 as_str_arms.push(quote! { 1832 Self::#variant_ident => #value_str 1833 }); 1834 } 1835 1836 let doc = self.generate_doc_comment(string.description.as_ref()); 1837 1838 // Generate IntoStatic impl 1839 let variant_info: Vec<(String, EnumVariantKind)> = known_values 1840 .iter() 1841 .map(|value| { 1842 let variant_name = value_to_variant_name(value.as_ref()); 1843 (variant_name, EnumVariantKind::Unit) 1844 }) 1845 .chain(std::iter::once(( 1846 "Other".to_string(), 1847 EnumVariantKind::Tuple, 1848 ))) 1849 .collect(); 1850 let into_static_impl = 1851 self.generate_into_static_for_enum(&type_name, &variant_info, true, false); 1852 1853 Ok(quote! { 1854 #doc 1855 #[derive(Debug, Clone, PartialEq, Eq, Hash)] 1856 pub enum #ident<'a> { 1857 #(#variants,)* 1858 Other(jacquard_common::CowStr<'a>), 1859 } 1860 1861 impl<'a> #ident<'a> { 1862 pub fn as_str(&self) -> &str { 1863 match self { 1864 #(#as_str_arms,)* 1865 Self::Other(s) => s.as_ref(), 1866 } 1867 } 1868 } 1869 1870 impl<'a> From<&'a str> for #ident<'a> { 1871 fn from(s: &'a str) -> Self { 1872 match s { 1873 #(#from_str_arms,)* 1874 _ => Self::Other(jacquard_common::CowStr::from(s)), 1875 } 1876 } 1877 } 1878 1879 impl<'a> From<String> for #ident<'a> { 1880 fn from(s: String) -> Self { 1881 match s.as_str() { 1882 #(#from_str_arms,)* 1883 _ => Self::Other(jacquard_common::CowStr::from(s)), 1884 } 1885 } 1886 } 1887 1888 impl<'a> AsRef<str> for #ident<'a> { 1889 fn as_ref(&self) -> &str { 1890 self.as_str() 1891 } 1892 } 1893 1894 impl<'a> serde::Serialize for #ident<'a> { 1895 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> 1896 where 1897 S: serde::Serializer, 1898 { 1899 serializer.serialize_str(self.as_str()) 1900 } 1901 } 1902 1903 impl<'de, 'a> serde::Deserialize<'de> for #ident<'a> 1904 where 1905 'de: 'a, 1906 { 1907 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> 1908 where 1909 D: serde::Deserializer<'de>, 1910 { 1911 let s = <&'de str>::deserialize(deserializer)?; 1912 Ok(Self::from(s)) 1913 } 1914 } 1915 1916 #into_static_impl 1917 }) 1918 } 1919 1920 /// Generate enum for integer with enum values 1921 fn generate_integer_enum( 1922 &self, 1923 nsid: &str, 1924 def_name: &str, 1925 integer: &LexInteger<'static>, 1926 ) -> Result<TokenStream> { 1927 let type_name = self.def_to_type_name(nsid, def_name); 1928 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site()); 1929 1930 let enum_values = integer.r#enum.as_ref().unwrap(); 1931 let mut variants = Vec::new(); 1932 let mut from_i64_arms = Vec::new(); 1933 let mut to_i64_arms = Vec::new(); 1934 1935 for value in enum_values { 1936 let variant_name = format!("Value{}", value.abs()); 1937 let variant_ident = syn::Ident::new(&variant_name, proc_macro2::Span::call_site()); 1938 1939 variants.push(quote! { 1940 #[serde(rename = #value)] 1941 #variant_ident 1942 }); 1943 1944 from_i64_arms.push(quote! { 1945 #value => Self::#variant_ident 1946 }); 1947 1948 to_i64_arms.push(quote! { 1949 Self::#variant_ident => #value 1950 }); 1951 } 1952 1953 let doc = self.generate_doc_comment(integer.description.as_ref()); 1954 1955 Ok(quote! { 1956 #doc 1957 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 1958 pub enum #ident { 1959 #(#variants,)* 1960 #[serde(untagged)] 1961 Other(i64), 1962 } 1963 1964 impl #ident { 1965 pub fn as_i64(&self) -> i64 { 1966 match self { 1967 #(#to_i64_arms,)* 1968 Self::Other(n) => *n, 1969 } 1970 } 1971 } 1972 1973 impl From<i64> for #ident { 1974 fn from(n: i64) -> Self { 1975 match n { 1976 #(#from_i64_arms,)* 1977 _ => Self::Other(n), 1978 } 1979 } 1980 } 1981 1982 impl serde::Serialize for #ident { 1983 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> 1984 where 1985 S: serde::Serializer, 1986 { 1987 serializer.serialize_i64(self.as_i64()) 1988 } 1989 } 1990 1991 impl<'de> serde::Deserialize<'de> for #ident { 1992 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> 1993 where 1994 D: serde::Deserializer<'de>, 1995 { 1996 let n = i64::deserialize(deserializer)?; 1997 Ok(Self::from(n)) 1998 } 1999 } 2000 }) 2001 } 2002 2003 /// Generate XrpcRequest trait impl for a query or procedure 2004 fn generate_xrpc_request_impl( 2005 &self, 2006 nsid: &str, 2007 type_base: &str, 2008 method: TokenStream, 2009 output_encoding: &str, 2010 has_params: bool, 2011 params_has_lifetime: bool, 2012 has_output: bool, 2013 has_errors: bool, 2014 is_binary_input: bool, 2015 ) -> Result<TokenStream> { 2016 let output_type = if has_output { 2017 let output_ident = syn::Ident::new( 2018 &format!("{}Output", type_base), 2019 proc_macro2::Span::call_site(), 2020 ); 2021 quote! { #output_ident<'de> } 2022 } else { 2023 quote! { () } 2024 }; 2025 2026 let error_type = if has_errors { 2027 let error_ident = syn::Ident::new( 2028 &format!("{}Error", type_base), 2029 proc_macro2::Span::call_site(), 2030 ); 2031 quote! { #error_ident<'de> } 2032 } else { 2033 quote! { jacquard_common::types::xrpc::GenericError<'de> } 2034 }; 2035 2036 // Generate encode_body() method for binary inputs 2037 let encode_body_method = if is_binary_input { 2038 quote! { 2039 fn encode_body(&self) -> Result<Vec<u8>, jacquard_common::types::xrpc::EncodeError> { 2040 Ok(self.body.to_vec()) 2041 } 2042 } 2043 } else { 2044 quote! {} 2045 }; 2046 2047 if has_params { 2048 // Implement on the params/input struct itself 2049 let request_ident = syn::Ident::new(type_base, proc_macro2::Span::call_site()); 2050 let impl_target = if params_has_lifetime { 2051 quote! { #request_ident<'_> } 2052 } else { 2053 quote! { #request_ident } 2054 }; 2055 2056 Ok(quote! { 2057 impl jacquard_common::types::xrpc::XrpcRequest for #impl_target { 2058 const NSID: &'static str = #nsid; 2059 const METHOD: jacquard_common::types::xrpc::XrpcMethod = #method; 2060 const OUTPUT_ENCODING: &'static str = #output_encoding; 2061 2062 type Output<'de> = #output_type; 2063 type Err<'de> = #error_type; 2064 2065 #encode_body_method 2066 } 2067 }) 2068 } else { 2069 // No params - generate a marker struct 2070 let request_ident = syn::Ident::new(type_base, proc_macro2::Span::call_site()); 2071 2072 Ok(quote! { 2073 /// XRPC request marker type 2074 #[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize)] 2075 pub struct #request_ident; 2076 2077 impl jacquard_common::types::xrpc::XrpcRequest for #request_ident { 2078 const NSID: &'static str = #nsid; 2079 const METHOD: jacquard_common::types::xrpc::XrpcMethod = #method; 2080 const OUTPUT_ENCODING: &'static str = #output_encoding; 2081 2082 type Output<'de> = #output_type; 2083 type Err<'de> = #error_type; 2084 } 2085 }) 2086 } 2087 } 2088 2089 /// Generate a union enum 2090 pub fn generate_union( 2091 &self, 2092 union_name: &str, 2093 refs: &[jacquard_common::CowStr<'static>], 2094 description: Option<&str>, 2095 closed: Option<bool>, 2096 ) -> Result<TokenStream> { 2097 let enum_ident = syn::Ident::new(union_name, proc_macro2::Span::call_site()); 2098 2099 let mut variants = Vec::new(); 2100 for ref_str in refs { 2101 // Parse ref to get NSID and def name 2102 let (ref_nsid, ref_def) = if let Some((nsid, fragment)) = ref_str.split_once('#') { 2103 (nsid, fragment) 2104 } else { 2105 (ref_str.as_ref(), "main") 2106 }; 2107 2108 // Skip unknown refs - they'll be handled by Unknown variant 2109 if !self.corpus.ref_exists(ref_str.as_ref()) { 2110 continue; 2111 } 2112 2113 // Generate variant name from def name (or last NSID segment if main) 2114 // For non-main refs, include the last NSID segment to avoid collisions 2115 // e.g. app.bsky.embed.images#view -> ImagesView 2116 // app.bsky.embed.video#view -> VideoView 2117 let variant_name = if ref_def == "main" { 2118 ref_nsid.split('.').last().unwrap().to_pascal_case() 2119 } else { 2120 let last_segment = ref_nsid.split('.').last().unwrap().to_pascal_case(); 2121 format!("{}{}", last_segment, ref_def.to_pascal_case()) 2122 }; 2123 let variant_ident = syn::Ident::new(&variant_name, proc_macro2::Span::call_site()); 2124 2125 // Get the Rust type for this ref 2126 let rust_type = self.ref_to_rust_type(ref_str.as_ref())?; 2127 2128 // Add serde rename for the full NSID 2129 let ref_str_literal = ref_str.as_ref(); 2130 variants.push(quote! { 2131 #[serde(rename = #ref_str_literal)] 2132 #variant_ident(Box<#rust_type>) 2133 }); 2134 } 2135 2136 let doc = description 2137 .map(|d| quote! { #[doc = #d] }) 2138 .unwrap_or_else(|| quote! {}); 2139 2140 // Only add open_union if not closed 2141 let is_open = closed != Some(true); 2142 2143 // Generate IntoStatic impl 2144 let variant_info: Vec<(String, EnumVariantKind)> = refs 2145 .iter() 2146 .filter_map(|ref_str| { 2147 // Skip unknown refs 2148 if !self.corpus.ref_exists(ref_str.as_ref()) { 2149 return None; 2150 } 2151 2152 let (ref_nsid, ref_def) = if let Some((nsid, fragment)) = ref_str.split_once('#') { 2153 (nsid, fragment) 2154 } else { 2155 (ref_str.as_ref(), "main") 2156 }; 2157 2158 let variant_name = if ref_def == "main" { 2159 ref_nsid.split('.').last().unwrap().to_pascal_case() 2160 } else { 2161 let last_segment = ref_nsid.split('.').last().unwrap().to_pascal_case(); 2162 format!("{}{}", last_segment, ref_def.to_pascal_case()) 2163 }; 2164 Some((variant_name, EnumVariantKind::Tuple)) 2165 }) 2166 .collect(); 2167 let into_static_impl = 2168 self.generate_into_static_for_enum(union_name, &variant_info, true, is_open); 2169 2170 if is_open { 2171 Ok(quote! { 2172 #doc 2173 #[jacquard_derive::open_union] 2174 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)] 2175 #[serde(tag = "$type")] 2176 #[serde(bound(deserialize = "'de: 'a"))] 2177 pub enum #enum_ident<'a> { 2178 #(#variants,)* 2179 } 2180 2181 #into_static_impl 2182 }) 2183 } else { 2184 Ok(quote! { 2185 #doc 2186 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)] 2187 #[serde(tag = "$type")] 2188 #[serde(bound(deserialize = "'de: 'a"))] 2189 pub enum #enum_ident<'a> { 2190 #(#variants,)* 2191 } 2192 2193 #into_static_impl 2194 }) 2195 } 2196 } 2197 2198 /// Generate IntoStatic impl for a struct 2199 fn generate_into_static_for_struct( 2200 &self, 2201 type_name: &str, 2202 field_names: &[&str], 2203 has_lifetime: bool, 2204 has_extra_data: bool, 2205 ) -> TokenStream { 2206 let ident = syn::Ident::new(type_name, proc_macro2::Span::call_site()); 2207 2208 let field_idents: Vec<_> = field_names 2209 .iter() 2210 .map(|name| make_ident(&name.to_snake_case())) 2211 .collect(); 2212 2213 if has_lifetime { 2214 let field_conversions: Vec<_> = field_idents 2215 .iter() 2216 .map(|field| quote! { #field: self.#field.into_static() }) 2217 .collect(); 2218 2219 let extra_data_conversion = if has_extra_data { 2220 quote! { extra_data: self.extra_data.into_static(), } 2221 } else { 2222 quote! {} 2223 }; 2224 2225 quote! { 2226 impl jacquard_common::IntoStatic for #ident<'_> { 2227 type Output = #ident<'static>; 2228 2229 fn into_static(self) -> Self::Output { 2230 #ident { 2231 #(#field_conversions,)* 2232 #extra_data_conversion 2233 } 2234 } 2235 } 2236 } 2237 } else { 2238 quote! { 2239 impl jacquard_common::IntoStatic for #ident { 2240 type Output = #ident; 2241 2242 fn into_static(self) -> Self::Output { 2243 self 2244 } 2245 } 2246 } 2247 } 2248 } 2249 2250 /// Generate IntoStatic impl for an enum 2251 fn generate_into_static_for_enum( 2252 &self, 2253 type_name: &str, 2254 variant_info: &[(String, EnumVariantKind)], 2255 has_lifetime: bool, 2256 is_open: bool, 2257 ) -> TokenStream { 2258 let ident = syn::Ident::new(type_name, proc_macro2::Span::call_site()); 2259 2260 if has_lifetime { 2261 let variant_conversions: Vec<_> = variant_info 2262 .iter() 2263 .map(|(variant_name, kind)| { 2264 let variant_ident = syn::Ident::new(variant_name, proc_macro2::Span::call_site()); 2265 match kind { 2266 EnumVariantKind::Unit => { 2267 quote! { 2268 #ident::#variant_ident => #ident::#variant_ident 2269 } 2270 } 2271 EnumVariantKind::Tuple => { 2272 quote! { 2273 #ident::#variant_ident(v) => #ident::#variant_ident(v.into_static()) 2274 } 2275 } 2276 EnumVariantKind::Struct(fields) => { 2277 let field_idents: Vec<_> = fields 2278 .iter() 2279 .map(|f| make_ident(&f.to_snake_case())) 2280 .collect(); 2281 let field_conversions: Vec<_> = field_idents 2282 .iter() 2283 .map(|f| quote! { #f: #f.into_static() }) 2284 .collect(); 2285 quote! { 2286 #ident::#variant_ident { #(#field_idents,)* } => #ident::#variant_ident { 2287 #(#field_conversions,)* 2288 } 2289 } 2290 } 2291 } 2292 }) 2293 .collect(); 2294 2295 let unknown_conversion = if is_open { 2296 quote! { 2297 #ident::Unknown(v) => #ident::Unknown(v.into_static()), 2298 } 2299 } else { 2300 quote! {} 2301 }; 2302 2303 quote! { 2304 impl jacquard_common::IntoStatic for #ident<'_> { 2305 type Output = #ident<'static>; 2306 2307 fn into_static(self) -> Self::Output { 2308 match self { 2309 #(#variant_conversions,)* 2310 #unknown_conversion 2311 } 2312 } 2313 } 2314 } 2315 } else { 2316 quote! { 2317 impl jacquard_common::IntoStatic for #ident { 2318 type Output = #ident; 2319 2320 fn into_static(self) -> Self::Output { 2321 self 2322 } 2323 } 2324 } 2325 } 2326 } 2327} 2328 2329/// Enum variant kind for IntoStatic generation 2330#[derive(Debug, Clone)] 2331#[allow(dead_code)] 2332enum EnumVariantKind { 2333 Unit, 2334 Tuple, 2335 Struct(Vec<String>), 2336} 2337 2338#[cfg(test)] 2339mod tests { 2340 use super::*; 2341 2342 #[test] 2343 fn test_generate_record() { 2344 let corpus = 2345 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus"); 2346 let codegen = CodeGenerator::new(&corpus, "jacquard_api"); 2347 2348 let doc = corpus.get("app.bsky.feed.post").expect("get post"); 2349 let def = doc.defs.get("main").expect("get main def"); 2350 2351 let tokens = codegen 2352 .generate_def("app.bsky.feed.post", "main", def) 2353 .expect("generate"); 2354 2355 // Format and print for inspection 2356 let file: syn::File = syn::parse2(tokens).expect("parse tokens"); 2357 let formatted = prettyplease::unparse(&file); 2358 println!("\n{}\n", formatted); 2359 2360 // Check basic structure 2361 assert!(formatted.contains("struct Post")); 2362 assert!(formatted.contains("pub text")); 2363 assert!(formatted.contains("CowStr<'a>")); 2364 } 2365 2366 #[test] 2367 fn test_generate_union() { 2368 let corpus = 2369 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus"); 2370 let codegen = CodeGenerator::new(&corpus, "jacquard_api"); 2371 2372 // Create a union with embed types 2373 let refs = vec![ 2374 "app.bsky.embed.images".into(), 2375 "app.bsky.embed.video".into(), 2376 "app.bsky.embed.external".into(), 2377 ]; 2378 2379 let tokens = codegen 2380 .generate_union("RecordEmbed", &refs, Some("Post embed union"), None) 2381 .expect("generate union"); 2382 2383 let file: syn::File = syn::parse2(tokens).expect("parse tokens"); 2384 let formatted = prettyplease::unparse(&file); 2385 println!("\n{}\n", formatted); 2386 2387 // Check structure 2388 assert!(formatted.contains("enum RecordEmbed")); 2389 assert!(formatted.contains("Images")); 2390 assert!(formatted.contains("Video")); 2391 assert!(formatted.contains("External")); 2392 assert!(formatted.contains("#[serde(tag = \"$type\")]")); 2393 assert!(formatted.contains("#[jacquard_derive::open_union]")); 2394 } 2395 2396 #[test] 2397 fn test_generate_query() { 2398 let corpus = 2399 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus"); 2400 let codegen = CodeGenerator::new(&corpus, "jacquard_api"); 2401 2402 let doc = corpus 2403 .get("app.bsky.feed.getAuthorFeed") 2404 .expect("get getAuthorFeed"); 2405 let def = doc.defs.get("main").expect("get main def"); 2406 2407 let tokens = codegen 2408 .generate_def("app.bsky.feed.getAuthorFeed", "main", def) 2409 .expect("generate"); 2410 2411 let file: syn::File = syn::parse2(tokens).expect("parse tokens"); 2412 let formatted = prettyplease::unparse(&file); 2413 println!("\n{}\n", formatted); 2414 2415 // Check structure 2416 assert!(formatted.contains("struct GetAuthorFeed")); 2417 assert!(formatted.contains("struct GetAuthorFeedOutput")); 2418 assert!(formatted.contains("enum GetAuthorFeedError")); 2419 assert!(formatted.contains("pub actor")); 2420 assert!(formatted.contains("pub limit")); 2421 assert!(formatted.contains("pub cursor")); 2422 assert!(formatted.contains("pub feed")); 2423 assert!(formatted.contains("BlockedActor")); 2424 assert!(formatted.contains("BlockedByActor")); 2425 } 2426 2427 #[test] 2428 fn test_generate_known_values_enum() { 2429 let corpus = 2430 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus"); 2431 let codegen = CodeGenerator::new(&corpus, "jacquard_api"); 2432 2433 let doc = corpus 2434 .get("com.atproto.label.defs") 2435 .expect("get label defs"); 2436 let def = doc.defs.get("labelValue").expect("get labelValue def"); 2437 2438 let tokens = codegen 2439 .generate_def("com.atproto.label.defs", "labelValue", def) 2440 .expect("generate"); 2441 2442 let file: syn::File = syn::parse2(tokens).expect("parse tokens"); 2443 let formatted = prettyplease::unparse(&file); 2444 println!("\n{}\n", formatted); 2445 2446 // Check structure 2447 assert!(formatted.contains("enum LabelValue")); 2448 assert!(formatted.contains("Hide")); 2449 assert!(formatted.contains("NoPromote")); 2450 assert!(formatted.contains("Warn")); 2451 assert!(formatted.contains("DmcaViolation")); 2452 assert!(formatted.contains("Other(jacquard_common::CowStr")); 2453 assert!(formatted.contains("impl<'a> From<&'a str>")); 2454 assert!(formatted.contains("fn as_str(&self)")); 2455 } 2456 2457 #[test] 2458 fn test_nsid_to_file_path() { 2459 let corpus = 2460 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus"); 2461 let codegen = CodeGenerator::new(&corpus, "jacquard_api"); 2462 2463 // Regular paths 2464 assert_eq!( 2465 codegen.nsid_to_file_path("app.bsky.feed.post"), 2466 std::path::PathBuf::from("app_bsky/feed/post.rs") 2467 ); 2468 2469 assert_eq!( 2470 codegen.nsid_to_file_path("app.bsky.feed.getAuthorFeed"), 2471 std::path::PathBuf::from("app_bsky/feed/get_author_feed.rs") 2472 ); 2473 2474 // Defs paths - should go in parent 2475 assert_eq!( 2476 codegen.nsid_to_file_path("com.atproto.label.defs"), 2477 std::path::PathBuf::from("com_atproto/label.rs") 2478 ); 2479 } 2480 2481 #[test] 2482 fn test_write_to_disk() { 2483 let corpus = 2484 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus"); 2485 let codegen = CodeGenerator::new(&corpus, "test_generated"); 2486 2487 let tmp_dir = 2488 tempfile::tempdir().expect("should be able to create temp directory for output"); 2489 let output_dir = std::path::PathBuf::from(tmp_dir.path()); 2490 2491 // Clean up any previous test output 2492 let _ = std::fs::remove_dir_all(&output_dir); 2493 2494 // Generate and write 2495 codegen.write_to_disk(&output_dir).expect("write to disk"); 2496 2497 // Verify some files were created 2498 assert!(output_dir.join("app_bsky/feed/post.rs").exists()); 2499 assert!(output_dir.join("app_bsky/feed/get_author_feed.rs").exists()); 2500 assert!(output_dir.join("com_atproto/label.rs").exists()); 2501 2502 // Verify module files were created 2503 assert!(output_dir.join("lib.rs").exists()); 2504 assert!(output_dir.join("app_bsky.rs").exists()); 2505 2506 // Read and verify post.rs contains expected content 2507 let post_content = std::fs::read_to_string(output_dir.join("app_bsky/feed/post.rs")) 2508 .expect("read post.rs"); 2509 assert!(post_content.contains("pub struct Post")); 2510 assert!(post_content.contains("jacquard_common")); 2511 } 2512}