···
lexicon "github.com/bluesky-social/indigo/atproto/lexicon"
+
schemaPath = flag.String("path", "internal/atproto/lexicon", "Path to lexicon schemas directory")
+
testDataPath = flag.String("test-data", "tests/lexicon-test-data", "Path to test data directory for ValidateRecord testing")
+
verbose = flag.Bool("v", false, "Verbose output")
+
strict = flag.Bool("strict", false, "Use strict validation mode")
+
schemasOnly = flag.Bool("schemas-only", false, "Only validate schemas, skip test data validation")
···
log.Fatalf("Schema validation failed: %v", err)
+
// Validate cross-references between schemas
+
if err := validateCrossReferences(&catalog, *verbose); err != nil {
+
log.Fatalf("Cross-reference validation failed: %v", err)
+
// Validate test data unless schemas-only flag is set
+
fmt.Printf("\n📋 Validating test data from: %s\n", *testDataPath)
+
allSchemas := extractAllSchemaIDs(*schemaPath)
+
if err := validateTestData(&catalog, *testDataPath, *verbose, *strict, allSchemas); err != nil {
+
log.Fatalf("Test data validation failed: %v", err)
+
fmt.Println("\n⏩ Skipping test data validation (--schemas-only flag set)")
+
fmt.Println("\n✅ All validations passed successfully!")
// validateSchemaStructure performs additional validation checks
func validateSchemaStructure(catalog *lexicon.BaseCatalog, schemaPath string, verbose bool) error {
var validationErrors []string
+
// Collect all JSON schema files and derive their IDs
err := filepath.Walk(schemaPath, func(path string, info os.FileInfo, err error) error {
+
// Skip test-data directory
+
if info.IsDir() && info.Name() == "test-data" {
+
return filepath.SkipDir
// Only process .json files
if !info.IsDir() && filepath.Ext(path) == ".json" {
schemaFiles = append(schemaFiles, path)
+
// Convert file path to schema ID
+
// e.g., internal/atproto/lexicon/social/coves/actor/profile.json -> social.coves.actor.profile
+
relPath, _ := filepath.Rel(schemaPath, path)
+
schemaID := filepath.ToSlash(relPath)
+
schemaID = schemaID[:len(schemaID)-5] // Remove .json extension
+
schemaID = strings.ReplaceAll(schemaID, "/", ".")
+
schemaIDs = append(schemaIDs, schemaID)
···
+
// Validate all discovered schemas
+
fmt.Println("\nValidating all schemas:")
+
for i, schemaID := range schemaIDs {
if _, err := catalog.Resolve(schemaID); err != nil {
+
validationErrors = append(validationErrors, fmt.Sprintf("Failed to resolve schema %s (from %s): %v", schemaID, schemaFiles[i], err))
fmt.Printf(" ✅ %s\n", schemaID)
···
return fmt.Errorf("found %d validation errors", len(validationErrors))
+
fmt.Printf("\n✅ Successfully validated all %d schemas\n", len(schemaIDs))
···
+
// Skip test-data directory
+
if info.IsDir() && info.Name() == "test-data" {
+
return filepath.SkipDir
// Only process .json files
if !info.IsDir() && filepath.Ext(path) == ".json" {
schemaFiles = append(schemaFiles, path)
···
// If all individual files loaded OK, try loading the whole directory
return catalog.LoadDirectory(schemaPath)
+
// extractAllSchemaIDs walks the schema directory and returns all schema IDs
+
func extractAllSchemaIDs(schemaPath string) []string {
+
filepath.Walk(schemaPath, func(path string, info os.FileInfo, err error) error {
+
// Skip test-data directory
+
if info.IsDir() && info.Name() == "test-data" {
+
return filepath.SkipDir
+
// Only process .json files
+
if !info.IsDir() && filepath.Ext(path) == ".json" {
+
// Convert file path to schema ID
+
relPath, _ := filepath.Rel(schemaPath, path)
+
schemaID := filepath.ToSlash(relPath)
+
schemaID = schemaID[:len(schemaID)-5] // Remove .json extension
+
schemaID = strings.ReplaceAll(schemaID, "/", ".")
+
// Only include record schemas (not procedures)
+
if strings.Contains(schemaID, ".record") ||
+
strings.Contains(schemaID, ".profile") ||
+
strings.Contains(schemaID, ".rules") ||
+
strings.Contains(schemaID, ".wiki") ||
+
strings.Contains(schemaID, ".subscription") ||
+
strings.Contains(schemaID, ".membership") ||
+
strings.Contains(schemaID, ".vote") ||
+
strings.Contains(schemaID, ".tag") ||
+
strings.Contains(schemaID, ".comment") ||
+
strings.Contains(schemaID, ".share") ||
+
strings.Contains(schemaID, ".tribunalVote") ||
+
strings.Contains(schemaID, ".ruleProposal") ||
+
strings.Contains(schemaID, ".ban") {
+
schemaIDs = append(schemaIDs, schemaID)
+
// validateTestData validates test JSON data files against their corresponding schemas
+
func validateTestData(catalog *lexicon.BaseCatalog, testDataPath string, verbose bool, strict bool, allSchemas []string) error {
+
// Check if test data directory exists
+
if _, err := os.Stat(testDataPath); os.IsNotExist(err) {
+
return fmt.Errorf("test data path does not exist: %s", testDataPath)
+
var validationErrors []string
+
testedTypes := make(map[string]bool)
+
// Walk through test data directory
+
err := filepath.Walk(testDataPath, func(path string, info os.FileInfo, err error) error {
+
// Only process .json files
+
if !info.IsDir() && filepath.Ext(path) == ".json" {
+
filename := filepath.Base(path)
+
isInvalidTest := strings.Contains(filename, "-invalid-")
+
fmt.Printf("\n Testing (expect failure): %s\n", filename)
+
fmt.Printf("\n Testing: %s\n", filename)
+
file, err := os.Open(path)
+
validationErrors = append(validationErrors, fmt.Sprintf("Failed to open %s: %v", path, err))
+
data, err := io.ReadAll(file)
+
validationErrors = append(validationErrors, fmt.Sprintf("Failed to read %s: %v", path, err))
+
var recordData map[string]interface{}
+
if err := json.Unmarshal(data, &recordData); err != nil {
+
validationErrors = append(validationErrors, fmt.Sprintf("Failed to parse JSON in %s: %v", path, err))
+
recordType, ok := recordData["$type"].(string)
+
validationErrors = append(validationErrors, fmt.Sprintf("Missing or invalid $type field in %s", path))
+
// Set validation flags
+
flags := lexicon.ValidateFlags(0)
+
flags |= lexicon.StrictRecursiveValidation
+
flags |= lexicon.AllowLenientDatetime
+
err = lexicon.ValidateRecord(catalog, recordData, recordType, flags)
+
// This file should fail validation
+
fmt.Printf(" ✅ Correctly rejected invalid %s record: %v\n", recordType, err)
+
validationErrors = append(validationErrors, fmt.Sprintf("Invalid test file %s passed validation when it should have failed", path))
+
fmt.Printf(" ❌ ERROR: Invalid record passed validation!\n")
+
// This file should pass validation
+
validationErrors = append(validationErrors, fmt.Sprintf("Validation failed for %s (type: %s): %v", path, recordType, err))
+
fmt.Printf(" ❌ Failed: %v\n", err)
+
testedTypes[recordType] = true
+
fmt.Printf(" ✅ Valid %s record\n", recordType)
+
return fmt.Errorf("error walking test data directory: %w", err)
+
if len(validationErrors) > 0 {
+
fmt.Println("\n❌ Test data validation errors found:")
+
for _, errMsg := range validationErrors {
+
fmt.Printf(" %s\n", errMsg)
+
return fmt.Errorf("found %d validation errors", len(validationErrors))
+
totalFiles := validFiles + invalidFiles
+
fmt.Println(" ⚠️ No test data files found")
+
// Show validation summary
+
fmt.Printf("\n📋 Validation Summary:\n")
+
fmt.Printf(" Valid test files: %d/%d passed\n", validSuccessCount, validFiles)
+
fmt.Printf(" Invalid test files: %d/%d correctly rejected\n", invalidFailCount, invalidFiles)
+
if validSuccessCount == validFiles && invalidFailCount == invalidFiles {
+
fmt.Printf("\n ✅ All test files behaved as expected!\n")
+
// Show test coverage summary (only for valid files)
+
fmt.Printf("\n📊 Test Data Coverage Summary:\n")
+
fmt.Printf(" - Records with test data: %d types\n", len(testedTypes))
+
fmt.Printf(" - Valid test files: %d\n", validFiles)
+
fmt.Printf(" - Invalid test files: %d (for error validation)\n", invalidFiles)
+
fmt.Printf("\n Tested record types:\n")
+
for recordType := range testedTypes {
+
fmt.Printf(" ✓ %s\n", recordType)
+
// Show untested schemas
+
fmt.Printf("\n ⚠️ Record types without test data:\n")
+
for _, schema := range allSchemas {
+
if !testedTypes[schema] {
+
fmt.Printf(" - %s\n", schema)
+
if untestedCount == 0 {
+
fmt.Println(" (None - full test coverage!)")
+
fmt.Printf("\n Coverage: %d/%d record types have test data (%.1f%%)\n",
+
len(testedTypes), len(allSchemas),
+
float64(len(testedTypes))/float64(len(allSchemas))*100)
+
// validateCrossReferences validates that all schema references resolve correctly
+
func validateCrossReferences(catalog *lexicon.BaseCatalog, verbose bool) error {
+
"social.coves.richtext.facet",
+
"social.coves.richtext.facet#byteSlice",
+
"social.coves.richtext.facet#mention",
+
"social.coves.richtext.facet#link",
+
"social.coves.richtext.facet#bold",
+
"social.coves.richtext.facet#italic",
+
"social.coves.richtext.facet#strikethrough",
+
"social.coves.richtext.facet#spoiler",
+
// Post types and views
+
"social.coves.post.get#postView",
+
"social.coves.post.get#authorView",
+
"social.coves.post.get#communityRef",
+
"social.coves.post.get#imageView",
+
"social.coves.post.get#videoView",
+
"social.coves.post.get#externalView",
+
"social.coves.post.get#postStats",
+
"social.coves.post.get#viewerState",
+
"social.coves.post.record#originalAuthor",
+
"social.coves.actor.profile#geoLocation",
+
// Community definitions
+
"social.coves.community.rules#rule",
+
fmt.Println("\n🔍 Validating cross-references between schemas:")
+
for _, ref := range knownRefs {
+
if _, err := catalog.Resolve(ref); err != nil {
+
errors = append(errors, fmt.Sprintf("Failed to resolve reference %s: %v", ref, err))
+
fmt.Printf(" ✅ %s\n", ref)
+
return fmt.Errorf("cross-reference validation failed:\n%s", strings.Join(errors, "\n"))