···
// Only process .json files
if !info.IsDir() && filepath.Ext(path) == ".json" {
schemaFiles = append(schemaFiles, path)
// Convert file path to schema ID
// e.g., internal/atproto/lexicon/social/coves/actor/profile.json -> social.coves.actor.profile
94
-
relPath, _ := filepath.Rel(schemaPath, path)
94
+
relPath, err := filepath.Rel(schemaPath, path)
96
+
return fmt.Errorf("failed to compute relative path: %w", err)
schemaID := filepath.ToSlash(relPath)
schemaID = schemaID[:len(schemaID)-5] // Remove .json extension
schemaID = strings.ReplaceAll(schemaID, "/", ".")
···
return fmt.Errorf("error walking schema directory: %w", err)
···
return fmt.Errorf("error walking schema directory: %w", err)
···
// extractAllSchemaIDs walks the schema directory and returns all schema IDs
func extractAllSchemaIDs(schemaPath string) []string {
186
-
filepath.Walk(schemaPath, func(path string, info os.FileInfo, err error) error {
187
+
if err := filepath.Walk(schemaPath, func(path string, info os.FileInfo, err error) error {
// Skip test-data directory
if info.IsDir() && info.Name() == "test-data" {
// Only process .json files
if !info.IsDir() && filepath.Ext(path) == ".json" {
// Convert file path to schema ID
199
-
relPath, _ := filepath.Rel(schemaPath, path)
200
+
relPath, err := filepath.Rel(schemaPath, path)
schemaID := filepath.ToSlash(relPath)
schemaID = schemaID[:len(schemaID)-5] // Remove .json extension
schemaID = strings.ReplaceAll(schemaID, "/", ".")
// Only include record schemas (not procedures)
205
-
if strings.Contains(schemaID, ".record") ||
206
-
strings.Contains(schemaID, ".profile") ||
207
-
strings.Contains(schemaID, ".rules") ||
208
-
strings.Contains(schemaID, ".wiki") ||
209
-
strings.Contains(schemaID, ".subscription") ||
210
-
strings.Contains(schemaID, ".membership") ||
211
-
strings.Contains(schemaID, ".vote") ||
212
-
strings.Contains(schemaID, ".tag") ||
213
-
strings.Contains(schemaID, ".comment") ||
214
-
strings.Contains(schemaID, ".share") ||
215
-
strings.Contains(schemaID, ".tribunalVote") ||
216
-
strings.Contains(schemaID, ".ruleProposal") ||
217
-
strings.Contains(schemaID, ".ban") {
209
+
if strings.Contains(schemaID, ".record") ||
210
+
strings.Contains(schemaID, ".profile") ||
211
+
strings.Contains(schemaID, ".rules") ||
212
+
strings.Contains(schemaID, ".wiki") ||
213
+
strings.Contains(schemaID, ".subscription") ||
214
+
strings.Contains(schemaID, ".membership") ||
215
+
strings.Contains(schemaID, ".vote") ||
216
+
strings.Contains(schemaID, ".tag") ||
217
+
strings.Contains(schemaID, ".comment") ||
218
+
strings.Contains(schemaID, ".share") ||
219
+
strings.Contains(schemaID, ".tribunalVote") ||
220
+
strings.Contains(schemaID, ".ruleProposal") ||
221
+
strings.Contains(schemaID, ".ban") {
schemaIDs = append(schemaIDs, schemaID)
227
+
log.Printf("Warning: failed to walk schema directory: %v", err)
// validateTestData validates test JSON data files against their corresponding schemas
228
-
func validateTestData(catalog *lexicon.BaseCatalog, testDataPath string, verbose bool, strict bool, allSchemas []string) error {
234
+
func validateTestData(catalog *lexicon.BaseCatalog, testDataPath string, verbose, strict bool, allSchemas []string) error {
// Check if test data directory exists
if _, err := os.Stat(testDataPath); os.IsNotExist(err) {
return fmt.Errorf("test data path does not exist: %s", testDataPath)
···
if !info.IsDir() && filepath.Ext(path) == ".json" {
filename := filepath.Base(path)
isInvalidTest := strings.Contains(filename, "-invalid-")
fmt.Printf("\n Testing (expect failure): %s\n", filename)
···
validationErrors = append(validationErrors, fmt.Sprintf("Failed to open %s: %v", path, err))
273
+
if closeErr := file.Close(); closeErr != nil {
274
+
validationErrors = append(validationErrors, fmt.Sprintf("Failed to close %s: %v", path, closeErr))
268
-
data, err := io.ReadAll(file)
270
-
validationErrors = append(validationErrors, fmt.Sprintf("Failed to read %s: %v", path, err))
278
+
data, readErr := io.ReadAll(file)
279
+
if readErr != nil {
280
+
validationErrors = append(validationErrors, fmt.Sprintf("Failed to read %s: %v", path, readErr))
···
var recordData map[string]interface{}
decoder := json.NewDecoder(bytes.NewReader(data))
decoder.UseNumber() // This preserves numbers as json.Number instead of float64
278
-
if err := decoder.Decode(&recordData); err != nil {
279
-
validationErrors = append(validationErrors, fmt.Sprintf("Failed to parse JSON in %s: %v", path, err))
288
+
if decodeErr := decoder.Decode(&recordData); decodeErr != nil {
289
+
validationErrors = append(validationErrors, fmt.Sprintf("Failed to parse JSON in %s: %v", path, decodeErr))
// Convert json.Number values to appropriate types
recordData = convertNumbers(recordData).(map[string]interface{})
···
302
-
err = lexicon.ValidateRecord(catalog, recordData, recordType, flags)
312
+
validateErr := lexicon.ValidateRecord(catalog, recordData, recordType, flags)
// This file should fail validation
317
+
if validateErr != nil {
310
-
fmt.Printf(" ✅ Correctly rejected invalid %s record: %v\n", recordType, err)
320
+
fmt.Printf(" ✅ Correctly rejected invalid %s record: %v\n", recordType, validateErr)
validationErrors = append(validationErrors, fmt.Sprintf("Invalid test file %s passed validation when it should have failed", path))
···
// This file should pass validation
322
-
validationErrors = append(validationErrors, fmt.Sprintf("Validation failed for %s (type: %s): %v", path, recordType, err))
331
+
if validateErr != nil {
332
+
validationErrors = append(validationErrors, fmt.Sprintf("Validation failed for %s (type: %s): %v", path, recordType, validateErr))
324
-
fmt.Printf(" ❌ Failed: %v\n", err)
334
+
fmt.Printf(" ❌ Failed: %v\n", validateErr)
···
return fmt.Errorf("error walking test data directory: %w", err)
···
fmt.Printf("\n📋 Validation Summary:\n")
fmt.Printf(" Valid test files: %d/%d passed\n", validSuccessCount, validFiles)
fmt.Printf(" Invalid test files: %d/%d correctly rejected\n", invalidFailCount, invalidFiles)
if validSuccessCount == validFiles && invalidFailCount == invalidFiles {
fmt.Printf("\n ✅ All test files behaved as expected!\n")
// Show test coverage summary (only for valid files)
fmt.Printf("\n📊 Test Data Coverage Summary:\n")
fmt.Printf(" - Records with test data: %d types\n", len(testedTypes))
fmt.Printf(" - Valid test files: %d\n", validFiles)
fmt.Printf(" - Invalid test files: %d (for error validation)\n", invalidFiles)
fmt.Printf("\n Tested record types:\n")
for recordType := range testedTypes {
fmt.Printf(" ✓ %s\n", recordType)
fmt.Printf("\n ⚠️ Record types without test data:\n")
···
fmt.Println(" (None - full test coverage!)")
387
-
fmt.Printf("\n Coverage: %d/%d record types have test data (%.1f%%)\n",
388
-
len(testedTypes), len(allSchemas),
396
+
fmt.Printf("\n Coverage: %d/%d record types have test data (%.1f%%)\n",
397
+
len(testedTypes), len(allSchemas),
float64(len(testedTypes))/float64(len(allSchemas))*100)
···
"social.coves.richtext.facet#italic",
"social.coves.richtext.facet#strikethrough",
"social.coves.richtext.facet#spoiler",
"social.coves.post.get#postView",
"social.coves.post.get#authorView",
···
"social.coves.post.get#externalView",
"social.coves.post.get#postStats",
"social.coves.post.get#viewerState",
"social.coves.post.record#originalAuthor",
"social.coves.actor.profile#geoLocation",
"social.coves.community.rules#rule",