···4242 @echo "\nRunning workspace resolution tests..."
4343 cargo test -p mlf-integration-tests --test workspace_integration -- --nocapture
44444545-# Run real-world lexicon tests (when implemented)
4545+# Run real-world round-trip tests (network-dependent, ignored by default)
4646test-real-world:
4747- @echo "\nRunning real-world lexicon tests..."
4848- cargo test -p mlf-integration-tests --test real_world_integration -- --nocapture
4747+ @echo "\n🌐 Running real-world round-trip tests (fetches from network)..."
4848+ @echo "This will download lexicons from: app.bsky.*, net.anisota.*, place.stream.*, pub.leaflet.*"
4949+ @echo ""
5050+ cargo test -p mlf-integration-tests --test real_world_roundtrip -- --ignored --nocapture
49515052# Run all workspace tests (excluding problematic packages)
5153test-all:
+2
tests/.gitignore
···11+# Real-world test artifacts (generated during test runs)
22+real_world/roundtrip/diffs/
+5
tests/Cargo.toml
···1717serde = { version = "1.0", features = ["derive"] }
1818toml = "0.8"
1919tokio = { version = "1", features = ["full"] }
2020+tempfile = "3.8"
20212122[dev-dependencies]
2223# Any additional test dependencies
···3233[[test]]
3334name = "lexicon_fetcher_integration"
3435path = "lexicon_fetcher_integration.rs"
3636+3737+[[test]]
3838+name = "real_world_roundtrip"
3939+path = "real_world/roundtrip.rs"
+13-12
tests/README.md
···3434## Current Status
35353636### ✅ Implemented
3737-- **mlf-lang/tests/lang/** - 17 tests for parsing and validation
3838-- **tests/codegen/lexicon/** - 4 tests for lexicon generation
3737+- **mlf-lang/tests/lang/** - 21 tests for parsing and validation
3838+- **tests/codegen/lexicon/** - 10 tests for lexicon generation
3939+- **tests/real_world_roundtrip** - Round-trip test (JSON → MLF → JSON) with real lexicons
39404041### 🚧 Planned
4142···8081- **workspace/precedence** - Resolution order (local > home > std)
8182- **workspace/sibling_files** - Multi-file modules
82838383-#### Real-World Tests
8484-- **real_world/bsky** - Full app.bsky.* lexicons
8585-- **real_world/place_stream** - Full place.stream.* lexicons
8686-- **real_world/atproto** - Full com.atproto.* lexicons
8787-- **real_world/bidirectional** - Roundtrip MLF ↔ JSON
8484+#### Real-World Tests ✅
8585+- **real_world/** - Tests using real lexicons from production networks
8686+ - **roundtrip** - Round-trip test: JSON → MLF → JSON
8787+ - Fetches real lexicons (app.bsky.*, net.anisota.*, place.stream.*, pub.leaflet.*)
8888+ - Validates accurate conversion both ways
8989+ - Writes diff files to `real_world/roundtrip/diffs/` (gitignored)
9090+ - Run with: `just test-real-world`
9191+ - See `real_world/README.md` for details
88928993## Running Tests
9094···100104just test-codegen # Codegen tests (4 tests)
101105just test-validation # Validation tests (12 tests)
102106103103-# Future test categories
104104-just test-cli # CLI integration tests
105105-just test-diagnostics # Error message tests
106106-just test-workspace # Multi-file resolution tests
107107-just test-real-world # Full lexicon suites
107107+# Network-dependent tests (run explicitly)
108108+just test-real-world # Round-trip test: fetch real lexicons, convert MLF→JSON, verify
108109109110# Other useful commands
110111just test-all # All workspace tests (includes unit tests)
+58
tests/real_world/README.md
···11+# Real-World Tests
22+33+Tests that fetch and validate real lexicons from production networks.
44+55+## Tests
66+77+### `roundtrip.rs` - Round-Trip Test
88+99+Validates that MLF can accurately convert lexicons: JSON → MLF → JSON
1010+1111+**What it does:**
1212+1. Fetches real lexicons from production networks:
1313+ - `app.bsky.actor.*`, `app.bsky.feed.*`, `app.bsky.graph.*`
1414+ - `net.anisota.*`
1515+ - `place.stream.*`
1616+ - `pub.leaflet.*`
1717+2. Converts downloaded JSON to MLF (automatic during fetch)
1818+3. Generates JSON back from MLF files
1919+4. Compares original vs regenerated JSON
2020+2121+**Running:**
2222+```bash
2323+# Using just
2424+just test-real-world
2525+2626+# Using cargo
2727+cargo test -p mlf-integration-tests --test real_world_roundtrip -- --ignored --nocapture
2828+```
2929+3030+**Network-dependent:** This test fetches from real networks, so it:
3131+- Is marked `#[ignore]` by default
3232+- Requires internet connectivity
3333+- Takes 30-60 seconds to run
3434+3535+**Diff files:** When differences are found, the test writes three files per lexicon to `roundtrip/diffs/`:
3636+- `{nsid}.original.json` - The original fetched JSON
3737+- `{nsid}.generated.json` - The regenerated JSON from MLF
3838+- `{nsid}.diff` - Unified diff output (`diff -u`)
3939+4040+Files are organized into subdirectories:
4141+- `diffs/acceptable/` - Acceptable differences (field ordering, `$type` fields)
4242+- `diffs/failure/` - Structural differences that indicate bugs
4343+4444+These files are gitignored but persisted locally for review.
4545+4646+## Adding New Tests
4747+4848+To add more real-world test sources:
4949+5050+1. Edit `TEST_SOURCES` in `roundtrip.rs`
5151+2. Ensure the NSID has published DNS TXT records
5252+3. Test with `mlf fetch <nsid>` first to verify
5353+5454+## Notes
5555+5656+- These tests validate the core MLF workflow end-to-end
5757+- Failures indicate bugs in either JSON→MLF or MLF→JSON conversion
5858+- Review diff files to diagnose what changed
+505
tests/real_world/roundtrip.rs
···11+// Real-world round-trip tests: JSON → MLF → JSON
22+//
33+// These tests fetch real lexicons from the network, convert them to MLF,
44+// then generate JSON back and verify the round-trip is accurate.
55+//
66+// Run with: cargo test --test real_world_roundtrip -- --ignored --nocapture
77+88+use std::collections::HashSet;
99+use std::fs;
1010+use std::path::{Path, PathBuf};
1111+use std::process::Command;
1212+use tempfile::TempDir;
1313+1414+/// Real-world lexicon sources to test
1515+/// These use specific namespaces that have DNS TXT records published
1616+const TEST_SOURCES: &[&str] = &[
1717+ // Bluesky - use specific namespaces since top-level doesn't have TXT record
1818+ "app.bsky.actor.*",
1919+ "app.bsky.feed.*",
2020+ "app.bsky.graph.*",
2121+ // Other networks
2222+ "net.anisota.*",
2323+ "place.stream.*",
2424+ "pub.leaflet.*",
2525+];
2626+2727+#[test]
2828+#[ignore] // Network-dependent test, run explicitly with --ignored
2929+fn test_real_world_roundtrip() {
3030+ println!("\n🌐 Real-World Round-Trip Test");
3131+ println!("=============================\n");
3232+3333+ // Create temp directory for test workspace
3434+ let temp_dir = TempDir::new().expect("Failed to create temp directory");
3535+ let workspace_path = temp_dir.path();
3636+3737+ println!("📁 Test workspace: {}\n", workspace_path.display());
3838+3939+ // Step 1: Initialize MLF project
4040+ println!("1️⃣ Initializing MLF project...");
4141+ init_mlf_project(workspace_path).expect("Failed to initialize project");
4242+4343+ // Step 2: Fetch real lexicons
4444+ println!("\n2️⃣ Fetching real lexicons from network...");
4545+ for source in TEST_SOURCES {
4646+ println!(" Fetching: {}", source);
4747+ fetch_lexicons(workspace_path, source).expect(&format!("Failed to fetch {}", source));
4848+ }
4949+5050+ // Step 3: Copy MLF files to standard lexicons directory
5151+ println!("\n3️⃣ Copying MLF files to standard lexicons directory...");
5252+ let source_mlf_dir = workspace_path.join(".mlf/lexicons/mlf");
5353+ let lexicons_dir = workspace_path.join("lexicons");
5454+ copy_mlf_files(&source_mlf_dir, &lexicons_dir).expect("Failed to copy MLF files");
5555+5656+ // Step 4: Generate JSON from MLF
5757+ println!("\n4️⃣ Generating JSON from MLF files...");
5858+ let output_dir = workspace_path.join("generated-lexicons");
5959+ generate_json_from_mlf(workspace_path, &output_dir).expect("Failed to generate JSON");
6060+6161+ // Step 5: Compare original vs regenerated JSON
6262+ println!("\n5️⃣ Comparing original vs regenerated JSON...");
6363+ let original_dir = workspace_path.join(".mlf/lexicons/json");
6464+6565+ // Write diffs to tests/real_world/roundtrip/diffs/ (persisted, gitignored)
6666+ let diffs_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
6767+ .join("real_world/roundtrip/diffs");
6868+6969+ let stats = compare_json_files(&original_dir, &output_dir, &diffs_dir)
7070+ .expect("Failed to compare JSON files");
7171+7272+ // Step 6: Report results
7373+ println!("\n📊 Round-Trip Test Results");
7474+ println!("===========================");
7575+ println!("Total lexicons tested: {}", stats.total);
7676+ println!("Perfect matches: {}", stats.perfect_matches);
7777+ println!("Acceptable differences: {}", stats.acceptable_diffs);
7878+ println!("Failures: {}", stats.failures);
7979+8080+ if !stats.failed_lexicons.is_empty() {
8181+ println!("\n❌ Failed lexicons:");
8282+ for (nsid, reason) in &stats.failed_lexicons {
8383+ println!(" - {}: {}", nsid, reason);
8484+ }
8585+ }
8686+8787+ if stats.acceptable_diffs > 0 || stats.failures > 0 {
8888+ println!("\n📁 Diff files written to: {}", diffs_dir.display());
8989+ println!(" Review these files to see what changed between original and regenerated JSON");
9090+ }
9191+9292+ // Assert that we have no failures
9393+ assert_eq!(
9494+ stats.failures, 0,
9595+ "Round-trip test failed for {} lexicon(s). Check diff files in {}",
9696+ stats.failures,
9797+ diffs_dir.display()
9898+ );
9999+100100+ println!("\n✅ All round-trip tests passed!");
101101+}
102102+103103+/// Initialize an MLF project with mlf.toml
104104+fn init_mlf_project(workspace_path: &Path) -> Result<(), String> {
105105+ // Create mlf.toml
106106+ let mlf_toml = r#"
107107+[package]
108108+name = "roundtrip-test"
109109+version = "0.1.0"
110110+111111+[dependencies]
112112+dependencies = []
113113+allow_transitive_deps = true
114114+optimize_transitive_fetches = false
115115+"#;
116116+117117+ fs::write(workspace_path.join("mlf.toml"), mlf_toml)
118118+ .map_err(|e| format!("Failed to write mlf.toml: {}", e))?;
119119+120120+ Ok(())
121121+}
122122+123123+/// Fetch lexicons using `mlf fetch`
124124+fn fetch_lexicons(workspace_path: &Path, nsid_pattern: &str) -> Result<(), String> {
125125+ let output = Command::new("mlf")
126126+ .arg("fetch")
127127+ .arg(nsid_pattern)
128128+ .current_dir(workspace_path)
129129+ .output()
130130+ .map_err(|e| format!("Failed to execute mlf fetch: {}", e))?;
131131+132132+ if !output.status.success() {
133133+ return Err(format!(
134134+ "mlf fetch failed:\n{}",
135135+ String::from_utf8_lossy(&output.stderr)
136136+ ));
137137+ }
138138+139139+ Ok(())
140140+}
141141+142142+/// Copy MLF files from .mlf/lexicons/mlf to lexicons/
143143+fn copy_mlf_files(source_dir: &Path, dest_dir: &Path) -> Result<(), String> {
144144+ if !source_dir.exists() {
145145+ return Err(format!("Source directory not found: {}", source_dir.display()));
146146+ }
147147+148148+ fn copy_recursive(src: &Path, dst: &Path) -> std::io::Result<()> {
149149+ fs::create_dir_all(dst)?;
150150+ for entry in fs::read_dir(src)? {
151151+ let entry = entry?;
152152+ let src_path = entry.path();
153153+ let dst_path = dst.join(entry.file_name());
154154+155155+ if src_path.is_dir() {
156156+ copy_recursive(&src_path, &dst_path)?;
157157+ } else {
158158+ fs::copy(&src_path, &dst_path)?;
159159+ }
160160+ }
161161+ Ok(())
162162+ }
163163+164164+ copy_recursive(source_dir, dest_dir)
165165+ .map_err(|e| format!("Failed to copy MLF files: {}", e))?;
166166+167167+ let mlf_count = find_mlf_files(dest_dir)?.len();
168168+ println!(" Copied {} MLF files", mlf_count);
169169+170170+ Ok(())
171171+}
172172+173173+/// Generate JSON from MLF files using `mlf generate lexicon`
174174+fn generate_json_from_mlf(workspace_dir: &Path, output_dir: &Path) -> Result<(), String> {
175175+ let lexicons_dir = workspace_dir.join("lexicons");
176176+177177+ if !lexicons_dir.exists() {
178178+ return Err(format!("Lexicons directory not found: {}", lexicons_dir.display()));
179179+ }
180180+181181+ // Create output directory
182182+ fs::create_dir_all(output_dir)
183183+ .map_err(|e| format!("Failed to create output directory: {}", e))?;
184184+185185+ // Generate all JSON files at once by passing the lexicons directory
186186+ // This allows proper dependency resolution between MLF files
187187+ println!(" Generating JSON files...");
188188+ let output = Command::new("mlf")
189189+ .arg("generate")
190190+ .arg("lexicon")
191191+ .arg("-i")
192192+ .arg("lexicons")
193193+ .arg("-o")
194194+ .arg(output_dir)
195195+ .current_dir(workspace_dir)
196196+ .output()
197197+ .map_err(|e| format!("Failed to execute mlf generate: {}", e))?;
198198+199199+ if !output.status.success() {
200200+ return Err(format!(
201201+ "mlf generate lexicon failed:\n{}",
202202+ String::from_utf8_lossy(&output.stderr)
203203+ ));
204204+ }
205205+206206+ println!(" Generated JSON successfully");
207207+208208+ Ok(())
209209+}
210210+211211+/// Find all .mlf files recursively
212212+fn find_mlf_files(dir: &Path) -> Result<Vec<PathBuf>, String> {
213213+ let mut mlf_files = Vec::new();
214214+215215+ fn walk_dir(dir: &Path, files: &mut Vec<PathBuf>) -> std::io::Result<()> {
216216+ if dir.is_dir() {
217217+ for entry in fs::read_dir(dir)? {
218218+ let entry = entry?;
219219+ let path = entry.path();
220220+ if path.is_dir() {
221221+ walk_dir(&path, files)?;
222222+ } else if path.extension().and_then(|s| s.to_str()) == Some("mlf") {
223223+ files.push(path);
224224+ }
225225+ }
226226+ }
227227+ Ok(())
228228+ }
229229+230230+ walk_dir(dir, &mut mlf_files).map_err(|e| format!("Failed to walk directory: {}", e))?;
231231+ Ok(mlf_files)
232232+}
233233+234234+#[derive(Debug)]
235235+struct ComparisonStats {
236236+ total: usize,
237237+ perfect_matches: usize,
238238+ acceptable_diffs: usize,
239239+ failures: usize,
240240+ failed_lexicons: Vec<(String, String)>,
241241+}
242242+243243+/// Compare original JSON with regenerated JSON
244244+fn compare_json_files(
245245+ original_dir: &Path,
246246+ generated_dir: &Path,
247247+ diffs_dir: &Path,
248248+) -> Result<ComparisonStats, String> {
249249+ // Create diffs directory
250250+ fs::create_dir_all(diffs_dir)
251251+ .map_err(|e| format!("Failed to create diffs directory: {}", e))?;
252252+ let mut stats = ComparisonStats {
253253+ total: 0,
254254+ perfect_matches: 0,
255255+ acceptable_diffs: 0,
256256+ failures: 0,
257257+ failed_lexicons: Vec::new(),
258258+ };
259259+260260+ // Find all JSON files in original directory
261261+ let original_files = find_json_files(original_dir)?;
262262+ stats.total = original_files.len();
263263+264264+ println!(" Comparing {} lexicon files...", stats.total);
265265+266266+ for original_file in original_files {
267267+ let relative_path = original_file
268268+ .strip_prefix(original_dir)
269269+ .map_err(|e| format!("Failed to strip prefix: {}", e))?;
270270+271271+ let generated_file = generated_dir.join(relative_path);
272272+273273+ // Extract NSID from path for reporting
274274+ let nsid = relative_path
275275+ .with_extension("")
276276+ .to_str()
277277+ .unwrap()
278278+ .replace(std::path::MAIN_SEPARATOR, ".");
279279+280280+ if !generated_file.exists() {
281281+ stats.failures += 1;
282282+ stats.failed_lexicons
283283+ .push((nsid.clone(), "Generated file not found".to_string()));
284284+ eprintln!(" ✗ {}: Generated file not found", nsid);
285285+ continue;
286286+ }
287287+288288+ // Read and parse JSON files
289289+ let original_json = fs::read_to_string(&original_file)
290290+ .map_err(|e| format!("Failed to read original: {}", e))?;
291291+ let generated_json = fs::read_to_string(&generated_file)
292292+ .map_err(|e| format!("Failed to read generated: {}", e))?;
293293+294294+ let original: serde_json::Value = serde_json::from_str(&original_json)
295295+ .map_err(|e| format!("Failed to parse original JSON: {}", e))?;
296296+ let generated: serde_json::Value = serde_json::from_str(&generated_json)
297297+ .map_err(|e| format!("Failed to parse generated JSON: {}", e))?;
298298+299299+ // Compare with allowed differences
300300+ match compare_lexicon_json(&original, &generated) {
301301+ ComparisonResult::Perfect => {
302302+ stats.perfect_matches += 1;
303303+ println!(" ✓ {} (perfect match)", nsid);
304304+ }
305305+ ComparisonResult::AcceptableDifferences(diffs) => {
306306+ stats.acceptable_diffs += 1;
307307+ println!(" ✓ {} (acceptable diffs: {})", nsid, diffs.join(", "));
308308+309309+ // Write diff file for acceptable differences
310310+ write_diff_file(diffs_dir, &nsid, &original_json, &generated_json, "acceptable")
311311+ .unwrap_or_else(|e| eprintln!("Warning: Failed to write diff: {}", e));
312312+ }
313313+ ComparisonResult::Failure(reason) => {
314314+ stats.failures += 1;
315315+ stats.failed_lexicons.push((nsid.clone(), reason.clone()));
316316+ eprintln!(" ✗ {}: {}", nsid, reason);
317317+318318+ // Write diff file for failures
319319+ write_diff_file(diffs_dir, &nsid, &original_json, &generated_json, "failure")
320320+ .unwrap_or_else(|e| eprintln!("Warning: Failed to write diff: {}", e));
321321+ }
322322+ }
323323+ }
324324+325325+ Ok(stats)
326326+}
327327+328328+/// Find all JSON files recursively
329329+fn find_json_files(dir: &Path) -> Result<Vec<PathBuf>, String> {
330330+ let mut json_files = Vec::new();
331331+332332+ fn walk_dir(dir: &Path, files: &mut Vec<PathBuf>) -> std::io::Result<()> {
333333+ if dir.is_dir() {
334334+ for entry in fs::read_dir(dir)? {
335335+ let entry = entry?;
336336+ let path = entry.path();
337337+ if path.is_dir() {
338338+ walk_dir(&path, files)?;
339339+ } else if path.extension().and_then(|s| s.to_str()) == Some("json") {
340340+ files.push(path);
341341+ }
342342+ }
343343+ }
344344+ Ok(())
345345+ }
346346+347347+ walk_dir(dir, &mut json_files).map_err(|e| format!("Failed to walk directory: {}", e))?;
348348+ Ok(json_files)
349349+}
350350+351351+#[derive(Debug)]
352352+enum ComparisonResult {
353353+ Perfect,
354354+ AcceptableDifferences(Vec<String>),
355355+ Failure(String),
356356+}
357357+358358+/// Compare two lexicon JSON objects, allowing certain acceptable differences
359359+fn compare_lexicon_json(
360360+ original: &serde_json::Value,
361361+ generated: &serde_json::Value,
362362+) -> ComparisonResult {
363363+ let mut acceptable_diffs = Vec::new();
364364+365365+ // Strip $type fields (these are often added/removed)
366366+ let original_stripped = strip_dollar_type(original);
367367+ let generated_stripped = strip_dollar_type(generated);
368368+369369+ // Check if they're identical after stripping $type
370370+ if original_stripped == generated_stripped {
371371+ return ComparisonResult::Perfect;
372372+ }
373373+374374+ // Allow $type differences
375375+ if has_only_dollar_type_diff(&original_stripped, &generated_stripped) {
376376+ acceptable_diffs.push("$type fields".to_string());
377377+ }
378378+379379+ // Check for field ordering differences (same fields, different order)
380380+ if has_only_ordering_diff(&original_stripped, &generated_stripped) {
381381+ acceptable_diffs.push("field ordering".to_string());
382382+ return ComparisonResult::AcceptableDifferences(acceptable_diffs);
383383+ }
384384+385385+ // If we have acceptable diffs, return them
386386+ if !acceptable_diffs.is_empty() {
387387+ return ComparisonResult::AcceptableDifferences(acceptable_diffs);
388388+ }
389389+390390+ // Otherwise, it's a failure
391391+ ComparisonResult::Failure(format!(
392392+ "Structural differences detected"
393393+ ))
394394+}
395395+396396+/// Recursively strip $type fields from JSON
397397+fn strip_dollar_type(value: &serde_json::Value) -> serde_json::Value {
398398+ match value {
399399+ serde_json::Value::Object(map) => {
400400+ let mut new_map = serde_json::Map::new();
401401+ for (k, v) in map {
402402+ if k != "$type" {
403403+ new_map.insert(k.clone(), strip_dollar_type(v));
404404+ }
405405+ }
406406+ serde_json::Value::Object(new_map)
407407+ }
408408+ serde_json::Value::Array(arr) => {
409409+ serde_json::Value::Array(arr.iter().map(strip_dollar_type).collect())
410410+ }
411411+ _ => value.clone(),
412412+ }
413413+}
414414+415415+/// Check if the only difference is $type fields
416416+fn has_only_dollar_type_diff(v1: &serde_json::Value, v2: &serde_json::Value) -> bool {
417417+ // After stripping $type, they should be equal
418418+ v1 == v2
419419+}
420420+421421+/// Write diff files showing differences between original and generated JSON
422422+fn write_diff_file(
423423+ diffs_dir: &Path,
424424+ nsid: &str,
425425+ original_json: &str,
426426+ generated_json: &str,
427427+ diff_type: &str,
428428+) -> Result<(), String> {
429429+ // Create subdirectory based on diff type
430430+ let type_dir = diffs_dir.join(diff_type);
431431+ fs::create_dir_all(&type_dir)
432432+ .map_err(|e| format!("Failed to create diff type directory: {}", e))?;
433433+434434+ // Create base filename from NSID
435435+ let base_filename = nsid.replace('.', "_");
436436+437437+ // Write original JSON
438438+ let original_path = type_dir.join(format!("{}.original.json", base_filename));
439439+ fs::write(&original_path, original_json)
440440+ .map_err(|e| format!("Failed to write original JSON: {}", e))?;
441441+442442+ // Write generated JSON
443443+ let generated_path = type_dir.join(format!("{}.generated.json", base_filename));
444444+ fs::write(&generated_path, generated_json)
445445+ .map_err(|e| format!("Failed to write generated JSON: {}", e))?;
446446+447447+ // Run diff command and save output
448448+ let diff_path = type_dir.join(format!("{}.diff", base_filename));
449449+ let diff_output = Command::new("diff")
450450+ .arg("-u")
451451+ .arg(&original_path)
452452+ .arg(&generated_path)
453453+ .output()
454454+ .map_err(|e| format!("Failed to run diff command: {}", e))?;
455455+456456+ // diff returns exit code 1 when files differ, which is expected
457457+ // Only error if exit code is 2+ (indicates an error running diff)
458458+ if diff_output.status.code() == Some(2) {
459459+ return Err(format!("diff command error: {}", String::from_utf8_lossy(&diff_output.stderr)));
460460+ }
461461+462462+ // Write diff output
463463+ fs::write(&diff_path, &diff_output.stdout)
464464+ .map_err(|e| format!("Failed to write diff output: {}", e))?;
465465+466466+ Ok(())
467467+}
468468+469469+/// Check if the only difference is field ordering in objects
470470+fn has_only_ordering_diff(v1: &serde_json::Value, v2: &serde_json::Value) -> bool {
471471+ match (v1, v2) {
472472+ (serde_json::Value::Object(map1), serde_json::Value::Object(map2)) => {
473473+ // Check if they have the same keys
474474+ let keys1: HashSet<_> = map1.keys().collect();
475475+ let keys2: HashSet<_> = map2.keys().collect();
476476+477477+ if keys1 != keys2 {
478478+ return false;
479479+ }
480480+481481+ // Check if all values match (recursively)
482482+ for key in keys1 {
483483+ let val1 = &map1[key];
484484+ let val2 = &map2[key];
485485+486486+ if !has_only_ordering_diff(val1, val2) && val1 != val2 {
487487+ return false;
488488+ }
489489+ }
490490+491491+ true
492492+ }
493493+ (serde_json::Value::Array(arr1), serde_json::Value::Array(arr2)) => {
494494+ // Arrays must match exactly (order matters)
495495+ if arr1.len() != arr2.len() {
496496+ return false;
497497+ }
498498+499499+ arr1.iter()
500500+ .zip(arr2.iter())
501501+ .all(|(v1, v2)| has_only_ordering_diff(v1, v2) || v1 == v2)
502502+ }
503503+ _ => v1 == v2,
504504+ }
505505+}
+2
tests/real_world/roundtrip/.gitignore
···11+# Diff files generated by round-trip tests
22+diffs/