using System.Diagnostics; using System.Text.Json; using ZB.MOM.WW.CBDDC.Core.Sync; namespace ZB.MOM.WW.CBDDC.Core.Tests; public class PerformanceRegressionTests { private readonly RecursiveNodeMergeConflictResolver _resolver; private readonly Dictionary _limits; /// /// Initializes a new instance of the class. /// public PerformanceRegressionTests() { _resolver = new RecursiveNodeMergeConflictResolver(); // Load limits var json = File.ReadAllText("benchmark_limits.json"); _limits = JsonSerializer.Deserialize>(json) ?? new Dictionary(); } private Document CreateDoc(string key, object data, HlcTimestamp ts) { var json = JsonSerializer.Serialize(data); var element = JsonDocument.Parse(json).RootElement; return new Document("test", key, element, ts, false); } private OplogEntry CreateOp(string key, object data, HlcTimestamp ts) { var json = JsonSerializer.Serialize(data); var element = JsonDocument.Parse(json).RootElement; return new OplogEntry("test", key, OperationType.Put, element, ts, string.Empty); } /// /// Verifies simple recursive merge operations stay within configured performance limits. /// [Fact] public void RecursiveMerge_Simple_ShouldBeWithinLimits() { int iterations = 10000; string limitKey = "RecursiveMerge_Simple_10k_Ops_MaxMs"; var ts1 = new HlcTimestamp(100, 0, "n1"); var ts2 = new HlcTimestamp(200, 0, "n2"); var doc = CreateDoc("k1", new { name = "Alice", age = 30 }, ts1); var op = CreateOp("k1", new { name = "Bob", age = 31 }, ts2); // Warmup for (int i = 0; i < 100; i++) _resolver.Resolve(doc, op); // Run var sw = Stopwatch.StartNew(); for (int i = 0; i < iterations; i++) { _resolver.Resolve(doc, op); } sw.Stop(); long elapsed = sw.ElapsedMilliseconds; Console.WriteLine($"Executed {iterations} merges in {elapsed}ms"); if (_limits.TryGetValue(limitKey, out int maxMs)) { elapsed.ShouldBeLessThan(maxMs, $"Performance regression! Expected < {maxMs}ms but took {elapsed}ms"); } else { Console.WriteLine($"Warning: No limit found for key '{limitKey}'"); } } /// /// Verifies deep array recursive merge operations stay within configured performance limits. /// [Fact] public void RecursiveMerge_DeepArray_ShouldBeWithinLimits() { int iterations = 1000; // Lower iterations for heavier op string limitKey = "RecursiveMerge_Array_1k_Ops_MaxMs"; var ts1 = new HlcTimestamp(100, 0, "n1"); var ts2 = new HlcTimestamp(200, 0, "n2"); var items = new List(); for (int i = 0; i < 100; i++) items.Add(new { id = i.ToString(), val = i }); var doc = CreateDoc("k1", new { items = items }, ts1); var op = CreateDoc("k1", new { items = items }, ts2).ToOplogEntry(OperationType.Put); // Same content to force id check traversal // Warmup _resolver.Resolve(doc, op); // Run var sw = Stopwatch.StartNew(); for (int i = 0; i < iterations; i++) { _resolver.Resolve(doc, op); } sw.Stop(); long elapsed = sw.ElapsedMilliseconds; Console.WriteLine($"Executed {iterations} array merges in {elapsed}ms"); if (_limits.TryGetValue(limitKey, out int maxMs)) { elapsed.ShouldBeLessThan(maxMs, $"Performance regression! Expected < {maxMs}ms but took {elapsed}ms"); } } } public static class DocExt { /// /// Creates an operation log entry from a document instance. /// /// The source document. /// The operation type to apply to the created entry. /// A new operation log entry. public static OplogEntry ToOplogEntry(this Document d, OperationType t) { return new OplogEntry(d.Collection, d.Key, t, d.Content, d.UpdatedAt, string.Empty); } }