Initial import of the CBDDC codebase with docs and tests. Add a .NET-focused gitignore to keep generated artifacts out of source control.
Some checks failed
CI / verify (push) Has been cancelled

This commit is contained in:
Joseph Doherty
2026-02-20 13:03:21 -05:00
commit 08bfc17218
218 changed files with 33910 additions and 0 deletions

View File

@@ -0,0 +1,127 @@
using System.Diagnostics;
using System.Text.Json;
using ZB.MOM.WW.CBDDC.Core.Sync;
namespace ZB.MOM.WW.CBDDC.Core.Tests;
public class PerformanceRegressionTests
{
private readonly RecursiveNodeMergeConflictResolver _resolver;
private readonly Dictionary<string, int> _limits;
/// <summary>
/// Initializes a new instance of the <see cref="PerformanceRegressionTests"/> class.
/// </summary>
public PerformanceRegressionTests()
{
_resolver = new RecursiveNodeMergeConflictResolver();
// Load limits
var json = File.ReadAllText("benchmark_limits.json");
_limits = JsonSerializer.Deserialize<Dictionary<string, int>>(json) ?? new Dictionary<string, int>();
}
private Document CreateDoc(string key, object data, HlcTimestamp ts)
{
var json = JsonSerializer.Serialize(data);
var element = JsonDocument.Parse(json).RootElement;
return new Document("test", key, element, ts, false);
}
private OplogEntry CreateOp(string key, object data, HlcTimestamp ts)
{
var json = JsonSerializer.Serialize(data);
var element = JsonDocument.Parse(json).RootElement;
return new OplogEntry("test", key, OperationType.Put, element, ts, string.Empty);
}
/// <summary>
/// Verifies simple recursive merge operations stay within configured performance limits.
/// </summary>
[Fact]
public void RecursiveMerge_Simple_ShouldBeWithinLimits()
{
int iterations = 10000;
string limitKey = "RecursiveMerge_Simple_10k_Ops_MaxMs";
var ts1 = new HlcTimestamp(100, 0, "n1");
var ts2 = new HlcTimestamp(200, 0, "n2");
var doc = CreateDoc("k1", new { name = "Alice", age = 30 }, ts1);
var op = CreateOp("k1", new { name = "Bob", age = 31 }, ts2);
// Warmup
for (int i = 0; i < 100; i++) _resolver.Resolve(doc, op);
// Run
var sw = Stopwatch.StartNew();
for (int i = 0; i < iterations; i++)
{
_resolver.Resolve(doc, op);
}
sw.Stop();
long elapsed = sw.ElapsedMilliseconds;
Console.WriteLine($"Executed {iterations} merges in {elapsed}ms");
if (_limits.TryGetValue(limitKey, out int maxMs))
{
elapsed.ShouldBeLessThan(maxMs, $"Performance regression! Expected < {maxMs}ms but took {elapsed}ms");
}
else
{
Console.WriteLine($"Warning: No limit found for key '{limitKey}'");
}
}
/// <summary>
/// Verifies deep array recursive merge operations stay within configured performance limits.
/// </summary>
[Fact]
public void RecursiveMerge_DeepArray_ShouldBeWithinLimits()
{
int iterations = 1000; // Lower iterations for heavier op
string limitKey = "RecursiveMerge_Array_1k_Ops_MaxMs";
var ts1 = new HlcTimestamp(100, 0, "n1");
var ts2 = new HlcTimestamp(200, 0, "n2");
var items = new List<object>();
for (int i = 0; i < 100; i++) items.Add(new { id = i.ToString(), val = i });
var doc = CreateDoc("k1", new { items = items }, ts1);
var op = CreateDoc("k1", new { items = items }, ts2).ToOplogEntry(OperationType.Put); // Same content to force id check traversal
// Warmup
_resolver.Resolve(doc, op);
// Run
var sw = Stopwatch.StartNew();
for (int i = 0; i < iterations; i++)
{
_resolver.Resolve(doc, op);
}
sw.Stop();
long elapsed = sw.ElapsedMilliseconds;
Console.WriteLine($"Executed {iterations} array merges in {elapsed}ms");
if (_limits.TryGetValue(limitKey, out int maxMs))
{
elapsed.ShouldBeLessThan(maxMs, $"Performance regression! Expected < {maxMs}ms but took {elapsed}ms");
}
}
}
public static class DocExt
{
/// <summary>
/// Creates an operation log entry from a document instance.
/// </summary>
/// <param name="d">The source document.</param>
/// <param name="t">The operation type to apply to the created entry.</param>
/// <returns>A new operation log entry.</returns>
public static OplogEntry ToOplogEntry(this Document d, OperationType t)
{
return new OplogEntry(d.Collection, d.Key, t, d.Content, d.UpdatedAt, string.Empty);
}
}