Initial import of the CBDDC codebase with docs and tests. Add a .NET-focused gitignore to keep generated artifacts out of source control.
Some checks failed
CI / verify (push) Has been cancelled
Some checks failed
CI / verify (push) Has been cancelled
This commit is contained in:
267
tests/ZB.MOM.WW.CBDDC.Core.Tests/ArchitectureFitnessTests.cs
Normal file
267
tests/ZB.MOM.WW.CBDDC.Core.Tests/ArchitectureFitnessTests.cs
Normal file
@@ -0,0 +1,267 @@
|
||||
using System.Reflection;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Xml.Linq;
|
||||
using ZB.MOM.WW.CBDDC.Core;
|
||||
|
||||
namespace ZB.MOM.WW.CBDDC.Core.Tests;
|
||||
|
||||
public class ArchitectureFitnessTests
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies that the core assembly does not reference outer-layer assemblies.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void CoreAssembly_ShouldNotReferenceOuterAssemblies()
|
||||
{
|
||||
var references = typeof(OplogEntry).Assembly
|
||||
.GetReferencedAssemblies()
|
||||
.Select(a => a.Name)
|
||||
.Where(a => !string.IsNullOrWhiteSpace(a))
|
||||
.ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
references.ShouldNotContain("ZB.MOM.WW.CBDDC.Network");
|
||||
references.ShouldNotContain("ZB.MOM.WW.CBDDC.Persistence");
|
||||
references.ShouldNotContain("ZB.MOM.WW.CBDDC.Hosting");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that project references under src form an acyclic graph.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void SourceProjectGraph_ShouldBeAcyclic()
|
||||
{
|
||||
var repoRoot = FindRepoRoot();
|
||||
var srcRoot = Path.Combine(repoRoot, "src");
|
||||
|
||||
var projectFiles = Directory
|
||||
.EnumerateFiles(srcRoot, "*.csproj", SearchOption.AllDirectories)
|
||||
.Where(p => !p.Contains($"{Path.DirectorySeparatorChar}obj{Path.DirectorySeparatorChar}", StringComparison.Ordinal)
|
||||
&& !p.Contains($"{Path.DirectorySeparatorChar}bin{Path.DirectorySeparatorChar}", StringComparison.Ordinal))
|
||||
.ToList();
|
||||
|
||||
var nodes = projectFiles.ToDictionary(
|
||||
p => Path.GetFileNameWithoutExtension(p),
|
||||
p => new HashSet<string>(StringComparer.Ordinal));
|
||||
|
||||
foreach (var projectFile in projectFiles)
|
||||
{
|
||||
var projectName = Path.GetFileNameWithoutExtension(projectFile);
|
||||
var doc = XDocument.Load(projectFile);
|
||||
var refs = doc.Descendants("ProjectReference")
|
||||
.Select(x => x.Attribute("Include")?.Value)
|
||||
.Where(v => !string.IsNullOrWhiteSpace(v))
|
||||
.Select(v => Path.GetFileNameWithoutExtension(v!.Replace('\\', '/')));
|
||||
|
||||
foreach (var reference in refs)
|
||||
{
|
||||
if (nodes.ContainsKey(reference))
|
||||
{
|
||||
nodes[projectName].Add(reference);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
HasCycle(nodes).ShouldBeFalse();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies the allowed dependency graph between source projects.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void SourceProjectReferences_ShouldMatchAllowedDependencyGraph()
|
||||
{
|
||||
var repoRoot = FindRepoRoot();
|
||||
var srcRoot = Path.Combine(repoRoot, "src");
|
||||
|
||||
var projectFiles = Directory
|
||||
.EnumerateFiles(srcRoot, "*.csproj", SearchOption.AllDirectories)
|
||||
.Where(p => !p.Contains($"{Path.DirectorySeparatorChar}obj{Path.DirectorySeparatorChar}", StringComparison.Ordinal)
|
||||
&& !p.Contains($"{Path.DirectorySeparatorChar}bin{Path.DirectorySeparatorChar}", StringComparison.Ordinal))
|
||||
.ToList();
|
||||
|
||||
var allowedDependencies = new Dictionary<string, HashSet<string>>(StringComparer.Ordinal)
|
||||
{
|
||||
["ZB.MOM.WW.CBDDC.Core"] = new HashSet<string>(StringComparer.Ordinal),
|
||||
["ZB.MOM.WW.CBDDC.Network"] = new HashSet<string>(StringComparer.Ordinal) { "ZB.MOM.WW.CBDDC.Core" },
|
||||
["ZB.MOM.WW.CBDDC.Persistence"] = new HashSet<string>(StringComparer.Ordinal) { "ZB.MOM.WW.CBDDC.Core" },
|
||||
["ZB.MOM.WW.CBDDC.Hosting"] = new HashSet<string>(StringComparer.Ordinal) { "ZB.MOM.WW.CBDDC.Network" }
|
||||
};
|
||||
|
||||
foreach (var projectFile in projectFiles)
|
||||
{
|
||||
var projectName = Path.GetFileNameWithoutExtension(projectFile);
|
||||
allowedDependencies.ContainsKey(projectName)
|
||||
.ShouldBeTrue($"Unexpected source project found: {projectName}");
|
||||
|
||||
var doc = XDocument.Load(projectFile);
|
||||
var references = doc.Descendants("ProjectReference")
|
||||
.Select(x => x.Attribute("Include")?.Value)
|
||||
.Where(v => !string.IsNullOrWhiteSpace(v))
|
||||
.Select(v => Path.GetFileNameWithoutExtension(v!.Replace('\\', '/')))
|
||||
.ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
var expected = allowedDependencies[projectName];
|
||||
var extra = references.Where(r => !expected.Contains(r)).ToList();
|
||||
var missing = expected.Where(e => !references.Contains(e)).ToList();
|
||||
|
||||
extra.ShouldBeEmpty($"Project {projectName} has disallowed references: {string.Join(", ", extra)}");
|
||||
missing.ShouldBeEmpty($"Project {projectName} is missing required references: {string.Join(", ", missing)}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies non-generic ILogger usage is restricted to explicit compatibility shims.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void SourceCode_ShouldRestrictNonGenericILoggerUsage()
|
||||
{
|
||||
var repoRoot = FindRepoRoot();
|
||||
var srcRoot = Path.Combine(repoRoot, "src");
|
||||
var loggerPattern = new Regex(@"\bILogger\b(?!\s*<|\s*Factory\b)", RegexOptions.Compiled);
|
||||
|
||||
var allowedSnippets = new[]
|
||||
{
|
||||
"private readonly ILogger _inner;",
|
||||
"internal ProtocolHandler(ILogger logger",
|
||||
"ILogger? logger = null)",
|
||||
"CreateTypedLogger(ILogger? logger)",
|
||||
"public ForwardingLogger(ILogger inner)"
|
||||
};
|
||||
|
||||
var violations = new List<string>();
|
||||
var sourceFiles = Directory.EnumerateFiles(srcRoot, "*.cs", SearchOption.AllDirectories)
|
||||
.Where(p => !p.Contains($"{Path.DirectorySeparatorChar}obj{Path.DirectorySeparatorChar}", StringComparison.Ordinal)
|
||||
&& !p.Contains($"{Path.DirectorySeparatorChar}bin{Path.DirectorySeparatorChar}", StringComparison.Ordinal));
|
||||
|
||||
foreach (var file in sourceFiles)
|
||||
{
|
||||
var lines = File.ReadAllLines(file);
|
||||
for (var i = 0; i < lines.Length; i++)
|
||||
{
|
||||
var line = lines[i].Trim();
|
||||
if (string.IsNullOrWhiteSpace(line) || line.StartsWith("//", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!loggerPattern.IsMatch(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (allowedSnippets.Any(line.Contains))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var relativePath = Path.GetRelativePath(repoRoot, file).Replace('\\', '/');
|
||||
violations.Add($"{relativePath}:{i + 1} -> {line}");
|
||||
}
|
||||
}
|
||||
|
||||
violations.ShouldBeEmpty($"Unexpected non-generic ILogger usage:{Environment.NewLine}{string.Join(Environment.NewLine, violations)}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies log boundaries push operation context for hosted/background entry points.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void BoundaryServices_ShouldPushOperationLogContext()
|
||||
{
|
||||
var repoRoot = FindRepoRoot();
|
||||
var boundaryFiles = new[]
|
||||
{
|
||||
"src/ZB.MOM.WW.CBDDC.Network/CBDDCNodeService.cs",
|
||||
"src/ZB.MOM.WW.CBDDC.Network/SyncOrchestrator.cs",
|
||||
"src/ZB.MOM.WW.CBDDC.Network/TcpSyncServer.cs",
|
||||
"src/ZB.MOM.WW.CBDDC.Hosting/HostedServices/DiscoveryServiceHostedService.cs",
|
||||
"src/ZB.MOM.WW.CBDDC.Hosting/HostedServices/TcpSyncServerHostedService.cs",
|
||||
"src/ZB.MOM.WW.CBDDC.Hosting/Services/NoOpDiscoveryService.cs",
|
||||
"src/ZB.MOM.WW.CBDDC.Hosting/Services/NoOpSyncOrchestrator.cs"
|
||||
};
|
||||
|
||||
foreach (var relativePath in boundaryFiles)
|
||||
{
|
||||
var filePath = Path.Combine(repoRoot, relativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
File.Exists(filePath).ShouldBeTrue($"Missing expected boundary file: {relativePath}");
|
||||
|
||||
var contents = File.ReadAllText(filePath);
|
||||
contents.Contains("LogContext.PushProperty(\"OperationId\"", StringComparison.Ordinal)
|
||||
.ShouldBeTrue($"Boundary file is missing OperationId log enrichment: {relativePath}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies boundary projects include Serilog for LogContext support.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void BoundaryProjects_ShouldReferenceSerilog()
|
||||
{
|
||||
var repoRoot = FindRepoRoot();
|
||||
var projects = new[]
|
||||
{
|
||||
"src/ZB.MOM.WW.CBDDC.Network/ZB.MOM.WW.CBDDC.Network.csproj",
|
||||
"src/ZB.MOM.WW.CBDDC.Hosting/ZB.MOM.WW.CBDDC.Hosting.csproj",
|
||||
"samples/ZB.MOM.WW.CBDDC.Sample.Console/ZB.MOM.WW.CBDDC.Sample.Console.csproj"
|
||||
};
|
||||
|
||||
foreach (var relativePath in projects)
|
||||
{
|
||||
var filePath = Path.Combine(repoRoot, relativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
File.Exists(filePath).ShouldBeTrue($"Missing project file: {relativePath}");
|
||||
|
||||
var contents = File.ReadAllText(filePath);
|
||||
contents.Contains("<PackageReference Include=\"Serilog\"", StringComparison.Ordinal)
|
||||
.ShouldBeTrue($"Serilog package reference is required for logging boundary enrichment: {relativePath}");
|
||||
}
|
||||
}
|
||||
|
||||
private static string FindRepoRoot()
|
||||
{
|
||||
var dir = AppContext.BaseDirectory;
|
||||
for (var i = 0; i < 10 && !string.IsNullOrWhiteSpace(dir); i++)
|
||||
{
|
||||
if (File.Exists(Path.Combine(dir, "CBDDC.slnx")))
|
||||
{
|
||||
return dir;
|
||||
}
|
||||
|
||||
dir = Directory.GetParent(dir)?.FullName ?? string.Empty;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Could not locate repository root containing CBDDC.slnx.");
|
||||
}
|
||||
|
||||
private static bool HasCycle(Dictionary<string, HashSet<string>> graph)
|
||||
{
|
||||
var visiting = new HashSet<string>(StringComparer.Ordinal);
|
||||
var visited = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
bool Dfs(string node)
|
||||
{
|
||||
if (visiting.Contains(node))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!visited.Add(node))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
visiting.Add(node);
|
||||
foreach (var next in graph[node])
|
||||
{
|
||||
if (Dfs(next))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
visiting.Remove(node);
|
||||
return false;
|
||||
}
|
||||
|
||||
return graph.Keys.Any(Dfs);
|
||||
}
|
||||
}
|
||||
2
tests/ZB.MOM.WW.CBDDC.Core.Tests/GlobalUsings.cs
Normal file
2
tests/ZB.MOM.WW.CBDDC.Core.Tests/GlobalUsings.cs
Normal file
@@ -0,0 +1,2 @@
|
||||
global using NSubstitute;
|
||||
global using Shouldly;
|
||||
77
tests/ZB.MOM.WW.CBDDC.Core.Tests/OplogEntryTests.cs
Executable file
77
tests/ZB.MOM.WW.CBDDC.Core.Tests/OplogEntryTests.cs
Executable file
@@ -0,0 +1,77 @@
|
||||
using System;
|
||||
using System.Text.Json;
|
||||
using Xunit;
|
||||
using ZB.MOM.WW.CBDDC.Core;
|
||||
using System.Globalization;
|
||||
|
||||
namespace ZB.MOM.WW.CBDDC.Core.Tests
|
||||
{
|
||||
public class OplogEntryTests
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies that hash computation is deterministic even when payload content differs.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ComputeHash_ShouldBeDeterministic_RegardlessOfPayload()
|
||||
{
|
||||
// Arrange
|
||||
var collection = "test-collection";
|
||||
var key = "test-key";
|
||||
var op = OperationType.Put;
|
||||
var timestamp = new HlcTimestamp(100, 0, "node-1");
|
||||
var prevHash = "prev-hash";
|
||||
|
||||
var payload1 = JsonDocument.Parse("{\"prop\": 1}").RootElement;
|
||||
var payload2 = JsonDocument.Parse("{\"prop\": 2, \"extra\": \"whitespace\"}").RootElement;
|
||||
|
||||
// Act
|
||||
var entry1 = new OplogEntry(collection, key, op, payload1, timestamp, prevHash);
|
||||
var entry2 = new OplogEntry(collection, key, op, payload2, timestamp, prevHash);
|
||||
|
||||
// Assert
|
||||
entry2.Hash.ShouldBe(entry1.Hash);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that hash computation uses invariant culture formatting for timestamp values.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ComputeHash_ShouldUseInvariantCulture_ForTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var originalCulture = CultureInfo.CurrentCulture;
|
||||
try
|
||||
{
|
||||
var culture = CultureInfo.GetCultureInfo("de-DE");
|
||||
CultureInfo.CurrentCulture = culture;
|
||||
|
||||
var timestamp = new HlcTimestamp(123456789, 1, "node");
|
||||
var entry = new OplogEntry("col", "key", OperationType.Put, null, timestamp, "prev");
|
||||
|
||||
// Act
|
||||
var hash = entry.ComputeHash();
|
||||
|
||||
// Assert
|
||||
CultureInfo.CurrentCulture = CultureInfo.InvariantCulture;
|
||||
var expectedEntry = new OplogEntry("col", "key", OperationType.Put, null, timestamp, "prev");
|
||||
hash.ShouldBe(expectedEntry.Hash);
|
||||
}
|
||||
finally
|
||||
{
|
||||
CultureInfo.CurrentCulture = originalCulture;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that an entry is valid when its stored hash matches computed content.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsValid_ShouldReturnTrue_WhenHashMatches()
|
||||
{
|
||||
var timestamp = new HlcTimestamp(100, 0, "node-1");
|
||||
var entry = new OplogEntry("col", "key", OperationType.Put, null, timestamp, "prev");
|
||||
|
||||
entry.IsValid().ShouldBeTrue();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
using ZB.MOM.WW.CBDDC.Core.Management;
|
||||
using ZB.MOM.WW.CBDDC.Core.Storage;
|
||||
|
||||
namespace ZB.MOM.WW.CBDDC.Core.Tests;
|
||||
|
||||
public class PeerManagementServiceTests
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies that removing peer tracking with remote removal enabled removes both tracking and remote peer configuration.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task RemovePeerTrackingAsync_WhenRemoveRemoteConfigTrue_RemovesTrackingAndRemoteConfig()
|
||||
{
|
||||
var configStore = Substitute.For<IPeerConfigurationStore>();
|
||||
var confirmationStore = Substitute.For<IPeerOplogConfirmationStore>();
|
||||
var service = new PeerManagementService(configStore, confirmationStore);
|
||||
var token = new CancellationTokenSource().Token;
|
||||
|
||||
await service.RemovePeerTrackingAsync("peer-1", removeRemoteConfig: true, token);
|
||||
|
||||
await confirmationStore.Received(1).RemovePeerTrackingAsync("peer-1", token);
|
||||
await configStore.Received(1).RemoveRemotePeerAsync("peer-1", token);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that removing peer tracking with remote removal disabled removes only tracking data.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task RemovePeerTrackingAsync_WhenRemoveRemoteConfigFalse_RemovesTrackingOnly()
|
||||
{
|
||||
var configStore = Substitute.For<IPeerConfigurationStore>();
|
||||
var confirmationStore = Substitute.For<IPeerOplogConfirmationStore>();
|
||||
var service = new PeerManagementService(configStore, confirmationStore);
|
||||
|
||||
await service.RemovePeerTrackingAsync("peer-1", removeRemoteConfig: false);
|
||||
|
||||
await confirmationStore.Received(1).RemovePeerTrackingAsync("peer-1", Arg.Any<CancellationToken>());
|
||||
await configStore.DidNotReceive().RemoveRemotePeerAsync(Arg.Any<string>(), Arg.Any<CancellationToken>());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that removing a remote peer delegates to tracking removal with remote configuration cleanup enabled.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task RemoveRemotePeerAsync_DelegatesToTrackingRemovalWithRemoteConfig()
|
||||
{
|
||||
var configStore = Substitute.For<IPeerConfigurationStore>();
|
||||
var confirmationStore = Substitute.For<IPeerOplogConfirmationStore>();
|
||||
var service = new PeerManagementService(configStore, confirmationStore);
|
||||
var token = new CancellationTokenSource().Token;
|
||||
|
||||
await service.RemoveRemotePeerAsync("peer-1", token);
|
||||
|
||||
await confirmationStore.Received(1).RemovePeerTrackingAsync("peer-1", token);
|
||||
await configStore.Received(1).RemoveRemotePeerAsync("peer-1", token);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that removing peer tracking with an invalid node identifier throws an <see cref="ArgumentException"/>.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task RemovePeerTrackingAsync_WhenNodeIdInvalid_ThrowsArgumentException()
|
||||
{
|
||||
var configStore = Substitute.For<IPeerConfigurationStore>();
|
||||
var confirmationStore = Substitute.For<IPeerOplogConfirmationStore>();
|
||||
var service = new PeerManagementService(configStore, confirmationStore);
|
||||
|
||||
await Should.ThrowAsync<ArgumentException>(() => service.RemovePeerTrackingAsync(" ", removeRemoteConfig: true));
|
||||
|
||||
await confirmationStore.DidNotReceive().RemovePeerTrackingAsync(Arg.Any<string>(), Arg.Any<CancellationToken>());
|
||||
await configStore.DidNotReceive().RemoveRemotePeerAsync(Arg.Any<string>(), Arg.Any<CancellationToken>());
|
||||
}
|
||||
}
|
||||
127
tests/ZB.MOM.WW.CBDDC.Core.Tests/PerformanceRegressionTests.cs
Executable file
127
tests/ZB.MOM.WW.CBDDC.Core.Tests/PerformanceRegressionTests.cs
Executable file
@@ -0,0 +1,127 @@
|
||||
using System.Diagnostics;
|
||||
using System.Text.Json;
|
||||
using ZB.MOM.WW.CBDDC.Core.Sync;
|
||||
|
||||
namespace ZB.MOM.WW.CBDDC.Core.Tests;
|
||||
|
||||
public class PerformanceRegressionTests
|
||||
{
|
||||
private readonly RecursiveNodeMergeConflictResolver _resolver;
|
||||
private readonly Dictionary<string, int> _limits;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="PerformanceRegressionTests"/> class.
|
||||
/// </summary>
|
||||
public PerformanceRegressionTests()
|
||||
{
|
||||
_resolver = new RecursiveNodeMergeConflictResolver();
|
||||
|
||||
// Load limits
|
||||
var json = File.ReadAllText("benchmark_limits.json");
|
||||
_limits = JsonSerializer.Deserialize<Dictionary<string, int>>(json) ?? new Dictionary<string, int>();
|
||||
}
|
||||
|
||||
private Document CreateDoc(string key, object data, HlcTimestamp ts)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(data);
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
return new Document("test", key, element, ts, false);
|
||||
}
|
||||
|
||||
private OplogEntry CreateOp(string key, object data, HlcTimestamp ts)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(data);
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
return new OplogEntry("test", key, OperationType.Put, element, ts, string.Empty);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies simple recursive merge operations stay within configured performance limits.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void RecursiveMerge_Simple_ShouldBeWithinLimits()
|
||||
{
|
||||
int iterations = 10000;
|
||||
string limitKey = "RecursiveMerge_Simple_10k_Ops_MaxMs";
|
||||
|
||||
var ts1 = new HlcTimestamp(100, 0, "n1");
|
||||
var ts2 = new HlcTimestamp(200, 0, "n2");
|
||||
var doc = CreateDoc("k1", new { name = "Alice", age = 30 }, ts1);
|
||||
var op = CreateOp("k1", new { name = "Bob", age = 31 }, ts2);
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < 100; i++) _resolver.Resolve(doc, op);
|
||||
|
||||
// Run
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
_resolver.Resolve(doc, op);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
long elapsed = sw.ElapsedMilliseconds;
|
||||
Console.WriteLine($"Executed {iterations} merges in {elapsed}ms");
|
||||
|
||||
if (_limits.TryGetValue(limitKey, out int maxMs))
|
||||
{
|
||||
elapsed.ShouldBeLessThan(maxMs, $"Performance regression! Expected < {maxMs}ms but took {elapsed}ms");
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"Warning: No limit found for key '{limitKey}'");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies deep array recursive merge operations stay within configured performance limits.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void RecursiveMerge_DeepArray_ShouldBeWithinLimits()
|
||||
{
|
||||
int iterations = 1000; // Lower iterations for heavier op
|
||||
string limitKey = "RecursiveMerge_Array_1k_Ops_MaxMs";
|
||||
|
||||
var ts1 = new HlcTimestamp(100, 0, "n1");
|
||||
var ts2 = new HlcTimestamp(200, 0, "n2");
|
||||
|
||||
var items = new List<object>();
|
||||
for (int i = 0; i < 100; i++) items.Add(new { id = i.ToString(), val = i });
|
||||
|
||||
var doc = CreateDoc("k1", new { items = items }, ts1);
|
||||
var op = CreateDoc("k1", new { items = items }, ts2).ToOplogEntry(OperationType.Put); // Same content to force id check traversal
|
||||
|
||||
// Warmup
|
||||
_resolver.Resolve(doc, op);
|
||||
|
||||
// Run
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
_resolver.Resolve(doc, op);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
long elapsed = sw.ElapsedMilliseconds;
|
||||
Console.WriteLine($"Executed {iterations} array merges in {elapsed}ms");
|
||||
|
||||
if (_limits.TryGetValue(limitKey, out int maxMs))
|
||||
{
|
||||
elapsed.ShouldBeLessThan(maxMs, $"Performance regression! Expected < {maxMs}ms but took {elapsed}ms");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class DocExt
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates an operation log entry from a document instance.
|
||||
/// </summary>
|
||||
/// <param name="d">The source document.</param>
|
||||
/// <param name="t">The operation type to apply to the created entry.</param>
|
||||
/// <returns>A new operation log entry.</returns>
|
||||
public static OplogEntry ToOplogEntry(this Document d, OperationType t)
|
||||
{
|
||||
return new OplogEntry(d.Collection, d.Key, t, d.Content, d.UpdatedAt, string.Empty);
|
||||
}
|
||||
}
|
||||
173
tests/ZB.MOM.WW.CBDDC.Core.Tests/RecursiveNodeMergeConflictResolverTests.cs
Executable file
173
tests/ZB.MOM.WW.CBDDC.Core.Tests/RecursiveNodeMergeConflictResolverTests.cs
Executable file
@@ -0,0 +1,173 @@
|
||||
using System.Text.Json;
|
||||
using ZB.MOM.WW.CBDDC.Core.Sync;
|
||||
|
||||
namespace ZB.MOM.WW.CBDDC.Core.Tests;
|
||||
|
||||
public class RecursiveNodeMergeConflictResolverTests
|
||||
{
|
||||
private readonly RecursiveNodeMergeConflictResolver _resolver;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="RecursiveNodeMergeConflictResolverTests"/> class.
|
||||
/// </summary>
|
||||
public RecursiveNodeMergeConflictResolverTests()
|
||||
{
|
||||
_resolver = new RecursiveNodeMergeConflictResolver();
|
||||
}
|
||||
|
||||
private Document CreateDoc(string key, object data, HlcTimestamp ts)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(data);
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
return new Document("test", key, element, ts, false);
|
||||
}
|
||||
|
||||
private OplogEntry CreateOp(string key, object data, HlcTimestamp ts)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(data);
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
return new OplogEntry("test", key, OperationType.Put, element, ts, string.Empty);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that disjoint fields are merged into a single document.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Resolve_ShouldMergeDisjointFields()
|
||||
{
|
||||
// Arrange
|
||||
var ts1 = new HlcTimestamp(100, 0, "n1");
|
||||
var ts2 = new HlcTimestamp(200, 0, "n2");
|
||||
|
||||
var doc = CreateDoc("k1", new { name = "Alice" }, ts1);
|
||||
var op = CreateOp("k1", new { age = 30 }, ts2);
|
||||
|
||||
// Act
|
||||
var result = _resolver.Resolve(doc, op);
|
||||
|
||||
// Assert
|
||||
result.ShouldApply.ShouldBeTrue();
|
||||
result.MergedDocument.ShouldNotBeNull();
|
||||
|
||||
var merged = result.MergedDocument.Content;
|
||||
merged.GetProperty("name").GetString().ShouldBe("Alice");
|
||||
merged.GetProperty("age").GetInt32().ShouldBe(30);
|
||||
result.MergedDocument.UpdatedAt.ShouldBe(ts2); // Max timestamp
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that primitive collisions are resolved using the higher timestamp value.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Resolve_ShouldPrioritizeHigherTimestamp_PrimitiveCollision()
|
||||
{
|
||||
// Arrange
|
||||
var oldTs = new HlcTimestamp(100, 0, "n1");
|
||||
var newTs = new HlcTimestamp(200, 0, "n2");
|
||||
|
||||
var doc = CreateDoc("k1", new { val = "Old" }, oldTs);
|
||||
var op = CreateOp("k1", new { val = "New" }, newTs);
|
||||
|
||||
// Act - Remote is newer
|
||||
var result1 = _resolver.Resolve(doc, op);
|
||||
result1.MergedDocument!.Content.GetProperty("val").GetString().ShouldBe("New");
|
||||
|
||||
// Act - Local is newer (simulating outdated remote op)
|
||||
var docNew = CreateDoc("k1", new { val = "Correct" }, newTs);
|
||||
var opOld = CreateOp("k1", new { val = "Stale" }, oldTs);
|
||||
|
||||
var result2 = _resolver.Resolve(docNew, opOld);
|
||||
result2.MergedDocument!.Content.GetProperty("val").GetString().ShouldBe("Correct");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that nested object content is merged recursively.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Resolve_ShouldRecursivelyMergeObjects()
|
||||
{
|
||||
// Arrange
|
||||
var ts1 = new HlcTimestamp(100, 0, "n1");
|
||||
var ts2 = new HlcTimestamp(200, 0, "n2");
|
||||
|
||||
var doc = CreateDoc("k1", new { info = new { x = 1, y = 1 } }, ts1);
|
||||
var op = CreateOp("k1", new { info = new { y = 2, z = 3 } }, ts2);
|
||||
|
||||
// Act
|
||||
var result = _resolver.Resolve(doc, op);
|
||||
|
||||
// Assert
|
||||
var info = result.MergedDocument!.Content.GetProperty("info");
|
||||
info.GetProperty("x").GetInt32().ShouldBe(1);
|
||||
info.GetProperty("y").GetInt32().ShouldBe(2); // Overwritten by newer
|
||||
info.GetProperty("z").GetInt32().ShouldBe(3); // Added
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that arrays containing object identifiers are merged by item identity.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Resolve_ShouldMergeArraysById()
|
||||
{
|
||||
// Arrange
|
||||
var ts1 = new HlcTimestamp(100, 0, "n1");
|
||||
var ts2 = new HlcTimestamp(200, 0, "n2");
|
||||
|
||||
var doc = CreateDoc("k1", new
|
||||
{
|
||||
items = new[] {
|
||||
new { id = "1", val = "A" },
|
||||
new { id = "2", val = "B" }
|
||||
}
|
||||
}, ts1);
|
||||
|
||||
var op = CreateOp("k1", new
|
||||
{
|
||||
items = new[] {
|
||||
new { id = "1", val = "A-Updated" }, // Update
|
||||
new { id = "3", val = "C" } // Insert
|
||||
}
|
||||
}, ts2);
|
||||
|
||||
// Act
|
||||
var result = _resolver.Resolve(doc, op);
|
||||
|
||||
// Assert
|
||||
Action<JsonElement> validate = (root) =>
|
||||
{
|
||||
var items = root.GetProperty("items");
|
||||
items.GetArrayLength().ShouldBe(3);
|
||||
|
||||
// Order is not guaranteed, so find by id
|
||||
// But simplified test checking content exists
|
||||
var text = items.GetRawText();
|
||||
text.ShouldContain("A-Updated");
|
||||
text.ShouldContain("B");
|
||||
text.ShouldContain("C");
|
||||
};
|
||||
|
||||
validate(result.MergedDocument!.Content);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that primitive arrays fall back to last-write-wins behavior.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Resolve_ShouldFallbackToLWW_ForPrimitiveArrays()
|
||||
{
|
||||
// Arrange
|
||||
var ts1 = new HlcTimestamp(100, 0, "n1");
|
||||
var ts2 = new HlcTimestamp(200, 0, "n2");
|
||||
|
||||
var doc = CreateDoc("k1", new { tags = new[] { "a", "b" } }, ts1);
|
||||
var op = CreateOp("k1", new { tags = new[] { "c" } }, ts2);
|
||||
|
||||
// Act
|
||||
var result = _resolver.Resolve(doc, op);
|
||||
|
||||
// Assert
|
||||
var tags = result.MergedDocument!.Content.GetProperty("tags");
|
||||
tags.GetArrayLength().ShouldBe(1);
|
||||
tags[0].GetString().ShouldBe("c");
|
||||
}
|
||||
}
|
||||
313
tests/ZB.MOM.WW.CBDDC.Core.Tests/VectorClockTests.cs
Executable file
313
tests/ZB.MOM.WW.CBDDC.Core.Tests/VectorClockTests.cs
Executable file
@@ -0,0 +1,313 @@
|
||||
using ZB.MOM.WW.CBDDC.Core;
|
||||
using System.Linq;
|
||||
using Xunit;
|
||||
|
||||
namespace ZB.MOM.WW.CBDDC.Core.Tests;
|
||||
|
||||
public class VectorClockTests
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies an empty vector clock returns the default timestamp for unknown nodes.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void EmptyVectorClock_ShouldReturnDefaultTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var vc = new VectorClock();
|
||||
|
||||
// Act
|
||||
var ts = vc.GetTimestamp("node1");
|
||||
|
||||
// Assert
|
||||
ts.ShouldBe(default(HlcTimestamp));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies setting a timestamp stores it for the specified node.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void SetTimestamp_ShouldStoreTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var vc = new VectorClock();
|
||||
var ts = new HlcTimestamp(100, 1, "node1");
|
||||
|
||||
// Act
|
||||
vc.SetTimestamp("node1", ts);
|
||||
|
||||
// Assert
|
||||
vc.GetTimestamp("node1").ShouldBe(ts);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies node identifiers are returned for all known nodes.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void NodeIds_ShouldReturnAllNodes()
|
||||
{
|
||||
// Arrange
|
||||
var vc = new VectorClock();
|
||||
vc.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
vc.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2"));
|
||||
|
||||
// Act
|
||||
var nodeIds = vc.NodeIds.ToList();
|
||||
|
||||
// Assert
|
||||
nodeIds.Count.ShouldBe(2);
|
||||
nodeIds.ShouldContain("node1");
|
||||
nodeIds.ShouldContain("node2");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies equal vector clocks are compared as equal.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void CompareTo_EqualClocks_ShouldReturnEqual()
|
||||
{
|
||||
// Arrange
|
||||
var vc1 = new VectorClock();
|
||||
vc1.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
vc1.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2"));
|
||||
|
||||
var vc2 = new VectorClock();
|
||||
vc2.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
vc2.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2"));
|
||||
|
||||
// Act
|
||||
var result = vc1.CompareTo(vc2);
|
||||
|
||||
// Assert
|
||||
result.ShouldBe(CausalityRelation.Equal);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a clock strictly ahead of another is reported as strictly ahead.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void CompareTo_StrictlyAhead_ShouldReturnStrictlyAhead()
|
||||
{
|
||||
// Arrange
|
||||
var vc1 = new VectorClock();
|
||||
vc1.SetTimestamp("node1", new HlcTimestamp(200, 1, "node1")); // Ahead
|
||||
vc1.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2")); // Same
|
||||
|
||||
var vc2 = new VectorClock();
|
||||
vc2.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
vc2.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2"));
|
||||
|
||||
// Act
|
||||
var result = vc1.CompareTo(vc2);
|
||||
|
||||
// Assert
|
||||
result.ShouldBe(CausalityRelation.StrictlyAhead);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a clock strictly behind another is reported as strictly behind.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void CompareTo_StrictlyBehind_ShouldReturnStrictlyBehind()
|
||||
{
|
||||
// Arrange
|
||||
var vc1 = new VectorClock();
|
||||
vc1.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1")); // Behind
|
||||
vc1.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2")); // Same
|
||||
|
||||
var vc2 = new VectorClock();
|
||||
vc2.SetTimestamp("node1", new HlcTimestamp(200, 1, "node1"));
|
||||
vc2.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2"));
|
||||
|
||||
// Act
|
||||
var result = vc1.CompareTo(vc2);
|
||||
|
||||
// Assert
|
||||
result.ShouldBe(CausalityRelation.StrictlyBehind);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies divergent per-node progress is reported as concurrent.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void CompareTo_Concurrent_ShouldReturnConcurrent()
|
||||
{
|
||||
// Arrange - Split brain scenario
|
||||
var vc1 = new VectorClock();
|
||||
vc1.SetTimestamp("node1", new HlcTimestamp(200, 1, "node1")); // Node1 ahead
|
||||
vc1.SetTimestamp("node2", new HlcTimestamp(100, 2, "node2")); // Node2 behind
|
||||
|
||||
var vc2 = new VectorClock();
|
||||
vc2.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1")); // Node1 behind
|
||||
vc2.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2")); // Node2 ahead
|
||||
|
||||
// Act
|
||||
var result = vc1.CompareTo(vc2);
|
||||
|
||||
// Assert
|
||||
result.ShouldBe(CausalityRelation.Concurrent);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies pull candidates include nodes where the other clock is ahead.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void GetNodesWithUpdates_ShouldReturnNodesWhereOtherIsAhead()
|
||||
{
|
||||
// Arrange
|
||||
var vc1 = new VectorClock();
|
||||
vc1.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
vc1.SetTimestamp("node2", new HlcTimestamp(100, 2, "node2"));
|
||||
|
||||
var vc2 = new VectorClock();
|
||||
vc2.SetTimestamp("node1", new HlcTimestamp(200, 1, "node1")); // Ahead
|
||||
vc2.SetTimestamp("node2", new HlcTimestamp(100, 2, "node2")); // Same
|
||||
|
||||
// Act
|
||||
var nodesToPull = vc1.GetNodesWithUpdates(vc2).ToList();
|
||||
|
||||
// Assert
|
||||
nodesToPull.Count().ShouldBe(1);
|
||||
nodesToPull.ShouldContain("node1");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies push candidates include nodes where this clock is ahead.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void GetNodesToPush_ShouldReturnNodesWhereThisIsAhead()
|
||||
{
|
||||
// Arrange
|
||||
var vc1 = new VectorClock();
|
||||
vc1.SetTimestamp("node1", new HlcTimestamp(200, 1, "node1")); // Ahead
|
||||
vc1.SetTimestamp("node2", new HlcTimestamp(100, 2, "node2")); // Same
|
||||
|
||||
var vc2 = new VectorClock();
|
||||
vc2.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
vc2.SetTimestamp("node2", new HlcTimestamp(100, 2, "node2"));
|
||||
|
||||
// Act
|
||||
var nodesToPush = vc1.GetNodesToPush(vc2).ToList();
|
||||
|
||||
// Assert
|
||||
nodesToPush.Count().ShouldBe(1);
|
||||
nodesToPush.ShouldContain("node1");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a newly introduced remote node is included in pull candidates.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void GetNodesWithUpdates_WhenNewNodeAppearsInOther_ShouldReturnIt()
|
||||
{
|
||||
// Arrange - Simulates a new node joining the cluster
|
||||
var vc1 = new VectorClock();
|
||||
vc1.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
|
||||
var vc2 = new VectorClock();
|
||||
vc2.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
vc2.SetTimestamp("node3", new HlcTimestamp(50, 1, "node3")); // New node
|
||||
|
||||
// Act
|
||||
var nodesToPull = vc1.GetNodesWithUpdates(vc2).ToList();
|
||||
|
||||
// Assert
|
||||
nodesToPull.Count().ShouldBe(1);
|
||||
nodesToPull.ShouldContain("node3");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies merge keeps the maximum timestamp per node.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Merge_ShouldTakeMaximumForEachNode()
|
||||
{
|
||||
// Arrange
|
||||
var vc1 = new VectorClock();
|
||||
vc1.SetTimestamp("node1", new HlcTimestamp(200, 1, "node1"));
|
||||
vc1.SetTimestamp("node2", new HlcTimestamp(100, 2, "node2"));
|
||||
|
||||
var vc2 = new VectorClock();
|
||||
vc2.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
vc2.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2"));
|
||||
vc2.SetTimestamp("node3", new HlcTimestamp(150, 1, "node3"));
|
||||
|
||||
// Act
|
||||
vc1.Merge(vc2);
|
||||
|
||||
// Assert
|
||||
vc1.GetTimestamp("node1").ShouldBe(new HlcTimestamp(200, 1, "node1")); // Kept max
|
||||
vc1.GetTimestamp("node2").ShouldBe(new HlcTimestamp(200, 2, "node2")); // Merged max
|
||||
vc1.GetTimestamp("node3").ShouldBe(new HlcTimestamp(150, 1, "node3")); // Added new
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies cloning creates an independent copy of the vector clock.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Clone_ShouldCreateIndependentCopy()
|
||||
{
|
||||
// Arrange
|
||||
var vc1 = new VectorClock();
|
||||
vc1.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
|
||||
// Act
|
||||
var vc2 = vc1.Clone();
|
||||
vc2.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2"));
|
||||
|
||||
// Assert
|
||||
vc1.NodeIds.Count().ShouldBe(1);
|
||||
vc2.NodeIds.Count().ShouldBe(2);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies the string representation includes serialized node timestamps.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ToString_ShouldReturnReadableFormat()
|
||||
{
|
||||
// Arrange
|
||||
var vc = new VectorClock();
|
||||
vc.SetTimestamp("node1", new HlcTimestamp(100, 1, "node1"));
|
||||
vc.SetTimestamp("node2", new HlcTimestamp(200, 2, "node2"));
|
||||
|
||||
// Act
|
||||
var str = vc.ToString();
|
||||
|
||||
// Assert
|
||||
str.ShouldContain("node1:100:1:node1");
|
||||
str.ShouldContain("node2:200:2:node2");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies split-brain updates are detected as concurrent.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void SplitBrainScenario_ShouldDetectConcurrency()
|
||||
{
|
||||
// Arrange - Simulating a network partition scenario
|
||||
// Partition 1: node1 and node2 are alive
|
||||
var vcPartition1 = new VectorClock();
|
||||
vcPartition1.SetTimestamp("node1", new HlcTimestamp(300, 5, "node1"));
|
||||
vcPartition1.SetTimestamp("node2", new HlcTimestamp(250, 3, "node2"));
|
||||
vcPartition1.SetTimestamp("node3", new HlcTimestamp(100, 1, "node3")); // Old data
|
||||
|
||||
// Partition 2: node3 is isolated
|
||||
var vcPartition2 = new VectorClock();
|
||||
vcPartition2.SetTimestamp("node1", new HlcTimestamp(150, 2, "node1")); // Old data
|
||||
vcPartition2.SetTimestamp("node2", new HlcTimestamp(150, 1, "node2")); // Old data
|
||||
vcPartition2.SetTimestamp("node3", new HlcTimestamp(400, 8, "node3")); // New data
|
||||
|
||||
// Act
|
||||
var relation = vcPartition1.CompareTo(vcPartition2);
|
||||
var partition1NeedsToPull = vcPartition1.GetNodesWithUpdates(vcPartition2).ToList();
|
||||
var partition1NeedsToPush = vcPartition1.GetNodesToPush(vcPartition2).ToList();
|
||||
|
||||
// Assert
|
||||
relation.ShouldBe(CausalityRelation.Concurrent);
|
||||
partition1NeedsToPull.Count().ShouldBe(1);
|
||||
partition1NeedsToPull.ShouldContain("node3");
|
||||
partition1NeedsToPush.Count.ShouldBe(2);
|
||||
partition1NeedsToPush.ShouldContain("node1");
|
||||
partition1NeedsToPush.ShouldContain("node2");
|
||||
}
|
||||
}
|
||||
37
tests/ZB.MOM.WW.CBDDC.Core.Tests/ZB.MOM.WW.CBDDC.Core.Tests.csproj
Executable file
37
tests/ZB.MOM.WW.CBDDC.Core.Tests/ZB.MOM.WW.CBDDC.Core.Tests.csproj
Executable file
@@ -0,0 +1,37 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<AssemblyName>ZB.MOM.WW.CBDDC.Core.Tests</AssemblyName>
|
||||
<RootNamespace>ZB.MOM.WW.CBDDC.Core.Tests</RootNamespace>
|
||||
<PackageId>ZB.MOM.WW.CBDDC.Core.Tests</PackageId>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1051</NoWarn>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="NSubstitute" Version="5.3.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="Shouldly" Version="4.3.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4" />
|
||||
<PackageReference Include="xunit.v3" Version="3.2.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Update="benchmark_limits.json">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\src\ZB.MOM.WW.CBDDC.Core\ZB.MOM.WW.CBDDC.Core.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
4
tests/ZB.MOM.WW.CBDDC.Core.Tests/benchmark_limits.json
Executable file
4
tests/ZB.MOM.WW.CBDDC.Core.Tests/benchmark_limits.json
Executable file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"RecursiveMerge_Simple_10k_Ops_MaxMs": 500,
|
||||
"RecursiveMerge_Array_1k_Ops_MaxMs": 1500
|
||||
}
|
||||
Reference in New Issue
Block a user