Files
CBDDC/tests/ZB.MOM.WW.CBDDC.Sample.Console.Tests/OplogStoreContractTestBase.cs
Joseph Doherty 6c4714f666
All checks were successful
NuGet Package Publish / nuget (push) Successful in 1m13s
Add XML docs required by CommentChecker fixes
2026-02-23 04:39:25 -05:00

160 lines
6.2 KiB
C#

using System.Text.Json;
using ZB.MOM.WW.CBDDC.Core;
using ZB.MOM.WW.CBDDC.Core.Storage;
namespace ZB.MOM.WW.CBDDC.Sample.Console.Tests;
public abstract class OplogStoreContractTestBase
{
/// <summary>
/// Verifies append, merge, and drop behavior across query, chain, and restart scenarios.
/// </summary>
[Fact]
public async Task OplogStore_AppendQueryMergeDrop_AndLastHash_Works()
{
await using var harness = await CreateHarnessAsync();
IOplogStore store = harness.Store;
var entry1 = CreateOplogEntry("Users", "u1", "node-a", 100, 0, "");
var entry2 = CreateOplogEntry("Users", "u2", "node-a", 110, 0, entry1.Hash);
var entry3 = CreateOplogEntry("Users", "u3", "node-a", 120, 1, entry2.Hash);
var otherNode = CreateOplogEntry("Users", "u4", "node-b", 115, 0, "");
await store.AppendOplogEntryAsync(entry1);
await store.AppendOplogEntryAsync(entry2);
await store.AppendOplogEntryAsync(entry3);
await store.AppendOplogEntryAsync(otherNode);
var chainRange = (await store.GetChainRangeAsync(entry1.Hash, entry3.Hash)).ToList();
chainRange.Select(x => x.Hash).ToList().ShouldBe(new[] { entry2.Hash, entry3.Hash });
var after = (await store.GetOplogAfterAsync(new HlcTimestamp(100, 0, "node-a"))).ToList();
after.Select(x => x.Hash).ToList().ShouldBe(new[] { entry2.Hash, otherNode.Hash, entry3.Hash });
var mergedEntry = CreateOplogEntry("Users", "u5", "node-a", 130, 0, entry3.Hash);
await store.MergeAsync(new[] { entry2, mergedEntry });
var exported = (await store.ExportAsync()).ToList();
exported.Count.ShouldBe(5);
exported.Count(x => x.Hash == entry2.Hash).ShouldBe(1);
string? cachedLastNodeAHash = await store.GetLastEntryHashAsync("node-a");
cachedLastNodeAHash.ShouldBe(entry3.Hash);
IOplogStore rehydratedStore = harness.ReopenStore();
string? persistedLastNodeAHash = await rehydratedStore.GetLastEntryHashAsync("node-a");
persistedLastNodeAHash.ShouldBe(mergedEntry.Hash);
await rehydratedStore.DropAsync();
(await rehydratedStore.ExportAsync()).ShouldBeEmpty();
}
/// <summary>
/// Verifies dataset isolation between primary and secondary stores.
/// </summary>
[Fact]
public async Task OplogStore_DatasetIsolation_Works()
{
await using var harness = await CreateHarnessAsync();
IOplogStore store = harness.Store;
var primaryEntry = CreateOplogEntry("Users", "p1", "node-a", 100, 0, "");
var logsEntry = CreateOplogEntry("Users", "l1", "node-a", 100, 1, "");
await harness.AppendOplogEntryAsync(primaryEntry, DatasetId.Primary);
await harness.AppendOplogEntryAsync(logsEntry, DatasetId.Logs);
var primary = (await harness.ExportAsync(DatasetId.Primary)).ToList();
var logs = (await harness.ExportAsync(DatasetId.Logs)).ToList();
primary.Count.ShouldBe(1);
primary[0].DatasetId.ShouldBe(DatasetId.Primary);
logs.Count.ShouldBe(1);
logs[0].DatasetId.ShouldBe(DatasetId.Logs);
}
/// <summary>
/// Verifies chain range queries return ordered linked entries.
/// </summary>
[Fact]
public async Task OplogStore_GetChainRangeAsync_ReturnsOrderedLinkedRange()
{
await using var harness = await CreateHarnessAsync();
IOplogStore store = harness.Store;
var entry1 = CreateOplogEntry("Users", "k1", "node1", 1000, 0, "");
var entry2 = CreateOplogEntry("Users", "k2", "node1", 2000, 0, entry1.Hash);
var entry3 = CreateOplogEntry("Users", "k3", "node1", 3000, 0, entry2.Hash);
await store.AppendOplogEntryAsync(entry1);
await store.AppendOplogEntryAsync(entry2);
await store.AppendOplogEntryAsync(entry3);
var range = (await store.GetChainRangeAsync(entry1.Hash, entry3.Hash)).ToList();
range.Count.ShouldBe(2);
range[0].Hash.ShouldBe(entry2.Hash);
range[1].Hash.ShouldBe(entry3.Hash);
}
/// <summary>
/// Creates the contract harness for this test class.
/// </summary>
protected abstract Task<IOplogStoreContractHarness> CreateHarnessAsync();
/// <summary>
/// Creates a reusable oplog entry with deterministic timestamps.
/// </summary>
/// <param name="collection">The collection name.</param>
/// <param name="key">The entry key.</param>
/// <param name="nodeId">The node identifier generating the entry.</param>
/// <param name="wall">The wall-clock component of the HLC timestamp.</param>
/// <param name="logic">The logical clock component of the HLC timestamp.</param>
/// <param name="previousHash">The previous entry hash.</param>
protected static OplogEntry CreateOplogEntry(
string collection,
string key,
string nodeId,
long wall,
int logic,
string previousHash)
{
return new OplogEntry(
collection,
key,
OperationType.Put,
JsonSerializer.SerializeToElement(new { key }),
new HlcTimestamp(wall, logic, nodeId),
previousHash);
}
}
public interface IOplogStoreContractHarness : IAsyncDisposable
{
/// <summary>
/// Gets the active contract store.
/// </summary>
IOplogStore Store { get; }
/// <summary>
/// Reopens the harness storage.
/// </summary>
IOplogStore ReopenStore();
/// <summary>
/// Appends an entry for the specified dataset.
/// </summary>
/// <param name="entry">The oplog entry to append.</param>
/// <param name="datasetId">The dataset identifier.</param>
/// <param name="cancellationToken">The cancellation token.</param>
Task AppendOplogEntryAsync(OplogEntry entry, string datasetId, CancellationToken cancellationToken = default);
/// <summary>
/// Exports entries for the specified dataset.
/// </summary>
/// <param name="datasetId">The dataset identifier.</param>
/// <param name="cancellationToken">The cancellation token.</param>
Task<IEnumerable<OplogEntry>> ExportAsync(string datasetId, CancellationToken cancellationToken = default);
}