Implement in-process multi-dataset sync isolation across core, network, persistence, and tests
All checks were successful
NuGet Package Publish / nuget (push) Successful in 1m14s

This commit is contained in:
Joseph Doherty
2026-02-22 11:58:34 -05:00
parent c06b56172a
commit 8e97061ab8
60 changed files with 4519 additions and 559 deletions

View File

@@ -113,12 +113,7 @@ internal sealed class BenchmarkPeerNode : IAsyncDisposable
public async Task UpsertUserAsync(User user)
{
User? existing = Context.Users.Find(u => u.Id == user.Id).FirstOrDefault();
if (existing == null)
await Context.Users.InsertAsync(user);
else
await Context.Users.UpdateAsync(user);
await Context.Users.UpdateAsync(user);
await Context.SaveChangesAsync();
}
@@ -127,6 +122,11 @@ internal sealed class BenchmarkPeerNode : IAsyncDisposable
return Context.Users.Find(u => u.Id == userId).Any();
}
public int CountUsersWithPrefix(string prefix)
{
return Context.Users.FindAll().Count(u => u.Id.StartsWith(prefix, StringComparison.Ordinal));
}
public async ValueTask DisposeAsync()
{
try

View File

@@ -0,0 +1,142 @@
using BenchmarkDotNet.Attributes;
using System.Net;
using System.Net.Sockets;
using ZB.MOM.WW.CBDDC.Core.Network;
using ZB.MOM.WW.CBDDC.Sample.Console;
namespace ZB.MOM.WW.CDBBC.E2E.Benchmark.Tests;
[MemoryDiagnoser]
[SimpleJob(launchCount: 1, warmupCount: 0, iterationCount: 1)]
public class OfflineResyncThroughputBenchmarks
{
private const int BacklogOperationCount = 10_000;
private BenchmarkPeerNode _onlineNode = null!;
private BenchmarkPeerNode _offlineNode = null!;
private int _runSequence;
private string _currentPrefix = string.Empty;
[GlobalSetup]
public Task GlobalSetupAsync()
{
return Task.CompletedTask;
}
[GlobalCleanup]
public Task GlobalCleanupAsync()
{
// Avoid explicit node disposal in BenchmarkDotNet child processes due Surreal embedded callback race.
// Process teardown releases resources after benchmark completion.
return Task.CompletedTask;
}
[IterationSetup(Target = nameof(OfflineBacklogWriteThroughput100k))]
public void SetupOfflineWriteThroughput()
{
_currentPrefix = $"offline-write-{Interlocked.Increment(ref _runSequence):D6}";
InitializeIterationNodesAsync().GetAwaiter().GetResult();
}
[Benchmark(Description = "Offline backlog write throughput (10K ops)", OperationsPerInvoke = BacklogOperationCount)]
public async Task OfflineBacklogWriteThroughput100k()
{
await WriteBatchAsync(_currentPrefix, BacklogOperationCount);
}
[IterationSetup(Target = nameof(OfflineNodeResyncDurationAfter100kBacklog))]
public void SetupOfflineResyncBenchmark()
{
_currentPrefix = $"offline-resync-{Interlocked.Increment(ref _runSequence):D6}";
InitializeIterationNodesAsync().GetAwaiter().GetResult();
WriteBatchAsync(_currentPrefix, BacklogOperationCount).GetAwaiter().GetResult();
}
[Benchmark(Description = "Offline node re-sync duration after 10K backlog")]
public async Task OfflineNodeResyncDurationAfter100kBacklog()
{
await _offlineNode.StartAsync();
await WaitForReplicationAsync(_currentPrefix, BacklogOperationCount, TimeSpan.FromMinutes(3));
}
private async Task WriteBatchAsync(string prefix, int count)
{
for (var i = 0; i < count; i++)
{
string userId = $"{prefix}-{i:D6}";
await _onlineNode.UpsertUserAsync(CreateUser(userId));
}
}
private async Task WaitForReplicationAsync(string prefix, int expectedCount, TimeSpan timeout)
{
DateTime deadline = DateTime.UtcNow.Add(timeout);
while (DateTime.UtcNow < deadline)
{
if (_offlineNode.CountUsersWithPrefix(prefix) >= expectedCount)
return;
await Task.Delay(250);
}
int replicatedCount = _offlineNode.CountUsersWithPrefix(prefix);
throw new TimeoutException(
$"Timed out waiting for re-sync. Expected {expectedCount}, replicated {replicatedCount}.");
}
private static User CreateUser(string userId)
{
return new User
{
Id = userId,
Name = $"user-{userId}",
Age = 30,
Address = new Address { City = "OfflineBenchmarkCity" }
};
}
private static int GetAvailableTcpPort()
{
using var listener = new TcpListener(IPAddress.Loopback, 0);
listener.Start();
return ((IPEndPoint)listener.LocalEndpoint).Port;
}
private async Task InitializeIterationNodesAsync()
{
int onlinePort = GetAvailableTcpPort();
int offlinePort = GetAvailableTcpPort();
while (offlinePort == onlinePort)
offlinePort = GetAvailableTcpPort();
string clusterToken = Guid.NewGuid().ToString("N");
_onlineNode = BenchmarkPeerNode.Create(
"offline-benchmark-online",
onlinePort,
clusterToken,
[
new KnownPeerConfiguration
{
NodeId = "offline-benchmark-offline",
Host = "127.0.0.1",
Port = offlinePort
}
]);
_offlineNode = BenchmarkPeerNode.Create(
"offline-benchmark-offline",
offlinePort,
clusterToken,
[
new KnownPeerConfiguration
{
NodeId = "offline-benchmark-online",
Host = "127.0.0.1",
Port = onlinePort
}
]);
await _onlineNode.StartAsync();
await Task.Delay(250);
}
}

View File

@@ -0,0 +1,50 @@
using System;
using System.Collections.Generic;
namespace ZB.MOM.WW.CDBBC.E2E.Benchmark.Tests;
/// <summary>
/// Represents a typical Serilog log entry.
/// </summary>
public sealed class SerilogLogEntry
{
/// <summary>
/// Timestamp when the log event was written.
/// </summary>
public DateTimeOffset Timestamp { get; set; }
/// <summary>
/// Log level (for example: Information, Warning, Error).
/// </summary>
public string Level { get; set; } = "Information";
/// <summary>
/// Name of the logger/category (typically the class name).
/// </summary>
public string? LoggerName { get; set; }
/// <summary>
/// Correlation context identifier used to tie log entries to a request.
/// </summary>
public string? ContextId { get; set; }
/// <summary>
/// Original message template used by Serilog.
/// </summary>
public string MessageTemplate { get; set; } = string.Empty;
/// <summary>
/// Fully rendered message text.
/// </summary>
public string? RenderedMessage { get; set; }
/// <summary>
/// Exception details if one was logged.
/// </summary>
public string? Exception { get; set; }
/// <summary>
/// Structured context values captured from Serilog context.
/// </summary>
public Dictionary<string, object?> ContextProperties { get; set; } = new(StringComparer.Ordinal);
}

View File

@@ -0,0 +1,440 @@
using System.Diagnostics;
using System.Text;
using BenchmarkDotNet.Attributes;
using SurrealDb.Net.Models;
using SurrealDb.Net.Models.Response;
using ZB.MOM.WW.CBDDC.Persistence.Surreal;
namespace ZB.MOM.WW.CDBBC.E2E.Benchmark.Tests;
[MemoryDiagnoser]
[SimpleJob(launchCount: 1, warmupCount: 1, iterationCount: 3)]
public class SurrealLogStorageBenchmarks
{
private const int LogRecordCount = 100_000;
private const int InsertBatchSize = 500;
private const string LogTable = "benchmark_log_entry";
private const string LogKvTable = "benchmark_log_kv";
private static readonly string[] LoggerNames =
[
"Api.RequestHandler",
"Api.AuthController",
"Api.OrderController",
"Api.InventoryController",
"Api.CustomerController",
"Workers.OutboxPublisher",
"Workers.NotificationDispatcher",
"Infrastructure.SqlRepository",
"Infrastructure.CacheService",
"Infrastructure.HttpClient",
"Domain.OrderService",
"Domain.PricingService"
];
private static readonly string[] TenantIds =
[
"tenant-01",
"tenant-02",
"tenant-03",
"tenant-04",
"tenant-05",
"tenant-06",
"tenant-07",
"tenant-08"
];
private CBDDCSurrealEmbeddedClient _surrealClient = null!;
private string _databasePath = string.Empty;
private string _workDir = string.Empty;
private DateTime _seedBaseUtc;
private DateTime _queryRangeStartUtc;
private DateTime _queryRangeEndUtc;
private string _contextIdQueryValue = string.Empty;
private string _loggerQueryValue = string.Empty;
private string _contextStringKeyQueryValue = string.Empty;
private string _contextStringValueQueryValue = string.Empty;
private string _contextNumericKeyQueryValue = string.Empty;
private int _contextNumericValueQueryValue;
[GlobalSetup]
public async Task GlobalSetupAsync()
{
_workDir = Path.Combine(Path.GetTempPath(), $"cbddc-serilog-benchmark-{Guid.NewGuid():N}");
Directory.CreateDirectory(_workDir);
_databasePath = Path.Combine(_workDir, "serilog.rocksdb");
_surrealClient = new CBDDCSurrealEmbeddedClient(
new CBDDCSurrealEmbeddedOptions
{
Endpoint = "rocksdb://local",
DatabasePath = _databasePath,
Namespace = "cbddc_benchmark",
Database = $"serilog_{Guid.NewGuid():N}",
Cdc = new CBDDCSurrealCdcOptions { Enabled = false }
});
await _surrealClient.InitializeAsync();
await DefineSchemaAndIndexesAsync();
_seedBaseUtc = DateTime.UtcNow.AddDays(-1);
_contextIdQueryValue = BuildContextId(LogRecordCount / 2);
_loggerQueryValue = LoggerNames[3];
_contextStringKeyQueryValue = "tenantId";
_contextStringValueQueryValue = TenantIds[5];
_contextNumericKeyQueryValue = "statusCode";
_contextNumericValueQueryValue = 500;
_queryRangeStartUtc = _seedBaseUtc.AddMinutes(6);
_queryRangeEndUtc = _queryRangeStartUtc.AddSeconds(30);
var seedTimer = Stopwatch.StartNew();
await InsertLogRecordsAsync();
seedTimer.Stop();
long sizeBytes = CalculatePathSizeBytes(_databasePath);
Console.WriteLine(
$"Seeded {LogRecordCount:N0} records in {seedTimer.Elapsed.TotalSeconds:F2}s. " +
$"RocksDB size: {sizeBytes / (1024d * 1024d):F2} MiB ({sizeBytes:N0} bytes). Path: {_databasePath}");
}
[GlobalCleanup]
public Task GlobalCleanupAsync()
{
// Avoid explicit Surreal embedded disposal in benchmark child processes due known native callback race.
return Task.CompletedTask;
}
[Benchmark(Description = "Query by contextId (latest 200 rows)")]
public async Task QueryByContextIdAsync()
{
await ExecuteQueryAsync(
$"""
SELECT * FROM {LogTable}
WHERE contextId = $contextId
ORDER BY timestamp DESC
LIMIT 200;
""",
new Dictionary<string, object?> { ["contextId"] = _contextIdQueryValue });
}
[Benchmark(Description = "Query by loggerName + timestamp range (latest 200 rows)")]
public async Task QueryByLoggerAndTimestampAsync()
{
await ExecuteQueryAsync(
$"""
SELECT * FROM {LogTable}
WHERE loggerName = $loggerName
AND timestamp >= $fromTs
AND timestamp <= $toTs
ORDER BY timestamp DESC
LIMIT 200;
""",
new Dictionary<string, object?>
{
["loggerName"] = _loggerQueryValue,
["fromTs"] = _queryRangeStartUtc,
["toTs"] = _queryRangeEndUtc
});
}
[Benchmark(Description = "Query by loggerName + timestamp + arbitrary context string key/value")]
public async Task QueryByLoggerTimestampAndContextKeyAsync()
{
await ExecuteQueryAsync(
$"""
LET $logIds = (
SELECT VALUE logId FROM {LogKvTable}
WHERE loggerName = $loggerName
AND key = $contextKey
AND valueStr = $contextValueStr
AND timestamp >= $fromTs
AND timestamp <= $toTs
ORDER BY timestamp DESC
LIMIT 200
);
SELECT * FROM {LogTable}
WHERE id INSIDE $logIds
ORDER BY timestamp DESC
LIMIT 200;
""",
new Dictionary<string, object?>
{
["loggerName"] = _loggerQueryValue,
["fromTs"] = _queryRangeStartUtc,
["toTs"] = _queryRangeEndUtc,
["contextKey"] = _contextStringKeyQueryValue,
["contextValueStr"] = _contextStringValueQueryValue
});
}
[Benchmark(Description = "Query by loggerName + timestamp + arbitrary context number key/value")]
public async Task QueryByLoggerTimestampAndNumericContextKeyAsync()
{
await ExecuteQueryAsync(
$"""
LET $logIds = (
SELECT VALUE logId FROM {LogKvTable}
WHERE loggerName = $loggerName
AND key = $contextKey
AND valueNum = $contextValueNum
AND timestamp >= $fromTs
AND timestamp <= $toTs
ORDER BY timestamp DESC
LIMIT 200
);
SELECT * FROM {LogTable}
WHERE id INSIDE $logIds
ORDER BY timestamp DESC
LIMIT 200;
""",
new Dictionary<string, object?>
{
["loggerName"] = _loggerQueryValue,
["fromTs"] = _queryRangeStartUtc,
["toTs"] = _queryRangeEndUtc,
["contextKey"] = _contextNumericKeyQueryValue,
["contextValueNum"] = _contextNumericValueQueryValue
});
}
[Benchmark(Description = "RocksDB size (bytes)")]
public long GetDatabaseFileSizeBytes()
{
return CalculatePathSizeBytes(_databasePath);
}
private async Task DefineSchemaAndIndexesAsync()
{
string schemaSql =
$"""
DEFINE TABLE OVERWRITE {LogTable} SCHEMAFULL;
DEFINE FIELD OVERWRITE timestamp ON TABLE {LogTable} TYPE datetime;
DEFINE FIELD OVERWRITE level ON TABLE {LogTable} TYPE string;
DEFINE FIELD OVERWRITE loggerName ON TABLE {LogTable} TYPE option<string>;
DEFINE FIELD OVERWRITE contextId ON TABLE {LogTable} TYPE option<string>;
DEFINE FIELD OVERWRITE messageTemplate ON TABLE {LogTable} TYPE string;
DEFINE FIELD OVERWRITE renderedMessage ON TABLE {LogTable} TYPE option<string>;
DEFINE FIELD OVERWRITE exception ON TABLE {LogTable} TYPE option<string>;
DEFINE FIELD OVERWRITE contextValues ON TABLE {LogTable} TYPE object FLEXIBLE;
DEFINE INDEX OVERWRITE idx_log_contextid_ts
ON TABLE {LogTable} COLUMNS contextId, timestamp;
DEFINE INDEX OVERWRITE idx_log_logger_ts
ON TABLE {LogTable} COLUMNS loggerName, timestamp;
DEFINE TABLE OVERWRITE {LogKvTable} SCHEMAFULL;
DEFINE FIELD OVERWRITE logId ON TABLE {LogKvTable} TYPE record<{LogTable}>;
DEFINE FIELD OVERWRITE loggerName ON TABLE {LogKvTable} TYPE string;
DEFINE FIELD OVERWRITE timestamp ON TABLE {LogKvTable} TYPE datetime;
DEFINE FIELD OVERWRITE key ON TABLE {LogKvTable} TYPE string;
DEFINE FIELD OVERWRITE valueStr ON TABLE {LogKvTable} TYPE option<string>;
DEFINE FIELD OVERWRITE valueNum ON TABLE {LogKvTable} TYPE option<number>;
DEFINE FIELD OVERWRITE valueBool ON TABLE {LogKvTable} TYPE option<bool>;
DEFINE INDEX OVERWRITE idx_logkv_logger_key_str_ts
ON TABLE {LogKvTable} COLUMNS loggerName, key, valueStr, timestamp;
DEFINE INDEX OVERWRITE idx_logkv_logger_key_num_ts
ON TABLE {LogKvTable} COLUMNS loggerName, key, valueNum, timestamp;
DEFINE INDEX OVERWRITE idx_logkv_logger_key_bool_ts
ON TABLE {LogKvTable} COLUMNS loggerName, key, valueBool, timestamp;
DEFINE INDEX OVERWRITE idx_logkv_logid
ON TABLE {LogKvTable} COLUMNS logId;
""";
var response = await _surrealClient.RawQueryAsync(schemaSql);
EnsureSuccessfulResponse(response, "Schema definition");
}
private async Task InsertLogRecordsAsync()
{
for (var batchStart = 0; batchStart < LogRecordCount; batchStart += InsertBatchSize)
{
int batchCount = Math.Min(InsertBatchSize, LogRecordCount - batchStart);
var sqlBuilder = new StringBuilder();
sqlBuilder.AppendLine("BEGIN TRANSACTION;");
var parameters = new Dictionary<string, object?>(batchCount * 2);
for (var offset = 0; offset < batchCount; offset++)
{
int sequence = batchStart + offset;
string idParameterName = $"id{offset}";
string recordParameterName = $"record{offset}";
string logId = $"log-{sequence:D8}";
RecordId logRecordId = RecordId.From(LogTable, logId);
IReadOnlyDictionary<string, object?> logRecord = CreateLogRecord(sequence);
parameters[idParameterName] = logRecordId;
parameters[recordParameterName] = logRecord;
sqlBuilder.Append("UPSERT $")
.Append(idParameterName)
.Append(" CONTENT $")
.Append(recordParameterName)
.AppendLine(";");
int kvOrdinal = 0;
foreach (IReadOnlyDictionary<string, object?> kvRow in CreateKvRows(logId, logRecordId, logRecord))
{
string kvIdParameterName = $"kvid{offset}_{kvOrdinal}";
string kvRecordParameterName = $"kvrecord{offset}_{kvOrdinal}";
parameters[kvIdParameterName] = RecordId.From(LogKvTable, $"{logId}-{kvOrdinal:D2}");
parameters[kvRecordParameterName] = kvRow;
sqlBuilder.Append("UPSERT $")
.Append(kvIdParameterName)
.Append(" CONTENT $")
.Append(kvRecordParameterName)
.AppendLine(";");
kvOrdinal++;
}
}
sqlBuilder.AppendLine("COMMIT TRANSACTION;");
var response = await _surrealClient.RawQueryAsync(sqlBuilder.ToString(), parameters);
EnsureSuccessfulResponse(response, $"Insert batch starting at row {batchStart}");
}
}
private IReadOnlyDictionary<string, object?> CreateLogRecord(int sequence)
{
DateTime timestamp = _seedBaseUtc.AddMilliseconds(sequence * 10L);
string loggerName = LoggerNames[sequence % LoggerNames.Length];
string tenantId = TenantIds[sequence % TenantIds.Length];
bool isBackground = sequence % 7 == 0;
string? exception = sequence % 2_500 == 0
? "System.InvalidOperationException: simulated benchmark exception."
: null;
var record = new Dictionary<string, object?>
{
["timestamp"] = timestamp,
["level"] = ResolveLogLevel(sequence),
["loggerName"] = loggerName,
["contextId"] = BuildContextId(sequence),
["messageTemplate"] = "Processed request {RequestId} for {Route}",
["renderedMessage"] = $"Processed request req-{sequence:D8} for /api/items/{sequence % 250}",
["contextValues"] = new Dictionary<string, object?>
{
["tenantId"] = tenantId,
["requestId"] = $"req-{sequence:D8}",
["route"] = $"/api/items/{sequence % 250}",
["statusCode"] = sequence % 20 == 0 ? 500 : 200,
["elapsedMs"] = 5 + (sequence % 200),
["nodeId"] = $"node-{sequence % 8:D2}",
["isBackground"] = isBackground
}
};
if (!string.IsNullOrEmpty(exception))
record["exception"] = exception;
return record;
}
private static IEnumerable<IReadOnlyDictionary<string, object?>> CreateKvRows(
string logId,
RecordId logRecordId,
IReadOnlyDictionary<string, object?> logRecord)
{
if (!logRecord.TryGetValue("loggerName", out object? loggerNameValue) || loggerNameValue is not string loggerName)
yield break;
if (!logRecord.TryGetValue("timestamp", out object? timestampValue) || timestampValue is not DateTime timestamp)
yield break;
if (!logRecord.TryGetValue("contextValues", out object? contextValuesObject) ||
contextValuesObject is not IReadOnlyDictionary<string, object?> contextValues)
yield break;
foreach ((string key, object? value) in contextValues)
{
if (value == null) continue;
var row = new Dictionary<string, object?>
{
["logId"] = logRecordId,
["loggerName"] = loggerName,
["timestamp"] = timestamp,
["key"] = key
};
switch (value)
{
case string stringValue:
row["valueStr"] = stringValue;
break;
case bool boolValue:
row["valueBool"] = boolValue;
break;
case sbyte or byte or short or ushort or int or uint or long or ulong or float or double or decimal:
row["valueNum"] = Convert.ToDouble(value);
break;
default:
row["valueStr"] = value.ToString();
break;
}
yield return row;
}
}
private async Task ExecuteQueryAsync(string query, IReadOnlyDictionary<string, object?> parameters)
{
var response = await _surrealClient.RawQueryAsync(query, parameters);
EnsureSuccessfulResponse(response, "Query execution");
}
private static string BuildContextId(int sequence)
{
return $"ctx-{sequence / 10:D6}";
}
private static string ResolveLogLevel(int sequence)
{
if (sequence % 2_500 == 0) return "Error";
if (sequence % 500 == 0) return "Warning";
return "Information";
}
private static long CalculatePathSizeBytes(string path)
{
if (File.Exists(path)) return new FileInfo(path).Length;
if (!Directory.Exists(path)) return 0;
long size = 0;
foreach (string file in Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories))
size += new FileInfo(file).Length;
return size;
}
private static void EnsureSuccessfulResponse(SurrealDbResponse response, string operation)
{
if (!response.HasErrors) return;
string errorSummary = string.Join(
" | ",
response.Errors.Take(3).Select((error, index) => DescribeError(error, index)));
throw new InvalidOperationException(
$"{operation} failed with SurrealDB errors. Details: {errorSummary}");
}
private static string DescribeError(object error, int index)
{
Type errorType = error.GetType();
string[] fieldsToExtract = ["Status", "Details", "Description", "Information", "Code"];
var extracted = new List<string>();
foreach (string field in fieldsToExtract)
{
object? value = errorType.GetProperty(field)?.GetValue(error);
if (value != null) extracted.Add($"{field}={value}");
}
if (extracted.Count == 0) return $"error[{index}] type={errorType.Name}";
return $"error[{index}] type={errorType.Name} {string.Join(", ", extracted)}";
}
}