Implement checkpoint modes with docs/tests and reorganize project file layout
All checks were successful
NuGet Publish / build-and-pack (push) Successful in 46s
NuGet Publish / publish-to-gitea (push) Successful in 53s

This commit is contained in:
Joseph Doherty
2026-02-21 07:56:36 -05:00
parent 3ffd468c79
commit 4c6aaa5a3f
96 changed files with 744 additions and 249 deletions

View File

@@ -0,0 +1,114 @@
using System.Diagnostics;
using System.IO;
using System.Text;
using Microsoft.Extensions.Logging;
using Serilog.Context;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class ManualBenchmark
{
private static StringBuilder _log = new();
private static void Log(ILogger logger, string message = "")
{
logger.LogInformation("{Message}", message);
_log.AppendLine(message);
}
/// <summary>
/// Tests run.
/// </summary>
/// <param name="logger">Logger for benchmark progress and results.</param>
public static void Run(ILogger logger)
{
using var _ = LogContext.PushProperty("Benchmark", nameof(ManualBenchmark));
_log.Clear();
Log(logger, "=== MANUAL BENCHMARK: CBDD ===");
Log(logger, $"Date: {DateTime.Now}");
Log(logger, "Testing: Complex Objects (Nested Documents + Collections)\n");
long batchInsertMs;
long singleInsertMs;
long readByIdMs;
using (LogContext.PushProperty("Phase", "BatchInsert"))
{
Log(logger, "1. Batch Insert (1000 items)");
var insertBench = new InsertBenchmarks();
insertBench.Setup();
insertBench.IterationSetup();
try
{
var sw = Stopwatch.StartNew();
insertBench.DocumentDb_Insert_Batch();
sw.Stop();
batchInsertMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD InsertBulk (1000): {batchInsertMs} ms");
}
finally
{
insertBench.Cleanup();
}
}
using (LogContext.PushProperty("Phase", "FindById"))
{
Log(logger, "\n2. FindById Performance (1000 operations)");
var readBench = new ReadBenchmarks();
readBench.Setup();
try
{
var sw = Stopwatch.StartNew();
for (int i = 0; i < 1000; i++)
{
readBench.DocumentDb_FindById();
}
sw.Stop();
readByIdMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD FindById x1000: {readByIdMs} ms ({(double)readByIdMs / 1000:F3} ms/op)");
}
finally
{
readBench.Cleanup();
}
}
using (LogContext.PushProperty("Phase", "SingleInsert"))
{
Log(logger, "\n3. Single Insert");
var insertBench = new InsertBenchmarks();
insertBench.Setup();
insertBench.IterationSetup();
try
{
var sw = Stopwatch.StartNew();
insertBench.DocumentDb_Insert_Single();
sw.Stop();
singleInsertMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD Single Insert: {singleInsertMs} ms");
}
finally
{
insertBench.Cleanup();
}
}
Log(logger, "\n============================================================================");
Log(logger, "BENCHMARK RESULTS (CBDD ONLY):");
Log(logger, "============================================================================");
Log(logger, $"Batch Insert (1000): {batchInsertMs} ms");
Log(logger, $"FindById x1000: {readByIdMs} ms");
Log(logger, $"Single Insert: {singleInsertMs} ms");
var artifactsDir = Path.Combine(AppContext.BaseDirectory, "BenchmarkDotNet.Artifacts", "results");
if (!Directory.Exists(artifactsDir))
{
Directory.CreateDirectory(artifactsDir);
}
var filePath = Path.Combine(artifactsDir, "manual_report.txt");
File.WriteAllText(filePath, _log.ToString());
logger.LogInformation("Report saved to: {FilePath}", filePath);
}
}

View File

@@ -0,0 +1,185 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[SimpleJob]
[InProcess]
[MemoryDiagnoser]
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
[HtmlExporter]
[JsonExporterAttribute.Full]
public class MixedWorkloadBenchmarks
{
/// <summary>
/// Gets or sets whether periodic online compaction is enabled.
/// </summary>
[Params(false, true)]
public bool PeriodicCompaction { get; set; }
/// <summary>
/// Gets or sets the number of operations per benchmark iteration.
/// </summary>
[Params(800)]
public int Operations { get; set; }
private string _dbPath = string.Empty;
private string _walPath = string.Empty;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private readonly List<ObjectId> _activeIds = [];
private int _nextValueSeed;
/// <summary>
/// Prepares benchmark storage and seed data for each iteration.
/// </summary>
[IterationSetup]
public void Setup()
{
var id = Guid.NewGuid().ToString("N");
_dbPath = Path.Combine(AppContext.BaseDirectory, $"bench_mixed_{id}.db");
_walPath = Path.ChangeExtension(_dbPath, ".wal");
var options = new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli
};
_storage = new StorageEngine(_dbPath, PageFileConfig.Default, options);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
_activeIds.Clear();
_nextValueSeed = 0;
for (var i = 0; i < 300; i++)
{
var idValue = _collection.Insert(CreatePerson(_nextValueSeed++));
_activeIds.Add(idValue);
}
_transactionHolder.CommitAndReset();
}
/// <summary>
/// Cleans up benchmark resources for each iteration.
/// </summary>
[IterationCleanup]
public void Cleanup()
{
_transactionHolder?.Dispose();
_storage?.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
if (File.Exists(_walPath)) File.Delete(_walPath);
}
/// <summary>
/// Benchmarks a mixed insert/update/delete workload.
/// </summary>
[Benchmark(Baseline = true)]
[BenchmarkCategory("MixedWorkload")]
public int InsertUpdateDeleteMix()
{
var random = new Random(12345);
for (var i = 1; i <= Operations; i++)
{
var mode = i % 5;
if (mode is 0 or 1)
{
var id = _collection.Insert(CreatePerson(_nextValueSeed++));
_activeIds.Add(id);
}
else if (mode is 2 or 3)
{
if (_activeIds.Count > 0)
{
var idx = random.Next(_activeIds.Count);
var id = _activeIds[idx];
var current = _collection.FindById(id);
if (current != null)
{
current.Age += 1;
current.Bio = BuildPayload(_nextValueSeed++);
_collection.Update(current);
}
}
}
else
{
if (_activeIds.Count > 100)
{
var idx = random.Next(_activeIds.Count);
var id = _activeIds[idx];
_collection.Delete(id);
_activeIds.RemoveAt(idx);
}
}
if (i % 50 == 0)
{
_transactionHolder.CommitAndReset();
}
if (PeriodicCompaction && i % 200 == 0)
{
_storage.RunOnlineCompactionPass(new CompactionOptions
{
OnlineMode = true,
OnlineBatchPageLimit = 8,
OnlineBatchDelay = TimeSpan.FromMilliseconds(1),
MaxOnlineDuration = TimeSpan.FromMilliseconds(120),
EnableTailTruncation = true
});
}
}
_transactionHolder.CommitAndReset();
return _collection.Count();
}
private static Person CreatePerson(int seed)
{
return new Person
{
Id = ObjectId.NewObjectId(),
FirstName = $"First_{seed}",
LastName = $"Last_{seed}",
Age = 18 + (seed % 60),
Bio = BuildPayload(seed),
CreatedAt = DateTime.UnixEpoch.AddSeconds(seed),
Balance = seed,
HomeAddress = new Address
{
Street = $"{seed} Mixed Ave",
City = "Workload City",
ZipCode = "10101"
}
};
}
private static string BuildPayload(int seed)
{
var builder = new System.Text.StringBuilder(1800);
for (var i = 0; i < 64; i++)
{
builder.Append("mixed-");
builder.Append(seed.ToString("D6"));
builder.Append('-');
builder.Append(i.ToString("D3"));
builder.Append('|');
}
return builder.ToString();
}
}

View File

@@ -0,0 +1,287 @@
using System.IO.Compression;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
internal static class PerformanceGateSmoke
{
private const int CompactionDocumentCount = 2_000;
private const int CompressionDocumentCount = 1_500;
/// <summary>
/// Runs the performance gate smoke probes and writes a report.
/// </summary>
/// <param name="logger">The logger.</param>
public static void Run(ILogger logger)
{
var compaction = RunCompactionProbe();
var compressionOff = RunCompressionGcProbe(enableCompression: false);
var compressionOn = RunCompressionGcProbe(enableCompression: true);
var report = new PerformanceGateReport(
DateTimeOffset.UtcNow,
compaction,
compressionOff,
compressionOn);
var reportPath = WriteReport(report);
logger.LogInformation("Performance gate smoke report written to {ReportPath}", reportPath);
Console.WriteLine("[performance_gate]");
Console.WriteLine($"report_path={reportPath}");
Console.WriteLine($"compaction.pre_pages={compaction.PrePages}");
Console.WriteLine($"compaction.post_pages={compaction.PostPages}");
Console.WriteLine($"compaction.reclaimed_file_bytes={compaction.ReclaimedFileBytes}");
Console.WriteLine($"compaction.throughput_bytes_per_sec={compaction.ThroughputBytesPerSecond:F2}");
Console.WriteLine($"compaction.throughput_pages_per_sec={compaction.ThroughputPagesPerSecond:F2}");
Console.WriteLine($"compaction.throughput_docs_per_sec={compaction.ThroughputDocumentsPerSecond:F2}");
Console.WriteLine($"compression_off.gen0_delta={compressionOff.Gen0Delta}");
Console.WriteLine($"compression_off.gen1_delta={compressionOff.Gen1Delta}");
Console.WriteLine($"compression_off.gen2_delta={compressionOff.Gen2Delta}");
Console.WriteLine($"compression_off.alloc_bytes_delta={compressionOff.AllocatedBytesDelta}");
Console.WriteLine($"compression_on.gen0_delta={compressionOn.Gen0Delta}");
Console.WriteLine($"compression_on.gen1_delta={compressionOn.Gen1Delta}");
Console.WriteLine($"compression_on.gen2_delta={compressionOn.Gen2Delta}");
Console.WriteLine($"compression_on.alloc_bytes_delta={compressionOn.AllocatedBytesDelta}");
}
private static CompactionProbeResult RunCompactionProbe()
{
var dbPath = NewDbPath("gate_compaction");
var walPath = Path.ChangeExtension(dbPath, ".wal");
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Small);
using var transactionHolder = new BenchmarkTransactionHolder(storage);
var collection = new DocumentCollection<Person>(storage, transactionHolder, new PersonMapper());
var ids = new List<ObjectId>(CompactionDocumentCount);
for (var i = 0; i < CompactionDocumentCount; i++)
{
ids.Add(collection.Insert(CreatePerson(i, includeLargeBio: true)));
}
transactionHolder.CommitAndReset();
storage.Checkpoint();
for (var i = 0; i < ids.Count; i += 3)
{
collection.Delete(ids[i]);
}
for (var i = 0; i < ids.Count; i += 5)
{
var current = collection.FindById(ids[i]);
if (current == null)
continue;
current.Bio = BuildBio(i + 10_000);
current.Age += 1;
collection.Update(current);
}
transactionHolder.CommitAndReset();
storage.Checkpoint();
var stats = storage.Compact(new CompactionOptions
{
OnlineMode = false,
DefragmentSlottedPages = true,
NormalizeFreeList = true,
EnableTailTruncation = true
});
return new CompactionProbeResult(
stats.PrePageCount,
stats.PostPageCount,
stats.ReclaimedFileBytes,
stats.ThroughputBytesPerSecond,
stats.ThroughputPagesPerSecond,
stats.ThroughputDocumentsPerSecond);
}
finally
{
TryDelete(dbPath);
TryDelete(walPath);
TryDelete($"{dbPath}.compact.state");
TryDelete($"{dbPath}.compact.tmp");
TryDelete($"{dbPath}.compact.bak");
}
}
private static CompressionGcProbeResult RunCompressionGcProbe(bool enableCompression)
{
var dbPath = NewDbPath(enableCompression ? "gate_gc_on" : "gate_gc_off");
var walPath = Path.ChangeExtension(dbPath, ".wal");
var compressionOptions = enableCompression
? new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = CompressionLevel.Fastest
}
: CompressionOptions.Default;
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default, compressionOptions);
using var transactionHolder = new BenchmarkTransactionHolder(storage);
var collection = new DocumentCollection<Person>(storage, transactionHolder, new PersonMapper());
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var g0Before = GC.CollectionCount(0);
var g1Before = GC.CollectionCount(1);
var g2Before = GC.CollectionCount(2);
var allocBefore = GC.GetTotalAllocatedBytes(true);
var ids = new ObjectId[CompressionDocumentCount];
for (var i = 0; i < CompressionDocumentCount; i++)
{
ids[i] = collection.Insert(CreatePerson(i, includeLargeBio: true));
}
transactionHolder.CommitAndReset();
for (var i = 0; i < ids.Length; i += 4)
{
var current = collection.FindById(ids[i]);
if (current == null)
continue;
current.Bio = BuildBio(i + 20_000);
current.Age += 1;
collection.Update(current);
}
transactionHolder.CommitAndReset();
var readCount = collection.FindAll().Count();
transactionHolder.CommitAndReset();
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var g0After = GC.CollectionCount(0);
var g1After = GC.CollectionCount(1);
var g2After = GC.CollectionCount(2);
var allocAfter = GC.GetTotalAllocatedBytes(true);
return new CompressionGcProbeResult(
enableCompression,
readCount,
g0After - g0Before,
g1After - g1Before,
g2After - g2Before,
allocAfter - allocBefore);
}
finally
{
TryDelete(dbPath);
TryDelete(walPath);
TryDelete($"{dbPath}.compact.state");
TryDelete($"{dbPath}.compact.tmp");
TryDelete($"{dbPath}.compact.bak");
}
}
private static string WriteReport(PerformanceGateReport report)
{
var outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "BenchmarkDotNet.Artifacts", "results");
Directory.CreateDirectory(outputDirectory);
var reportPath = Path.Combine(outputDirectory, "PerformanceGateSmoke-report.json");
var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
File.WriteAllText(reportPath, json);
return reportPath;
}
private static Person CreatePerson(int i, bool includeLargeBio)
{
return new Person
{
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 20 + (i % 50),
Bio = includeLargeBio ? BuildBio(i) : $"bio-{i}",
CreatedAt = DateTime.UnixEpoch.AddMinutes(i),
Balance = 100 + i,
HomeAddress = new Address
{
Street = $"{i} Main St",
City = "Gate City",
ZipCode = "12345"
},
EmploymentHistory =
[
new WorkHistory
{
CompanyName = $"Company_{i}",
Title = "Engineer",
DurationYears = i % 10,
Tags = ["csharp", "db", "compression"]
}
]
};
}
private static string BuildBio(int seed)
{
var builder = new System.Text.StringBuilder(4500);
for (var i = 0; i < 150; i++)
{
builder.Append("bio-");
builder.Append(seed.ToString("D6"));
builder.Append('-');
builder.Append(i.ToString("D3"));
builder.Append('|');
}
return builder.ToString();
}
private static string NewDbPath(string prefix)
=> Path.Combine(Path.GetTempPath(), $"{prefix}_{Guid.NewGuid():N}.db");
private static void TryDelete(string path)
{
if (File.Exists(path))
{
File.Delete(path);
}
}
private sealed record PerformanceGateReport(
DateTimeOffset CapturedAtUtc,
CompactionProbeResult Compaction,
CompressionGcProbeResult CompressionOff,
CompressionGcProbeResult CompressionOn);
private sealed record CompactionProbeResult(
uint PrePages,
uint PostPages,
long ReclaimedFileBytes,
double ThroughputBytesPerSecond,
double ThroughputPagesPerSecond,
double ThroughputDocumentsPerSecond);
private sealed record CompressionGcProbeResult(
bool CompressionEnabled,
int ReadCount,
int Gen0Delta,
int Gen1Delta,
int Gen2Delta,
long AllocatedBytesDelta);
}