Fix audit findings for coverage, architecture checks, and XML docs
All checks were successful
NuGet Publish / build-and-pack (push) Successful in 45s
NuGet Publish / publish-to-gitea (push) Successful in 52s

This commit is contained in:
Joseph Doherty
2026-02-20 15:43:25 -05:00
parent 5528806518
commit 3ffd468c79
99 changed files with 23746 additions and 9548 deletions

View File

@@ -10,11 +10,19 @@ internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposab
private readonly object _sync = new();
private ITransaction? _currentTransaction;
/// <summary>
/// Initializes a new instance of the <see cref="BenchmarkTransactionHolder"/> class.
/// </summary>
/// <param name="storage">The storage engine used to create transactions.</param>
public BenchmarkTransactionHolder(StorageEngine storage)
{
_storage = storage ?? throw new ArgumentNullException(nameof(storage));
}
/// <summary>
/// Gets the current active transaction or starts a new one.
/// </summary>
/// <returns>The current active transaction.</returns>
public ITransaction GetCurrentTransactionOrStart()
{
lock (_sync)
@@ -28,11 +36,18 @@ internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposab
}
}
/// <summary>
/// Gets the current active transaction or starts a new one asynchronously.
/// </summary>
/// <returns>A task that returns the current active transaction.</returns>
public Task<ITransaction> GetCurrentTransactionOrStartAsync()
{
return Task.FromResult(GetCurrentTransactionOrStart());
}
/// <summary>
/// Commits the current transaction when active and clears the holder.
/// </summary>
public void CommitAndReset()
{
lock (_sync)
@@ -53,6 +68,9 @@ internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposab
}
}
/// <summary>
/// Rolls back the current transaction when active and clears the holder.
/// </summary>
public void RollbackAndReset()
{
lock (_sync)
@@ -73,6 +91,9 @@ internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposab
}
}
/// <summary>
/// Disposes this holder and rolls back any outstanding transaction.
/// </summary>
public void Dispose()
{
RollbackAndReset();

View File

@@ -15,6 +15,9 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[JsonExporterAttribute.Full]
public class CompactionBenchmarks
{
/// <summary>
/// Gets or sets the number of documents used per benchmark iteration.
/// </summary>
[Params(2_000)]
public int DocumentCount { get; set; }
@@ -25,6 +28,9 @@ public class CompactionBenchmarks
private DocumentCollection<Person> _collection = null!;
private List<ObjectId> _insertedIds = [];
/// <summary>
/// Prepares benchmark state and seed data for each iteration.
/// </summary>
[IterationSetup]
public void Setup()
{
@@ -56,6 +62,9 @@ public class CompactionBenchmarks
_storage.Checkpoint();
}
/// <summary>
/// Cleans up benchmark resources and temporary files after each iteration.
/// </summary>
[IterationCleanup]
public void Cleanup()
{
@@ -66,6 +75,10 @@ public class CompactionBenchmarks
if (File.Exists(_walPath)) File.Delete(_walPath);
}
/// <summary>
/// Benchmarks reclaimed file bytes reported by offline compaction.
/// </summary>
/// <returns>The reclaimed file byte count.</returns>
[Benchmark(Baseline = true)]
[BenchmarkCategory("Compaction_Offline")]
public long OfflineCompact_ReclaimedBytes()
@@ -81,6 +94,10 @@ public class CompactionBenchmarks
return stats.ReclaimedFileBytes;
}
/// <summary>
/// Benchmarks tail bytes truncated by offline compaction.
/// </summary>
/// <returns>The truncated tail byte count.</returns>
[Benchmark]
[BenchmarkCategory("Compaction_Offline")]
public long OfflineCompact_TailBytesTruncated()

View File

@@ -20,12 +20,21 @@ public class CompressionBenchmarks
private const int SeedCount = 300;
private const int WorkloadCount = 100;
/// <summary>
/// Gets or sets whether compression is enabled for the benchmark run.
/// </summary>
[Params(false, true)]
public bool EnableCompression { get; set; }
/// <summary>
/// Gets or sets the compression codec for the benchmark run.
/// </summary>
[Params(CompressionCodec.Brotli, CompressionCodec.Deflate)]
public CompressionCodec Codec { get; set; }
/// <summary>
/// Gets or sets the compression level for the benchmark run.
/// </summary>
[Params(CompressionLevel.Fastest, CompressionLevel.Optimal)]
public CompressionLevel Level { get; set; }
@@ -38,6 +47,9 @@ public class CompressionBenchmarks
private Person[] _insertBatch = Array.Empty<Person>();
private ObjectId[] _seedIds = Array.Empty<ObjectId>();
/// <summary>
/// Prepares benchmark storage and seed data for each iteration.
/// </summary>
[IterationSetup]
public void Setup()
{
@@ -72,6 +84,9 @@ public class CompressionBenchmarks
.ToArray();
}
/// <summary>
/// Cleans up benchmark resources for each iteration.
/// </summary>
[IterationCleanup]
public void Cleanup()
{
@@ -82,6 +97,9 @@ public class CompressionBenchmarks
if (File.Exists(_walPath)) File.Delete(_walPath);
}
/// <summary>
/// Benchmarks insert workload performance.
/// </summary>
[Benchmark(Baseline = true)]
[BenchmarkCategory("Compression_InsertUpdateRead")]
public void Insert_Workload()
@@ -90,6 +108,9 @@ public class CompressionBenchmarks
_transactionHolder.CommitAndReset();
}
/// <summary>
/// Benchmarks update workload performance.
/// </summary>
[Benchmark]
[BenchmarkCategory("Compression_InsertUpdateRead")]
public void Update_Workload()
@@ -109,6 +130,9 @@ public class CompressionBenchmarks
_transactionHolder.CommitAndReset();
}
/// <summary>
/// Benchmarks read workload performance.
/// </summary>
[Benchmark]
[BenchmarkCategory("Compression_InsertUpdateRead")]
public int Read_Workload()

View File

@@ -11,33 +11,55 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
internal static class DatabaseSizeBenchmark
{
private static readonly int[] TargetCounts = [10_000, 1_000_000, 10_000_000];
private static readonly CompressionOptions CompressedBrotliFast = new()
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = System.IO.Compression.CompressionLevel.Fastest
};
private static readonly Scenario[] Scenarios =
[
// Separate compression set (no compaction)
new(
"Uncompressed",
CompressionOptions.Default),
Set: "compression",
Name: "CompressionOnly-Uncompressed",
CompressionOptions: CompressionOptions.Default,
RunCompaction: false),
new(
"Compressed-BrotliFast",
new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = System.IO.Compression.CompressionLevel.Fastest
})
Set: "compression",
Name: "CompressionOnly-Compressed-BrotliFast",
CompressionOptions: CompressedBrotliFast,
RunCompaction: false),
// Separate compaction set (compaction enabled)
new(
Set: "compaction",
Name: "Compaction-Uncompressed",
CompressionOptions: CompressionOptions.Default,
RunCompaction: true),
new(
Set: "compaction",
Name: "Compaction-Compressed-BrotliFast",
CompressionOptions: CompressedBrotliFast,
RunCompaction: true)
];
private const int BatchSize = 50_000;
private const int ProgressInterval = 1_000_000;
/// <summary>
/// Tests run.
/// </summary>
/// <param name="logger">Logger for benchmark progress and results.</param>
public static void Run(ILogger logger)
{
var results = new List<SizeResult>(TargetCounts.Length * Scenarios.Length);
logger.LogInformation("=== CBDD Database Size Benchmark ===");
logger.LogInformation("=== CBDD Database Size Benchmark (Separate Compression/Compaction Sets) ===");
logger.LogInformation("Targets: {Targets}", string.Join(", ", TargetCounts.Select(x => x.ToString("N0"))));
logger.LogInformation("Scenarios: {Scenarios}", string.Join(", ", Scenarios.Select(x => x.Name)));
logger.LogInformation("Scenarios: {Scenarios}", string.Join(", ", Scenarios.Select(x => $"{x.Set}:{x.Name}")));
logger.LogInformation("Batch size: {BatchSize:N0}", BatchSize);
foreach (var targetCount in TargetCounts)
@@ -48,12 +70,17 @@ internal static class DatabaseSizeBenchmark
var walPath = Path.ChangeExtension(dbPath, ".wal");
using var _ = LogContext.PushProperty("TargetCount", targetCount);
using var __ = LogContext.PushProperty("Scenario", scenario.Name);
using var ___ = LogContext.PushProperty("ScenarioSet", scenario.Set);
logger.LogInformation("Starting scenario {Scenario} for target {TargetCount:N0} docs", scenario.Name, targetCount);
logger.LogInformation(
"Starting {Set} scenario {Scenario} for target {TargetCount:N0} docs",
scenario.Set,
scenario.Name,
targetCount);
var insertStopwatch = Stopwatch.StartNew();
CompressionStats compressionStats = default;
CompactionStats compactionStats;
CompactionStats compactionStats = new();
long preCompactDbBytes;
long preCompactWalBytes;
long postCompactDbBytes;
@@ -93,12 +120,15 @@ internal static class DatabaseSizeBenchmark
preCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
preCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
compactionStats = storage.Compact(new CompactionOptions
if (scenario.RunCompaction)
{
EnableTailTruncation = true,
DefragmentSlottedPages = true,
NormalizeFreeList = true
});
compactionStats = storage.Compact(new CompactionOptions
{
EnableTailTruncation = true,
DefragmentSlottedPages = true,
NormalizeFreeList = true
});
}
postCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
postCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
@@ -106,7 +136,9 @@ internal static class DatabaseSizeBenchmark
}
var result = new SizeResult(
scenario.Set,
scenario.Name,
scenario.RunCompaction,
targetCount,
insertStopwatch.Elapsed,
preCompactDbBytes,
@@ -118,13 +150,16 @@ internal static class DatabaseSizeBenchmark
results.Add(result);
logger.LogInformation(
"Completed {Scenario} {TargetCount:N0} docs in {Elapsed}. pre={PreTotal}, post={PostTotal}, shrink={Shrink}, compRatio={CompRatio}",
"Completed {Set}:{Scenario} {TargetCount:N0} docs in {Elapsed}. pre={PreTotal}, post={PostTotal}, shrink={Shrink}, compactApplied={CompactionApplied}, compactReclaim={CompactReclaim}, compRatio={CompRatio}",
scenario.Set,
scenario.Name,
targetCount,
insertStopwatch.Elapsed,
FormatBytes(result.PreCompactTotalBytes),
FormatBytes(result.PostCompactTotalBytes),
FormatBytes(result.ShrinkBytes),
scenario.RunCompaction,
FormatBytes(result.CompactionStats.ReclaimedFileBytes),
result.CompressionRatioText);
TryDelete(dbPath);
@@ -133,10 +168,14 @@ internal static class DatabaseSizeBenchmark
}
logger.LogInformation("=== Size Benchmark Summary ===");
foreach (var result in results.OrderBy(x => x.TargetCount).ThenBy(x => x.Scenario))
foreach (var result in results
.OrderBy(x => x.Set)
.ThenBy(x => x.TargetCount)
.ThenBy(x => x.Scenario))
{
logger.LogInformation(
"{Scenario,-22} | {Count,12:N0} docs | insert={Elapsed,12} | pre={Pre,12} | post={Post,12} | shrink={Shrink,12} | compact={CompactBytes,12} | ratio={Ratio}",
"{Set,-11} | {Scenario,-38} | {Count,12:N0} docs | insert={Elapsed,12} | pre={Pre,12} | post={Post,12} | shrink={Shrink,12} | compact={CompactBytes,12} | ratio={Ratio}",
result.Set,
result.Scenario,
result.TargetCount,
result.InsertElapsed,
@@ -146,6 +185,8 @@ internal static class DatabaseSizeBenchmark
FormatBytes(result.CompactionStats.ReclaimedFileBytes),
result.CompressionRatioText);
}
WriteSummaryCsv(results, logger);
}
private static SizeBenchmarkDocument CreateDocument(int value)
@@ -181,10 +222,42 @@ internal static class DatabaseSizeBenchmark
return $"{size:N2} {units[unitIndex]}";
}
private sealed record Scenario(string Name, CompressionOptions CompressionOptions);
private static void WriteSummaryCsv(IEnumerable<SizeResult> results, ILogger logger)
{
var outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "BenchmarkDotNet.Artifacts", "results");
Directory.CreateDirectory(outputDirectory);
var outputPath = Path.Combine(outputDirectory, "DatabaseSizeBenchmark-results.csv");
var lines = new List<string>
{
"set,scenario,target_count,run_compaction,insert_seconds,pre_total_bytes,post_total_bytes,shrink_bytes,compaction_reclaimed_bytes,compression_ratio_text"
};
foreach (var result in results.OrderBy(x => x.Set).ThenBy(x => x.TargetCount).ThenBy(x => x.Scenario))
{
lines.Add(string.Join(",",
result.Set,
result.Scenario,
result.TargetCount.ToString(),
result.RunCompaction ? "true" : "false",
result.InsertElapsed.TotalSeconds.ToString("F3"),
result.PreCompactTotalBytes.ToString(),
result.PostCompactTotalBytes.ToString(),
result.ShrinkBytes.ToString(),
result.CompactionStats.ReclaimedFileBytes.ToString(),
result.CompressionRatioText));
}
File.WriteAllLines(outputPath, lines);
logger.LogInformation("Database size summary CSV written to {OutputPath}", outputPath);
}
private sealed record Scenario(string Set, string Name, CompressionOptions CompressionOptions, bool RunCompaction);
private sealed record SizeResult(
string Set,
string Scenario,
bool RunCompaction,
int TargetCount,
TimeSpan InsertElapsed,
long PreCompactDbBytes,
@@ -194,10 +267,22 @@ internal static class DatabaseSizeBenchmark
CompactionStats CompactionStats,
CompressionStats CompressionStats)
{
/// <summary>
/// Gets or sets the pre compact total bytes.
/// </summary>
public long PreCompactTotalBytes => PreCompactDbBytes + PreCompactWalBytes;
/// <summary>
/// Gets or sets the post compact total bytes.
/// </summary>
public long PostCompactTotalBytes => PostCompactDbBytes + PostCompactWalBytes;
/// <summary>
/// Gets or sets the shrink bytes.
/// </summary>
public long ShrinkBytes => PreCompactTotalBytes - PostCompactTotalBytes;
/// <summary>
/// Gets or sets the compression ratio text.
/// </summary>
public string CompressionRatioText =>
CompressionStats.BytesAfterCompression > 0
? $"{(double)CompressionStats.BytesBeforeCompression / CompressionStats.BytesAfterCompression:N2}x"
@@ -206,19 +291,32 @@ internal static class DatabaseSizeBenchmark
private sealed class SizeBenchmarkDocument
{
/// <summary>
/// Gets or sets the id.
/// </summary>
public ObjectId Id { get; set; }
/// <summary>
/// Gets or sets the value.
/// </summary>
public int Value { get; set; }
/// <summary>
/// Gets or sets the name.
/// </summary>
public string Name { get; set; } = string.Empty;
}
private sealed class SizeBenchmarkDocumentMapper : ObjectIdMapperBase<SizeBenchmarkDocument>
{
/// <inheritdoc />
public override string CollectionName => "size_documents";
/// <inheritdoc />
public override ObjectId GetId(SizeBenchmarkDocument entity) => entity.Id;
/// <inheritdoc />
public override void SetId(SizeBenchmarkDocument entity, ObjectId id) => entity.Id = id;
/// <inheritdoc />
public override int Serialize(SizeBenchmarkDocument entity, BsonSpanWriter writer)
{
var sizePos = writer.BeginDocument();
@@ -229,6 +327,7 @@ internal static class DatabaseSizeBenchmark
return writer.Position;
}
/// <inheritdoc />
public override SizeBenchmarkDocument Deserialize(BsonSpanReader reader)
{
var document = new SizeBenchmarkDocument();

View File

@@ -1,16 +1,16 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using Microsoft.Extensions.Logging;
using Serilog.Context;
using System.IO;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using Microsoft.Extensions.Logging;
using Serilog.Context;
using System.IO;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[InProcess]
@@ -18,32 +18,35 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
[HtmlExporter]
[JsonExporterAttribute.Full]
public class InsertBenchmarks
{
private const int BatchSize = 1000;
private static readonly ILogger Logger = Logging.CreateLogger<InsertBenchmarks>();
private string _docDbPath = "";
private string _docDbWalPath = "";
private StorageEngine? _storage = null;
private BenchmarkTransactionHolder? _transactionHolder = null;
private DocumentCollection<Person>? _collection = null;
private Person[] _batchData = Array.Empty<Person>();
private Person? _singlePerson = null;
public class InsertBenchmarks
{
private const int BatchSize = 1000;
private static readonly ILogger Logger = Logging.CreateLogger<InsertBenchmarks>();
private string _docDbPath = "";
private string _docDbWalPath = "";
private StorageEngine? _storage = null;
private BenchmarkTransactionHolder? _transactionHolder = null;
private DocumentCollection<Person>? _collection = null;
private Person[] _batchData = Array.Empty<Person>();
private Person? _singlePerson = null;
/// <summary>
/// Tests setup.
/// </summary>
[GlobalSetup]
public void Setup()
{
var temp = AppContext.BaseDirectory;
var id = Guid.NewGuid().ToString("N");
_docDbPath = Path.Combine(temp, $"bench_docdb_{id}.db");
_docDbWalPath = Path.ChangeExtension(_docDbPath, ".wal");
_singlePerson = CreatePerson(0);
_batchData = new Person[BatchSize];
for (int i = 0; i < BatchSize; i++)
var temp = AppContext.BaseDirectory;
var id = Guid.NewGuid().ToString("N");
_docDbPath = Path.Combine(temp, $"bench_docdb_{id}.db");
_docDbWalPath = Path.ChangeExtension(_docDbPath, ".wal");
_singlePerson = CreatePerson(0);
_batchData = new Person[BatchSize];
for (int i = 0; i < BatchSize; i++)
{
_batchData[i] = CreatePerson(i);
}
@@ -60,7 +63,7 @@ public class InsertBenchmarks
Bio = null, // Removed large payload to focus on structure
CreatedAt = DateTime.UtcNow,
Balance = 1000.50m * (i + 1),
HomeAddress = new Address
HomeAddress = new Address
{
Street = $"{i} Main St",
City = "Tech City",
@@ -83,51 +86,63 @@ public class InsertBenchmarks
return p;
}
[IterationSetup]
public void IterationSetup()
{
_storage = new StorageEngine(_docDbPath, PageFileConfig.Default);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
}
[IterationCleanup]
public void Cleanup()
{
try
{
using var _ = LogContext.PushProperty("Benchmark", nameof(InsertBenchmarks));
_transactionHolder?.Dispose();
_transactionHolder = null;
_storage?.Dispose();
_storage = null;
System.Threading.Thread.Sleep(100);
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
}
catch (Exception ex)
{
Logger.LogWarning(ex, "Cleanup warning");
}
}
// --- Benchmarks ---
[Benchmark(Baseline = true, Description = "CBDD Single Insert")]
[BenchmarkCategory("Insert_Single")]
public void DocumentDb_Insert_Single()
{
_collection?.Insert(_singlePerson!);
_transactionHolder?.CommitAndReset();
}
[Benchmark(Description = "CBDD Batch Insert (1000 items, 1 Txn)")]
[BenchmarkCategory("Insert_Batch")]
public void DocumentDb_Insert_Batch()
{
_collection?.InsertBulk(_batchData);
_transactionHolder?.CommitAndReset();
}
}
/// <summary>
/// Tests iteration setup.
/// </summary>
[IterationSetup]
public void IterationSetup()
{
_storage = new StorageEngine(_docDbPath, PageFileConfig.Default);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
}
/// <summary>
/// Tests cleanup.
/// </summary>
[IterationCleanup]
public void Cleanup()
{
try
{
using var _ = LogContext.PushProperty("Benchmark", nameof(InsertBenchmarks));
_transactionHolder?.Dispose();
_transactionHolder = null;
_storage?.Dispose();
_storage = null;
System.Threading.Thread.Sleep(100);
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
}
catch (Exception ex)
{
Logger.LogWarning(ex, "Cleanup warning");
}
}
// --- Benchmarks ---
/// <summary>
/// Tests document db insert single.
/// </summary>
[Benchmark(Baseline = true, Description = "CBDD Single Insert")]
[BenchmarkCategory("Insert_Single")]
public void DocumentDb_Insert_Single()
{
_collection?.Insert(_singlePerson!);
_transactionHolder?.CommitAndReset();
}
/// <summary>
/// Tests document db insert batch.
/// </summary>
[Benchmark(Description = "CBDD Batch Insert (1000 items, 1 Txn)")]
[BenchmarkCategory("Insert_Batch")]
public void DocumentDb_Insert_Batch()
{
_collection?.InsertBulk(_batchData);
_transactionHolder?.CommitAndReset();
}
}

View File

@@ -7,8 +7,16 @@ internal static class Logging
{
private static readonly Lazy<ILoggerFactory> LoggerFactoryInstance = new(CreateFactory);
/// <summary>
/// Gets the shared logger factory for benchmarks.
/// </summary>
public static ILoggerFactory LoggerFactory => LoggerFactoryInstance.Value;
/// <summary>
/// Creates a logger for the specified category type.
/// </summary>
/// <typeparam name="T">The logger category type.</typeparam>
/// <returns>A logger for <typeparamref name="T"/>.</returns>
public static Microsoft.Extensions.Logging.ILogger CreateLogger<T>()
{
return LoggerFactory.CreateLogger<T>();

View File

@@ -1,110 +1,114 @@
using System.Diagnostics;
using System.IO;
using System.Text;
using Microsoft.Extensions.Logging;
using Serilog.Context;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
using System.Diagnostics;
using System.IO;
using System.Text;
using Microsoft.Extensions.Logging;
using Serilog.Context;
public class ManualBenchmark
{
private static StringBuilder _log = new();
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
private static void Log(ILogger logger, string message = "")
{
logger.LogInformation("{Message}", message);
_log.AppendLine(message);
}
public class ManualBenchmark
{
private static StringBuilder _log = new();
private static void Log(ILogger logger, string message = "")
{
logger.LogInformation("{Message}", message);
_log.AppendLine(message);
}
/// <summary>
/// Tests run.
/// </summary>
/// <param name="logger">Logger for benchmark progress and results.</param>
public static void Run(ILogger logger)
{
using var _ = LogContext.PushProperty("Benchmark", nameof(ManualBenchmark));
_log.Clear();
Log(logger, "=== MANUAL BENCHMARK: CBDD ===");
Log(logger, $"Date: {DateTime.Now}");
Log(logger, "Testing: Complex Objects (Nested Documents + Collections)\n");
long batchInsertMs;
long singleInsertMs;
long readByIdMs;
using (LogContext.PushProperty("Phase", "BatchInsert"))
{
Log(logger, "1. Batch Insert (1000 items)");
var insertBench = new InsertBenchmarks();
insertBench.Setup();
insertBench.IterationSetup();
try
{
var sw = Stopwatch.StartNew();
insertBench.DocumentDb_Insert_Batch();
sw.Stop();
batchInsertMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD InsertBulk (1000): {batchInsertMs} ms");
}
finally
{
insertBench.Cleanup();
}
}
using (LogContext.PushProperty("Phase", "FindById"))
{
Log(logger, "\n2. FindById Performance (1000 operations)");
var readBench = new ReadBenchmarks();
readBench.Setup();
try
{
var sw = Stopwatch.StartNew();
for (int i = 0; i < 1000; i++)
{
readBench.DocumentDb_FindById();
}
sw.Stop();
readByIdMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD FindById x1000: {readByIdMs} ms ({(double)readByIdMs / 1000:F3} ms/op)");
}
finally
{
readBench.Cleanup();
}
}
using (LogContext.PushProperty("Phase", "SingleInsert"))
{
Log(logger, "\n3. Single Insert");
var insertBench = new InsertBenchmarks();
insertBench.Setup();
insertBench.IterationSetup();
try
{
var sw = Stopwatch.StartNew();
insertBench.DocumentDb_Insert_Single();
sw.Stop();
singleInsertMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD Single Insert: {singleInsertMs} ms");
}
finally
{
insertBench.Cleanup();
}
}
Log(logger, "\n============================================================================");
Log(logger, "BENCHMARK RESULTS (CBDD ONLY):");
Log(logger, "============================================================================");
Log(logger, $"Batch Insert (1000): {batchInsertMs} ms");
Log(logger, $"FindById x1000: {readByIdMs} ms");
Log(logger, $"Single Insert: {singleInsertMs} ms");
var artifactsDir = Path.Combine(AppContext.BaseDirectory, "BenchmarkDotNet.Artifacts", "results");
if (!Directory.Exists(artifactsDir))
{
Directory.CreateDirectory(artifactsDir);
}
var filePath = Path.Combine(artifactsDir, "manual_report.txt");
File.WriteAllText(filePath, _log.ToString());
logger.LogInformation("Report saved to: {FilePath}", filePath);
}
}
{
using var _ = LogContext.PushProperty("Benchmark", nameof(ManualBenchmark));
_log.Clear();
Log(logger, "=== MANUAL BENCHMARK: CBDD ===");
Log(logger, $"Date: {DateTime.Now}");
Log(logger, "Testing: Complex Objects (Nested Documents + Collections)\n");
long batchInsertMs;
long singleInsertMs;
long readByIdMs;
using (LogContext.PushProperty("Phase", "BatchInsert"))
{
Log(logger, "1. Batch Insert (1000 items)");
var insertBench = new InsertBenchmarks();
insertBench.Setup();
insertBench.IterationSetup();
try
{
var sw = Stopwatch.StartNew();
insertBench.DocumentDb_Insert_Batch();
sw.Stop();
batchInsertMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD InsertBulk (1000): {batchInsertMs} ms");
}
finally
{
insertBench.Cleanup();
}
}
using (LogContext.PushProperty("Phase", "FindById"))
{
Log(logger, "\n2. FindById Performance (1000 operations)");
var readBench = new ReadBenchmarks();
readBench.Setup();
try
{
var sw = Stopwatch.StartNew();
for (int i = 0; i < 1000; i++)
{
readBench.DocumentDb_FindById();
}
sw.Stop();
readByIdMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD FindById x1000: {readByIdMs} ms ({(double)readByIdMs / 1000:F3} ms/op)");
}
finally
{
readBench.Cleanup();
}
}
using (LogContext.PushProperty("Phase", "SingleInsert"))
{
Log(logger, "\n3. Single Insert");
var insertBench = new InsertBenchmarks();
insertBench.Setup();
insertBench.IterationSetup();
try
{
var sw = Stopwatch.StartNew();
insertBench.DocumentDb_Insert_Single();
sw.Stop();
singleInsertMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD Single Insert: {singleInsertMs} ms");
}
finally
{
insertBench.Cleanup();
}
}
Log(logger, "\n============================================================================");
Log(logger, "BENCHMARK RESULTS (CBDD ONLY):");
Log(logger, "============================================================================");
Log(logger, $"Batch Insert (1000): {batchInsertMs} ms");
Log(logger, $"FindById x1000: {readByIdMs} ms");
Log(logger, $"Single Insert: {singleInsertMs} ms");
var artifactsDir = Path.Combine(AppContext.BaseDirectory, "BenchmarkDotNet.Artifacts", "results");
if (!Directory.Exists(artifactsDir))
{
Directory.CreateDirectory(artifactsDir);
}
var filePath = Path.Combine(artifactsDir, "manual_report.txt");
File.WriteAllText(filePath, _log.ToString());
logger.LogInformation("Report saved to: {FilePath}", filePath);
}
}

View File

@@ -16,9 +16,15 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[JsonExporterAttribute.Full]
public class MixedWorkloadBenchmarks
{
/// <summary>
/// Gets or sets whether periodic online compaction is enabled.
/// </summary>
[Params(false, true)]
public bool PeriodicCompaction { get; set; }
/// <summary>
/// Gets or sets the number of operations per benchmark iteration.
/// </summary>
[Params(800)]
public int Operations { get; set; }
@@ -30,6 +36,9 @@ public class MixedWorkloadBenchmarks
private readonly List<ObjectId> _activeIds = [];
private int _nextValueSeed;
/// <summary>
/// Prepares benchmark storage and seed data for each iteration.
/// </summary>
[IterationSetup]
public void Setup()
{
@@ -61,6 +70,9 @@ public class MixedWorkloadBenchmarks
_transactionHolder.CommitAndReset();
}
/// <summary>
/// Cleans up benchmark resources for each iteration.
/// </summary>
[IterationCleanup]
public void Cleanup()
{
@@ -71,6 +83,9 @@ public class MixedWorkloadBenchmarks
if (File.Exists(_walPath)) File.Delete(_walPath);
}
/// <summary>
/// Benchmarks a mixed insert/update/delete workload.
/// </summary>
[Benchmark(Baseline = true)]
[BenchmarkCategory("MixedWorkload")]
public int InsertUpdateDeleteMix()

View File

@@ -0,0 +1,287 @@
using System.IO.Compression;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
internal static class PerformanceGateSmoke
{
private const int CompactionDocumentCount = 2_000;
private const int CompressionDocumentCount = 1_500;
/// <summary>
/// Runs the performance gate smoke probes and writes a report.
/// </summary>
/// <param name="logger">The logger.</param>
public static void Run(ILogger logger)
{
var compaction = RunCompactionProbe();
var compressionOff = RunCompressionGcProbe(enableCompression: false);
var compressionOn = RunCompressionGcProbe(enableCompression: true);
var report = new PerformanceGateReport(
DateTimeOffset.UtcNow,
compaction,
compressionOff,
compressionOn);
var reportPath = WriteReport(report);
logger.LogInformation("Performance gate smoke report written to {ReportPath}", reportPath);
Console.WriteLine("[performance_gate]");
Console.WriteLine($"report_path={reportPath}");
Console.WriteLine($"compaction.pre_pages={compaction.PrePages}");
Console.WriteLine($"compaction.post_pages={compaction.PostPages}");
Console.WriteLine($"compaction.reclaimed_file_bytes={compaction.ReclaimedFileBytes}");
Console.WriteLine($"compaction.throughput_bytes_per_sec={compaction.ThroughputBytesPerSecond:F2}");
Console.WriteLine($"compaction.throughput_pages_per_sec={compaction.ThroughputPagesPerSecond:F2}");
Console.WriteLine($"compaction.throughput_docs_per_sec={compaction.ThroughputDocumentsPerSecond:F2}");
Console.WriteLine($"compression_off.gen0_delta={compressionOff.Gen0Delta}");
Console.WriteLine($"compression_off.gen1_delta={compressionOff.Gen1Delta}");
Console.WriteLine($"compression_off.gen2_delta={compressionOff.Gen2Delta}");
Console.WriteLine($"compression_off.alloc_bytes_delta={compressionOff.AllocatedBytesDelta}");
Console.WriteLine($"compression_on.gen0_delta={compressionOn.Gen0Delta}");
Console.WriteLine($"compression_on.gen1_delta={compressionOn.Gen1Delta}");
Console.WriteLine($"compression_on.gen2_delta={compressionOn.Gen2Delta}");
Console.WriteLine($"compression_on.alloc_bytes_delta={compressionOn.AllocatedBytesDelta}");
}
private static CompactionProbeResult RunCompactionProbe()
{
var dbPath = NewDbPath("gate_compaction");
var walPath = Path.ChangeExtension(dbPath, ".wal");
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Small);
using var transactionHolder = new BenchmarkTransactionHolder(storage);
var collection = new DocumentCollection<Person>(storage, transactionHolder, new PersonMapper());
var ids = new List<ObjectId>(CompactionDocumentCount);
for (var i = 0; i < CompactionDocumentCount; i++)
{
ids.Add(collection.Insert(CreatePerson(i, includeLargeBio: true)));
}
transactionHolder.CommitAndReset();
storage.Checkpoint();
for (var i = 0; i < ids.Count; i += 3)
{
collection.Delete(ids[i]);
}
for (var i = 0; i < ids.Count; i += 5)
{
var current = collection.FindById(ids[i]);
if (current == null)
continue;
current.Bio = BuildBio(i + 10_000);
current.Age += 1;
collection.Update(current);
}
transactionHolder.CommitAndReset();
storage.Checkpoint();
var stats = storage.Compact(new CompactionOptions
{
OnlineMode = false,
DefragmentSlottedPages = true,
NormalizeFreeList = true,
EnableTailTruncation = true
});
return new CompactionProbeResult(
stats.PrePageCount,
stats.PostPageCount,
stats.ReclaimedFileBytes,
stats.ThroughputBytesPerSecond,
stats.ThroughputPagesPerSecond,
stats.ThroughputDocumentsPerSecond);
}
finally
{
TryDelete(dbPath);
TryDelete(walPath);
TryDelete($"{dbPath}.compact.state");
TryDelete($"{dbPath}.compact.tmp");
TryDelete($"{dbPath}.compact.bak");
}
}
private static CompressionGcProbeResult RunCompressionGcProbe(bool enableCompression)
{
var dbPath = NewDbPath(enableCompression ? "gate_gc_on" : "gate_gc_off");
var walPath = Path.ChangeExtension(dbPath, ".wal");
var compressionOptions = enableCompression
? new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = CompressionLevel.Fastest
}
: CompressionOptions.Default;
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default, compressionOptions);
using var transactionHolder = new BenchmarkTransactionHolder(storage);
var collection = new DocumentCollection<Person>(storage, transactionHolder, new PersonMapper());
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var g0Before = GC.CollectionCount(0);
var g1Before = GC.CollectionCount(1);
var g2Before = GC.CollectionCount(2);
var allocBefore = GC.GetTotalAllocatedBytes(true);
var ids = new ObjectId[CompressionDocumentCount];
for (var i = 0; i < CompressionDocumentCount; i++)
{
ids[i] = collection.Insert(CreatePerson(i, includeLargeBio: true));
}
transactionHolder.CommitAndReset();
for (var i = 0; i < ids.Length; i += 4)
{
var current = collection.FindById(ids[i]);
if (current == null)
continue;
current.Bio = BuildBio(i + 20_000);
current.Age += 1;
collection.Update(current);
}
transactionHolder.CommitAndReset();
var readCount = collection.FindAll().Count();
transactionHolder.CommitAndReset();
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var g0After = GC.CollectionCount(0);
var g1After = GC.CollectionCount(1);
var g2After = GC.CollectionCount(2);
var allocAfter = GC.GetTotalAllocatedBytes(true);
return new CompressionGcProbeResult(
enableCompression,
readCount,
g0After - g0Before,
g1After - g1Before,
g2After - g2Before,
allocAfter - allocBefore);
}
finally
{
TryDelete(dbPath);
TryDelete(walPath);
TryDelete($"{dbPath}.compact.state");
TryDelete($"{dbPath}.compact.tmp");
TryDelete($"{dbPath}.compact.bak");
}
}
private static string WriteReport(PerformanceGateReport report)
{
var outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "BenchmarkDotNet.Artifacts", "results");
Directory.CreateDirectory(outputDirectory);
var reportPath = Path.Combine(outputDirectory, "PerformanceGateSmoke-report.json");
var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
File.WriteAllText(reportPath, json);
return reportPath;
}
private static Person CreatePerson(int i, bool includeLargeBio)
{
return new Person
{
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 20 + (i % 50),
Bio = includeLargeBio ? BuildBio(i) : $"bio-{i}",
CreatedAt = DateTime.UnixEpoch.AddMinutes(i),
Balance = 100 + i,
HomeAddress = new Address
{
Street = $"{i} Main St",
City = "Gate City",
ZipCode = "12345"
},
EmploymentHistory =
[
new WorkHistory
{
CompanyName = $"Company_{i}",
Title = "Engineer",
DurationYears = i % 10,
Tags = ["csharp", "db", "compression"]
}
]
};
}
private static string BuildBio(int seed)
{
var builder = new System.Text.StringBuilder(4500);
for (var i = 0; i < 150; i++)
{
builder.Append("bio-");
builder.Append(seed.ToString("D6"));
builder.Append('-');
builder.Append(i.ToString("D3"));
builder.Append('|');
}
return builder.ToString();
}
private static string NewDbPath(string prefix)
=> Path.Combine(Path.GetTempPath(), $"{prefix}_{Guid.NewGuid():N}.db");
private static void TryDelete(string path)
{
if (File.Exists(path))
{
File.Delete(path);
}
}
private sealed record PerformanceGateReport(
DateTimeOffset CapturedAtUtc,
CompactionProbeResult Compaction,
CompressionGcProbeResult CompressionOff,
CompressionGcProbeResult CompressionOn);
private sealed record CompactionProbeResult(
uint PrePages,
uint PostPages,
long ReclaimedFileBytes,
double ThroughputBytesPerSecond,
double ThroughputPagesPerSecond,
double ThroughputDocumentsPerSecond);
private sealed record CompressionGcProbeResult(
bool CompressionEnabled,
int ReadCount,
int Gen0Delta,
int Gen1Delta,
int Gen2Delta,
long AllocatedBytesDelta);
}

View File

@@ -6,30 +6,78 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class Address
{
/// <summary>
/// Gets or sets the Street.
/// </summary>
public string Street { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the City.
/// </summary>
public string City { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the ZipCode.
/// </summary>
public string ZipCode { get; set; } = string.Empty;
}
public class WorkHistory
{
/// <summary>
/// Gets or sets the CompanyName.
/// </summary>
public string CompanyName { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the Title.
/// </summary>
public string Title { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the DurationYears.
/// </summary>
public int DurationYears { get; set; }
/// <summary>
/// Gets or sets the Tags.
/// </summary>
public List<string> Tags { get; set; } = new();
}
public class Person
{
/// <summary>
/// Gets or sets the Id.
/// </summary>
public ObjectId Id { get; set; }
/// <summary>
/// Gets or sets the FirstName.
/// </summary>
public string FirstName { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the LastName.
/// </summary>
public string LastName { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the Age.
/// </summary>
public int Age { get; set; }
/// <summary>
/// Gets or sets the Bio.
/// </summary>
public string? Bio { get; set; } = string.Empty;
public DateTime CreatedAt { get; set; }
// Complex fields
/// <summary>
/// Gets or sets the CreatedAt.
/// </summary>
public DateTime CreatedAt { get; set; }
// Complex fields
/// <summary>
/// Gets or sets the Balance.
/// </summary>
public decimal Balance { get; set; }
/// <summary>
/// Gets or sets the HomeAddress.
/// </summary>
public Address HomeAddress { get; set; } = new();
/// <summary>
/// Gets or sets the EmploymentHistory.
/// </summary>
public List<WorkHistory> EmploymentHistory { get; set; } = new();
}

View File

@@ -5,16 +5,20 @@ using System.Runtime.InteropServices;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class PersonMapper : ObjectIdMapperBase<Person>
{
public override string CollectionName => "people";
public override ObjectId GetId(Person entity) => entity.Id;
public override void SetId(Person entity, ObjectId id) => entity.Id = id;
public override int Serialize(Person entity, BsonSpanWriter writer)
{
public class PersonMapper : ObjectIdMapperBase<Person>
{
/// <inheritdoc />
public override string CollectionName => "people";
/// <inheritdoc />
public override ObjectId GetId(Person entity) => entity.Id;
/// <inheritdoc />
public override void SetId(Person entity, ObjectId id) => entity.Id = id;
/// <inheritdoc />
public override int Serialize(Person entity, BsonSpanWriter writer)
{
var sizePos = writer.BeginDocument();
writer.WriteObjectId("_id", entity.Id);
@@ -67,8 +71,9 @@ public class PersonMapper : ObjectIdMapperBase<Person>
return writer.Position;
}
public override Person Deserialize(BsonSpanReader reader)
{
/// <inheritdoc />
public override Person Deserialize(BsonSpanReader reader)
{
var person = new Person();
reader.ReadDocumentSize();

View File

@@ -50,6 +50,13 @@ class Program
return;
}
if (mode == "gate")
{
using var _ = LogContext.PushProperty("Mode", "PerformanceGateSmoke");
PerformanceGateSmoke.Run(logger);
return;
}
if (mode == "all")
{
using var _ = LogContext.PushProperty("Mode", "AllBenchmarks");

View File

@@ -1,14 +1,14 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using System.IO;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using System.IO;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[SimpleJob]
[InProcess]
@@ -18,53 +18,59 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[JsonExporterAttribute.Full]
public class ReadBenchmarks
{
private const int DocCount = 1000;
private string _docDbPath = null!;
private string _docDbWalPath = null!;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private ObjectId[] _ids = null!;
private ObjectId _targetId;
private const int DocCount = 1000;
private string _docDbPath = null!;
private string _docDbWalPath = null!;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private ObjectId[] _ids = null!;
private ObjectId _targetId;
/// <summary>
/// Tests setup.
/// </summary>
[GlobalSetup]
public void Setup()
{
var temp = AppContext.BaseDirectory;
var id = Guid.NewGuid().ToString("N");
_docDbPath = Path.Combine(temp, $"bench_read_docdb_{id}.db");
_docDbWalPath = Path.ChangeExtension(_docDbPath, ".wal");
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
_storage = new StorageEngine(_docDbPath, PageFileConfig.Default);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
var id = Guid.NewGuid().ToString("N");
_docDbPath = Path.Combine(temp, $"bench_read_docdb_{id}.db");
_docDbWalPath = Path.ChangeExtension(_docDbPath, ".wal");
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
_storage = new StorageEngine(_docDbPath, PageFileConfig.Default);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
_ids = new ObjectId[DocCount];
for (int i = 0; i < DocCount; i++)
{
var p = CreatePerson(i);
_ids[i] = _collection.Insert(p);
}
_transactionHolder.CommitAndReset();
_targetId = _ids[DocCount / 2];
}
for (int i = 0; i < DocCount; i++)
{
var p = CreatePerson(i);
_ids[i] = _collection.Insert(p);
}
_transactionHolder.CommitAndReset();
_targetId = _ids[DocCount / 2];
}
/// <summary>
/// Tests cleanup.
/// </summary>
[GlobalCleanup]
public void Cleanup()
{
_transactionHolder?.Dispose();
_storage?.Dispose();
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
}
public void Cleanup()
{
_transactionHolder?.Dispose();
_storage?.Dispose();
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
}
private Person CreatePerson(int i)
{
@@ -77,7 +83,7 @@ public class ReadBenchmarks
Bio = null,
CreatedAt = DateTime.UtcNow,
Balance = 1000.50m * (i + 1),
HomeAddress = new Address
HomeAddress = new Address
{
Street = $"{i} Main St",
City = "Tech City",
@@ -100,10 +106,13 @@ public class ReadBenchmarks
return p;
}
[Benchmark(Baseline = true, Description = "CBDD FindById")]
[BenchmarkCategory("Read_Single")]
public Person? DocumentDb_FindById()
{
/// <summary>
/// Tests document db find by id.
/// </summary>
[Benchmark(Baseline = true, Description = "CBDD FindById")]
[BenchmarkCategory("Read_Single")]
public Person? DocumentDb_FindById()
{
return _collection.FindById(_targetId);
}
}

View File

@@ -27,7 +27,7 @@ public class SerializationBenchmarks
private static readonly System.Collections.Concurrent.ConcurrentDictionary<string, ushort> _keyMap = new(StringComparer.OrdinalIgnoreCase);
private static readonly System.Collections.Concurrent.ConcurrentDictionary<ushort, string> _keys = new();
static SerializationBenchmarks()
static SerializationBenchmarks()
{
ushort id = 1;
string[] initialKeys = { "_id", "firstname", "lastname", "age", "bio", "createdat", "balance", "homeaddress", "street", "city", "zipcode", "employmenthistory", "companyname", "title", "durationyears", "tags" };
@@ -47,8 +47,11 @@ public class SerializationBenchmarks
}
}
[GlobalSetup]
public void Setup()
/// <summary>
/// Prepares benchmark data for serialization and deserialization scenarios.
/// </summary>
[GlobalSetup]
public void Setup()
{
_person = CreatePerson(0);
_people = new List<Person>(BatchSize);
@@ -108,39 +111,54 @@ public class SerializationBenchmarks
return p;
}
[Benchmark(Description = "Serialize Single (BSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Bson()
/// <summary>
/// Benchmarks BSON serialization for a single document.
/// </summary>
[Benchmark(Description = "Serialize Single (BSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Bson()
{
var writer = new BsonSpanWriter(_serializeBuffer, _keyMap);
_mapper.Serialize(_person, writer);
}
[Benchmark(Description = "Serialize Single (JSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Json()
/// <summary>
/// Benchmarks JSON serialization for a single document.
/// </summary>
[Benchmark(Description = "Serialize Single (JSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Json()
{
JsonSerializer.SerializeToUtf8Bytes(_person);
}
[Benchmark(Description = "Deserialize Single (BSON)")]
[BenchmarkCategory("Single")]
public Person Deserialize_Bson()
/// <summary>
/// Benchmarks BSON deserialization for a single document.
/// </summary>
[Benchmark(Description = "Deserialize Single (BSON)")]
[BenchmarkCategory("Single")]
public Person Deserialize_Bson()
{
var reader = new BsonSpanReader(_bsonData, _keys);
return _mapper.Deserialize(reader);
}
[Benchmark(Description = "Deserialize Single (JSON)")]
[BenchmarkCategory("Single")]
public Person? Deserialize_Json()
/// <summary>
/// Benchmarks JSON deserialization for a single document.
/// </summary>
[Benchmark(Description = "Deserialize Single (JSON)")]
[BenchmarkCategory("Single")]
public Person? Deserialize_Json()
{
return JsonSerializer.Deserialize<Person>(_jsonData);
}
[Benchmark(Description = "Serialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Bson()
/// <summary>
/// Benchmarks BSON serialization for a list of documents.
/// </summary>
[Benchmark(Description = "Serialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Bson()
{
foreach (var p in _people)
{
@@ -149,9 +167,12 @@ public class SerializationBenchmarks
}
}
[Benchmark(Description = "Serialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Json()
/// <summary>
/// Benchmarks JSON serialization for a list of documents.
/// </summary>
[Benchmark(Description = "Serialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Json()
{
foreach (var p in _people)
{
@@ -159,9 +180,12 @@ public class SerializationBenchmarks
}
}
[Benchmark(Description = "Deserialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Bson()
/// <summary>
/// Benchmarks BSON deserialization for a list of documents.
/// </summary>
[Benchmark(Description = "Deserialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Bson()
{
foreach (var data in _bsonDataList)
{
@@ -170,9 +194,12 @@ public class SerializationBenchmarks
}
}
[Benchmark(Description = "Deserialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Json()
/// <summary>
/// Benchmarks JSON deserialization for a list of documents.
/// </summary>
[Benchmark(Description = "Deserialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Json()
{
foreach (var data in _jsonDataList)
{