Initialize CBDD solution and add a .NET-focused gitignore for generated artifacts.

This commit is contained in:
Joseph Doherty
2026-02-20 12:54:07 -05:00
commit b8ed5ec500
214 changed files with 101452 additions and 0 deletions

View File

@@ -0,0 +1,80 @@
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposable
{
private readonly StorageEngine _storage;
private readonly object _sync = new();
private ITransaction? _currentTransaction;
public BenchmarkTransactionHolder(StorageEngine storage)
{
_storage = storage ?? throw new ArgumentNullException(nameof(storage));
}
public ITransaction GetCurrentTransactionOrStart()
{
lock (_sync)
{
if (_currentTransaction == null || _currentTransaction.State != TransactionState.Active)
{
_currentTransaction = _storage.BeginTransaction();
}
return _currentTransaction;
}
}
public Task<ITransaction> GetCurrentTransactionOrStartAsync()
{
return Task.FromResult(GetCurrentTransactionOrStart());
}
public void CommitAndReset()
{
lock (_sync)
{
if (_currentTransaction == null)
{
return;
}
if (_currentTransaction.State == TransactionState.Active ||
_currentTransaction.State == TransactionState.Preparing)
{
_currentTransaction.Commit();
}
_currentTransaction.Dispose();
_currentTransaction = null;
}
}
public void RollbackAndReset()
{
lock (_sync)
{
if (_currentTransaction == null)
{
return;
}
if (_currentTransaction.State == TransactionState.Active ||
_currentTransaction.State == TransactionState.Preparing)
{
_currentTransaction.Rollback();
}
_currentTransaction.Dispose();
_currentTransaction = null;
}
}
public void Dispose()
{
RollbackAndReset();
}
}

View File

@@ -0,0 +1,133 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[SimpleJob]
[InProcess]
[MemoryDiagnoser]
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
[HtmlExporter]
[JsonExporterAttribute.Full]
public class CompactionBenchmarks
{
[Params(2_000)]
public int DocumentCount { get; set; }
private string _dbPath = string.Empty;
private string _walPath = string.Empty;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private List<ObjectId> _insertedIds = [];
[IterationSetup]
public void Setup()
{
var id = Guid.NewGuid().ToString("N");
_dbPath = Path.Combine(AppContext.BaseDirectory, $"bench_compaction_{id}.db");
_walPath = Path.ChangeExtension(_dbPath, ".wal");
_storage = new StorageEngine(_dbPath, PageFileConfig.Small);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
_insertedIds.Clear();
for (var i = 0; i < DocumentCount; i++)
{
var person = CreatePerson(i);
var insertedId = _collection.Insert(person);
_insertedIds.Add(insertedId);
}
_transactionHolder.CommitAndReset();
_storage.Checkpoint();
for (var i = _insertedIds.Count - 1; i >= _insertedIds.Count / 3; i--)
{
_collection.Delete(_insertedIds[i]);
}
_transactionHolder.CommitAndReset();
_storage.Checkpoint();
}
[IterationCleanup]
public void Cleanup()
{
_transactionHolder?.Dispose();
_storage?.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
if (File.Exists(_walPath)) File.Delete(_walPath);
}
[Benchmark(Baseline = true)]
[BenchmarkCategory("Compaction_Offline")]
public long OfflineCompact_ReclaimedBytes()
{
var stats = _storage.Compact(new CompactionOptions
{
OnlineMode = false,
EnableTailTruncation = true,
DefragmentSlottedPages = true,
NormalizeFreeList = true
});
return stats.ReclaimedFileBytes;
}
[Benchmark]
[BenchmarkCategory("Compaction_Offline")]
public long OfflineCompact_TailBytesTruncated()
{
var stats = _storage.Compact(new CompactionOptions
{
OnlineMode = false,
EnableTailTruncation = true,
DefragmentSlottedPages = true,
NormalizeFreeList = true
});
return stats.TailBytesTruncated;
}
private static Person CreatePerson(int i)
{
return new Person
{
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = i % 90,
Bio = BuildPayload(i),
CreatedAt = DateTime.UnixEpoch.AddSeconds(i),
Balance = i * 1.5m,
HomeAddress = new Address
{
Street = $"{i} Long Street",
City = "Compaction City",
ZipCode = "90000"
}
};
}
private static string BuildPayload(int seed)
{
var builder = new System.Text.StringBuilder(2500);
for (var i = 0; i < 80; i++)
{
builder.Append("compact-");
builder.Append(seed.ToString("D6"));
builder.Append('-');
builder.Append(i.ToString("D3"));
builder.Append('|');
}
return builder.ToString();
}
}

View File

@@ -0,0 +1,174 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using System.IO.Compression;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[SimpleJob]
[InProcess]
[MemoryDiagnoser]
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
[HtmlExporter]
[JsonExporterAttribute.Full]
public class CompressionBenchmarks
{
private const int SeedCount = 300;
private const int WorkloadCount = 100;
[Params(false, true)]
public bool EnableCompression { get; set; }
[Params(CompressionCodec.Brotli, CompressionCodec.Deflate)]
public CompressionCodec Codec { get; set; }
[Params(CompressionLevel.Fastest, CompressionLevel.Optimal)]
public CompressionLevel Level { get; set; }
private string _dbPath = string.Empty;
private string _walPath = string.Empty;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private Person[] _insertBatch = Array.Empty<Person>();
private ObjectId[] _seedIds = Array.Empty<ObjectId>();
[IterationSetup]
public void Setup()
{
var id = Guid.NewGuid().ToString("N");
_dbPath = Path.Combine(AppContext.BaseDirectory, $"bench_compression_{id}.db");
_walPath = Path.ChangeExtension(_dbPath, ".wal");
var compressionOptions = new CompressionOptions
{
EnableCompression = EnableCompression,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = Codec,
Level = Level
};
_storage = new StorageEngine(_dbPath, PageFileConfig.Default, compressionOptions);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
_seedIds = new ObjectId[SeedCount];
for (var i = 0; i < SeedCount; i++)
{
var doc = CreatePerson(i, includeLargeBio: true);
_seedIds[i] = _collection.Insert(doc);
}
_transactionHolder.CommitAndReset();
_insertBatch = Enumerable.Range(SeedCount, WorkloadCount)
.Select(i => CreatePerson(i, includeLargeBio: true))
.ToArray();
}
[IterationCleanup]
public void Cleanup()
{
_transactionHolder?.Dispose();
_storage?.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
if (File.Exists(_walPath)) File.Delete(_walPath);
}
[Benchmark(Baseline = true)]
[BenchmarkCategory("Compression_InsertUpdateRead")]
public void Insert_Workload()
{
_collection.InsertBulk(_insertBatch);
_transactionHolder.CommitAndReset();
}
[Benchmark]
[BenchmarkCategory("Compression_InsertUpdateRead")]
public void Update_Workload()
{
for (var i = 0; i < WorkloadCount; i++)
{
var id = _seedIds[i];
var current = _collection.FindById(id);
if (current == null)
continue;
current.Bio = BuildBio(i + 10_000);
current.Age += 1;
_collection.Update(current);
}
_transactionHolder.CommitAndReset();
}
[Benchmark]
[BenchmarkCategory("Compression_InsertUpdateRead")]
public int Read_Workload()
{
var checksum = 0;
for (var i = 0; i < WorkloadCount; i++)
{
var person = _collection.FindById(_seedIds[i]);
if (person != null)
{
checksum += person.Age;
}
}
_transactionHolder.CommitAndReset();
return checksum;
}
private static Person CreatePerson(int i, bool includeLargeBio)
{
return new Person
{
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 20 + (i % 50),
Bio = includeLargeBio ? BuildBio(i) : $"bio-{i}",
CreatedAt = DateTime.UnixEpoch.AddMinutes(i),
Balance = 100 + i,
HomeAddress = new Address
{
Street = $"{i} Main St",
City = "Bench City",
ZipCode = "12345"
},
EmploymentHistory =
[
new WorkHistory
{
CompanyName = $"Company_{i}",
Title = "Engineer",
DurationYears = i % 10,
Tags = ["csharp", "db", "compression"]
}
]
};
}
private static string BuildBio(int seed)
{
var builder = new System.Text.StringBuilder(4500);
for (var i = 0; i < 150; i++)
{
builder.Append("bio-");
builder.Append(seed.ToString("D6"));
builder.Append('-');
builder.Append(i.ToString("D3"));
builder.Append('|');
}
return builder.ToString();
}
}

View File

@@ -0,0 +1,266 @@
using System.Diagnostics;
using Microsoft.Extensions.Logging;
using Serilog.Context;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
internal static class DatabaseSizeBenchmark
{
private static readonly int[] TargetCounts = [10_000, 1_000_000, 10_000_000];
private static readonly Scenario[] Scenarios =
[
new(
"Uncompressed",
CompressionOptions.Default),
new(
"Compressed-BrotliFast",
new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = System.IO.Compression.CompressionLevel.Fastest
})
];
private const int BatchSize = 50_000;
private const int ProgressInterval = 1_000_000;
public static void Run(ILogger logger)
{
var results = new List<SizeResult>(TargetCounts.Length * Scenarios.Length);
logger.LogInformation("=== CBDD Database Size Benchmark ===");
logger.LogInformation("Targets: {Targets}", string.Join(", ", TargetCounts.Select(x => x.ToString("N0"))));
logger.LogInformation("Scenarios: {Scenarios}", string.Join(", ", Scenarios.Select(x => x.Name)));
logger.LogInformation("Batch size: {BatchSize:N0}", BatchSize);
foreach (var targetCount in TargetCounts)
{
foreach (var scenario in Scenarios)
{
var dbPath = Path.Combine(Path.GetTempPath(), $"cbdd_size_{scenario.Name}_{targetCount}_{Guid.NewGuid():N}.db");
var walPath = Path.ChangeExtension(dbPath, ".wal");
using var _ = LogContext.PushProperty("TargetCount", targetCount);
using var __ = LogContext.PushProperty("Scenario", scenario.Name);
logger.LogInformation("Starting scenario {Scenario} for target {TargetCount:N0} docs", scenario.Name, targetCount);
var insertStopwatch = Stopwatch.StartNew();
CompressionStats compressionStats = default;
CompactionStats compactionStats;
long preCompactDbBytes;
long preCompactWalBytes;
long postCompactDbBytes;
long postCompactWalBytes;
using (var storage = new StorageEngine(dbPath, PageFileConfig.Default, scenario.CompressionOptions))
using (var transactionHolder = new BenchmarkTransactionHolder(storage))
{
var collection = new DocumentCollection<SizeBenchmarkDocument>(
storage,
transactionHolder,
new SizeBenchmarkDocumentMapper());
var inserted = 0;
while (inserted < targetCount)
{
var currentBatchSize = Math.Min(BatchSize, targetCount - inserted);
var documents = new SizeBenchmarkDocument[currentBatchSize];
var baseValue = inserted;
for (var i = 0; i < currentBatchSize; i++)
{
documents[i] = CreateDocument(baseValue + i);
}
collection.InsertBulk(documents);
transactionHolder.CommitAndReset();
inserted += currentBatchSize;
if (inserted == targetCount || inserted % ProgressInterval == 0)
{
logger.LogInformation("Inserted {Inserted:N0}/{TargetCount:N0}", inserted, targetCount);
}
}
insertStopwatch.Stop();
preCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
preCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
compactionStats = storage.Compact(new CompactionOptions
{
EnableTailTruncation = true,
DefragmentSlottedPages = true,
NormalizeFreeList = true
});
postCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
postCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
compressionStats = storage.GetCompressionStats();
}
var result = new SizeResult(
scenario.Name,
targetCount,
insertStopwatch.Elapsed,
preCompactDbBytes,
preCompactWalBytes,
postCompactDbBytes,
postCompactWalBytes,
compactionStats,
compressionStats);
results.Add(result);
logger.LogInformation(
"Completed {Scenario} {TargetCount:N0} docs in {Elapsed}. pre={PreTotal}, post={PostTotal}, shrink={Shrink}, compRatio={CompRatio}",
scenario.Name,
targetCount,
insertStopwatch.Elapsed,
FormatBytes(result.PreCompactTotalBytes),
FormatBytes(result.PostCompactTotalBytes),
FormatBytes(result.ShrinkBytes),
result.CompressionRatioText);
TryDelete(dbPath);
TryDelete(walPath);
}
}
logger.LogInformation("=== Size Benchmark Summary ===");
foreach (var result in results.OrderBy(x => x.TargetCount).ThenBy(x => x.Scenario))
{
logger.LogInformation(
"{Scenario,-22} | {Count,12:N0} docs | insert={Elapsed,12} | pre={Pre,12} | post={Post,12} | shrink={Shrink,12} | compact={CompactBytes,12} | ratio={Ratio}",
result.Scenario,
result.TargetCount,
result.InsertElapsed,
FormatBytes(result.PreCompactTotalBytes),
FormatBytes(result.PostCompactTotalBytes),
FormatBytes(result.ShrinkBytes),
FormatBytes(result.CompactionStats.ReclaimedFileBytes),
result.CompressionRatioText);
}
}
private static SizeBenchmarkDocument CreateDocument(int value)
{
return new SizeBenchmarkDocument
{
Id = ObjectId.NewObjectId(),
Value = value,
Name = $"doc-{value:D8}"
};
}
private static void TryDelete(string path)
{
if (File.Exists(path))
{
File.Delete(path);
}
}
private static string FormatBytes(long bytes)
{
string[] units = ["B", "KB", "MB", "GB", "TB"];
double size = bytes;
var unitIndex = 0;
while (size >= 1024 && unitIndex < units.Length - 1)
{
size /= 1024;
unitIndex++;
}
return $"{size:N2} {units[unitIndex]}";
}
private sealed record Scenario(string Name, CompressionOptions CompressionOptions);
private sealed record SizeResult(
string Scenario,
int TargetCount,
TimeSpan InsertElapsed,
long PreCompactDbBytes,
long PreCompactWalBytes,
long PostCompactDbBytes,
long PostCompactWalBytes,
CompactionStats CompactionStats,
CompressionStats CompressionStats)
{
public long PreCompactTotalBytes => PreCompactDbBytes + PreCompactWalBytes;
public long PostCompactTotalBytes => PostCompactDbBytes + PostCompactWalBytes;
public long ShrinkBytes => PreCompactTotalBytes - PostCompactTotalBytes;
public string CompressionRatioText =>
CompressionStats.BytesAfterCompression > 0
? $"{(double)CompressionStats.BytesBeforeCompression / CompressionStats.BytesAfterCompression:N2}x"
: "n/a";
}
private sealed class SizeBenchmarkDocument
{
public ObjectId Id { get; set; }
public int Value { get; set; }
public string Name { get; set; } = string.Empty;
}
private sealed class SizeBenchmarkDocumentMapper : ObjectIdMapperBase<SizeBenchmarkDocument>
{
public override string CollectionName => "size_documents";
public override ObjectId GetId(SizeBenchmarkDocument entity) => entity.Id;
public override void SetId(SizeBenchmarkDocument entity, ObjectId id) => entity.Id = id;
public override int Serialize(SizeBenchmarkDocument entity, BsonSpanWriter writer)
{
var sizePos = writer.BeginDocument();
writer.WriteObjectId("_id", entity.Id);
writer.WriteInt32("value", entity.Value);
writer.WriteString("name", entity.Name);
writer.EndDocument(sizePos);
return writer.Position;
}
public override SizeBenchmarkDocument Deserialize(BsonSpanReader reader)
{
var document = new SizeBenchmarkDocument();
reader.ReadDocumentSize();
while (reader.Remaining > 0)
{
var bsonType = reader.ReadBsonType();
if (bsonType == BsonType.EndOfDocument)
{
break;
}
var name = reader.ReadElementHeader();
switch (name)
{
case "_id":
document.Id = reader.ReadObjectId();
break;
case "value":
document.Value = reader.ReadInt32();
break;
case "name":
document.Name = reader.ReadString();
break;
default:
reader.SkipValue(bsonType);
break;
}
}
return document;
}
}
}

View File

@@ -0,0 +1,133 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using Microsoft.Extensions.Logging;
using Serilog.Context;
using System.IO;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[InProcess]
[MemoryDiagnoser]
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
[HtmlExporter]
[JsonExporterAttribute.Full]
public class InsertBenchmarks
{
private const int BatchSize = 1000;
private static readonly ILogger Logger = Logging.CreateLogger<InsertBenchmarks>();
private string _docDbPath = "";
private string _docDbWalPath = "";
private StorageEngine? _storage = null;
private BenchmarkTransactionHolder? _transactionHolder = null;
private DocumentCollection<Person>? _collection = null;
private Person[] _batchData = Array.Empty<Person>();
private Person? _singlePerson = null;
[GlobalSetup]
public void Setup()
{
var temp = AppContext.BaseDirectory;
var id = Guid.NewGuid().ToString("N");
_docDbPath = Path.Combine(temp, $"bench_docdb_{id}.db");
_docDbWalPath = Path.ChangeExtension(_docDbPath, ".wal");
_singlePerson = CreatePerson(0);
_batchData = new Person[BatchSize];
for (int i = 0; i < BatchSize; i++)
{
_batchData[i] = CreatePerson(i);
}
}
private Person CreatePerson(int i)
{
var p = new Person
{
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 20 + (i % 50),
Bio = null, // Removed large payload to focus on structure
CreatedAt = DateTime.UtcNow,
Balance = 1000.50m * (i + 1),
HomeAddress = new Address
{
Street = $"{i} Main St",
City = "Tech City",
ZipCode = "12345"
}
};
// Add 10 work history items to stress structure traversal
for (int j = 0; j < 10; j++)
{
p.EmploymentHistory.Add(new WorkHistory
{
CompanyName = $"TechCorp_{i}_{j}",
Title = "Developer",
DurationYears = j,
Tags = new List<string> { "C#", "BSON", "Performance", "Database", "Complex" }
});
}
return p;
}
[IterationSetup]
public void IterationSetup()
{
_storage = new StorageEngine(_docDbPath, PageFileConfig.Default);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
}
[IterationCleanup]
public void Cleanup()
{
try
{
using var _ = LogContext.PushProperty("Benchmark", nameof(InsertBenchmarks));
_transactionHolder?.Dispose();
_transactionHolder = null;
_storage?.Dispose();
_storage = null;
System.Threading.Thread.Sleep(100);
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
}
catch (Exception ex)
{
Logger.LogWarning(ex, "Cleanup warning");
}
}
// --- Benchmarks ---
[Benchmark(Baseline = true, Description = "CBDD Single Insert")]
[BenchmarkCategory("Insert_Single")]
public void DocumentDb_Insert_Single()
{
_collection?.Insert(_singlePerson!);
_transactionHolder?.CommitAndReset();
}
[Benchmark(Description = "CBDD Batch Insert (1000 items, 1 Txn)")]
[BenchmarkCategory("Insert_Batch")]
public void DocumentDb_Insert_Batch()
{
_collection?.InsertBulk(_batchData);
_transactionHolder?.CommitAndReset();
}
}

View File

@@ -0,0 +1,30 @@
using Microsoft.Extensions.Logging;
using Serilog;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
internal static class Logging
{
private static readonly Lazy<ILoggerFactory> LoggerFactoryInstance = new(CreateFactory);
public static ILoggerFactory LoggerFactory => LoggerFactoryInstance.Value;
public static Microsoft.Extensions.Logging.ILogger CreateLogger<T>()
{
return LoggerFactory.CreateLogger<T>();
}
private static ILoggerFactory CreateFactory()
{
var serilogLogger = new LoggerConfiguration()
.Enrich.FromLogContext()
.WriteTo.Console()
.CreateLogger();
return Microsoft.Extensions.Logging.LoggerFactory.Create(builder =>
{
builder.ClearProviders();
builder.AddSerilog(serilogLogger, dispose: true);
});
}
}

View File

@@ -0,0 +1,110 @@
using System.Diagnostics;
using System.IO;
using System.Text;
using Microsoft.Extensions.Logging;
using Serilog.Context;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class ManualBenchmark
{
private static StringBuilder _log = new();
private static void Log(ILogger logger, string message = "")
{
logger.LogInformation("{Message}", message);
_log.AppendLine(message);
}
public static void Run(ILogger logger)
{
using var _ = LogContext.PushProperty("Benchmark", nameof(ManualBenchmark));
_log.Clear();
Log(logger, "=== MANUAL BENCHMARK: CBDD ===");
Log(logger, $"Date: {DateTime.Now}");
Log(logger, "Testing: Complex Objects (Nested Documents + Collections)\n");
long batchInsertMs;
long singleInsertMs;
long readByIdMs;
using (LogContext.PushProperty("Phase", "BatchInsert"))
{
Log(logger, "1. Batch Insert (1000 items)");
var insertBench = new InsertBenchmarks();
insertBench.Setup();
insertBench.IterationSetup();
try
{
var sw = Stopwatch.StartNew();
insertBench.DocumentDb_Insert_Batch();
sw.Stop();
batchInsertMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD InsertBulk (1000): {batchInsertMs} ms");
}
finally
{
insertBench.Cleanup();
}
}
using (LogContext.PushProperty("Phase", "FindById"))
{
Log(logger, "\n2. FindById Performance (1000 operations)");
var readBench = new ReadBenchmarks();
readBench.Setup();
try
{
var sw = Stopwatch.StartNew();
for (int i = 0; i < 1000; i++)
{
readBench.DocumentDb_FindById();
}
sw.Stop();
readByIdMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD FindById x1000: {readByIdMs} ms ({(double)readByIdMs / 1000:F3} ms/op)");
}
finally
{
readBench.Cleanup();
}
}
using (LogContext.PushProperty("Phase", "SingleInsert"))
{
Log(logger, "\n3. Single Insert");
var insertBench = new InsertBenchmarks();
insertBench.Setup();
insertBench.IterationSetup();
try
{
var sw = Stopwatch.StartNew();
insertBench.DocumentDb_Insert_Single();
sw.Stop();
singleInsertMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD Single Insert: {singleInsertMs} ms");
}
finally
{
insertBench.Cleanup();
}
}
Log(logger, "\n============================================================================");
Log(logger, "BENCHMARK RESULTS (CBDD ONLY):");
Log(logger, "============================================================================");
Log(logger, $"Batch Insert (1000): {batchInsertMs} ms");
Log(logger, $"FindById x1000: {readByIdMs} ms");
Log(logger, $"Single Insert: {singleInsertMs} ms");
var artifactsDir = Path.Combine(AppContext.BaseDirectory, "BenchmarkDotNet.Artifacts", "results");
if (!Directory.Exists(artifactsDir))
{
Directory.CreateDirectory(artifactsDir);
}
var filePath = Path.Combine(artifactsDir, "manual_report.txt");
File.WriteAllText(filePath, _log.ToString());
logger.LogInformation("Report saved to: {FilePath}", filePath);
}
}

View File

@@ -0,0 +1,170 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[SimpleJob]
[InProcess]
[MemoryDiagnoser]
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
[HtmlExporter]
[JsonExporterAttribute.Full]
public class MixedWorkloadBenchmarks
{
[Params(false, true)]
public bool PeriodicCompaction { get; set; }
[Params(800)]
public int Operations { get; set; }
private string _dbPath = string.Empty;
private string _walPath = string.Empty;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private readonly List<ObjectId> _activeIds = [];
private int _nextValueSeed;
[IterationSetup]
public void Setup()
{
var id = Guid.NewGuid().ToString("N");
_dbPath = Path.Combine(AppContext.BaseDirectory, $"bench_mixed_{id}.db");
_walPath = Path.ChangeExtension(_dbPath, ".wal");
var options = new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli
};
_storage = new StorageEngine(_dbPath, PageFileConfig.Default, options);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
_activeIds.Clear();
_nextValueSeed = 0;
for (var i = 0; i < 300; i++)
{
var idValue = _collection.Insert(CreatePerson(_nextValueSeed++));
_activeIds.Add(idValue);
}
_transactionHolder.CommitAndReset();
}
[IterationCleanup]
public void Cleanup()
{
_transactionHolder?.Dispose();
_storage?.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
if (File.Exists(_walPath)) File.Delete(_walPath);
}
[Benchmark(Baseline = true)]
[BenchmarkCategory("MixedWorkload")]
public int InsertUpdateDeleteMix()
{
var random = new Random(12345);
for (var i = 1; i <= Operations; i++)
{
var mode = i % 5;
if (mode is 0 or 1)
{
var id = _collection.Insert(CreatePerson(_nextValueSeed++));
_activeIds.Add(id);
}
else if (mode is 2 or 3)
{
if (_activeIds.Count > 0)
{
var idx = random.Next(_activeIds.Count);
var id = _activeIds[idx];
var current = _collection.FindById(id);
if (current != null)
{
current.Age += 1;
current.Bio = BuildPayload(_nextValueSeed++);
_collection.Update(current);
}
}
}
else
{
if (_activeIds.Count > 100)
{
var idx = random.Next(_activeIds.Count);
var id = _activeIds[idx];
_collection.Delete(id);
_activeIds.RemoveAt(idx);
}
}
if (i % 50 == 0)
{
_transactionHolder.CommitAndReset();
}
if (PeriodicCompaction && i % 200 == 0)
{
_storage.RunOnlineCompactionPass(new CompactionOptions
{
OnlineMode = true,
OnlineBatchPageLimit = 8,
OnlineBatchDelay = TimeSpan.FromMilliseconds(1),
MaxOnlineDuration = TimeSpan.FromMilliseconds(120),
EnableTailTruncation = true
});
}
}
_transactionHolder.CommitAndReset();
return _collection.Count();
}
private static Person CreatePerson(int seed)
{
return new Person
{
Id = ObjectId.NewObjectId(),
FirstName = $"First_{seed}",
LastName = $"Last_{seed}",
Age = 18 + (seed % 60),
Bio = BuildPayload(seed),
CreatedAt = DateTime.UnixEpoch.AddSeconds(seed),
Balance = seed,
HomeAddress = new Address
{
Street = $"{seed} Mixed Ave",
City = "Workload City",
ZipCode = "10101"
}
};
}
private static string BuildPayload(int seed)
{
var builder = new System.Text.StringBuilder(1800);
for (var i = 0; i < 64; i++)
{
builder.Append("mixed-");
builder.Append(seed.ToString("D6"));
builder.Append('-');
builder.Append(i.ToString("D3"));
builder.Append('|');
}
return builder.ToString();
}
}

View File

@@ -0,0 +1,35 @@
using ZB.MOM.WW.CBDD.Bson;
using System;
using System.Collections.Generic;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class Address
{
public string Street { get; set; } = string.Empty;
public string City { get; set; } = string.Empty;
public string ZipCode { get; set; } = string.Empty;
}
public class WorkHistory
{
public string CompanyName { get; set; } = string.Empty;
public string Title { get; set; } = string.Empty;
public int DurationYears { get; set; }
public List<string> Tags { get; set; } = new();
}
public class Person
{
public ObjectId Id { get; set; }
public string FirstName { get; set; } = string.Empty;
public string LastName { get; set; } = string.Empty;
public int Age { get; set; }
public string? Bio { get; set; } = string.Empty;
public DateTime CreatedAt { get; set; }
// Complex fields
public decimal Balance { get; set; }
public Address HomeAddress { get; set; } = new();
public List<WorkHistory> EmploymentHistory { get; set; } = new();
}

View File

@@ -0,0 +1,161 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using System.Buffers;
using System.Runtime.InteropServices;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class PersonMapper : ObjectIdMapperBase<Person>
{
public override string CollectionName => "people";
public override ObjectId GetId(Person entity) => entity.Id;
public override void SetId(Person entity, ObjectId id) => entity.Id = id;
public override int Serialize(Person entity, BsonSpanWriter writer)
{
var sizePos = writer.BeginDocument();
writer.WriteObjectId("_id", entity.Id);
writer.WriteString("firstname", entity.FirstName);
writer.WriteString("lastname", entity.LastName);
writer.WriteInt32("age", entity.Age);
if (entity.Bio != null)
writer.WriteString("bio", entity.Bio);
else
writer.WriteNull("bio");
writer.WriteInt64("createdat", entity.CreatedAt.Ticks);
// Complex fields
writer.WriteDouble("balance", (double)entity.Balance);
// Nested Object: Address
var addrPos = writer.BeginDocument("homeaddress");
writer.WriteString("street", entity.HomeAddress.Street);
writer.WriteString("city", entity.HomeAddress.City);
writer.WriteString("zipcode", entity.HomeAddress.ZipCode);
writer.EndDocument(addrPos);
// Collection: EmploymentHistory
var histPos = writer.BeginArray("employmenthistory");
for (int i = 0; i < entity.EmploymentHistory.Count; i++)
{
var item = entity.EmploymentHistory[i];
// Array elements are keys "0", "1", "2"...
var itemPos = writer.BeginDocument(i.ToString());
writer.WriteString("companyname", item.CompanyName);
writer.WriteString("title", item.Title);
writer.WriteInt32("durationyears", item.DurationYears);
// Nested Collection: Tags
var tagsPos = writer.BeginArray("tags");
for (int j = 0; j < item.Tags.Count; j++)
{
writer.WriteString(j.ToString(), item.Tags[j]);
}
writer.EndArray(tagsPos);
writer.EndDocument(itemPos);
}
writer.EndArray(histPos);
writer.EndDocument(sizePos);
return writer.Position;
}
public override Person Deserialize(BsonSpanReader reader)
{
var person = new Person();
reader.ReadDocumentSize();
while (reader.Remaining > 0)
{
var type = reader.ReadBsonType();
if (type == BsonType.EndOfDocument)
break;
var name = reader.ReadElementHeader();
switch (name)
{
case "_id": person.Id = reader.ReadObjectId(); break;
case "firstname": person.FirstName = reader.ReadString(); break;
case "lastname": person.LastName = reader.ReadString(); break;
case "age": person.Age = reader.ReadInt32(); break;
case "bio":
if (type == BsonType.Null) person.Bio = null;
else person.Bio = reader.ReadString();
break;
case "createdat": person.CreatedAt = new DateTime(reader.ReadInt64()); break;
case "balance": person.Balance = (decimal)reader.ReadDouble(); break;
case "homeaddress":
reader.ReadDocumentSize(); // Enter document
while (reader.Remaining > 0)
{
var addrType = reader.ReadBsonType();
if (addrType == BsonType.EndOfDocument) break;
var addrName = reader.ReadElementHeader();
// We assume strict schema for benchmark speed, but should handle skipping
if (addrName == "street") person.HomeAddress.Street = reader.ReadString();
else if (addrName == "city") person.HomeAddress.City = reader.ReadString();
else if (addrName == "zipcode") person.HomeAddress.ZipCode = reader.ReadString();
else reader.SkipValue(addrType);
}
break;
case "employmenthistory":
reader.ReadDocumentSize(); // Enter Array
while (reader.Remaining > 0)
{
var arrType = reader.ReadBsonType();
if (arrType == BsonType.EndOfDocument) break;
reader.ReadElementHeader(); // Array index "0", "1"... ignore
// Read WorkHistory item
var workItem = new WorkHistory();
reader.ReadDocumentSize(); // Enter Item Document
while (reader.Remaining > 0)
{
var itemType = reader.ReadBsonType();
if (itemType == BsonType.EndOfDocument) break;
var itemName = reader.ReadElementHeader();
if (itemName == "companyname") workItem.CompanyName = reader.ReadString();
else if (itemName == "title") workItem.Title = reader.ReadString();
else if (itemName == "durationyears") workItem.DurationYears = reader.ReadInt32();
else if (itemName == "tags")
{
reader.ReadDocumentSize(); // Enter Tags Array
while (reader.Remaining > 0)
{
var tagType = reader.ReadBsonType();
if (tagType == BsonType.EndOfDocument) break;
reader.ReadElementHeader(); // Index
if (tagType == BsonType.String)
workItem.Tags.Add(reader.ReadString());
else
reader.SkipValue(tagType);
}
}
else reader.SkipValue(itemType);
}
person.EmploymentHistory.Add(workItem);
}
break;
default:
reader.SkipValue(type);
break;
}
}
return person;
}
}

View File

@@ -0,0 +1,82 @@
using BenchmarkDotNet.Columns;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Exporters;
using BenchmarkDotNet.Reports;
using BenchmarkDotNet.Running;
using Microsoft.Extensions.Logging;
using Serilog.Context;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
class Program
{
static void Main(string[] args)
{
var logger = Logging.CreateLogger<Program>();
var mode = args.Length > 0 ? args[0].Trim().ToLowerInvariant() : string.Empty;
if (mode == "manual")
{
using var _ = LogContext.PushProperty("Mode", "Manual");
ManualBenchmark.Run(logger);
return;
}
if (mode == "size")
{
using var _ = LogContext.PushProperty("Mode", "SizeBenchmark");
DatabaseSizeBenchmark.Run(logger);
return;
}
if (mode == "compression")
{
using var _ = LogContext.PushProperty("Mode", "CompressionBenchmarks");
BenchmarkRunner.Run<CompressionBenchmarks>(CreateConfig());
return;
}
if (mode == "compaction")
{
using var _ = LogContext.PushProperty("Mode", "CompactionBenchmarks");
BenchmarkRunner.Run<CompactionBenchmarks>(CreateConfig());
return;
}
if (mode == "mixed")
{
using var _ = LogContext.PushProperty("Mode", "MixedWorkloadBenchmarks");
BenchmarkRunner.Run<MixedWorkloadBenchmarks>(CreateConfig());
return;
}
if (mode == "all")
{
using var _ = LogContext.PushProperty("Mode", "AllBenchmarks");
var config = CreateConfig();
BenchmarkRunner.Run<InsertBenchmarks>(config);
BenchmarkRunner.Run<ReadBenchmarks>(config);
BenchmarkRunner.Run<SerializationBenchmarks>(config);
BenchmarkRunner.Run<CompressionBenchmarks>(config);
BenchmarkRunner.Run<CompactionBenchmarks>(config);
BenchmarkRunner.Run<MixedWorkloadBenchmarks>(config);
return;
}
using var __ = LogContext.PushProperty("Mode", "BenchmarkDotNet");
var defaultConfig = CreateConfig();
BenchmarkRunner.Run<InsertBenchmarks>(defaultConfig);
BenchmarkRunner.Run<ReadBenchmarks>(defaultConfig);
BenchmarkRunner.Run<SerializationBenchmarks>(defaultConfig);
}
private static IConfig CreateConfig()
{
return DefaultConfig.Instance
.AddExporter(HtmlExporter.Default)
.WithSummaryStyle(SummaryStyle.Default
.WithRatioStyle(RatioStyle.Trend)
.WithTimeUnit(Perfolizer.Horology.TimeUnit.Microsecond));
}
}

View File

@@ -0,0 +1,109 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using System.IO;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[SimpleJob]
[InProcess]
[MemoryDiagnoser]
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
[HtmlExporter]
[JsonExporterAttribute.Full]
public class ReadBenchmarks
{
private const int DocCount = 1000;
private string _docDbPath = null!;
private string _docDbWalPath = null!;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private ObjectId[] _ids = null!;
private ObjectId _targetId;
[GlobalSetup]
public void Setup()
{
var temp = AppContext.BaseDirectory;
var id = Guid.NewGuid().ToString("N");
_docDbPath = Path.Combine(temp, $"bench_read_docdb_{id}.db");
_docDbWalPath = Path.ChangeExtension(_docDbPath, ".wal");
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
_storage = new StorageEngine(_docDbPath, PageFileConfig.Default);
_transactionHolder = new BenchmarkTransactionHolder(_storage);
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
_ids = new ObjectId[DocCount];
for (int i = 0; i < DocCount; i++)
{
var p = CreatePerson(i);
_ids[i] = _collection.Insert(p);
}
_transactionHolder.CommitAndReset();
_targetId = _ids[DocCount / 2];
}
[GlobalCleanup]
public void Cleanup()
{
_transactionHolder?.Dispose();
_storage?.Dispose();
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
}
private Person CreatePerson(int i)
{
var p = new Person
{
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 20 + (i % 50),
Bio = null,
CreatedAt = DateTime.UtcNow,
Balance = 1000.50m * (i + 1),
HomeAddress = new Address
{
Street = $"{i} Main St",
City = "Tech City",
ZipCode = "12345"
}
};
// Add 10 work history items
for (int j = 0; j < 10; j++)
{
p.EmploymentHistory.Add(new WorkHistory
{
CompanyName = $"TechCorp_{i}_{j}",
Title = "Developer",
DurationYears = j,
Tags = new List<string> { "C#", "BSON", "Performance", "Database", "Complex" }
});
}
return p;
}
[Benchmark(Baseline = true, Description = "CBDD FindById")]
[BenchmarkCategory("Read_Single")]
public Person? DocumentDb_FindById()
{
return _collection.FindById(_targetId);
}
}

View File

@@ -0,0 +1,182 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using ZB.MOM.WW.CBDD.Bson;
using System.Text.Json;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[InProcess]
[MemoryDiagnoser]
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
[HtmlExporter]
[JsonExporterAttribute.Full]
public class SerializationBenchmarks
{
private const int BatchSize = 10000;
private Person _person = null!;
private List<Person> _people = null!;
private PersonMapper _mapper = new PersonMapper();
private byte[] _bsonData = Array.Empty<byte>();
private byte[] _jsonData = Array.Empty<byte>();
private List<byte[]> _bsonDataList = new();
private List<byte[]> _jsonDataList = new();
private byte[] _serializeBuffer = Array.Empty<byte>();
private static readonly System.Collections.Concurrent.ConcurrentDictionary<string, ushort> _keyMap = new(StringComparer.OrdinalIgnoreCase);
private static readonly System.Collections.Concurrent.ConcurrentDictionary<ushort, string> _keys = new();
static SerializationBenchmarks()
{
ushort id = 1;
string[] initialKeys = { "_id", "firstname", "lastname", "age", "bio", "createdat", "balance", "homeaddress", "street", "city", "zipcode", "employmenthistory", "companyname", "title", "durationyears", "tags" };
foreach (var key in initialKeys)
{
_keyMap[key] = id;
_keys[id] = key;
id++;
}
// Add some indices for arrays
for (int i = 0; i < 100; i++)
{
var s = i.ToString();
_keyMap[s] = id;
_keys[id] = s;
id++;
}
}
[GlobalSetup]
public void Setup()
{
_person = CreatePerson(0);
_people = new List<Person>(BatchSize);
for (int i = 0; i < BatchSize; i++)
{
_people.Add(CreatePerson(i));
}
// Pre-allocate buffer for BSON serialization
_serializeBuffer = new byte[8192];
var writer = new BsonSpanWriter(_serializeBuffer, _keyMap);
// Single item data
var len = _mapper.Serialize(_person, writer);
_bsonData = _serializeBuffer.AsSpan(0, len).ToArray();
_jsonData = JsonSerializer.SerializeToUtf8Bytes(_person);
// List data
foreach (var p in _people)
{
len = _mapper.Serialize(p, writer);
_bsonDataList.Add(_serializeBuffer.AsSpan(0, len).ToArray());
_jsonDataList.Add(JsonSerializer.SerializeToUtf8Bytes(p));
}
}
private Person CreatePerson(int i)
{
var p = new Person
{
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 25,
Bio = null,
CreatedAt = DateTime.UtcNow,
Balance = 1000.50m,
HomeAddress = new Address
{
Street = $"{i} Main St",
City = "Tech City",
ZipCode = "12345"
}
};
for (int j = 0; j < 10; j++)
{
p.EmploymentHistory.Add(new WorkHistory
{
CompanyName = $"TechCorp_{i}_{j}",
Title = "Developer",
DurationYears = j,
Tags = new List<string> { "C#", "BSON", "Performance", "Database", "Complex" }
});
}
return p;
}
[Benchmark(Description = "Serialize Single (BSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Bson()
{
var writer = new BsonSpanWriter(_serializeBuffer, _keyMap);
_mapper.Serialize(_person, writer);
}
[Benchmark(Description = "Serialize Single (JSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Json()
{
JsonSerializer.SerializeToUtf8Bytes(_person);
}
[Benchmark(Description = "Deserialize Single (BSON)")]
[BenchmarkCategory("Single")]
public Person Deserialize_Bson()
{
var reader = new BsonSpanReader(_bsonData, _keys);
return _mapper.Deserialize(reader);
}
[Benchmark(Description = "Deserialize Single (JSON)")]
[BenchmarkCategory("Single")]
public Person? Deserialize_Json()
{
return JsonSerializer.Deserialize<Person>(_jsonData);
}
[Benchmark(Description = "Serialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Bson()
{
foreach (var p in _people)
{
var writer = new BsonSpanWriter(_serializeBuffer, _keyMap);
_mapper.Serialize(p, writer);
}
}
[Benchmark(Description = "Serialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Json()
{
foreach (var p in _people)
{
JsonSerializer.SerializeToUtf8Bytes(p);
}
}
[Benchmark(Description = "Deserialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Bson()
{
foreach (var data in _bsonDataList)
{
var reader = new BsonSpanReader(data, _keys);
_mapper.Deserialize(reader);
}
}
[Benchmark(Description = "Deserialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Json()
{
foreach (var data in _jsonDataList)
{
JsonSerializer.Deserialize<Person>(data);
}
}
}

View File

@@ -0,0 +1,25 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<AssemblyName>ZB.MOM.WW.CBDD.Tests.Benchmark</AssemblyName>
<RootNamespace>ZB.MOM.WW.CBDD.Tests.Benchmark</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BenchmarkDotNet" Version="0.15.8" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="9.0.10" />
<PackageReference Include="Serilog" Version="4.2.0" />
<PackageReference Include="Serilog.Extensions.Logging" Version="8.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\CBDD.Core\ZB.MOM.WW.CBDD.Core.csproj" />
</ItemGroup>
</Project>