Fix audit findings for coverage, architecture checks, and XML docs
All checks were successful
NuGet Publish / build-and-pack (push) Successful in 45s
NuGet Publish / publish-to-gitea (push) Successful in 52s

This commit is contained in:
Joseph Doherty
2026-02-20 15:43:25 -05:00
parent 5528806518
commit 3ffd468c79
99 changed files with 23746 additions and 9548 deletions

View File

@@ -11,33 +11,55 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
internal static class DatabaseSizeBenchmark
{
private static readonly int[] TargetCounts = [10_000, 1_000_000, 10_000_000];
private static readonly CompressionOptions CompressedBrotliFast = new()
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = System.IO.Compression.CompressionLevel.Fastest
};
private static readonly Scenario[] Scenarios =
[
// Separate compression set (no compaction)
new(
"Uncompressed",
CompressionOptions.Default),
Set: "compression",
Name: "CompressionOnly-Uncompressed",
CompressionOptions: CompressionOptions.Default,
RunCompaction: false),
new(
"Compressed-BrotliFast",
new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = System.IO.Compression.CompressionLevel.Fastest
})
Set: "compression",
Name: "CompressionOnly-Compressed-BrotliFast",
CompressionOptions: CompressedBrotliFast,
RunCompaction: false),
// Separate compaction set (compaction enabled)
new(
Set: "compaction",
Name: "Compaction-Uncompressed",
CompressionOptions: CompressionOptions.Default,
RunCompaction: true),
new(
Set: "compaction",
Name: "Compaction-Compressed-BrotliFast",
CompressionOptions: CompressedBrotliFast,
RunCompaction: true)
];
private const int BatchSize = 50_000;
private const int ProgressInterval = 1_000_000;
/// <summary>
/// Tests run.
/// </summary>
/// <param name="logger">Logger for benchmark progress and results.</param>
public static void Run(ILogger logger)
{
var results = new List<SizeResult>(TargetCounts.Length * Scenarios.Length);
logger.LogInformation("=== CBDD Database Size Benchmark ===");
logger.LogInformation("=== CBDD Database Size Benchmark (Separate Compression/Compaction Sets) ===");
logger.LogInformation("Targets: {Targets}", string.Join(", ", TargetCounts.Select(x => x.ToString("N0"))));
logger.LogInformation("Scenarios: {Scenarios}", string.Join(", ", Scenarios.Select(x => x.Name)));
logger.LogInformation("Scenarios: {Scenarios}", string.Join(", ", Scenarios.Select(x => $"{x.Set}:{x.Name}")));
logger.LogInformation("Batch size: {BatchSize:N0}", BatchSize);
foreach (var targetCount in TargetCounts)
@@ -48,12 +70,17 @@ internal static class DatabaseSizeBenchmark
var walPath = Path.ChangeExtension(dbPath, ".wal");
using var _ = LogContext.PushProperty("TargetCount", targetCount);
using var __ = LogContext.PushProperty("Scenario", scenario.Name);
using var ___ = LogContext.PushProperty("ScenarioSet", scenario.Set);
logger.LogInformation("Starting scenario {Scenario} for target {TargetCount:N0} docs", scenario.Name, targetCount);
logger.LogInformation(
"Starting {Set} scenario {Scenario} for target {TargetCount:N0} docs",
scenario.Set,
scenario.Name,
targetCount);
var insertStopwatch = Stopwatch.StartNew();
CompressionStats compressionStats = default;
CompactionStats compactionStats;
CompactionStats compactionStats = new();
long preCompactDbBytes;
long preCompactWalBytes;
long postCompactDbBytes;
@@ -93,12 +120,15 @@ internal static class DatabaseSizeBenchmark
preCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
preCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
compactionStats = storage.Compact(new CompactionOptions
if (scenario.RunCompaction)
{
EnableTailTruncation = true,
DefragmentSlottedPages = true,
NormalizeFreeList = true
});
compactionStats = storage.Compact(new CompactionOptions
{
EnableTailTruncation = true,
DefragmentSlottedPages = true,
NormalizeFreeList = true
});
}
postCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
postCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
@@ -106,7 +136,9 @@ internal static class DatabaseSizeBenchmark
}
var result = new SizeResult(
scenario.Set,
scenario.Name,
scenario.RunCompaction,
targetCount,
insertStopwatch.Elapsed,
preCompactDbBytes,
@@ -118,13 +150,16 @@ internal static class DatabaseSizeBenchmark
results.Add(result);
logger.LogInformation(
"Completed {Scenario} {TargetCount:N0} docs in {Elapsed}. pre={PreTotal}, post={PostTotal}, shrink={Shrink}, compRatio={CompRatio}",
"Completed {Set}:{Scenario} {TargetCount:N0} docs in {Elapsed}. pre={PreTotal}, post={PostTotal}, shrink={Shrink}, compactApplied={CompactionApplied}, compactReclaim={CompactReclaim}, compRatio={CompRatio}",
scenario.Set,
scenario.Name,
targetCount,
insertStopwatch.Elapsed,
FormatBytes(result.PreCompactTotalBytes),
FormatBytes(result.PostCompactTotalBytes),
FormatBytes(result.ShrinkBytes),
scenario.RunCompaction,
FormatBytes(result.CompactionStats.ReclaimedFileBytes),
result.CompressionRatioText);
TryDelete(dbPath);
@@ -133,10 +168,14 @@ internal static class DatabaseSizeBenchmark
}
logger.LogInformation("=== Size Benchmark Summary ===");
foreach (var result in results.OrderBy(x => x.TargetCount).ThenBy(x => x.Scenario))
foreach (var result in results
.OrderBy(x => x.Set)
.ThenBy(x => x.TargetCount)
.ThenBy(x => x.Scenario))
{
logger.LogInformation(
"{Scenario,-22} | {Count,12:N0} docs | insert={Elapsed,12} | pre={Pre,12} | post={Post,12} | shrink={Shrink,12} | compact={CompactBytes,12} | ratio={Ratio}",
"{Set,-11} | {Scenario,-38} | {Count,12:N0} docs | insert={Elapsed,12} | pre={Pre,12} | post={Post,12} | shrink={Shrink,12} | compact={CompactBytes,12} | ratio={Ratio}",
result.Set,
result.Scenario,
result.TargetCount,
result.InsertElapsed,
@@ -146,6 +185,8 @@ internal static class DatabaseSizeBenchmark
FormatBytes(result.CompactionStats.ReclaimedFileBytes),
result.CompressionRatioText);
}
WriteSummaryCsv(results, logger);
}
private static SizeBenchmarkDocument CreateDocument(int value)
@@ -181,10 +222,42 @@ internal static class DatabaseSizeBenchmark
return $"{size:N2} {units[unitIndex]}";
}
private sealed record Scenario(string Name, CompressionOptions CompressionOptions);
private static void WriteSummaryCsv(IEnumerable<SizeResult> results, ILogger logger)
{
var outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "BenchmarkDotNet.Artifacts", "results");
Directory.CreateDirectory(outputDirectory);
var outputPath = Path.Combine(outputDirectory, "DatabaseSizeBenchmark-results.csv");
var lines = new List<string>
{
"set,scenario,target_count,run_compaction,insert_seconds,pre_total_bytes,post_total_bytes,shrink_bytes,compaction_reclaimed_bytes,compression_ratio_text"
};
foreach (var result in results.OrderBy(x => x.Set).ThenBy(x => x.TargetCount).ThenBy(x => x.Scenario))
{
lines.Add(string.Join(",",
result.Set,
result.Scenario,
result.TargetCount.ToString(),
result.RunCompaction ? "true" : "false",
result.InsertElapsed.TotalSeconds.ToString("F3"),
result.PreCompactTotalBytes.ToString(),
result.PostCompactTotalBytes.ToString(),
result.ShrinkBytes.ToString(),
result.CompactionStats.ReclaimedFileBytes.ToString(),
result.CompressionRatioText));
}
File.WriteAllLines(outputPath, lines);
logger.LogInformation("Database size summary CSV written to {OutputPath}", outputPath);
}
private sealed record Scenario(string Set, string Name, CompressionOptions CompressionOptions, bool RunCompaction);
private sealed record SizeResult(
string Set,
string Scenario,
bool RunCompaction,
int TargetCount,
TimeSpan InsertElapsed,
long PreCompactDbBytes,
@@ -194,10 +267,22 @@ internal static class DatabaseSizeBenchmark
CompactionStats CompactionStats,
CompressionStats CompressionStats)
{
/// <summary>
/// Gets or sets the pre compact total bytes.
/// </summary>
public long PreCompactTotalBytes => PreCompactDbBytes + PreCompactWalBytes;
/// <summary>
/// Gets or sets the post compact total bytes.
/// </summary>
public long PostCompactTotalBytes => PostCompactDbBytes + PostCompactWalBytes;
/// <summary>
/// Gets or sets the shrink bytes.
/// </summary>
public long ShrinkBytes => PreCompactTotalBytes - PostCompactTotalBytes;
/// <summary>
/// Gets or sets the compression ratio text.
/// </summary>
public string CompressionRatioText =>
CompressionStats.BytesAfterCompression > 0
? $"{(double)CompressionStats.BytesBeforeCompression / CompressionStats.BytesAfterCompression:N2}x"
@@ -206,19 +291,32 @@ internal static class DatabaseSizeBenchmark
private sealed class SizeBenchmarkDocument
{
/// <summary>
/// Gets or sets the id.
/// </summary>
public ObjectId Id { get; set; }
/// <summary>
/// Gets or sets the value.
/// </summary>
public int Value { get; set; }
/// <summary>
/// Gets or sets the name.
/// </summary>
public string Name { get; set; } = string.Empty;
}
private sealed class SizeBenchmarkDocumentMapper : ObjectIdMapperBase<SizeBenchmarkDocument>
{
/// <inheritdoc />
public override string CollectionName => "size_documents";
/// <inheritdoc />
public override ObjectId GetId(SizeBenchmarkDocument entity) => entity.Id;
/// <inheritdoc />
public override void SetId(SizeBenchmarkDocument entity, ObjectId id) => entity.Id = id;
/// <inheritdoc />
public override int Serialize(SizeBenchmarkDocument entity, BsonSpanWriter writer)
{
var sizePos = writer.BeginDocument();
@@ -229,6 +327,7 @@ internal static class DatabaseSizeBenchmark
return writer.Position;
}
/// <inheritdoc />
public override SizeBenchmarkDocument Deserialize(BsonSpanReader reader)
{
var document = new SizeBenchmarkDocument();