Fix audit findings for coverage, architecture checks, and XML docs
This commit is contained in:
287
tests/CBDD.Tests.Benchmark/PerformanceGateSmoke.cs
Normal file
287
tests/CBDD.Tests.Benchmark/PerformanceGateSmoke.cs
Normal file
@@ -0,0 +1,287 @@
|
||||
using System.IO.Compression;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using ZB.MOM.WW.CBDD.Bson;
|
||||
using ZB.MOM.WW.CBDD.Core.Collections;
|
||||
using ZB.MOM.WW.CBDD.Core.Compression;
|
||||
using ZB.MOM.WW.CBDD.Core.Storage;
|
||||
|
||||
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
|
||||
|
||||
internal static class PerformanceGateSmoke
|
||||
{
|
||||
private const int CompactionDocumentCount = 2_000;
|
||||
private const int CompressionDocumentCount = 1_500;
|
||||
|
||||
/// <summary>
|
||||
/// Runs the performance gate smoke probes and writes a report.
|
||||
/// </summary>
|
||||
/// <param name="logger">The logger.</param>
|
||||
public static void Run(ILogger logger)
|
||||
{
|
||||
var compaction = RunCompactionProbe();
|
||||
var compressionOff = RunCompressionGcProbe(enableCompression: false);
|
||||
var compressionOn = RunCompressionGcProbe(enableCompression: true);
|
||||
|
||||
var report = new PerformanceGateReport(
|
||||
DateTimeOffset.UtcNow,
|
||||
compaction,
|
||||
compressionOff,
|
||||
compressionOn);
|
||||
var reportPath = WriteReport(report);
|
||||
|
||||
logger.LogInformation("Performance gate smoke report written to {ReportPath}", reportPath);
|
||||
|
||||
Console.WriteLine("[performance_gate]");
|
||||
Console.WriteLine($"report_path={reportPath}");
|
||||
Console.WriteLine($"compaction.pre_pages={compaction.PrePages}");
|
||||
Console.WriteLine($"compaction.post_pages={compaction.PostPages}");
|
||||
Console.WriteLine($"compaction.reclaimed_file_bytes={compaction.ReclaimedFileBytes}");
|
||||
Console.WriteLine($"compaction.throughput_bytes_per_sec={compaction.ThroughputBytesPerSecond:F2}");
|
||||
Console.WriteLine($"compaction.throughput_pages_per_sec={compaction.ThroughputPagesPerSecond:F2}");
|
||||
Console.WriteLine($"compaction.throughput_docs_per_sec={compaction.ThroughputDocumentsPerSecond:F2}");
|
||||
Console.WriteLine($"compression_off.gen0_delta={compressionOff.Gen0Delta}");
|
||||
Console.WriteLine($"compression_off.gen1_delta={compressionOff.Gen1Delta}");
|
||||
Console.WriteLine($"compression_off.gen2_delta={compressionOff.Gen2Delta}");
|
||||
Console.WriteLine($"compression_off.alloc_bytes_delta={compressionOff.AllocatedBytesDelta}");
|
||||
Console.WriteLine($"compression_on.gen0_delta={compressionOn.Gen0Delta}");
|
||||
Console.WriteLine($"compression_on.gen1_delta={compressionOn.Gen1Delta}");
|
||||
Console.WriteLine($"compression_on.gen2_delta={compressionOn.Gen2Delta}");
|
||||
Console.WriteLine($"compression_on.alloc_bytes_delta={compressionOn.AllocatedBytesDelta}");
|
||||
}
|
||||
|
||||
private static CompactionProbeResult RunCompactionProbe()
|
||||
{
|
||||
var dbPath = NewDbPath("gate_compaction");
|
||||
var walPath = Path.ChangeExtension(dbPath, ".wal");
|
||||
|
||||
try
|
||||
{
|
||||
using var storage = new StorageEngine(dbPath, PageFileConfig.Small);
|
||||
using var transactionHolder = new BenchmarkTransactionHolder(storage);
|
||||
var collection = new DocumentCollection<Person>(storage, transactionHolder, new PersonMapper());
|
||||
|
||||
var ids = new List<ObjectId>(CompactionDocumentCount);
|
||||
for (var i = 0; i < CompactionDocumentCount; i++)
|
||||
{
|
||||
ids.Add(collection.Insert(CreatePerson(i, includeLargeBio: true)));
|
||||
}
|
||||
|
||||
transactionHolder.CommitAndReset();
|
||||
storage.Checkpoint();
|
||||
|
||||
for (var i = 0; i < ids.Count; i += 3)
|
||||
{
|
||||
collection.Delete(ids[i]);
|
||||
}
|
||||
|
||||
for (var i = 0; i < ids.Count; i += 5)
|
||||
{
|
||||
var current = collection.FindById(ids[i]);
|
||||
if (current == null)
|
||||
continue;
|
||||
|
||||
current.Bio = BuildBio(i + 10_000);
|
||||
current.Age += 1;
|
||||
collection.Update(current);
|
||||
}
|
||||
|
||||
transactionHolder.CommitAndReset();
|
||||
storage.Checkpoint();
|
||||
|
||||
var stats = storage.Compact(new CompactionOptions
|
||||
{
|
||||
OnlineMode = false,
|
||||
DefragmentSlottedPages = true,
|
||||
NormalizeFreeList = true,
|
||||
EnableTailTruncation = true
|
||||
});
|
||||
|
||||
return new CompactionProbeResult(
|
||||
stats.PrePageCount,
|
||||
stats.PostPageCount,
|
||||
stats.ReclaimedFileBytes,
|
||||
stats.ThroughputBytesPerSecond,
|
||||
stats.ThroughputPagesPerSecond,
|
||||
stats.ThroughputDocumentsPerSecond);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDelete(dbPath);
|
||||
TryDelete(walPath);
|
||||
TryDelete($"{dbPath}.compact.state");
|
||||
TryDelete($"{dbPath}.compact.tmp");
|
||||
TryDelete($"{dbPath}.compact.bak");
|
||||
}
|
||||
}
|
||||
|
||||
private static CompressionGcProbeResult RunCompressionGcProbe(bool enableCompression)
|
||||
{
|
||||
var dbPath = NewDbPath(enableCompression ? "gate_gc_on" : "gate_gc_off");
|
||||
var walPath = Path.ChangeExtension(dbPath, ".wal");
|
||||
var compressionOptions = enableCompression
|
||||
? new CompressionOptions
|
||||
{
|
||||
EnableCompression = true,
|
||||
MinSizeBytes = 256,
|
||||
MinSavingsPercent = 0,
|
||||
Codec = CompressionCodec.Brotli,
|
||||
Level = CompressionLevel.Fastest
|
||||
}
|
||||
: CompressionOptions.Default;
|
||||
|
||||
try
|
||||
{
|
||||
using var storage = new StorageEngine(dbPath, PageFileConfig.Default, compressionOptions);
|
||||
using var transactionHolder = new BenchmarkTransactionHolder(storage);
|
||||
var collection = new DocumentCollection<Person>(storage, transactionHolder, new PersonMapper());
|
||||
|
||||
GC.Collect();
|
||||
GC.WaitForPendingFinalizers();
|
||||
GC.Collect();
|
||||
|
||||
var g0Before = GC.CollectionCount(0);
|
||||
var g1Before = GC.CollectionCount(1);
|
||||
var g2Before = GC.CollectionCount(2);
|
||||
var allocBefore = GC.GetTotalAllocatedBytes(true);
|
||||
|
||||
var ids = new ObjectId[CompressionDocumentCount];
|
||||
for (var i = 0; i < CompressionDocumentCount; i++)
|
||||
{
|
||||
ids[i] = collection.Insert(CreatePerson(i, includeLargeBio: true));
|
||||
}
|
||||
|
||||
transactionHolder.CommitAndReset();
|
||||
|
||||
for (var i = 0; i < ids.Length; i += 4)
|
||||
{
|
||||
var current = collection.FindById(ids[i]);
|
||||
if (current == null)
|
||||
continue;
|
||||
|
||||
current.Bio = BuildBio(i + 20_000);
|
||||
current.Age += 1;
|
||||
collection.Update(current);
|
||||
}
|
||||
|
||||
transactionHolder.CommitAndReset();
|
||||
|
||||
var readCount = collection.FindAll().Count();
|
||||
transactionHolder.CommitAndReset();
|
||||
|
||||
GC.Collect();
|
||||
GC.WaitForPendingFinalizers();
|
||||
GC.Collect();
|
||||
|
||||
var g0After = GC.CollectionCount(0);
|
||||
var g1After = GC.CollectionCount(1);
|
||||
var g2After = GC.CollectionCount(2);
|
||||
var allocAfter = GC.GetTotalAllocatedBytes(true);
|
||||
|
||||
return new CompressionGcProbeResult(
|
||||
enableCompression,
|
||||
readCount,
|
||||
g0After - g0Before,
|
||||
g1After - g1Before,
|
||||
g2After - g2Before,
|
||||
allocAfter - allocBefore);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDelete(dbPath);
|
||||
TryDelete(walPath);
|
||||
TryDelete($"{dbPath}.compact.state");
|
||||
TryDelete($"{dbPath}.compact.tmp");
|
||||
TryDelete($"{dbPath}.compact.bak");
|
||||
}
|
||||
}
|
||||
|
||||
private static string WriteReport(PerformanceGateReport report)
|
||||
{
|
||||
var outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "BenchmarkDotNet.Artifacts", "results");
|
||||
Directory.CreateDirectory(outputDirectory);
|
||||
|
||||
var reportPath = Path.Combine(outputDirectory, "PerformanceGateSmoke-report.json");
|
||||
var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
|
||||
File.WriteAllText(reportPath, json);
|
||||
return reportPath;
|
||||
}
|
||||
|
||||
private static Person CreatePerson(int i, bool includeLargeBio)
|
||||
{
|
||||
return new Person
|
||||
{
|
||||
Id = ObjectId.NewObjectId(),
|
||||
FirstName = $"First_{i}",
|
||||
LastName = $"Last_{i}",
|
||||
Age = 20 + (i % 50),
|
||||
Bio = includeLargeBio ? BuildBio(i) : $"bio-{i}",
|
||||
CreatedAt = DateTime.UnixEpoch.AddMinutes(i),
|
||||
Balance = 100 + i,
|
||||
HomeAddress = new Address
|
||||
{
|
||||
Street = $"{i} Main St",
|
||||
City = "Gate City",
|
||||
ZipCode = "12345"
|
||||
},
|
||||
EmploymentHistory =
|
||||
[
|
||||
new WorkHistory
|
||||
{
|
||||
CompanyName = $"Company_{i}",
|
||||
Title = "Engineer",
|
||||
DurationYears = i % 10,
|
||||
Tags = ["csharp", "db", "compression"]
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
private static string BuildBio(int seed)
|
||||
{
|
||||
var builder = new System.Text.StringBuilder(4500);
|
||||
for (var i = 0; i < 150; i++)
|
||||
{
|
||||
builder.Append("bio-");
|
||||
builder.Append(seed.ToString("D6"));
|
||||
builder.Append('-');
|
||||
builder.Append(i.ToString("D3"));
|
||||
builder.Append('|');
|
||||
}
|
||||
|
||||
return builder.ToString();
|
||||
}
|
||||
|
||||
private static string NewDbPath(string prefix)
|
||||
=> Path.Combine(Path.GetTempPath(), $"{prefix}_{Guid.NewGuid():N}.db");
|
||||
|
||||
private static void TryDelete(string path)
|
||||
{
|
||||
if (File.Exists(path))
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record PerformanceGateReport(
|
||||
DateTimeOffset CapturedAtUtc,
|
||||
CompactionProbeResult Compaction,
|
||||
CompressionGcProbeResult CompressionOff,
|
||||
CompressionGcProbeResult CompressionOn);
|
||||
|
||||
private sealed record CompactionProbeResult(
|
||||
uint PrePages,
|
||||
uint PostPages,
|
||||
long ReclaimedFileBytes,
|
||||
double ThroughputBytesPerSecond,
|
||||
double ThroughputPagesPerSecond,
|
||||
double ThroughputDocumentsPerSecond);
|
||||
|
||||
private sealed record CompressionGcProbeResult(
|
||||
bool CompressionEnabled,
|
||||
int ReadCount,
|
||||
int Gen0Delta,
|
||||
int Gen1Delta,
|
||||
int Gen2Delta,
|
||||
long AllocatedBytesDelta);
|
||||
}
|
||||
Reference in New Issue
Block a user