Initialize CBDD solution and add a .NET-focused gitignore for generated artifacts.
This commit is contained in:
174
tests/CBDD.Tests.Benchmark/CompressionBenchmarks.cs
Normal file
174
tests/CBDD.Tests.Benchmark/CompressionBenchmarks.cs
Normal file
@@ -0,0 +1,174 @@
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using BenchmarkDotNet.Configs;
|
||||
using BenchmarkDotNet.Jobs;
|
||||
using System.IO.Compression;
|
||||
using ZB.MOM.WW.CBDD.Bson;
|
||||
using ZB.MOM.WW.CBDD.Core.Collections;
|
||||
using ZB.MOM.WW.CBDD.Core.Compression;
|
||||
using ZB.MOM.WW.CBDD.Core.Storage;
|
||||
|
||||
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
|
||||
|
||||
[SimpleJob]
|
||||
[InProcess]
|
||||
[MemoryDiagnoser]
|
||||
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
|
||||
[HtmlExporter]
|
||||
[JsonExporterAttribute.Full]
|
||||
public class CompressionBenchmarks
|
||||
{
|
||||
private const int SeedCount = 300;
|
||||
private const int WorkloadCount = 100;
|
||||
|
||||
[Params(false, true)]
|
||||
public bool EnableCompression { get; set; }
|
||||
|
||||
[Params(CompressionCodec.Brotli, CompressionCodec.Deflate)]
|
||||
public CompressionCodec Codec { get; set; }
|
||||
|
||||
[Params(CompressionLevel.Fastest, CompressionLevel.Optimal)]
|
||||
public CompressionLevel Level { get; set; }
|
||||
|
||||
private string _dbPath = string.Empty;
|
||||
private string _walPath = string.Empty;
|
||||
private StorageEngine _storage = null!;
|
||||
private BenchmarkTransactionHolder _transactionHolder = null!;
|
||||
private DocumentCollection<Person> _collection = null!;
|
||||
|
||||
private Person[] _insertBatch = Array.Empty<Person>();
|
||||
private ObjectId[] _seedIds = Array.Empty<ObjectId>();
|
||||
|
||||
[IterationSetup]
|
||||
public void Setup()
|
||||
{
|
||||
var id = Guid.NewGuid().ToString("N");
|
||||
_dbPath = Path.Combine(AppContext.BaseDirectory, $"bench_compression_{id}.db");
|
||||
_walPath = Path.ChangeExtension(_dbPath, ".wal");
|
||||
|
||||
var compressionOptions = new CompressionOptions
|
||||
{
|
||||
EnableCompression = EnableCompression,
|
||||
MinSizeBytes = 256,
|
||||
MinSavingsPercent = 0,
|
||||
Codec = Codec,
|
||||
Level = Level
|
||||
};
|
||||
|
||||
_storage = new StorageEngine(_dbPath, PageFileConfig.Default, compressionOptions);
|
||||
_transactionHolder = new BenchmarkTransactionHolder(_storage);
|
||||
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
|
||||
|
||||
_seedIds = new ObjectId[SeedCount];
|
||||
for (var i = 0; i < SeedCount; i++)
|
||||
{
|
||||
var doc = CreatePerson(i, includeLargeBio: true);
|
||||
_seedIds[i] = _collection.Insert(doc);
|
||||
}
|
||||
|
||||
_transactionHolder.CommitAndReset();
|
||||
|
||||
_insertBatch = Enumerable.Range(SeedCount, WorkloadCount)
|
||||
.Select(i => CreatePerson(i, includeLargeBio: true))
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
[IterationCleanup]
|
||||
public void Cleanup()
|
||||
{
|
||||
_transactionHolder?.Dispose();
|
||||
_storage?.Dispose();
|
||||
|
||||
if (File.Exists(_dbPath)) File.Delete(_dbPath);
|
||||
if (File.Exists(_walPath)) File.Delete(_walPath);
|
||||
}
|
||||
|
||||
[Benchmark(Baseline = true)]
|
||||
[BenchmarkCategory("Compression_InsertUpdateRead")]
|
||||
public void Insert_Workload()
|
||||
{
|
||||
_collection.InsertBulk(_insertBatch);
|
||||
_transactionHolder.CommitAndReset();
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("Compression_InsertUpdateRead")]
|
||||
public void Update_Workload()
|
||||
{
|
||||
for (var i = 0; i < WorkloadCount; i++)
|
||||
{
|
||||
var id = _seedIds[i];
|
||||
var current = _collection.FindById(id);
|
||||
if (current == null)
|
||||
continue;
|
||||
|
||||
current.Bio = BuildBio(i + 10_000);
|
||||
current.Age += 1;
|
||||
_collection.Update(current);
|
||||
}
|
||||
|
||||
_transactionHolder.CommitAndReset();
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
[BenchmarkCategory("Compression_InsertUpdateRead")]
|
||||
public int Read_Workload()
|
||||
{
|
||||
var checksum = 0;
|
||||
for (var i = 0; i < WorkloadCount; i++)
|
||||
{
|
||||
var person = _collection.FindById(_seedIds[i]);
|
||||
if (person != null)
|
||||
{
|
||||
checksum += person.Age;
|
||||
}
|
||||
}
|
||||
|
||||
_transactionHolder.CommitAndReset();
|
||||
return checksum;
|
||||
}
|
||||
|
||||
private static Person CreatePerson(int i, bool includeLargeBio)
|
||||
{
|
||||
return new Person
|
||||
{
|
||||
Id = ObjectId.NewObjectId(),
|
||||
FirstName = $"First_{i}",
|
||||
LastName = $"Last_{i}",
|
||||
Age = 20 + (i % 50),
|
||||
Bio = includeLargeBio ? BuildBio(i) : $"bio-{i}",
|
||||
CreatedAt = DateTime.UnixEpoch.AddMinutes(i),
|
||||
Balance = 100 + i,
|
||||
HomeAddress = new Address
|
||||
{
|
||||
Street = $"{i} Main St",
|
||||
City = "Bench City",
|
||||
ZipCode = "12345"
|
||||
},
|
||||
EmploymentHistory =
|
||||
[
|
||||
new WorkHistory
|
||||
{
|
||||
CompanyName = $"Company_{i}",
|
||||
Title = "Engineer",
|
||||
DurationYears = i % 10,
|
||||
Tags = ["csharp", "db", "compression"]
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
private static string BuildBio(int seed)
|
||||
{
|
||||
var builder = new System.Text.StringBuilder(4500);
|
||||
for (var i = 0; i < 150; i++)
|
||||
{
|
||||
builder.Append("bio-");
|
||||
builder.Append(seed.ToString("D6"));
|
||||
builder.Append('-');
|
||||
builder.Append(i.ToString("D3"));
|
||||
builder.Append('|');
|
||||
}
|
||||
|
||||
return builder.ToString();
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user