Reformat / cleanup
All checks were successful
NuGet Publish / build-and-pack (push) Successful in 46s
NuGet Publish / publish-to-gitea (push) Successful in 56s

This commit is contained in:
Joseph Doherty
2026-02-21 08:10:36 -05:00
parent 4c6aaa5a3f
commit a70d8befae
176 changed files with 50555 additions and 49587 deletions

View File

@@ -1,6 +1,6 @@
using System.Text;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
@@ -15,21 +15,22 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[JsonExporterAttribute.Full]
public class CompactionBenchmarks
{
private readonly List<ObjectId> _insertedIds = [];
private DocumentCollection<Person> _collection = null!;
private string _dbPath = string.Empty;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private string _walPath = string.Empty;
/// <summary>
/// Gets or sets the number of documents used per benchmark iteration.
/// Gets or sets the number of documents used per benchmark iteration.
/// </summary>
[Params(2_000)]
public int DocumentCount { get; set; }
private string _dbPath = string.Empty;
private string _walPath = string.Empty;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private List<ObjectId> _insertedIds = [];
/// <summary>
/// Prepares benchmark state and seed data for each iteration.
/// Prepares benchmark state and seed data for each iteration.
/// </summary>
[IterationSetup]
public void Setup()
@@ -53,17 +54,14 @@ public class CompactionBenchmarks
_transactionHolder.CommitAndReset();
_storage.Checkpoint();
for (var i = _insertedIds.Count - 1; i >= _insertedIds.Count / 3; i--)
{
_collection.Delete(_insertedIds[i]);
}
for (int i = _insertedIds.Count - 1; i >= _insertedIds.Count / 3; i--) _collection.Delete(_insertedIds[i]);
_transactionHolder.CommitAndReset();
_storage.Checkpoint();
}
/// <summary>
/// Cleans up benchmark resources and temporary files after each iteration.
/// Cleans up benchmark resources and temporary files after each iteration.
/// </summary>
[IterationCleanup]
public void Cleanup()
@@ -76,7 +74,7 @@ public class CompactionBenchmarks
}
/// <summary>
/// Benchmarks reclaimed file bytes reported by offline compaction.
/// Benchmarks reclaimed file bytes reported by offline compaction.
/// </summary>
/// <returns>The reclaimed file byte count.</returns>
[Benchmark(Baseline = true)]
@@ -95,7 +93,7 @@ public class CompactionBenchmarks
}
/// <summary>
/// Benchmarks tail bytes truncated by offline compaction.
/// Benchmarks tail bytes truncated by offline compaction.
/// </summary>
/// <returns>The truncated tail byte count.</returns>
[Benchmark]
@@ -135,7 +133,7 @@ public class CompactionBenchmarks
private static string BuildPayload(int seed)
{
var builder = new System.Text.StringBuilder(2500);
var builder = new StringBuilder(2500);
for (var i = 0; i < 80; i++)
{
builder.Append("compact-");
@@ -147,4 +145,4 @@ public class CompactionBenchmarks
return builder.ToString();
}
}
}

View File

@@ -1,7 +1,7 @@
using System.IO.Compression;
using System.Text;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using System.IO.Compression;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
@@ -19,36 +19,36 @@ public class CompressionBenchmarks
{
private const int SeedCount = 300;
private const int WorkloadCount = 100;
private DocumentCollection<Person> _collection = null!;
private string _dbPath = string.Empty;
private Person[] _insertBatch = Array.Empty<Person>();
private ObjectId[] _seedIds = Array.Empty<ObjectId>();
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private string _walPath = string.Empty;
/// <summary>
/// Gets or sets whether compression is enabled for the benchmark run.
/// Gets or sets whether compression is enabled for the benchmark run.
/// </summary>
[Params(false, true)]
public bool EnableCompression { get; set; }
/// <summary>
/// Gets or sets the compression codec for the benchmark run.
/// Gets or sets the compression codec for the benchmark run.
/// </summary>
[Params(CompressionCodec.Brotli, CompressionCodec.Deflate)]
public CompressionCodec Codec { get; set; }
/// <summary>
/// Gets or sets the compression level for the benchmark run.
/// Gets or sets the compression level for the benchmark run.
/// </summary>
[Params(CompressionLevel.Fastest, CompressionLevel.Optimal)]
public CompressionLevel Level { get; set; }
private string _dbPath = string.Empty;
private string _walPath = string.Empty;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private Person[] _insertBatch = Array.Empty<Person>();
private ObjectId[] _seedIds = Array.Empty<ObjectId>();
/// <summary>
/// Prepares benchmark storage and seed data for each iteration.
/// Prepares benchmark storage and seed data for each iteration.
/// </summary>
[IterationSetup]
public void Setup()
@@ -73,19 +73,19 @@ public class CompressionBenchmarks
_seedIds = new ObjectId[SeedCount];
for (var i = 0; i < SeedCount; i++)
{
var doc = CreatePerson(i, includeLargeBio: true);
var doc = CreatePerson(i, true);
_seedIds[i] = _collection.Insert(doc);
}
_transactionHolder.CommitAndReset();
_insertBatch = Enumerable.Range(SeedCount, WorkloadCount)
.Select(i => CreatePerson(i, includeLargeBio: true))
.Select(i => CreatePerson(i, true))
.ToArray();
}
/// <summary>
/// Cleans up benchmark resources for each iteration.
/// Cleans up benchmark resources for each iteration.
/// </summary>
[IterationCleanup]
public void Cleanup()
@@ -98,7 +98,7 @@ public class CompressionBenchmarks
}
/// <summary>
/// Benchmarks insert workload performance.
/// Benchmarks insert workload performance.
/// </summary>
[Benchmark(Baseline = true)]
[BenchmarkCategory("Compression_InsertUpdateRead")]
@@ -109,7 +109,7 @@ public class CompressionBenchmarks
}
/// <summary>
/// Benchmarks update workload performance.
/// Benchmarks update workload performance.
/// </summary>
[Benchmark]
[BenchmarkCategory("Compression_InsertUpdateRead")]
@@ -131,7 +131,7 @@ public class CompressionBenchmarks
}
/// <summary>
/// Benchmarks read workload performance.
/// Benchmarks read workload performance.
/// </summary>
[Benchmark]
[BenchmarkCategory("Compression_InsertUpdateRead")]
@@ -141,10 +141,7 @@ public class CompressionBenchmarks
for (var i = 0; i < WorkloadCount; i++)
{
var person = _collection.FindById(_seedIds[i]);
if (person != null)
{
checksum += person.Age;
}
if (person != null) checksum += person.Age;
}
_transactionHolder.CommitAndReset();
@@ -158,7 +155,7 @@ public class CompressionBenchmarks
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 20 + (i % 50),
Age = 20 + i % 50,
Bio = includeLargeBio ? BuildBio(i) : $"bio-{i}",
CreatedAt = DateTime.UnixEpoch.AddMinutes(i),
Balance = 100 + i,
@@ -183,7 +180,7 @@ public class CompressionBenchmarks
private static string BuildBio(int seed)
{
var builder = new System.Text.StringBuilder(4500);
var builder = new StringBuilder(4500);
for (var i = 0; i < 150; i++)
{
builder.Append("bio-");
@@ -195,4 +192,4 @@ public class CompressionBenchmarks
return builder.ToString();
}
}
}

View File

@@ -1,21 +1,21 @@
using ZB.MOM.WW.CBDD.Bson;
using System;
using System.Collections.Generic;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class Address
{
/// <summary>
/// Gets or sets the Street.
/// Gets or sets the Street.
/// </summary>
public string Street { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the City.
/// Gets or sets the City.
/// </summary>
public string City { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the ZipCode.
/// Gets or sets the ZipCode.
/// </summary>
public string ZipCode { get; set; } = string.Empty;
}
@@ -23,19 +23,22 @@ public class Address
public class WorkHistory
{
/// <summary>
/// Gets or sets the CompanyName.
/// Gets or sets the CompanyName.
/// </summary>
public string CompanyName { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the Title.
/// Gets or sets the Title.
/// </summary>
public string Title { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the DurationYears.
/// Gets or sets the DurationYears.
/// </summary>
public int DurationYears { get; set; }
/// <summary>
/// Gets or sets the Tags.
/// Gets or sets the Tags.
/// </summary>
public List<string> Tags { get; set; } = new();
}
@@ -43,41 +46,48 @@ public class WorkHistory
public class Person
{
/// <summary>
/// Gets or sets the Id.
/// Gets or sets the Id.
/// </summary>
public ObjectId Id { get; set; }
/// <summary>
/// Gets or sets the FirstName.
/// Gets or sets the FirstName.
/// </summary>
public string FirstName { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the LastName.
/// Gets or sets the LastName.
/// </summary>
public string LastName { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the Age.
/// Gets or sets the Age.
/// </summary>
public int Age { get; set; }
/// <summary>
/// Gets or sets the Bio.
/// Gets or sets the Bio.
/// </summary>
public string? Bio { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the CreatedAt.
/// Gets or sets the CreatedAt.
/// </summary>
public DateTime CreatedAt { get; set; }
// Complex fields
/// <summary>
/// Gets or sets the Balance.
/// Gets or sets the Balance.
/// </summary>
public decimal Balance { get; set; }
/// <summary>
/// Gets or sets the HomeAddress.
/// Gets or sets the HomeAddress.
/// </summary>
public Address HomeAddress { get; set; } = new();
/// <summary>
/// Gets or sets the EmploymentHistory.
/// Gets or sets the EmploymentHistory.
/// </summary>
public List<WorkHistory> EmploymentHistory { get; set; } = new();
}
}

View File

@@ -1,26 +1,30 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using System.Buffers;
using System.Runtime.InteropServices;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class PersonMapper : ObjectIdMapperBase<Person>
{
/// <inheritdoc />
public override string CollectionName => "people";
/// <inheritdoc />
public override ObjectId GetId(Person entity) => entity.Id;
/// <inheritdoc />
public override void SetId(Person entity, ObjectId id) => entity.Id = id;
/// <inheritdoc />
public override int Serialize(Person entity, BsonSpanWriter writer)
{
var sizePos = writer.BeginDocument();
public class PersonMapper : ObjectIdMapperBase<Person>
{
/// <inheritdoc />
public override string CollectionName => "people";
/// <inheritdoc />
public override ObjectId GetId(Person entity)
{
return entity.Id;
}
/// <inheritdoc />
public override void SetId(Person entity, ObjectId id)
{
entity.Id = id;
}
/// <inheritdoc />
public override int Serialize(Person entity, BsonSpanWriter writer)
{
int sizePos = writer.BeginDocument();
writer.WriteObjectId("_id", entity.Id);
writer.WriteString("firstname", entity.FirstName);
writer.WriteString("lastname", entity.LastName);
@@ -30,111 +34,119 @@ public class PersonMapper : ObjectIdMapperBase<Person>
else
writer.WriteNull("bio");
writer.WriteInt64("createdat", entity.CreatedAt.Ticks);
// Complex fields
writer.WriteDouble("balance", (double)entity.Balance);
// Nested Object: Address
var addrPos = writer.BeginDocument("homeaddress");
writer.WriteInt64("createdat", entity.CreatedAt.Ticks);
// Complex fields
writer.WriteDouble("balance", (double)entity.Balance);
// Nested Object: Address
int addrPos = writer.BeginDocument("homeaddress");
writer.WriteString("street", entity.HomeAddress.Street);
writer.WriteString("city", entity.HomeAddress.City);
writer.WriteString("zipcode", entity.HomeAddress.ZipCode);
writer.EndDocument(addrPos);
// Collection: EmploymentHistory
var histPos = writer.BeginArray("employmenthistory");
for (int i = 0; i < entity.EmploymentHistory.Count; i++)
writer.EndDocument(addrPos);
// Collection: EmploymentHistory
int histPos = writer.BeginArray("employmenthistory");
for (var i = 0; i < entity.EmploymentHistory.Count; i++)
{
var item = entity.EmploymentHistory[i];
// Array elements are keys "0", "1", "2"...
var itemPos = writer.BeginDocument(i.ToString());
int itemPos = writer.BeginDocument(i.ToString());
writer.WriteString("companyname", item.CompanyName);
writer.WriteString("title", item.Title);
writer.WriteInt32("durationyears", item.DurationYears);
// Nested Collection: Tags
var tagsPos = writer.BeginArray("tags");
for (int j = 0; j < item.Tags.Count; j++)
{
writer.WriteString(j.ToString(), item.Tags[j]);
}
writer.EndArray(tagsPos);
writer.WriteInt32("durationyears", item.DurationYears);
// Nested Collection: Tags
int tagsPos = writer.BeginArray("tags");
for (var j = 0; j < item.Tags.Count; j++) writer.WriteString(j.ToString(), item.Tags[j]);
writer.EndArray(tagsPos);
writer.EndDocument(itemPos);
}
writer.EndArray(histPos);
writer.EndDocument(sizePos);
writer.EndArray(histPos);
writer.EndDocument(sizePos);
return writer.Position;
}
/// <inheritdoc />
public override Person Deserialize(BsonSpanReader reader)
{
var person = new Person();
reader.ReadDocumentSize();
/// <inheritdoc />
public override Person Deserialize(BsonSpanReader reader)
{
var person = new Person();
reader.ReadDocumentSize();
while (reader.Remaining > 0)
{
var type = reader.ReadBsonType();
if (type == BsonType.EndOfDocument)
break;
var name = reader.ReadElementHeader();
break;
string name = reader.ReadElementHeader();
switch (name)
{
case "_id": person.Id = reader.ReadObjectId(); break;
case "firstname": person.FirstName = reader.ReadString(); break;
case "lastname": person.LastName = reader.ReadString(); break;
case "age": person.Age = reader.ReadInt32(); break;
case "bio":
case "bio":
if (type == BsonType.Null) person.Bio = null;
else person.Bio = reader.ReadString();
else person.Bio = reader.ReadString();
break;
case "createdat": person.CreatedAt = new DateTime(reader.ReadInt64()); break;
case "balance": person.Balance = (decimal)reader.ReadDouble(); break;
case "balance": person.Balance = (decimal)reader.ReadDouble(); break;
case "homeaddress":
reader.ReadDocumentSize(); // Enter document
while (reader.Remaining > 0)
{
var addrType = reader.ReadBsonType();
if (addrType == BsonType.EndOfDocument) break;
var addrName = reader.ReadElementHeader();
// We assume strict schema for benchmark speed, but should handle skipping
string addrName = reader.ReadElementHeader();
// We assume strict schema for benchmark speed, but should handle skipping
if (addrName == "street") person.HomeAddress.Street = reader.ReadString();
else if (addrName == "city") person.HomeAddress.City = reader.ReadString();
else if (addrName == "zipcode") person.HomeAddress.ZipCode = reader.ReadString();
else reader.SkipValue(addrType);
}
break;
break;
case "employmenthistory":
reader.ReadDocumentSize(); // Enter Array
while (reader.Remaining > 0)
{
var arrType = reader.ReadBsonType();
if (arrType == BsonType.EndOfDocument) break;
reader.ReadElementHeader(); // Array index "0", "1"... ignore
// Read WorkHistory item
reader.ReadElementHeader(); // Array index "0", "1"... ignore
// Read WorkHistory item
var workItem = new WorkHistory();
reader.ReadDocumentSize(); // Enter Item Document
while (reader.Remaining > 0)
{
var itemType = reader.ReadBsonType();
if (itemType == BsonType.EndOfDocument) break;
var itemName = reader.ReadElementHeader();
if (itemName == "companyname") workItem.CompanyName = reader.ReadString();
else if (itemName == "title") workItem.Title = reader.ReadString();
else if (itemName == "durationyears") workItem.DurationYears = reader.ReadInt32();
string itemName = reader.ReadElementHeader();
if (itemName == "companyname")
{
workItem.CompanyName = reader.ReadString();
}
else if (itemName == "title")
{
workItem.Title = reader.ReadString();
}
else if (itemName == "durationyears")
{
workItem.DurationYears = reader.ReadInt32();
}
else if (itemName == "tags")
{
reader.ReadDocumentSize(); // Enter Tags Array
@@ -149,18 +161,23 @@ public class PersonMapper : ObjectIdMapperBase<Person>
reader.SkipValue(tagType);
}
}
else reader.SkipValue(itemType);
else
{
reader.SkipValue(itemType);
}
}
person.EmploymentHistory.Add(workItem);
}
break;
break;
default:
reader.SkipValue(type);
break;
}
}
}
return person;
}
}
}

View File

@@ -1,4 +1,3 @@
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
@@ -11,7 +10,7 @@ internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposab
private ITransaction? _currentTransaction;
/// <summary>
/// Initializes a new instance of the <see cref="BenchmarkTransactionHolder"/> class.
/// Initializes a new instance of the <see cref="BenchmarkTransactionHolder" /> class.
/// </summary>
/// <param name="storage">The storage engine used to create transactions.</param>
public BenchmarkTransactionHolder(StorageEngine storage)
@@ -20,7 +19,15 @@ internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposab
}
/// <summary>
/// Gets the current active transaction or starts a new one.
/// Disposes this holder and rolls back any outstanding transaction.
/// </summary>
public void Dispose()
{
RollbackAndReset();
}
/// <summary>
/// Gets the current active transaction or starts a new one.
/// </summary>
/// <returns>The current active transaction.</returns>
public ITransaction GetCurrentTransactionOrStart()
@@ -28,16 +35,14 @@ internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposab
lock (_sync)
{
if (_currentTransaction == null || _currentTransaction.State != TransactionState.Active)
{
_currentTransaction = _storage.BeginTransaction();
}
return _currentTransaction;
}
}
/// <summary>
/// Gets the current active transaction or starts a new one asynchronously.
/// Gets the current active transaction or starts a new one asynchronously.
/// </summary>
/// <returns>A task that returns the current active transaction.</returns>
public Task<ITransaction> GetCurrentTransactionOrStartAsync()
@@ -46,22 +51,17 @@ internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposab
}
/// <summary>
/// Commits the current transaction when active and clears the holder.
/// Commits the current transaction when active and clears the holder.
/// </summary>
public void CommitAndReset()
{
lock (_sync)
{
if (_currentTransaction == null)
{
return;
}
if (_currentTransaction == null) return;
if (_currentTransaction.State == TransactionState.Active ||
_currentTransaction.State == TransactionState.Preparing)
{
_currentTransaction.Commit();
}
_currentTransaction.Dispose();
_currentTransaction = null;
@@ -69,33 +69,20 @@ internal sealed class BenchmarkTransactionHolder : ITransactionHolder, IDisposab
}
/// <summary>
/// Rolls back the current transaction when active and clears the holder.
/// Rolls back the current transaction when active and clears the holder.
/// </summary>
public void RollbackAndReset()
{
lock (_sync)
{
if (_currentTransaction == null)
{
return;
}
if (_currentTransaction == null) return;
if (_currentTransaction.State == TransactionState.Active ||
_currentTransaction.State == TransactionState.Preparing)
{
_currentTransaction.Rollback();
}
_currentTransaction.Dispose();
_currentTransaction = null;
}
}
/// <summary>
/// Disposes this holder and rolls back any outstanding transaction.
/// </summary>
public void Dispose()
{
RollbackAndReset();
}
}
}

View File

@@ -1,5 +1,6 @@
using Microsoft.Extensions.Logging;
using Serilog;
using ILogger = Microsoft.Extensions.Logging.ILogger;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
@@ -8,16 +9,16 @@ internal static class Logging
private static readonly Lazy<ILoggerFactory> LoggerFactoryInstance = new(CreateFactory);
/// <summary>
/// Gets the shared logger factory for benchmarks.
/// Gets the shared logger factory for benchmarks.
/// </summary>
public static ILoggerFactory LoggerFactory => LoggerFactoryInstance.Value;
/// <summary>
/// Creates a logger for the specified category type.
/// Creates a logger for the specified category type.
/// </summary>
/// <typeparam name="T">The logger category type.</typeparam>
/// <returns>A logger for <typeparamref name="T"/>.</returns>
public static Microsoft.Extensions.Logging.ILogger CreateLogger<T>()
/// <returns>A logger for <typeparamref name="T" />.</returns>
public static ILogger CreateLogger<T>()
{
return LoggerFactory.CreateLogger<T>();
}
@@ -32,7 +33,7 @@ internal static class Logging
return Microsoft.Extensions.Logging.LoggerFactory.Create(builder =>
{
builder.ClearProviders();
builder.AddSerilog(serilogLogger, dispose: true);
builder.AddSerilog(serilogLogger, true);
});
}
}
}

View File

@@ -3,17 +3,17 @@ using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Exporters;
using BenchmarkDotNet.Reports;
using BenchmarkDotNet.Running;
using Microsoft.Extensions.Logging;
using Perfolizer.Horology;
using Serilog.Context;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
class Program
internal class Program
{
static void Main(string[] args)
private static void Main(string[] args)
{
var logger = Logging.CreateLogger<Program>();
var mode = args.Length > 0 ? args[0].Trim().ToLowerInvariant() : string.Empty;
string mode = args.Length > 0 ? args[0].Trim().ToLowerInvariant() : string.Empty;
if (mode == "manual")
{
@@ -84,6 +84,6 @@ class Program
.AddExporter(HtmlExporter.Default)
.WithSummaryStyle(SummaryStyle.Default
.WithRatioStyle(RatioStyle.Trend)
.WithTimeUnit(Perfolizer.Horology.TimeUnit.Microsecond));
.WithTimeUnit(TimeUnit.Microsecond));
}
}
}

View File

@@ -1,18 +1,13 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using Microsoft.Extensions.Logging;
using Serilog.Context;
using System.IO;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[InProcess]
[MemoryDiagnoser]
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
@@ -23,33 +18,30 @@ public class InsertBenchmarks
private const int BatchSize = 1000;
private static readonly ILogger Logger = Logging.CreateLogger<InsertBenchmarks>();
private Person[] _batchData = Array.Empty<Person>();
private DocumentCollection<Person>? _collection;
private string _docDbPath = "";
private string _docDbWalPath = "";
private Person? _singlePerson;
private StorageEngine? _storage = null;
private BenchmarkTransactionHolder? _transactionHolder = null;
private DocumentCollection<Person>? _collection = null;
private Person[] _batchData = Array.Empty<Person>();
private Person? _singlePerson = null;
private StorageEngine? _storage;
private BenchmarkTransactionHolder? _transactionHolder;
/// <summary>
/// Tests setup.
/// Tests setup.
/// </summary>
[GlobalSetup]
public void Setup()
{
var temp = AppContext.BaseDirectory;
string temp = AppContext.BaseDirectory;
var id = Guid.NewGuid().ToString("N");
_docDbPath = Path.Combine(temp, $"bench_docdb_{id}.db");
_docDbWalPath = Path.ChangeExtension(_docDbPath, ".wal");
_singlePerson = CreatePerson(0);
_batchData = new Person[BatchSize];
for (int i = 0; i < BatchSize; i++)
{
_batchData[i] = CreatePerson(i);
}
for (var i = 0; i < BatchSize; i++) _batchData[i] = CreatePerson(i);
}
private Person CreatePerson(int i)
@@ -59,7 +51,7 @@ public class InsertBenchmarks
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 20 + (i % 50),
Age = 20 + i % 50,
Bio = null, // Removed large payload to focus on structure
CreatedAt = DateTime.UtcNow,
Balance = 1000.50m * (i + 1),
@@ -72,8 +64,7 @@ public class InsertBenchmarks
};
// Add 10 work history items to stress structure traversal
for (int j = 0; j < 10; j++)
{
for (var j = 0; j < 10; j++)
p.EmploymentHistory.Add(new WorkHistory
{
CompanyName = $"TechCorp_{i}_{j}",
@@ -81,13 +72,12 @@ public class InsertBenchmarks
DurationYears = j,
Tags = new List<string> { "C#", "BSON", "Performance", "Database", "Complex" }
});
}
return p;
}
/// <summary>
/// Tests iteration setup.
/// Tests iteration setup.
/// </summary>
[IterationSetup]
public void IterationSetup()
@@ -98,7 +88,7 @@ public class InsertBenchmarks
}
/// <summary>
/// Tests cleanup.
/// Tests cleanup.
/// </summary>
[IterationCleanup]
public void Cleanup()
@@ -111,7 +101,7 @@ public class InsertBenchmarks
_storage?.Dispose();
_storage = null;
System.Threading.Thread.Sleep(100);
Thread.Sleep(100);
if (File.Exists(_docDbPath)) File.Delete(_docDbPath);
if (File.Exists(_docDbWalPath)) File.Delete(_docDbWalPath);
@@ -125,7 +115,7 @@ public class InsertBenchmarks
// --- Benchmarks ---
/// <summary>
/// Tests document db insert single.
/// Tests document db insert single.
/// </summary>
[Benchmark(Baseline = true, Description = "CBDD Single Insert")]
[BenchmarkCategory("Insert_Single")]
@@ -136,7 +126,7 @@ public class InsertBenchmarks
}
/// <summary>
/// Tests document db insert batch.
/// Tests document db insert batch.
/// </summary>
[Benchmark(Description = "CBDD Batch Insert (1000 items, 1 Txn)")]
[BenchmarkCategory("Insert_Batch")]
@@ -145,4 +135,4 @@ public class InsertBenchmarks
_collection?.InsertBulk(_batchData);
_transactionHolder?.CommitAndReset();
}
}
}

View File

@@ -1,12 +1,8 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using System.IO;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
@@ -19,24 +15,24 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class ReadBenchmarks
{
private const int DocCount = 1000;
private DocumentCollection<Person> _collection = null!;
private string _docDbPath = null!;
private string _docDbWalPath = null!;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private ObjectId[] _ids = null!;
private StorageEngine _storage = null!;
private ObjectId _targetId;
private BenchmarkTransactionHolder _transactionHolder = null!;
/// <summary>
/// Tests setup.
/// Tests setup.
/// </summary>
[GlobalSetup]
public void Setup()
{
var temp = AppContext.BaseDirectory;
string temp = AppContext.BaseDirectory;
var id = Guid.NewGuid().ToString("N");
_docDbPath = Path.Combine(temp, $"bench_read_docdb_{id}.db");
_docDbWalPath = Path.ChangeExtension(_docDbPath, ".wal");
@@ -49,18 +45,19 @@ public class ReadBenchmarks
_collection = new DocumentCollection<Person>(_storage, _transactionHolder, new PersonMapper());
_ids = new ObjectId[DocCount];
for (int i = 0; i < DocCount; i++)
for (var i = 0; i < DocCount; i++)
{
var p = CreatePerson(i);
_ids[i] = _collection.Insert(p);
}
_transactionHolder.CommitAndReset();
_targetId = _ids[DocCount / 2];
}
/// <summary>
/// Tests cleanup.
/// Tests cleanup.
/// </summary>
[GlobalCleanup]
public void Cleanup()
@@ -79,7 +76,7 @@ public class ReadBenchmarks
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 20 + (i % 50),
Age = 20 + i % 50,
Bio = null,
CreatedAt = DateTime.UtcNow,
Balance = 1000.50m * (i + 1),
@@ -92,8 +89,7 @@ public class ReadBenchmarks
};
// Add 10 work history items
for (int j = 0; j < 10; j++)
{
for (var j = 0; j < 10; j++)
p.EmploymentHistory.Add(new WorkHistory
{
CompanyName = $"TechCorp_{i}_{j}",
@@ -101,13 +97,12 @@ public class ReadBenchmarks
DurationYears = j,
Tags = new List<string> { "C#", "BSON", "Performance", "Database", "Complex" }
});
}
return p;
}
/// <summary>
/// Tests document db find by id.
/// Tests document db find by id.
/// </summary>
[Benchmark(Baseline = true, Description = "CBDD FindById")]
[BenchmarkCategory("Read_Single")]
@@ -115,4 +110,4 @@ public class ReadBenchmarks
{
return _collection.FindById(_targetId);
}
}
}

View File

@@ -1,7 +1,8 @@
using System.Collections.Concurrent;
using System.Text.Json;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using ZB.MOM.WW.CBDD.Bson;
using System.Text.Json;
namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
@@ -13,32 +14,37 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class SerializationBenchmarks
{
private const int BatchSize = 10000;
private Person _person = null!;
private List<Person> _people = null!;
private PersonMapper _mapper = new PersonMapper();
private static readonly ConcurrentDictionary<string, ushort> _keyMap = new(StringComparer.OrdinalIgnoreCase);
private static readonly ConcurrentDictionary<ushort, string> _keys = new();
private readonly List<byte[]> _bsonDataList = new();
private readonly List<byte[]> _jsonDataList = new();
private readonly PersonMapper _mapper = new();
private byte[] _bsonData = Array.Empty<byte>();
private byte[] _jsonData = Array.Empty<byte>();
private List<byte[]> _bsonDataList = new();
private List<byte[]> _jsonDataList = new();
private byte[] _jsonData = Array.Empty<byte>();
private List<Person> _people = null!;
private Person _person = null!;
private byte[] _serializeBuffer = Array.Empty<byte>();
private static readonly System.Collections.Concurrent.ConcurrentDictionary<string, ushort> _keyMap = new(StringComparer.OrdinalIgnoreCase);
private static readonly System.Collections.Concurrent.ConcurrentDictionary<ushort, string> _keys = new();
static SerializationBenchmarks()
static SerializationBenchmarks()
{
ushort id = 1;
string[] initialKeys = { "_id", "firstname", "lastname", "age", "bio", "createdat", "balance", "homeaddress", "street", "city", "zipcode", "employmenthistory", "companyname", "title", "durationyears", "tags" };
foreach (var key in initialKeys)
string[] initialKeys =
{
"_id", "firstname", "lastname", "age", "bio", "createdat", "balance", "homeaddress", "street", "city",
"zipcode", "employmenthistory", "companyname", "title", "durationyears", "tags"
};
foreach (string key in initialKeys)
{
_keyMap[key] = id;
_keys[id] = key;
id++;
}
// Add some indices for arrays
for (int i = 0; i < 100; i++)
for (var i = 0; i < 100; i++)
{
var s = i.ToString();
_keyMap[s] = id;
@@ -47,26 +53,23 @@ public class SerializationBenchmarks
}
}
/// <summary>
/// Prepares benchmark data for serialization and deserialization scenarios.
/// </summary>
[GlobalSetup]
public void Setup()
/// <summary>
/// Prepares benchmark data for serialization and deserialization scenarios.
/// </summary>
[GlobalSetup]
public void Setup()
{
_person = CreatePerson(0);
_people = new List<Person>(BatchSize);
for (int i = 0; i < BatchSize; i++)
{
_people.Add(CreatePerson(i));
}
// Pre-allocate buffer for BSON serialization
for (var i = 0; i < BatchSize; i++) _people.Add(CreatePerson(i));
// Pre-allocate buffer for BSON serialization
_serializeBuffer = new byte[8192];
var writer = new BsonSpanWriter(_serializeBuffer, _keyMap);
// Single item data
var len = _mapper.Serialize(_person, writer);
int len = _mapper.Serialize(_person, writer);
_bsonData = _serializeBuffer.AsSpan(0, len).ToArray();
_jsonData = JsonSerializer.SerializeToUtf8Bytes(_person);
@@ -87,10 +90,10 @@ public class SerializationBenchmarks
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 25,
Bio = null,
Bio = null,
CreatedAt = DateTime.UtcNow,
Balance = 1000.50m,
HomeAddress = new Address
HomeAddress = new Address
{
Street = $"{i} Main St",
City = "Tech City",
@@ -98,8 +101,7 @@ public class SerializationBenchmarks
}
};
for (int j = 0; j < 10; j++)
{
for (var j = 0; j < 10; j++)
p.EmploymentHistory.Add(new WorkHistory
{
CompanyName = $"TechCorp_{i}_{j}",
@@ -107,58 +109,57 @@ public class SerializationBenchmarks
DurationYears = j,
Tags = new List<string> { "C#", "BSON", "Performance", "Database", "Complex" }
});
}
return p;
}
/// <summary>
/// Benchmarks BSON serialization for a single document.
/// </summary>
[Benchmark(Description = "Serialize Single (BSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Bson()
/// <summary>
/// Benchmarks BSON serialization for a single document.
/// </summary>
[Benchmark(Description = "Serialize Single (BSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Bson()
{
var writer = new BsonSpanWriter(_serializeBuffer, _keyMap);
_mapper.Serialize(_person, writer);
}
/// <summary>
/// Benchmarks JSON serialization for a single document.
/// </summary>
[Benchmark(Description = "Serialize Single (JSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Json()
/// <summary>
/// Benchmarks JSON serialization for a single document.
/// </summary>
[Benchmark(Description = "Serialize Single (JSON)")]
[BenchmarkCategory("Single")]
public void Serialize_Json()
{
JsonSerializer.SerializeToUtf8Bytes(_person);
}
/// <summary>
/// Benchmarks BSON deserialization for a single document.
/// </summary>
[Benchmark(Description = "Deserialize Single (BSON)")]
[BenchmarkCategory("Single")]
public Person Deserialize_Bson()
}
/// <summary>
/// Benchmarks BSON deserialization for a single document.
/// </summary>
[Benchmark(Description = "Deserialize Single (BSON)")]
[BenchmarkCategory("Single")]
public Person Deserialize_Bson()
{
var reader = new BsonSpanReader(_bsonData, _keys);
return _mapper.Deserialize(reader);
}
/// <summary>
/// Benchmarks JSON deserialization for a single document.
/// </summary>
[Benchmark(Description = "Deserialize Single (JSON)")]
[BenchmarkCategory("Single")]
public Person? Deserialize_Json()
/// <summary>
/// Benchmarks JSON deserialization for a single document.
/// </summary>
[Benchmark(Description = "Deserialize Single (JSON)")]
[BenchmarkCategory("Single")]
public Person? Deserialize_Json()
{
return JsonSerializer.Deserialize<Person>(_jsonData);
}
/// <summary>
/// Benchmarks BSON serialization for a list of documents.
/// </summary>
[Benchmark(Description = "Serialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Bson()
/// <summary>
/// Benchmarks BSON serialization for a list of documents.
/// </summary>
[Benchmark(Description = "Serialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Bson()
{
foreach (var p in _people)
{
@@ -167,43 +168,37 @@ public class SerializationBenchmarks
}
}
/// <summary>
/// Benchmarks JSON serialization for a list of documents.
/// </summary>
[Benchmark(Description = "Serialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Json()
/// <summary>
/// Benchmarks JSON serialization for a list of documents.
/// </summary>
[Benchmark(Description = "Serialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Serialize_List_Json()
{
foreach (var p in _people)
{
JsonSerializer.SerializeToUtf8Bytes(p);
}
foreach (var p in _people) JsonSerializer.SerializeToUtf8Bytes(p);
}
/// <summary>
/// Benchmarks BSON deserialization for a list of documents.
/// </summary>
[Benchmark(Description = "Deserialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Bson()
/// <summary>
/// Benchmarks BSON deserialization for a list of documents.
/// </summary>
[Benchmark(Description = "Deserialize List 10k (BSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Bson()
{
foreach (var data in _bsonDataList)
foreach (byte[] data in _bsonDataList)
{
var reader = new BsonSpanReader(data, _keys);
_mapper.Deserialize(reader);
}
}
/// <summary>
/// Benchmarks JSON deserialization for a list of documents.
/// </summary>
[Benchmark(Description = "Deserialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Json()
/// <summary>
/// Benchmarks JSON deserialization for a list of documents.
/// </summary>
[Benchmark(Description = "Deserialize List 10k (JSON loop)")]
[BenchmarkCategory("Batch")]
public void Deserialize_List_Json()
{
foreach (var data in _jsonDataList)
{
JsonSerializer.Deserialize<Person>(data);
}
foreach (byte[] data in _jsonDataList) JsonSerializer.Deserialize<Person>(data);
}
}
}

View File

@@ -1,4 +1,5 @@
using System.Diagnostics;
using System.IO.Compression;
using Microsoft.Extensions.Logging;
using Serilog.Context;
using ZB.MOM.WW.CBDD.Bson;
@@ -10,47 +11,47 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
internal static class DatabaseSizeBenchmark
{
private const int BatchSize = 50_000;
private const int ProgressInterval = 1_000_000;
private static readonly int[] TargetCounts = [10_000, 1_000_000, 10_000_000];
private static readonly CompressionOptions CompressedBrotliFast = new()
{
EnableCompression = true,
MinSizeBytes = 256,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = System.IO.Compression.CompressionLevel.Fastest
Level = CompressionLevel.Fastest
};
private static readonly Scenario[] Scenarios =
[
// Separate compression set (no compaction)
new(
Set: "compression",
Name: "CompressionOnly-Uncompressed",
CompressionOptions: CompressionOptions.Default,
RunCompaction: false),
"compression",
"CompressionOnly-Uncompressed",
CompressionOptions.Default,
false),
new(
Set: "compression",
Name: "CompressionOnly-Compressed-BrotliFast",
CompressionOptions: CompressedBrotliFast,
RunCompaction: false),
"compression",
"CompressionOnly-Compressed-BrotliFast",
CompressedBrotliFast,
false),
// Separate compaction set (compaction enabled)
new(
Set: "compaction",
Name: "Compaction-Uncompressed",
CompressionOptions: CompressionOptions.Default,
RunCompaction: true),
"compaction",
"Compaction-Uncompressed",
CompressionOptions.Default,
true),
new(
Set: "compaction",
Name: "Compaction-Compressed-BrotliFast",
CompressionOptions: CompressedBrotliFast,
RunCompaction: true)
"compaction",
"Compaction-Compressed-BrotliFast",
CompressedBrotliFast,
true)
];
private const int BatchSize = 50_000;
private const int ProgressInterval = 1_000_000;
/// <summary>
/// Tests run.
/// Tests run.
/// </summary>
/// <param name="logger">Logger for benchmark progress and results.</param>
public static void Run(ILogger logger)
@@ -62,109 +63,101 @@ internal static class DatabaseSizeBenchmark
logger.LogInformation("Scenarios: {Scenarios}", string.Join(", ", Scenarios.Select(x => $"{x.Set}:{x.Name}")));
logger.LogInformation("Batch size: {BatchSize:N0}", BatchSize);
foreach (var targetCount in TargetCounts)
foreach (int targetCount in TargetCounts)
foreach (var scenario in Scenarios)
{
foreach (var scenario in Scenarios)
string dbPath = Path.Combine(Path.GetTempPath(),
$"cbdd_size_{scenario.Name}_{targetCount}_{Guid.NewGuid():N}.db");
string walPath = Path.ChangeExtension(dbPath, ".wal");
using var _ = LogContext.PushProperty("TargetCount", targetCount);
using var __ = LogContext.PushProperty("Scenario", scenario.Name);
using var ___ = LogContext.PushProperty("ScenarioSet", scenario.Set);
logger.LogInformation(
"Starting {Set} scenario {Scenario} for target {TargetCount:N0} docs",
scenario.Set,
scenario.Name,
targetCount);
var insertStopwatch = Stopwatch.StartNew();
CompressionStats compressionStats = default;
CompactionStats compactionStats = new();
long preCompactDbBytes;
long preCompactWalBytes;
long postCompactDbBytes;
long postCompactWalBytes;
using (var storage = new StorageEngine(dbPath, PageFileConfig.Default, scenario.CompressionOptions))
using (var transactionHolder = new BenchmarkTransactionHolder(storage))
{
var dbPath = Path.Combine(Path.GetTempPath(), $"cbdd_size_{scenario.Name}_{targetCount}_{Guid.NewGuid():N}.db");
var walPath = Path.ChangeExtension(dbPath, ".wal");
using var _ = LogContext.PushProperty("TargetCount", targetCount);
using var __ = LogContext.PushProperty("Scenario", scenario.Name);
using var ___ = LogContext.PushProperty("ScenarioSet", scenario.Set);
var collection = new DocumentCollection<SizeBenchmarkDocument>(
storage,
transactionHolder,
new SizeBenchmarkDocumentMapper());
logger.LogInformation(
"Starting {Set} scenario {Scenario} for target {TargetCount:N0} docs",
scenario.Set,
scenario.Name,
targetCount);
var insertStopwatch = Stopwatch.StartNew();
CompressionStats compressionStats = default;
CompactionStats compactionStats = new();
long preCompactDbBytes;
long preCompactWalBytes;
long postCompactDbBytes;
long postCompactWalBytes;
using (var storage = new StorageEngine(dbPath, PageFileConfig.Default, scenario.CompressionOptions))
using (var transactionHolder = new BenchmarkTransactionHolder(storage))
var inserted = 0;
while (inserted < targetCount)
{
var collection = new DocumentCollection<SizeBenchmarkDocument>(
storage,
transactionHolder,
new SizeBenchmarkDocumentMapper());
int currentBatchSize = Math.Min(BatchSize, targetCount - inserted);
var documents = new SizeBenchmarkDocument[currentBatchSize];
int baseValue = inserted;
var inserted = 0;
while (inserted < targetCount)
{
var currentBatchSize = Math.Min(BatchSize, targetCount - inserted);
var documents = new SizeBenchmarkDocument[currentBatchSize];
var baseValue = inserted;
for (var i = 0; i < currentBatchSize; i++) documents[i] = CreateDocument(baseValue + i);
for (var i = 0; i < currentBatchSize; i++)
{
documents[i] = CreateDocument(baseValue + i);
}
collection.InsertBulk(documents);
transactionHolder.CommitAndReset();
collection.InsertBulk(documents);
transactionHolder.CommitAndReset();
inserted += currentBatchSize;
if (inserted == targetCount || inserted % ProgressInterval == 0)
{
logger.LogInformation("Inserted {Inserted:N0}/{TargetCount:N0}", inserted, targetCount);
}
}
insertStopwatch.Stop();
preCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
preCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
if (scenario.RunCompaction)
{
compactionStats = storage.Compact(new CompactionOptions
{
EnableTailTruncation = true,
DefragmentSlottedPages = true,
NormalizeFreeList = true
});
}
postCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
postCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
compressionStats = storage.GetCompressionStats();
inserted += currentBatchSize;
if (inserted == targetCount || inserted % ProgressInterval == 0)
logger.LogInformation("Inserted {Inserted:N0}/{TargetCount:N0}", inserted, targetCount);
}
var result = new SizeResult(
scenario.Set,
scenario.Name,
scenario.RunCompaction,
targetCount,
insertStopwatch.Elapsed,
preCompactDbBytes,
preCompactWalBytes,
postCompactDbBytes,
postCompactWalBytes,
compactionStats,
compressionStats);
results.Add(result);
insertStopwatch.Stop();
preCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
preCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
logger.LogInformation(
"Completed {Set}:{Scenario} {TargetCount:N0} docs in {Elapsed}. pre={PreTotal}, post={PostTotal}, shrink={Shrink}, compactApplied={CompactionApplied}, compactReclaim={CompactReclaim}, compRatio={CompRatio}",
scenario.Set,
scenario.Name,
targetCount,
insertStopwatch.Elapsed,
FormatBytes(result.PreCompactTotalBytes),
FormatBytes(result.PostCompactTotalBytes),
FormatBytes(result.ShrinkBytes),
scenario.RunCompaction,
FormatBytes(result.CompactionStats.ReclaimedFileBytes),
result.CompressionRatioText);
if (scenario.RunCompaction)
compactionStats = storage.Compact(new CompactionOptions
{
EnableTailTruncation = true,
DefragmentSlottedPages = true,
NormalizeFreeList = true
});
TryDelete(dbPath);
TryDelete(walPath);
postCompactDbBytes = File.Exists(dbPath) ? new FileInfo(dbPath).Length : 0;
postCompactWalBytes = File.Exists(walPath) ? new FileInfo(walPath).Length : 0;
compressionStats = storage.GetCompressionStats();
}
var result = new SizeResult(
scenario.Set,
scenario.Name,
scenario.RunCompaction,
targetCount,
insertStopwatch.Elapsed,
preCompactDbBytes,
preCompactWalBytes,
postCompactDbBytes,
postCompactWalBytes,
compactionStats,
compressionStats);
results.Add(result);
logger.LogInformation(
"Completed {Set}:{Scenario} {TargetCount:N0} docs in {Elapsed}. pre={PreTotal}, post={PostTotal}, shrink={Shrink}, compactApplied={CompactionApplied}, compactReclaim={CompactReclaim}, compRatio={CompRatio}",
scenario.Set,
scenario.Name,
targetCount,
insertStopwatch.Elapsed,
FormatBytes(result.PreCompactTotalBytes),
FormatBytes(result.PostCompactTotalBytes),
FormatBytes(result.ShrinkBytes),
scenario.RunCompaction,
FormatBytes(result.CompactionStats.ReclaimedFileBytes),
result.CompressionRatioText);
TryDelete(dbPath);
TryDelete(walPath);
}
logger.LogInformation("=== Size Benchmark Summary ===");
@@ -172,7 +165,6 @@ internal static class DatabaseSizeBenchmark
.OrderBy(x => x.Set)
.ThenBy(x => x.TargetCount)
.ThenBy(x => x.Scenario))
{
logger.LogInformation(
"{Set,-11} | {Scenario,-38} | {Count,12:N0} docs | insert={Elapsed,12} | pre={Pre,12} | post={Post,12} | shrink={Shrink,12} | compact={CompactBytes,12} | ratio={Ratio}",
result.Set,
@@ -184,7 +176,6 @@ internal static class DatabaseSizeBenchmark
FormatBytes(result.ShrinkBytes),
FormatBytes(result.CompactionStats.ReclaimedFileBytes),
result.CompressionRatioText);
}
WriteSummaryCsv(results, logger);
}
@@ -201,10 +192,7 @@ internal static class DatabaseSizeBenchmark
private static void TryDelete(string path)
{
if (File.Exists(path))
{
File.Delete(path);
}
if (File.Exists(path)) File.Delete(path);
}
private static string FormatBytes(long bytes)
@@ -224,9 +212,9 @@ internal static class DatabaseSizeBenchmark
private static void WriteSummaryCsv(IEnumerable<SizeResult> results, ILogger logger)
{
var outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "BenchmarkDotNet.Artifacts", "results");
string outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "BenchmarkDotNet.Artifacts", "results");
Directory.CreateDirectory(outputDirectory);
var outputPath = Path.Combine(outputDirectory, "DatabaseSizeBenchmark-results.csv");
string outputPath = Path.Combine(outputDirectory, "DatabaseSizeBenchmark-results.csv");
var lines = new List<string>
{
@@ -234,7 +222,6 @@ internal static class DatabaseSizeBenchmark
};
foreach (var result in results.OrderBy(x => x.Set).ThenBy(x => x.TargetCount).ThenBy(x => x.Scenario))
{
lines.Add(string.Join(",",
result.Set,
result.Scenario,
@@ -246,7 +233,6 @@ internal static class DatabaseSizeBenchmark
result.ShrinkBytes.ToString(),
result.CompactionStats.ReclaimedFileBytes.ToString(),
result.CompressionRatioText));
}
File.WriteAllLines(outputPath, lines);
logger.LogInformation("Database size summary CSV written to {OutputPath}", outputPath);
@@ -268,20 +254,22 @@ internal static class DatabaseSizeBenchmark
CompressionStats CompressionStats)
{
/// <summary>
/// Gets or sets the pre compact total bytes.
/// Gets or sets the pre compact total bytes.
/// </summary>
public long PreCompactTotalBytes => PreCompactDbBytes + PreCompactWalBytes;
/// <summary>
/// Gets or sets the post compact total bytes.
/// Gets or sets the post compact total bytes.
/// </summary>
public long PostCompactTotalBytes => PostCompactDbBytes + PostCompactWalBytes;
/// <summary>
/// Gets or sets the shrink bytes.
/// Gets or sets the shrink bytes.
/// </summary>
public long ShrinkBytes => PreCompactTotalBytes - PostCompactTotalBytes;
/// <summary>
/// Gets or sets the compression ratio text.
/// Gets or sets the compression ratio text.
/// </summary>
public string CompressionRatioText =>
CompressionStats.BytesAfterCompression > 0
@@ -292,15 +280,17 @@ internal static class DatabaseSizeBenchmark
private sealed class SizeBenchmarkDocument
{
/// <summary>
/// Gets or sets the id.
/// Gets or sets the id.
/// </summary>
public ObjectId Id { get; set; }
/// <summary>
/// Gets or sets the value.
/// Gets or sets the value.
/// </summary>
public int Value { get; set; }
/// <summary>
/// Gets or sets the name.
/// Gets or sets the name.
/// </summary>
public string Name { get; set; } = string.Empty;
}
@@ -311,15 +301,21 @@ internal static class DatabaseSizeBenchmark
public override string CollectionName => "size_documents";
/// <inheritdoc />
public override ObjectId GetId(SizeBenchmarkDocument entity) => entity.Id;
public override ObjectId GetId(SizeBenchmarkDocument entity)
{
return entity.Id;
}
/// <inheritdoc />
public override void SetId(SizeBenchmarkDocument entity, ObjectId id) => entity.Id = id;
public override void SetId(SizeBenchmarkDocument entity, ObjectId id)
{
entity.Id = id;
}
/// <inheritdoc />
public override int Serialize(SizeBenchmarkDocument entity, BsonSpanWriter writer)
{
var sizePos = writer.BeginDocument();
int sizePos = writer.BeginDocument();
writer.WriteObjectId("_id", entity.Id);
writer.WriteInt32("value", entity.Value);
writer.WriteString("name", entity.Name);
@@ -336,12 +332,9 @@ internal static class DatabaseSizeBenchmark
while (reader.Remaining > 0)
{
var bsonType = reader.ReadBsonType();
if (bsonType == BsonType.EndOfDocument)
{
break;
}
if (bsonType == BsonType.EndOfDocument) break;
var name = reader.ReadElementHeader();
string name = reader.ReadElementHeader();
switch (name)
{
case "_id":
@@ -362,4 +355,4 @@ internal static class DatabaseSizeBenchmark
return document;
}
}
}
}

View File

@@ -1,5 +1,4 @@
using System.Diagnostics;
using System.IO;
using System.Text;
using Microsoft.Extensions.Logging;
using Serilog.Context;
@@ -8,7 +7,7 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
public class ManualBenchmark
{
private static StringBuilder _log = new();
private static readonly StringBuilder _log = new();
private static void Log(ILogger logger, string message = "")
{
@@ -16,11 +15,11 @@ public class ManualBenchmark
_log.AppendLine(message);
}
/// <summary>
/// Tests run.
/// </summary>
/// <param name="logger">Logger for benchmark progress and results.</param>
public static void Run(ILogger logger)
/// <summary>
/// Tests run.
/// </summary>
/// <param name="logger">Logger for benchmark progress and results.</param>
public static void Run(ILogger logger)
{
using var _ = LogContext.PushProperty("Benchmark", nameof(ManualBenchmark));
_log.Clear();
@@ -60,10 +59,7 @@ public class ManualBenchmark
try
{
var sw = Stopwatch.StartNew();
for (int i = 0; i < 1000; i++)
{
readBench.DocumentDb_FindById();
}
for (var i = 0; i < 1000; i++) readBench.DocumentDb_FindById();
sw.Stop();
readByIdMs = sw.ElapsedMilliseconds;
Log(logger, $" CBDD FindById x1000: {readByIdMs} ms ({(double)readByIdMs / 1000:F3} ms/op)");
@@ -101,14 +97,11 @@ public class ManualBenchmark
Log(logger, $"FindById x1000: {readByIdMs} ms");
Log(logger, $"Single Insert: {singleInsertMs} ms");
var artifactsDir = Path.Combine(AppContext.BaseDirectory, "BenchmarkDotNet.Artifacts", "results");
if (!Directory.Exists(artifactsDir))
{
Directory.CreateDirectory(artifactsDir);
}
string artifactsDir = Path.Combine(AppContext.BaseDirectory, "BenchmarkDotNet.Artifacts", "results");
if (!Directory.Exists(artifactsDir)) Directory.CreateDirectory(artifactsDir);
var filePath = Path.Combine(artifactsDir, "manual_report.txt");
string filePath = Path.Combine(artifactsDir, "manual_report.txt");
File.WriteAllText(filePath, _log.ToString());
logger.LogInformation("Report saved to: {FilePath}", filePath);
}
}
}

View File

@@ -1,6 +1,6 @@
using System.Text;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
@@ -16,28 +16,29 @@ namespace ZB.MOM.WW.CBDD.Tests.Benchmark;
[JsonExporterAttribute.Full]
public class MixedWorkloadBenchmarks
{
private readonly List<ObjectId> _activeIds = [];
private DocumentCollection<Person> _collection = null!;
private string _dbPath = string.Empty;
private int _nextValueSeed;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private string _walPath = string.Empty;
/// <summary>
/// Gets or sets whether periodic online compaction is enabled.
/// Gets or sets whether periodic online compaction is enabled.
/// </summary>
[Params(false, true)]
public bool PeriodicCompaction { get; set; }
/// <summary>
/// Gets or sets the number of operations per benchmark iteration.
/// Gets or sets the number of operations per benchmark iteration.
/// </summary>
[Params(800)]
public int Operations { get; set; }
private string _dbPath = string.Empty;
private string _walPath = string.Empty;
private StorageEngine _storage = null!;
private BenchmarkTransactionHolder _transactionHolder = null!;
private DocumentCollection<Person> _collection = null!;
private readonly List<ObjectId> _activeIds = [];
private int _nextValueSeed;
/// <summary>
/// Prepares benchmark storage and seed data for each iteration.
/// Prepares benchmark storage and seed data for each iteration.
/// </summary>
[IterationSetup]
public void Setup()
@@ -71,7 +72,7 @@ public class MixedWorkloadBenchmarks
}
/// <summary>
/// Cleans up benchmark resources for each iteration.
/// Cleans up benchmark resources for each iteration.
/// </summary>
[IterationCleanup]
public void Cleanup()
@@ -84,7 +85,7 @@ public class MixedWorkloadBenchmarks
}
/// <summary>
/// Benchmarks a mixed insert/update/delete workload.
/// Benchmarks a mixed insert/update/delete workload.
/// </summary>
[Benchmark(Baseline = true)]
[BenchmarkCategory("MixedWorkload")]
@@ -94,7 +95,7 @@ public class MixedWorkloadBenchmarks
for (var i = 1; i <= Operations; i++)
{
var mode = i % 5;
int mode = i % 5;
if (mode is 0 or 1)
{
var id = _collection.Insert(CreatePerson(_nextValueSeed++));
@@ -104,7 +105,7 @@ public class MixedWorkloadBenchmarks
{
if (_activeIds.Count > 0)
{
var idx = random.Next(_activeIds.Count);
int idx = random.Next(_activeIds.Count);
var id = _activeIds[idx];
var current = _collection.FindById(id);
if (current != null)
@@ -119,20 +120,16 @@ public class MixedWorkloadBenchmarks
{
if (_activeIds.Count > 100)
{
var idx = random.Next(_activeIds.Count);
int idx = random.Next(_activeIds.Count);
var id = _activeIds[idx];
_collection.Delete(id);
_activeIds.RemoveAt(idx);
}
}
if (i % 50 == 0)
{
_transactionHolder.CommitAndReset();
}
if (i % 50 == 0) _transactionHolder.CommitAndReset();
if (PeriodicCompaction && i % 200 == 0)
{
_storage.RunOnlineCompactionPass(new CompactionOptions
{
OnlineMode = true,
@@ -141,7 +138,6 @@ public class MixedWorkloadBenchmarks
MaxOnlineDuration = TimeSpan.FromMilliseconds(120),
EnableTailTruncation = true
});
}
}
_transactionHolder.CommitAndReset();
@@ -155,7 +151,7 @@ public class MixedWorkloadBenchmarks
Id = ObjectId.NewObjectId(),
FirstName = $"First_{seed}",
LastName = $"Last_{seed}",
Age = 18 + (seed % 60),
Age = 18 + seed % 60,
Bio = BuildPayload(seed),
CreatedAt = DateTime.UnixEpoch.AddSeconds(seed),
Balance = seed,
@@ -170,7 +166,7 @@ public class MixedWorkloadBenchmarks
private static string BuildPayload(int seed)
{
var builder = new System.Text.StringBuilder(1800);
var builder = new StringBuilder(1800);
for (var i = 0; i < 64; i++)
{
builder.Append("mixed-");
@@ -182,4 +178,4 @@ public class MixedWorkloadBenchmarks
return builder.ToString();
}
}
}

View File

@@ -1,4 +1,5 @@
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using ZB.MOM.WW.CBDD.Bson;
@@ -14,21 +15,21 @@ internal static class PerformanceGateSmoke
private const int CompressionDocumentCount = 1_500;
/// <summary>
/// Runs the performance gate smoke probes and writes a report.
/// Runs the performance gate smoke probes and writes a report.
/// </summary>
/// <param name="logger">The logger.</param>
public static void Run(ILogger logger)
{
var compaction = RunCompactionProbe();
var compressionOff = RunCompressionGcProbe(enableCompression: false);
var compressionOn = RunCompressionGcProbe(enableCompression: true);
var compressionOff = RunCompressionGcProbe(false);
var compressionOn = RunCompressionGcProbe(true);
var report = new PerformanceGateReport(
DateTimeOffset.UtcNow,
compaction,
compressionOff,
compressionOn);
var reportPath = WriteReport(report);
string reportPath = WriteReport(report);
logger.LogInformation("Performance gate smoke report written to {ReportPath}", reportPath);
@@ -52,8 +53,8 @@ internal static class PerformanceGateSmoke
private static CompactionProbeResult RunCompactionProbe()
{
var dbPath = NewDbPath("gate_compaction");
var walPath = Path.ChangeExtension(dbPath, ".wal");
string dbPath = NewDbPath("gate_compaction");
string walPath = Path.ChangeExtension(dbPath, ".wal");
try
{
@@ -62,18 +63,12 @@ internal static class PerformanceGateSmoke
var collection = new DocumentCollection<Person>(storage, transactionHolder, new PersonMapper());
var ids = new List<ObjectId>(CompactionDocumentCount);
for (var i = 0; i < CompactionDocumentCount; i++)
{
ids.Add(collection.Insert(CreatePerson(i, includeLargeBio: true)));
}
for (var i = 0; i < CompactionDocumentCount; i++) ids.Add(collection.Insert(CreatePerson(i, true)));
transactionHolder.CommitAndReset();
storage.Checkpoint();
for (var i = 0; i < ids.Count; i += 3)
{
collection.Delete(ids[i]);
}
for (var i = 0; i < ids.Count; i += 3) collection.Delete(ids[i]);
for (var i = 0; i < ids.Count; i += 5)
{
@@ -117,8 +112,8 @@ internal static class PerformanceGateSmoke
private static CompressionGcProbeResult RunCompressionGcProbe(bool enableCompression)
{
var dbPath = NewDbPath(enableCompression ? "gate_gc_on" : "gate_gc_off");
var walPath = Path.ChangeExtension(dbPath, ".wal");
string dbPath = NewDbPath(enableCompression ? "gate_gc_on" : "gate_gc_off");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var compressionOptions = enableCompression
? new CompressionOptions
{
@@ -140,16 +135,13 @@ internal static class PerformanceGateSmoke
GC.WaitForPendingFinalizers();
GC.Collect();
var g0Before = GC.CollectionCount(0);
var g1Before = GC.CollectionCount(1);
var g2Before = GC.CollectionCount(2);
var allocBefore = GC.GetTotalAllocatedBytes(true);
int g0Before = GC.CollectionCount(0);
int g1Before = GC.CollectionCount(1);
int g2Before = GC.CollectionCount(2);
long allocBefore = GC.GetTotalAllocatedBytes(true);
var ids = new ObjectId[CompressionDocumentCount];
for (var i = 0; i < CompressionDocumentCount; i++)
{
ids[i] = collection.Insert(CreatePerson(i, includeLargeBio: true));
}
for (var i = 0; i < CompressionDocumentCount; i++) ids[i] = collection.Insert(CreatePerson(i, true));
transactionHolder.CommitAndReset();
@@ -166,17 +158,17 @@ internal static class PerformanceGateSmoke
transactionHolder.CommitAndReset();
var readCount = collection.FindAll().Count();
int readCount = collection.FindAll().Count();
transactionHolder.CommitAndReset();
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var g0After = GC.CollectionCount(0);
var g1After = GC.CollectionCount(1);
var g2After = GC.CollectionCount(2);
var allocAfter = GC.GetTotalAllocatedBytes(true);
int g0After = GC.CollectionCount(0);
int g1After = GC.CollectionCount(1);
int g2After = GC.CollectionCount(2);
long allocAfter = GC.GetTotalAllocatedBytes(true);
return new CompressionGcProbeResult(
enableCompression,
@@ -198,11 +190,11 @@ internal static class PerformanceGateSmoke
private static string WriteReport(PerformanceGateReport report)
{
var outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "BenchmarkDotNet.Artifacts", "results");
string outputDirectory = Path.Combine(Directory.GetCurrentDirectory(), "BenchmarkDotNet.Artifacts", "results");
Directory.CreateDirectory(outputDirectory);
var reportPath = Path.Combine(outputDirectory, "PerformanceGateSmoke-report.json");
var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
string reportPath = Path.Combine(outputDirectory, "PerformanceGateSmoke-report.json");
string json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
File.WriteAllText(reportPath, json);
return reportPath;
}
@@ -214,7 +206,7 @@ internal static class PerformanceGateSmoke
Id = ObjectId.NewObjectId(),
FirstName = $"First_{i}",
LastName = $"Last_{i}",
Age = 20 + (i % 50),
Age = 20 + i % 50,
Bio = includeLargeBio ? BuildBio(i) : $"bio-{i}",
CreatedAt = DateTime.UnixEpoch.AddMinutes(i),
Balance = 100 + i,
@@ -239,7 +231,7 @@ internal static class PerformanceGateSmoke
private static string BuildBio(int seed)
{
var builder = new System.Text.StringBuilder(4500);
var builder = new StringBuilder(4500);
for (var i = 0; i < 150; i++)
{
builder.Append("bio-");
@@ -253,14 +245,13 @@ internal static class PerformanceGateSmoke
}
private static string NewDbPath(string prefix)
=> Path.Combine(Path.GetTempPath(), $"{prefix}_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"{prefix}_{Guid.NewGuid():N}.db");
}
private static void TryDelete(string path)
{
if (File.Exists(path))
{
File.Delete(path);
}
if (File.Exists(path)) File.Delete(path);
}
private sealed record PerformanceGateReport(
@@ -284,4 +275,4 @@ internal static class PerformanceGateSmoke
int Gen1Delta,
int Gen2Delta,
long AllocatedBytesDelta);
}
}

View File

@@ -1,25 +1,25 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<AssemblyName>ZB.MOM.WW.CBDD.Tests.Benchmark</AssemblyName>
<RootNamespace>ZB.MOM.WW.CBDD.Tests.Benchmark</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
</PropertyGroup>
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<AssemblyName>ZB.MOM.WW.CBDD.Tests.Benchmark</AssemblyName>
<RootNamespace>ZB.MOM.WW.CBDD.Tests.Benchmark</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BenchmarkDotNet" Version="0.15.8" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="9.0.10" />
<PackageReference Include="Serilog" Version="4.2.0" />
<PackageReference Include="Serilog.Extensions.Logging" Version="8.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="BenchmarkDotNet" Version="0.15.8"/>
<PackageReference Include="Microsoft.Extensions.Logging" Version="9.0.10"/>
<PackageReference Include="Serilog" Version="4.2.0"/>
<PackageReference Include="Serilog.Extensions.Logging" Version="8.0.0"/>
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0"/>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\CBDD.Core\ZB.MOM.WW.CBDD.Core.csproj" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\CBDD.Core\ZB.MOM.WW.CBDD.Core.csproj"/>
</ItemGroup>
</Project>

View File

@@ -16,12 +16,12 @@ public class ArchitectureFitnessTests
private const string FacadeProject = "src/CBDD/ZB.MOM.WW.CBDD.csproj";
/// <summary>
/// Executes Solution_DependencyGraph_ShouldRemainAcyclic_AndFollowLayerDirection.
/// Executes Solution_DependencyGraph_ShouldRemainAcyclic_AndFollowLayerDirection.
/// </summary>
[Fact]
public void Solution_DependencyGraph_ShouldRemainAcyclic_AndFollowLayerDirection()
{
var repoRoot = FindRepositoryRoot();
string repoRoot = FindRepositoryRoot();
var projectGraph = LoadSolutionProjectGraph(repoRoot);
// Explicit layer rules
@@ -30,28 +30,27 @@ public class ArchitectureFitnessTests
projectGraph[CoreProject].ShouldBe(new[] { BsonProject });
projectGraph[FacadeProject]
.OrderBy(v => v, StringComparer.Ordinal)
.ShouldBe(new[] { BsonProject, CoreProject, SourceGeneratorsProject }.OrderBy(v => v, StringComparer.Ordinal));
.ShouldBe(new[] { BsonProject, CoreProject, SourceGeneratorsProject }.OrderBy(v => v,
StringComparer.Ordinal));
// Source projects should not depend on tests.
foreach (var kvp in projectGraph.Where(p => p.Key.StartsWith("src/", StringComparison.Ordinal)))
{
kvp.Value.Any(dep => dep.StartsWith("tests/", StringComparison.Ordinal))
.ShouldBeFalse($"{kvp.Key} must not reference test projects.");
}
HasCycle(projectGraph)
.ShouldBeFalse("Project references must remain acyclic.");
}
/// <summary>
/// Executes HighLevelCollectionApi_ShouldNotExpandRawBsonReaderWriterSurface.
/// Executes HighLevelCollectionApi_ShouldNotExpandRawBsonReaderWriterSurface.
/// </summary>
[Fact]
public void HighLevelCollectionApi_ShouldNotExpandRawBsonReaderWriterSurface()
{
var lowLevelTypes = new[] { typeof(BsonSpanReader), typeof(BsonSpanWriter) };
var collectionOffenders = typeof(DocumentCollection<,>)
string[] collectionOffenders = typeof(DocumentCollection<,>)
.GetMethods(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static | BindingFlags.DeclaredOnly)
.Where(m => lowLevelTypes.Any(t => MethodUsesType(m, t)))
.Select(m => m.Name)
@@ -61,7 +60,7 @@ public class ArchitectureFitnessTests
collectionOffenders.ShouldBeEmpty();
var dbContextOffenders = typeof(DocumentDbContext)
string[] dbContextOffenders = typeof(DocumentDbContext)
.GetMethods(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static | BindingFlags.DeclaredOnly)
.Where(m => lowLevelTypes.Any(t => MethodUsesType(m, t)))
.Select(m => m.Name)
@@ -72,7 +71,7 @@ public class ArchitectureFitnessTests
}
/// <summary>
/// Executes CollectionAndIndexOrchestration_ShouldUseStoragePortInternally.
/// Executes CollectionAndIndexOrchestration_ShouldUseStoragePortInternally.
/// </summary>
[Fact]
public void CollectionAndIndexOrchestration_ShouldUseStoragePortInternally()
@@ -84,22 +83,23 @@ public class ArchitectureFitnessTests
typeof(BTreeIndex),
typeof(CollectionIndexManager<,>),
typeof(CollectionSecondaryIndex<,>),
typeof(VectorSearchIndex),
typeof(VectorSearchIndex)
};
var fieldOffenders = targetTypes
string[] fieldOffenders = targetTypes
.SelectMany(t => t.GetFields(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public)
.Where(f => f.FieldType == typeof(StorageEngine))
.Select(f => $"{t.Name}.{f.Name}"))
.OrderBy(v => v)
.ToArray();
fieldOffenders.ShouldBeEmpty("Collection/index orchestration should hold IStorageEngine instead of concrete StorageEngine.");
fieldOffenders.ShouldBeEmpty(
"Collection/index orchestration should hold IStorageEngine instead of concrete StorageEngine.");
}
private static Dictionary<string, List<string>> LoadSolutionProjectGraph(string repoRoot)
{
var solutionPath = Path.Combine(repoRoot, "CBDD.slnx");
string solutionPath = Path.Combine(repoRoot, "CBDD.slnx");
var solutionDoc = XDocument.Load(solutionPath);
var projects = solutionDoc
@@ -115,11 +115,11 @@ public class ArchitectureFitnessTests
_ => new List<string>(),
StringComparer.Ordinal);
foreach (var project in projects)
foreach (string project in projects)
{
var projectFile = Path.Combine(repoRoot, project);
string projectFile = Path.Combine(repoRoot, project);
var projectDoc = XDocument.Load(projectFile);
var projectDir = Path.GetDirectoryName(projectFile)!;
string projectDir = Path.GetDirectoryName(projectFile)!;
var refs = projectDoc
.Descendants()
@@ -127,7 +127,8 @@ public class ArchitectureFitnessTests
.Select(e => e.Attribute("Include")?.Value)
.Where(v => !string.IsNullOrWhiteSpace(v))
.Select(v => v!.Replace('\\', '/'))
.Select(v => NormalizePath(Path.GetRelativePath(repoRoot, Path.GetFullPath(Path.Combine(projectDir, v)))))
.Select(v =>
NormalizePath(Path.GetRelativePath(repoRoot, Path.GetFullPath(Path.Combine(projectDir, v)))))
.Where(projects.Contains)
.Distinct(StringComparer.Ordinal)
.OrderBy(v => v, StringComparer.Ordinal)
@@ -143,30 +144,20 @@ public class ArchitectureFitnessTests
{
var state = graph.Keys.ToDictionary(k => k, _ => 0, StringComparer.Ordinal);
foreach (var node in graph.Keys)
{
foreach (string node in graph.Keys)
if (state[node] == 0 && Visit(node))
{
return true;
}
}
return false;
bool Visit(string node)
{
state[node] = 1; // visiting
foreach (var dep in graph[node])
foreach (string dep in graph[node])
{
if (state[dep] == 1)
{
return true;
}
if (state[dep] == 1) return true;
if (state[dep] == 0 && Visit(dep))
{
return true;
}
if (state[dep] == 0 && Visit(dep)) return true;
}
state[node] = 2; // visited
@@ -176,30 +167,19 @@ public class ArchitectureFitnessTests
private static bool MethodUsesType(MethodInfo method, Type forbidden)
{
if (TypeContains(method.ReturnType, forbidden))
{
return true;
}
if (TypeContains(method.ReturnType, forbidden)) return true;
return method.GetParameters().Any(p => TypeContains(p.ParameterType, forbidden));
}
private static bool TypeContains(Type inspected, Type forbidden)
{
if (inspected == forbidden)
{
return true;
}
if (inspected == forbidden) return true;
if (inspected.HasElementType && inspected.GetElementType() is { } elementType && TypeContains(elementType, forbidden))
{
return true;
}
if (inspected.HasElementType && inspected.GetElementType() is { } elementType &&
TypeContains(elementType, forbidden)) return true;
if (!inspected.IsGenericType)
{
return false;
}
if (!inspected.IsGenericType) return false;
return inspected.GetGenericArguments().Any(t => TypeContains(t, forbidden));
}
@@ -209,11 +189,8 @@ public class ArchitectureFitnessTests
var current = new DirectoryInfo(AppContext.BaseDirectory);
while (current != null)
{
var solutionPath = Path.Combine(current.FullName, "CBDD.slnx");
if (File.Exists(solutionPath))
{
return current.FullName;
}
string solutionPath = Path.Combine(current.FullName, "CBDD.slnx");
if (File.Exists(solutionPath)) return current.FullName;
current = current.Parent;
}
@@ -222,5 +199,7 @@ public class ArchitectureFitnessTests
}
private static string NormalizePath(string path)
=> path.Replace('\\', '/');
}
{
return path.Replace('\\', '/');
}
}

View File

@@ -9,7 +9,7 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class BsonDocumentAndBufferWriterTests
{
/// <summary>
/// Verifies BSON document creation and typed retrieval roundtrip.
/// Verifies BSON document creation and typed retrieval roundtrip.
/// </summary>
[Fact]
public void BsonDocument_Create_And_TryGet_RoundTrip()
@@ -32,10 +32,10 @@ public class BsonDocumentAndBufferWriterTests
var wrapped = new BsonDocument(doc.RawData.ToArray(), reverseMap);
wrapped.TryGetString("name", out var name).ShouldBeTrue();
wrapped.TryGetString("name", out string? name).ShouldBeTrue();
name.ShouldBe("Alice");
wrapped.TryGetInt32("age", out var age).ShouldBeTrue();
wrapped.TryGetInt32("age", out int age).ShouldBeTrue();
age.ShouldBe(32);
wrapped.TryGetObjectId("_id", out var id).ShouldBeTrue();
@@ -46,7 +46,7 @@ public class BsonDocumentAndBufferWriterTests
}
/// <summary>
/// Verifies typed getters return false for missing fields and type mismatches.
/// Verifies typed getters return false for missing fields and type mismatches.
/// </summary>
[Fact]
public void BsonDocument_TryGet_Should_Return_False_For_Missing_Or_Wrong_Type()
@@ -71,7 +71,7 @@ public class BsonDocumentAndBufferWriterTests
}
/// <summary>
/// Verifies the BSON document builder grows its internal buffer for large documents.
/// Verifies the BSON document builder grows its internal buffer for large documents.
/// </summary>
[Fact]
public void BsonDocumentBuilder_Should_Grow_Buffer_When_Document_Is_Large()
@@ -86,21 +86,18 @@ public class BsonDocumentAndBufferWriterTests
}
var builder = new BsonDocumentBuilder(keyMap);
for (int i = 1; i <= 180; i++)
{
builder.AddInt32($"k{i}", i);
}
for (var i = 1; i <= 180; i++) builder.AddInt32($"k{i}", i);
var doc = builder.Build();
doc.Size.ShouldBeGreaterThan(1024);
var wrapped = new BsonDocument(doc.RawData.ToArray(), reverseMap);
wrapped.TryGetInt32("k180", out var value).ShouldBeTrue();
wrapped.TryGetInt32("k180", out int value).ShouldBeTrue();
value.ShouldBe(180);
}
/// <summary>
/// Verifies BSON buffer writer emits expected nested document and array layout.
/// Verifies BSON buffer writer emits expected nested document and array layout.
/// </summary>
[Fact]
public void BsonBufferWriter_Should_Write_Nested_Document_And_Array()
@@ -125,7 +122,7 @@ public class BsonDocumentAndBufferWriterTests
writer.EndDocument(rootSizePos);
int rootEnd = writer.Position;
var bytes = output.WrittenSpan.ToArray();
byte[] bytes = output.WrittenSpan.ToArray();
PatchDocumentSize(bytes, childSizePos, childEnd);
PatchDocumentSize(bytes, arraySizePos, arrayEnd);
PatchDocumentSize(bytes, rootSizePos, rootEnd);
@@ -164,7 +161,7 @@ public class BsonDocumentAndBufferWriterTests
}
/// <summary>
/// Verifies single-byte and C-string span reads operate correctly.
/// Verifies single-byte and C-string span reads operate correctly.
/// </summary>
[Fact]
public void BsonSpanReader_ReadByte_And_ReadCStringSpan_Should_Work()
@@ -172,10 +169,10 @@ public class BsonDocumentAndBufferWriterTests
var singleByteReader = new BsonSpanReader(new byte[] { 0x2A }, new ConcurrentDictionary<ushort, string>());
singleByteReader.ReadByte().ShouldBe((byte)0x2A);
var cstring = Encoding.UTF8.GetBytes("hello\0");
byte[] cstring = Encoding.UTF8.GetBytes("hello\0");
var cstringReader = new BsonSpanReader(cstring, new ConcurrentDictionary<ushort, string>());
var destination = new char[16];
var written = cstringReader.ReadCString(destination);
int written = cstringReader.ReadCString(destination);
new string(destination, 0, written).ShouldBe("hello");
}
@@ -194,4 +191,4 @@ public class BsonDocumentAndBufferWriterTests
{
BinaryPrimitives.WriteInt32LittleEndian(output.AsSpan(sizePosition, 4), endPosition - sizePosition);
}
}
}

View File

@@ -1,39 +1,12 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using Xunit;
using System.Collections.Generic;
using System;
using System.Linq;
namespace ZB.MOM.WW.CBDD.Tests;
public class BsonSchemaTests
{
public class SimpleEntity
{
/// <summary>
/// Gets or sets the identifier.
/// </summary>
public ObjectId Id { get; set; }
/// <summary>
/// Gets or sets the name.
/// </summary>
public string Name { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the age.
/// </summary>
public int Age { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the entity is active.
/// </summary>
public bool IsActive { get; set; }
}
/// <summary>
/// Verifies schema generation for a simple entity.
/// Verifies schema generation for a simple entity.
/// </summary>
[Fact]
public void GenerateSchema_SimpleEntity()
@@ -53,21 +26,8 @@ public class BsonSchemaTests
ageField.Type.ShouldBe(BsonType.Int32);
}
public class CollectionEntity
{
/// <summary>
/// Gets or sets tags.
/// </summary>
public List<string> Tags { get; set; } = new();
/// <summary>
/// Gets or sets scores.
/// </summary>
public int[] Scores { get; set; } = Array.Empty<int>();
}
/// <summary>
/// Verifies schema generation for collection fields.
/// Verifies schema generation for collection fields.
/// </summary>
[Fact]
public void GenerateSchema_Collections()
@@ -83,16 +43,8 @@ public class BsonSchemaTests
scores.ArrayItemType.ShouldBe(BsonType.Int32);
}
public class NestedEntity
{
/// <summary>
/// Gets or sets the parent entity.
/// </summary>
public SimpleEntity Parent { get; set; } = new();
}
/// <summary>
/// Verifies schema generation for nested document fields.
/// Verifies schema generation for nested document fields.
/// </summary>
[Fact]
public void GenerateSchema_Nested()
@@ -105,16 +57,8 @@ public class BsonSchemaTests
parent.NestedSchema.Fields.ShouldContain(f => f.Name == "_id");
}
public class ComplexCollectionEntity
{
/// <summary>
/// Gets or sets items.
/// </summary>
public List<SimpleEntity> Items { get; set; } = new();
}
/// <summary>
/// Verifies schema generation for collections of complex types.
/// Verifies schema generation for collections of complex types.
/// </summary>
[Fact]
public void GenerateSchema_ComplexCollection()
@@ -133,4 +77,56 @@ public class BsonSchemaTests
items.NestedSchema.ShouldNotBeNull();
items.NestedSchema.Fields.ShouldContain(f => f.Name == "_id");
}
}
public class SimpleEntity
{
/// <summary>
/// Gets or sets the identifier.
/// </summary>
public ObjectId Id { get; set; }
/// <summary>
/// Gets or sets the name.
/// </summary>
public string Name { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the age.
/// </summary>
public int Age { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the entity is active.
/// </summary>
public bool IsActive { get; set; }
}
public class CollectionEntity
{
/// <summary>
/// Gets or sets tags.
/// </summary>
public List<string> Tags { get; set; } = new();
/// <summary>
/// Gets or sets scores.
/// </summary>
public int[] Scores { get; set; } = Array.Empty<int>();
}
public class NestedEntity
{
/// <summary>
/// Gets or sets the parent entity.
/// </summary>
public SimpleEntity Parent { get; set; } = new();
}
public class ComplexCollectionEntity
{
/// <summary>
/// Gets or sets items.
/// </summary>
public List<SimpleEntity> Items { get; set; } = new();
}
}

View File

@@ -1,6 +1,5 @@
using ZB.MOM.WW.CBDD.Bson;
using Xunit;
using System.Collections.Concurrent;
using ZB.MOM.WW.CBDD.Bson;
namespace ZB.MOM.WW.CBDD.Tests;
@@ -10,13 +9,17 @@ public class BsonSpanReaderWriterTests
private readonly ConcurrentDictionary<ushort, string> _keys = new();
/// <summary>
/// Initializes a new instance of the <see cref="BsonSpanReaderWriterTests"/> class.
/// Initializes a new instance of the <see cref="BsonSpanReaderWriterTests" /> class.
/// </summary>
public BsonSpanReaderWriterTests()
{
ushort id = 1;
string[] initialKeys = ["name", "age", "active", "_id", "val", "dec", "timestamp", "int32", "int64", "double", "data", "child", "value", "0", "1"];
foreach (var key in initialKeys)
string[] initialKeys =
[
"name", "age", "active", "_id", "val", "dec", "timestamp", "int32", "int64", "double", "data", "child",
"value", "0", "1"
];
foreach (string key in initialKeys)
{
_keyMap[key] = id;
_keys[id] = key;
@@ -25,7 +28,7 @@ public class BsonSpanReaderWriterTests
}
/// <summary>
/// Tests write and read simple document.
/// Tests write and read simple document.
/// </summary>
[Fact]
public void WriteAndRead_SimpleDocument()
@@ -33,7 +36,7 @@ public class BsonSpanReaderWriterTests
Span<byte> buffer = stackalloc byte[256];
var writer = new BsonSpanWriter(buffer, _keyMap);
var sizePos = writer.BeginDocument();
int sizePos = writer.BeginDocument();
writer.WriteString("name", "John");
writer.WriteInt32("age", 30);
writer.WriteBoolean("active", true);
@@ -42,29 +45,29 @@ public class BsonSpanReaderWriterTests
var documentBytes = buffer[..writer.Position];
var reader = new BsonSpanReader(documentBytes, _keys);
var size = reader.ReadDocumentSize();
int size = reader.ReadDocumentSize();
size.ShouldBe(writer.Position);
var type1 = reader.ReadBsonType();
var name1 = reader.ReadElementHeader();
var value1 = reader.ReadString();
string name1 = reader.ReadElementHeader();
string value1 = reader.ReadString();
type1.ShouldBe(BsonType.String);
name1.ShouldBe("name");
value1.ShouldBe("John");
var type2 = reader.ReadBsonType();
var name2 = reader.ReadElementHeader();
var value2 = reader.ReadInt32();
string name2 = reader.ReadElementHeader();
int value2 = reader.ReadInt32();
type2.ShouldBe(BsonType.Int32);
name2.ShouldBe("age");
value2.ShouldBe(30);
var type3 = reader.ReadBsonType();
var name3 = reader.ReadElementHeader();
var value3 = reader.ReadBoolean();
string name3 = reader.ReadElementHeader();
bool value3 = reader.ReadBoolean();
type3.ShouldBe(BsonType.Boolean);
name3.ShouldBe("active");
@@ -72,7 +75,7 @@ public class BsonSpanReaderWriterTests
}
/// <summary>
/// Tests write and read object id.
/// Tests write and read object id.
/// </summary>
[Fact]
public void WriteAndRead_ObjectId()
@@ -82,7 +85,7 @@ public class BsonSpanReaderWriterTests
var oid = ObjectId.NewObjectId();
var sizePos = writer.BeginDocument();
int sizePos = writer.BeginDocument();
writer.WriteObjectId("_id", oid);
writer.EndDocument(sizePos);
@@ -91,7 +94,7 @@ public class BsonSpanReaderWriterTests
reader.ReadDocumentSize();
var type = reader.ReadBsonType();
var name = reader.ReadElementHeader();
string name = reader.ReadElementHeader();
var readOid = reader.ReadObjectId();
type.ShouldBe(BsonType.ObjectId);
@@ -100,7 +103,7 @@ public class BsonSpanReaderWriterTests
}
/// <summary>
/// Tests read write double.
/// Tests read write double.
/// </summary>
[Fact]
public void ReadWrite_Double()
@@ -112,8 +115,8 @@ public class BsonSpanReaderWriterTests
var reader = new BsonSpanReader(buffer, _keys);
var type = reader.ReadBsonType();
var name = reader.ReadElementHeader();
var val = reader.ReadDouble();
string name = reader.ReadElementHeader();
double val = reader.ReadDouble();
type.ShouldBe(BsonType.Double);
name.ShouldBe("val");
@@ -121,7 +124,7 @@ public class BsonSpanReaderWriterTests
}
/// <summary>
/// Tests read write decimal128 round trip.
/// Tests read write decimal128 round trip.
/// </summary>
[Fact]
public void ReadWrite_Decimal128_RoundTrip()
@@ -129,13 +132,13 @@ public class BsonSpanReaderWriterTests
var buffer = new byte[256];
var writer = new BsonSpanWriter(buffer, _keyMap);
decimal original = 123456.789m;
var original = 123456.789m;
writer.WriteDecimal128("dec", original);
var reader = new BsonSpanReader(buffer, _keys);
var type = reader.ReadBsonType();
var name = reader.ReadElementHeader();
var val = reader.ReadDecimal128();
string name = reader.ReadElementHeader();
decimal val = reader.ReadDecimal128();
type.ShouldBe(BsonType.Decimal128);
name.ShouldBe("dec");
@@ -143,7 +146,7 @@ public class BsonSpanReaderWriterTests
}
/// <summary>
/// Tests write and read date time.
/// Tests write and read date time.
/// </summary>
[Fact]
public void WriteAndRead_DateTime()
@@ -156,7 +159,7 @@ public class BsonSpanReaderWriterTests
var expectedTime = new DateTime(now.Year, now.Month, now.Day,
now.Hour, now.Minute, now.Second, now.Millisecond, DateTimeKind.Utc);
var sizePos = writer.BeginDocument();
int sizePos = writer.BeginDocument();
writer.WriteDateTime("timestamp", expectedTime);
writer.EndDocument(sizePos);
@@ -165,7 +168,7 @@ public class BsonSpanReaderWriterTests
reader.ReadDocumentSize();
var type = reader.ReadBsonType();
var name = reader.ReadElementHeader();
string name = reader.ReadElementHeader();
var readTime = reader.ReadDateTime();
type.ShouldBe(BsonType.DateTime);
@@ -174,7 +177,7 @@ public class BsonSpanReaderWriterTests
}
/// <summary>
/// Tests write and read numeric types.
/// Tests write and read numeric types.
/// </summary>
[Fact]
public void WriteAndRead_NumericTypes()
@@ -182,7 +185,7 @@ public class BsonSpanReaderWriterTests
Span<byte> buffer = stackalloc byte[256];
var writer = new BsonSpanWriter(buffer, _keyMap);
var sizePos = writer.BeginDocument();
int sizePos = writer.BeginDocument();
writer.WriteInt32("int32", int.MaxValue);
writer.WriteInt64("int64", long.MaxValue);
writer.WriteDouble("double", 3.14159);
@@ -207,7 +210,7 @@ public class BsonSpanReaderWriterTests
}
/// <summary>
/// Tests write and read binary.
/// Tests write and read binary.
/// </summary>
[Fact]
public void WriteAndRead_Binary()
@@ -217,7 +220,7 @@ public class BsonSpanReaderWriterTests
byte[] testData = [1, 2, 3, 4, 5];
var sizePos = writer.BeginDocument();
int sizePos = writer.BeginDocument();
writer.WriteBinary("data", testData);
writer.EndDocument(sizePos);
@@ -226,8 +229,8 @@ public class BsonSpanReaderWriterTests
reader.ReadDocumentSize();
var type = reader.ReadBsonType();
var name = reader.ReadElementHeader();
var readData = reader.ReadBinary(out var subtype);
string name = reader.ReadElementHeader();
var readData = reader.ReadBinary(out byte subtype);
type.ShouldBe(BsonType.Binary);
name.ShouldBe("data");
@@ -236,7 +239,7 @@ public class BsonSpanReaderWriterTests
}
/// <summary>
/// Tests write and read nested document.
/// Tests write and read nested document.
/// </summary>
[Fact]
public void WriteAndRead_NestedDocument()
@@ -244,10 +247,10 @@ public class BsonSpanReaderWriterTests
Span<byte> buffer = stackalloc byte[512];
var writer = new BsonSpanWriter(buffer, _keyMap);
var rootSizePos = writer.BeginDocument();
int rootSizePos = writer.BeginDocument();
writer.WriteString("name", "Parent");
var childSizePos = writer.BeginDocument("child");
int childSizePos = writer.BeginDocument("child");
writer.WriteString("name", "Child");
writer.WriteInt32("value", 42);
writer.EndDocument(childSizePos);
@@ -256,7 +259,7 @@ public class BsonSpanReaderWriterTests
var documentBytes = buffer[..writer.Position];
var reader = new BsonSpanReader(documentBytes, _keys);
var rootSize = reader.ReadDocumentSize();
int rootSize = reader.ReadDocumentSize();
rootSize.ShouldBe(writer.Position);
@@ -276,4 +279,4 @@ public class BsonSpanReaderWriterTests
reader.ReadElementHeader().ShouldBe("value");
reader.ReadInt32().ShouldBe(42);
}
}
}

View File

@@ -1,29 +1,34 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using ZB.MOM.WW.CBDD.Core.CDC;
using ZB.MOM.WW.CBDD.Shared;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class CdcScalabilityTests : IDisposable
{
private readonly Shared.TestDbContext _db;
private readonly TestDbContext _db;
private readonly string _dbPath;
/// <summary>
/// Initializes a new instance of the <see cref="CdcScalabilityTests"/> class.
/// Initializes a new instance of the <see cref="CdcScalabilityTests" /> class.
/// </summary>
public CdcScalabilityTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"cdc_scaling_{Guid.NewGuid()}.db");
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Verifies CDC dispatch reaches all registered subscribers.
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
string wal = Path.ChangeExtension(_dbPath, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
/// <summary>
/// Verifies CDC dispatch reaches all registered subscribers.
/// </summary>
[Fact]
public async Task Test_Cdc_1000_Subscribers_Receive_Events()
@@ -34,13 +39,10 @@ public class CdcScalabilityTests : IDisposable
var subscriptions = new List<IDisposable>();
// 1. Create 1000 subscribers
for (int i = 0; i < SubscriberCount; i++)
for (var i = 0; i < SubscriberCount; i++)
{
int index = i;
var sub = _db.People.Watch().Subscribe(_ =>
{
Interlocked.Increment(ref eventCounts[index]);
});
var sub = _db.People.Watch().Subscribe(_ => { Interlocked.Increment(ref eventCounts[index]); });
subscriptions.Add(sub);
}
@@ -53,16 +55,13 @@ public class CdcScalabilityTests : IDisposable
await Task.Delay(1000, ct);
// 4. Verify all subscribers received both events
for (int i = 0; i < SubscriberCount; i++)
{
eventCounts[i].ShouldBe(2);
}
for (var i = 0; i < SubscriberCount; i++) eventCounts[i].ShouldBe(2);
foreach (var sub in subscriptions) sub.Dispose();
}
/// <summary>
/// Verifies a slow subscriber does not block other subscribers.
/// Verifies a slow subscriber does not block other subscribers.
/// </summary>
[Fact(Skip = "Performance test - run manually when needed")]
public async Task Test_Cdc_Slow_Subscriber_Does_Not_Block_Others()
@@ -80,10 +79,7 @@ public class CdcScalabilityTests : IDisposable
});
// 2. Register a fast subscriber
using var fastSub = _db.People.Watch().Subscribe(_ =>
{
Interlocked.Increment(ref fastEventCount);
});
using var fastSub = _db.People.Watch().Subscribe(_ => { Interlocked.Increment(ref fastEventCount); });
// 3. Perform a write
_db.People.Insert(new Person { Id = 1, Name = "John", Age = 30 });
@@ -107,15 +103,4 @@ public class CdcScalabilityTests : IDisposable
await Task.Delay(2500, ct); // Wait for the second one in slow sub to be processed after the first Sleep
slowEventCount.ShouldBe(2);
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
var wal = Path.ChangeExtension(_dbPath, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
}
}

View File

@@ -1,15 +1,8 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.CDC;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Shared;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
@@ -17,33 +10,43 @@ public class CdcTests : IDisposable
{
private static readonly TimeSpan DefaultEventTimeout = TimeSpan.FromSeconds(3);
private static readonly TimeSpan PollInterval = TimeSpan.FromMilliseconds(10);
private readonly TestDbContext _db;
private readonly string _dbPath = $"cdc_test_{Guid.NewGuid()}.db";
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="CdcTests"/> class.
/// Initializes a new instance of the <see cref="CdcTests" /> class.
/// </summary>
public CdcTests()
{
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Verifies that an insert operation publishes a CDC event.
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
if (File.Exists(_dbPath + "-wal")) File.Delete(_dbPath + "-wal");
}
/// <summary>
/// Verifies that an insert operation publishes a CDC event.
/// </summary>
[Fact]
public async Task Test_Cdc_Basic_Insert_Fires_Event()
{
var ct = TestContext.Current.CancellationToken;
var events = new ConcurrentQueue<ChangeStreamEvent<int, Person>>();
using var subscription = _db.People.Watch(capturePayload: true).Subscribe(events.Enqueue);
using var subscription = _db.People.Watch(true).Subscribe(events.Enqueue);
var person = new Person { Id = 1, Name = "John", Age = 30 };
_db.People.Insert(person);
_db.SaveChanges();
await WaitForEventCountAsync(events, expectedCount: 1, ct);
await WaitForEventCountAsync(events, 1, ct);
var snapshot = events.ToArray();
snapshot.Length.ShouldBe(1);
@@ -54,20 +57,20 @@ public class CdcTests : IDisposable
}
/// <summary>
/// Verifies payload is omitted when CDC capture payload is disabled.
/// Verifies payload is omitted when CDC capture payload is disabled.
/// </summary>
[Fact]
public async Task Test_Cdc_No_Payload_When_Not_Requested()
{
var ct = TestContext.Current.CancellationToken;
var events = new ConcurrentQueue<ChangeStreamEvent<int, Person>>();
using var subscription = _db.People.Watch(capturePayload: false).Subscribe(events.Enqueue);
using var subscription = _db.People.Watch(false).Subscribe(events.Enqueue);
var person = new Person { Id = 1, Name = "John", Age = 30 };
_db.People.Insert(person);
_db.SaveChanges();
await WaitForEventCountAsync(events, expectedCount: 1, ct);
await WaitForEventCountAsync(events, 1, ct);
var snapshot = events.ToArray();
snapshot.Length.ShouldBe(1);
@@ -75,14 +78,14 @@ public class CdcTests : IDisposable
}
/// <summary>
/// Verifies CDC events are published only for committed changes.
/// Verifies CDC events are published only for committed changes.
/// </summary>
[Fact]
public async Task Test_Cdc_Commit_Only()
{
var ct = TestContext.Current.CancellationToken;
var events = new ConcurrentQueue<ChangeStreamEvent<int, Person>>();
using var subscription = _db.People.Watch(capturePayload: true).Subscribe(events.Enqueue);
using var subscription = _db.People.Watch(true).Subscribe(events.Enqueue);
using (var txn = _db.BeginTransaction())
{
@@ -101,21 +104,21 @@ public class CdcTests : IDisposable
txn.Commit();
}
await WaitForEventCountAsync(events, expectedCount: 1, ct);
await WaitForEventCountAsync(events, 1, ct);
var snapshot = events.ToArray();
snapshot.Length.ShouldBe(1);
snapshot[0].DocumentId.ShouldBe(2);
}
/// <summary>
/// Verifies update and delete operations publish CDC events.
/// Verifies update and delete operations publish CDC events.
/// </summary>
[Fact]
public async Task Test_Cdc_Update_And_Delete()
{
var ct = TestContext.Current.CancellationToken;
var events = new ConcurrentQueue<ChangeStreamEvent<int, Person>>();
using var subscription = _db.People.Watch(capturePayload: true).Subscribe(events.Enqueue);
using var subscription = _db.People.Watch(true).Subscribe(events.Enqueue);
var person = new Person { Id = 1, Name = "John", Age = 30 };
_db.People.Insert(person);
@@ -128,7 +131,7 @@ public class CdcTests : IDisposable
_db.People.Delete(1);
_db.SaveChanges();
await WaitForEventCountAsync(events, expectedCount: 3, ct);
await WaitForEventCountAsync(events, 3, ct);
var snapshot = events.ToArray();
snapshot.Length.ShouldBe(3);
@@ -140,16 +143,6 @@ public class CdcTests : IDisposable
snapshot[2].DocumentId.ShouldBe(1);
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
if (File.Exists(_dbPath + "-wal")) File.Delete(_dbPath + "-wal");
}
private static async Task WaitForEventCountAsync(
ConcurrentQueue<ChangeStreamEvent<int, Person>> events,
int expectedCount,
@@ -158,10 +151,7 @@ public class CdcTests : IDisposable
var sw = Stopwatch.StartNew();
while (sw.Elapsed < DefaultEventTimeout)
{
if (events.Count >= expectedCount)
{
return;
}
if (events.Count >= expectedCount) return;
await Task.Delay(PollInterval, ct);
}
@@ -174,12 +164,12 @@ public class CdcTests : IDisposable
public static class ObservableExtensions
{
/// <summary>
/// Subscribes to an observable sequence using an action callback.
/// Subscribes to an observable sequence using an action callback.
/// </summary>
/// <typeparam name="T">The event type.</typeparam>
/// <param name="observable">The observable sequence.</param>
/// <param name="onNext">The callback for next events.</param>
/// <returns>An <see cref="IDisposable"/> subscription.</returns>
/// <returns>An <see cref="IDisposable" /> subscription.</returns>
public static IDisposable Subscribe<T>(this IObservable<T> observable, Action<T> onNext)
{
return observable.Subscribe(new AnonymousObserver<T>(onNext));
@@ -190,26 +180,36 @@ public static class ObservableExtensions
private readonly Action<T> _onNext;
/// <summary>
/// Initializes a new instance of the <see cref="AnonymousObserver{T}"/> class.
/// Initializes a new instance of the <see cref="AnonymousObserver{T}" /> class.
/// </summary>
/// <param name="onNext">The callback for next events.</param>
public AnonymousObserver(Action<T> onNext) => _onNext = onNext;
public AnonymousObserver(Action<T> onNext)
{
_onNext = onNext;
}
/// <summary>
/// Handles completion.
/// Handles completion.
/// </summary>
public void OnCompleted() { }
public void OnCompleted()
{
}
/// <summary>
/// Handles an observable error.
/// Handles an observable error.
/// </summary>
/// <param name="error">The observed error.</param>
public void OnError(Exception error) { }
public void OnError(Exception error)
{
}
/// <summary>
/// Handles the next value.
/// Handles the next value.
/// </summary>
/// <param name="value">The observed value.</param>
public void OnNext(T value) => _onNext(value);
public void OnNext(T value)
{
_onNext(value);
}
}
}
}

View File

@@ -1,9 +1,4 @@
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Shared;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
@@ -12,7 +7,7 @@ public class AsyncTests : IDisposable
private readonly string _dbPath;
/// <summary>
/// Initializes a new instance of the <see cref="AsyncTests"/> class.
/// Initializes a new instance of the <see cref="AsyncTests" /> class.
/// </summary>
public AsyncTests()
{
@@ -20,7 +15,7 @@ public class AsyncTests : IDisposable
}
/// <summary>
/// Executes Dispose.
/// Executes Dispose.
/// </summary>
public void Dispose()
{
@@ -29,14 +24,14 @@ public class AsyncTests : IDisposable
}
/// <summary>
/// Executes Async_Transaction_Commit_Should_Persist_Data.
/// Executes Async_Transaction_Commit_Should_Persist_Data.
/// </summary>
[Fact]
public async Task Async_Transaction_Commit_Should_Persist_Data()
{
var ct = TestContext.Current.CancellationToken;
using (var db = new Shared.TestDbContext(_dbPath))
using (var db = new TestDbContext(_dbPath))
{
using (var txn = await db.BeginTransactionAsync(ct))
{
@@ -47,7 +42,7 @@ public class AsyncTests : IDisposable
}
// Verify with new storage engine instance
using var db2 = new Shared.TestDbContext(_dbPath);
using var db2 = new TestDbContext(_dbPath);
var doc1 = db2.AsyncDocs.FindById(1);
doc1.ShouldNotBeNull();
doc1.Name.ShouldBe("Async1");
@@ -58,14 +53,14 @@ public class AsyncTests : IDisposable
}
/// <summary>
/// Executes Async_Transaction_Rollback_Should_Discard_Data.
/// Executes Async_Transaction_Rollback_Should_Discard_Data.
/// </summary>
[Fact]
public async Task Async_Transaction_Rollback_Should_Discard_Data()
{
var ct = TestContext.Current.CancellationToken;
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
using (var txn = await db.BeginTransactionAsync(ct))
{
db.AsyncDocs.Insert(new AsyncDoc { Id = 3, Name = "RollbackMe" });
@@ -76,12 +71,12 @@ public class AsyncTests : IDisposable
}
/// <summary>
/// Executes Bulk_Async_Insert_Should_Persist_Data.
/// Executes Bulk_Async_Insert_Should_Persist_Data.
/// </summary>
[Fact]
public async Task Bulk_Async_Insert_Should_Persist_Data()
{
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
var docs = Enumerable.Range(1, 100).Select(i => new AsyncDoc { Id = i + 5000, Name = $"Bulk{i}" });
var ids = await db.AsyncDocs.InsertBulkAsync(docs);
@@ -94,23 +89,20 @@ public class AsyncTests : IDisposable
}
/// <summary>
/// Executes Bulk_Async_Update_Should_Persist_Changes.
/// Executes Bulk_Async_Update_Should_Persist_Changes.
/// </summary>
[Fact]
public async Task Bulk_Async_Update_Should_Persist_Changes()
{
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
// 1. Insert 100 docs
var docs = Enumerable.Range(1, 100).Select(i => new AsyncDoc { Id = i + 6000, Name = $"Original{i}" }).ToList();
await db.AsyncDocs.InsertBulkAsync(docs);
// 2. Update all docs
foreach (var doc in docs)
{
doc.Name = $"Updated{doc.Id - 6000}";
}
foreach (var doc in docs) doc.Name = $"Updated{doc.Id - 6000}";
var count = await db.AsyncDocs.UpdateBulkAsync(docs);
int count = await db.AsyncDocs.UpdateBulkAsync(docs);
count.ShouldBe(100);
@@ -121,23 +113,24 @@ public class AsyncTests : IDisposable
}
/// <summary>
/// Executes High_Concurrency_Async_Commits.
/// Executes High_Concurrency_Async_Commits.
/// </summary>
[Fact]
public async Task High_Concurrency_Async_Commits()
{
var ct = TestContext.Current.CancellationToken;
using var db = new Shared.TestDbContext(Path.Combine(Path.GetTempPath(), $"cbdd_async_concurrency_{Guid.NewGuid()}.db"));
int threadCount = 2;
int docsPerThread = 50;
using var db =
new TestDbContext(Path.Combine(Path.GetTempPath(), $"cbdd_async_concurrency_{Guid.NewGuid()}.db"));
var threadCount = 2;
var docsPerThread = 50;
var tasks = Enumerable.Range(0, threadCount).Select(async i =>
{
// Test mix of implicit and explicit transactions
for (int j = 0; j < docsPerThread; j++)
for (var j = 0; j < docsPerThread; j++)
{
int id = (i * docsPerThread) + j + 8000;
int id = i * docsPerThread + j + 8000;
await db.AsyncDocs.InsertAsync(new AsyncDoc { Id = id, Name = $"Thread{i}_Doc{j}" });
}
});
@@ -146,7 +139,7 @@ public class AsyncTests : IDisposable
await db.SaveChangesAsync(ct);
// Verify count
var count = db.AsyncDocs.Scan(_ => true).Count();
int count = db.AsyncDocs.Scan(_ => true).Count();
count.ShouldBe(threadCount * docsPerThread);
}
}
}

View File

@@ -1,33 +1,27 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
using Xunit;
using static ZB.MOM.WW.CBDD.Tests.SchemaTests;
namespace ZB.MOM.WW.CBDD.Tests;
public class BulkOperationsTests : IDisposable
{
private readonly TestDbContext _dbContext;
private readonly string _dbPath;
private readonly string _walPath;
private readonly Shared.TestDbContext _dbContext;
/// <summary>
/// Initializes a new instance of the <see cref="BulkOperationsTests"/> class.
/// Initializes a new instance of the <see cref="BulkOperationsTests" /> class.
/// </summary>
public BulkOperationsTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"test_bulk_{Guid.NewGuid()}.db");
_walPath = Path.Combine(Path.GetTempPath(), $"test_bulk_{Guid.NewGuid()}.wal");
_dbContext = new Shared.TestDbContext(_dbPath);
_dbContext = new TestDbContext(_dbPath);
}
/// <summary>
/// Executes Dispose.
/// Executes Dispose.
/// </summary>
public void Dispose()
{
@@ -35,17 +29,14 @@ public class BulkOperationsTests : IDisposable
}
/// <summary>
/// Executes UpdateBulk_UpdatesMultipleDocuments.
/// Executes UpdateBulk_UpdatesMultipleDocuments.
/// </summary>
[Fact]
public void UpdateBulk_UpdatesMultipleDocuments()
{
// Arrange: Insert 100 users
var users = new List<User>();
for (int i = 0; i < 100; i++)
{
users.Add(new User { Id = ObjectId.NewObjectId(), Name = $"User {i}", Age = 20 });
}
for (var i = 0; i < 100; i++) users.Add(new User { Id = ObjectId.NewObjectId(), Name = $"User {i}", Age = 20 });
_dbContext.Users.InsertBulk(users);
_dbContext.SaveChanges();
@@ -57,7 +48,7 @@ public class BulkOperationsTests : IDisposable
}
// Act
var updatedCount = _dbContext.Users.UpdateBulk(users);
int updatedCount = _dbContext.Users.UpdateBulk(users);
_dbContext.SaveChanges();
// Assert
@@ -74,41 +65,32 @@ public class BulkOperationsTests : IDisposable
}
/// <summary>
/// Executes DeleteBulk_RemovesMultipleDocuments.
/// Executes DeleteBulk_RemovesMultipleDocuments.
/// </summary>
[Fact]
public void DeleteBulk_RemovesMultipleDocuments()
{
// Arrange: Insert 100 users
var users = new List<User>();
for (int i = 0; i < 100; i++)
{
users.Add(new User { Id = ObjectId.NewObjectId(), Name = $"User {i}", Age = 20 });
}
for (var i = 0; i < 100; i++) users.Add(new User { Id = ObjectId.NewObjectId(), Name = $"User {i}", Age = 20 });
_dbContext.Users.InsertBulk(users);
_dbContext.SaveChanges();
var idsToDelete = users.Take(50).Select(u => u.Id).ToList();
// Act
var deletedCount = _dbContext.Users.DeleteBulk(idsToDelete);
int deletedCount = _dbContext.Users.DeleteBulk(idsToDelete);
_dbContext.SaveChanges();
// Assert
deletedCount.ShouldBe(50);
// Verify deleted
foreach (var id in idsToDelete)
{
_dbContext.Users.FindById(id).ShouldBeNull();
}
foreach (var id in idsToDelete) _dbContext.Users.FindById(id).ShouldBeNull();
// Verify remaining
var remaining = users.Skip(50).ToList();
foreach (var u in remaining)
{
_dbContext.Users.FindById(u.Id).ShouldNotBeNull();
}
foreach (var u in remaining) _dbContext.Users.FindById(u.Id).ShouldNotBeNull();
// Verify count
// Note: Count() is not fully implemented efficiently yet (iterates everything), but FindAll().Count() works
@@ -116,7 +98,7 @@ public class BulkOperationsTests : IDisposable
}
/// <summary>
/// Executes DeleteBulk_WithTransaction_Rollworks.
/// Executes DeleteBulk_WithTransaction_Rollworks.
/// </summary>
[Fact]
public void DeleteBulk_WithTransaction_Rollworks()
@@ -137,4 +119,4 @@ public class BulkOperationsTests : IDisposable
// Assert: Should still exist
_dbContext.Users.FindById(user.Id).ShouldNotBeNull();
}
}
}

View File

@@ -1,32 +1,27 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class DocumentCollectionDeleteTests : IDisposable
{
private readonly TestDbContext _dbContext;
private readonly string _dbPath;
private readonly string _walPath;
private readonly Shared.TestDbContext _dbContext;
/// <summary>
/// Initializes a new instance of the <see cref="DocumentCollectionDeleteTests"/> class.
/// Initializes a new instance of the <see cref="DocumentCollectionDeleteTests" /> class.
/// </summary>
public DocumentCollectionDeleteTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"test_delete_{Guid.NewGuid()}.db");
_walPath = Path.Combine(Path.GetTempPath(), $"test_delete_{Guid.NewGuid()}.wal");
_dbContext = new Shared.TestDbContext(_dbPath);
_dbContext = new TestDbContext(_dbPath);
}
/// <summary>
/// Releases test resources.
/// Releases test resources.
/// </summary>
public void Dispose()
{
@@ -34,7 +29,7 @@ public class DocumentCollectionDeleteTests : IDisposable
}
/// <summary>
/// Verifies delete removes both the document and its index entry.
/// Verifies delete removes both the document and its index entry.
/// </summary>
[Fact]
public void Delete_RemovesDocumentAndIndexEntry()
@@ -47,7 +42,7 @@ public class DocumentCollectionDeleteTests : IDisposable
_dbContext.Users.FindById(user.Id).ShouldNotBeNull();
// Delete
var deleted = _dbContext.Users.Delete(user.Id);
bool deleted = _dbContext.Users.Delete(user.Id);
_dbContext.SaveChanges();
// Assert
@@ -62,19 +57,19 @@ public class DocumentCollectionDeleteTests : IDisposable
}
/// <summary>
/// Verifies delete returns false for a non-existent document.
/// Verifies delete returns false for a non-existent document.
/// </summary>
[Fact]
public void Delete_NonExistent_ReturnsFalse()
{
var id = ObjectId.NewObjectId();
var deleted = _dbContext.Users.Delete(id);
bool deleted = _dbContext.Users.Delete(id);
_dbContext.SaveChanges();
deleted.ShouldBeFalse();
}
/// <summary>
/// Verifies deletes inside a transaction commit successfully.
/// Verifies deletes inside a transaction commit successfully.
/// </summary>
[Fact]
public void Delete_WithTransaction_CommitsSuccessfully()
@@ -92,4 +87,4 @@ public class DocumentCollectionDeleteTests : IDisposable
// Verify
_dbContext.Users.FindById(user.Id).ShouldBeNull();
}
}
}

View File

@@ -5,20 +5,31 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class DocumentCollectionIndexApiTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath;
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="DocumentCollectionIndexApiTests"/> class.
/// Initializes a new instance of the <see cref="DocumentCollectionIndexApiTests" /> class.
/// </summary>
public DocumentCollectionIndexApiTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"collection_index_api_{Guid.NewGuid():N}.db");
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Verifies vector index creation and deletion behavior.
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
string wal = Path.ChangeExtension(_dbPath, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
/// <summary>
/// Verifies vector index creation and deletion behavior.
/// </summary>
[Fact]
public void CreateVectorIndex_And_DropIndex_Should_Work()
@@ -39,34 +50,23 @@ public class DocumentCollectionIndexApiTests : IDisposable
}
/// <summary>
/// Verifies ensure-index returns existing indexes when already present.
/// Verifies ensure-index returns existing indexes when already present.
/// </summary>
[Fact]
public void EnsureIndex_Should_Return_Existing_Index_When_Already_Present()
{
var first = _db.People.EnsureIndex(p => p.Age, name: "idx_people_age");
var second = _db.People.EnsureIndex(p => p.Age, name: "idx_people_age");
var first = _db.People.EnsureIndex(p => p.Age, "idx_people_age");
var second = _db.People.EnsureIndex(p => p.Age, "idx_people_age");
ReferenceEquals(first, second).ShouldBeTrue();
}
/// <summary>
/// Verifies dropping the primary index name is rejected.
/// Verifies dropping the primary index name is rejected.
/// </summary>
[Fact]
public void DropIndex_Should_Reject_Primary_Index_Name()
{
Should.Throw<InvalidOperationException>(() => _db.People.DropIndex("_id"));
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
var wal = Path.ChangeExtension(_dbPath, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
}
}

View File

@@ -1,31 +1,35 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
namespace ZB.MOM.WW.CBDD.Tests;
public class DocumentCollectionTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath;
private readonly string _walPath;
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="DocumentCollectionTests"/> class.
/// Initializes a new instance of the <see cref="DocumentCollectionTests" /> class.
/// </summary>
public DocumentCollectionTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"test_collection_{Guid.NewGuid()}.db");
_walPath = Path.Combine(Path.GetTempPath(), $"test_collection_{Guid.NewGuid()}.wal");
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Verifies insert and find-by-id operations.
/// Releases test resources.
/// </summary>
public void Dispose()
{
_db?.Dispose();
}
/// <summary>
/// Verifies insert and find-by-id operations.
/// </summary>
[Fact]
public void Insert_And_FindById_Works()
@@ -46,7 +50,7 @@ public class DocumentCollectionTests : IDisposable
}
/// <summary>
/// Verifies find-by-id returns null when no document is found.
/// Verifies find-by-id returns null when no document is found.
/// </summary>
[Fact]
public void FindById_Returns_Null_When_Not_Found()
@@ -59,7 +63,7 @@ public class DocumentCollectionTests : IDisposable
}
/// <summary>
/// Verifies find-all returns all entities.
/// Verifies find-all returns all entities.
/// </summary>
[Fact]
public void FindAll_Returns_All_Entities()
@@ -81,7 +85,7 @@ public class DocumentCollectionTests : IDisposable
}
/// <summary>
/// Verifies update modifies an existing entity.
/// Verifies update modifies an existing entity.
/// </summary>
[Fact]
public void Update_Modifies_Entity()
@@ -93,7 +97,7 @@ public class DocumentCollectionTests : IDisposable
// Act
user.Age = 31;
var updated = _db.Users.Update(user);
bool updated = _db.Users.Update(user);
_db.SaveChanges();
// Assert
@@ -105,7 +109,7 @@ public class DocumentCollectionTests : IDisposable
}
/// <summary>
/// Verifies update returns false when the entity does not exist.
/// Verifies update returns false when the entity does not exist.
/// </summary>
[Fact]
public void Update_Returns_False_When_Not_Found()
@@ -114,7 +118,7 @@ public class DocumentCollectionTests : IDisposable
var user = new User { Id = ObjectId.NewObjectId(), Name = "Ghost", Age = 99 };
// Act
var updated = _db.Users.Update(user);
bool updated = _db.Users.Update(user);
_db.SaveChanges();
// Assert
@@ -122,7 +126,7 @@ public class DocumentCollectionTests : IDisposable
}
/// <summary>
/// Verifies delete removes an entity.
/// Verifies delete removes an entity.
/// </summary>
[Fact]
public void Delete_Removes_Entity()
@@ -133,7 +137,7 @@ public class DocumentCollectionTests : IDisposable
_db.SaveChanges();
// Act
var deleted = _db.Users.Delete(id);
bool deleted = _db.Users.Delete(id);
_db.SaveChanges();
// Assert
@@ -142,13 +146,13 @@ public class DocumentCollectionTests : IDisposable
}
/// <summary>
/// Verifies delete returns false when the entity does not exist.
/// Verifies delete returns false when the entity does not exist.
/// </summary>
[Fact]
public void Delete_Returns_False_When_Not_Found()
{
// Act
var deleted = _db.Users.Delete(ObjectId.NewObjectId());
bool deleted = _db.Users.Delete(ObjectId.NewObjectId());
_db.SaveChanges();
// Assert
@@ -156,7 +160,7 @@ public class DocumentCollectionTests : IDisposable
}
/// <summary>
/// Verifies count returns the correct entity count.
/// Verifies count returns the correct entity count.
/// </summary>
[Fact]
public void Count_Returns_Correct_Count()
@@ -167,14 +171,14 @@ public class DocumentCollectionTests : IDisposable
_db.SaveChanges();
// Act
var count = _db.Users.Count();
int count = _db.Users.Count();
// Assert
count.ShouldBe(2);
}
/// <summary>
/// Verifies predicate queries filter entities correctly.
/// Verifies predicate queries filter entities correctly.
/// </summary>
[Fact]
public void Find_With_Predicate_Filters_Correctly()
@@ -194,7 +198,7 @@ public class DocumentCollectionTests : IDisposable
}
/// <summary>
/// Verifies bulk insert stores multiple entities.
/// Verifies bulk insert stores multiple entities.
/// </summary>
[Fact]
public void InsertBulk_Inserts_Multiple_Entities()
@@ -217,7 +221,7 @@ public class DocumentCollectionTests : IDisposable
}
/// <summary>
/// Verifies inserts preserve an explicitly assigned identifier.
/// Verifies inserts preserve an explicitly assigned identifier.
/// </summary>
[Fact]
public void Insert_With_SpecifiedId_RetainsId()
@@ -238,12 +242,4 @@ public class DocumentCollectionTests : IDisposable
found.Id.ShouldBe(id);
found.Name.ShouldBe("SpecifiedID");
}
/// <summary>
/// Releases test resources.
/// </summary>
public void Dispose()
{
_db?.Dispose();
}
}
}

View File

@@ -1,29 +1,24 @@
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class InsertBulkTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _testFile;
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="InsertBulkTests"/> class.
/// Initializes a new instance of the <see cref="InsertBulkTests" /> class.
/// </summary>
public InsertBulkTests()
{
_testFile = Path.GetTempFileName();
_db = new Shared.TestDbContext(_testFile);
_db = new TestDbContext(_testFile);
}
/// <summary>
/// Disposes test resources.
/// Disposes test resources.
/// </summary>
public void Dispose()
{
@@ -31,16 +26,13 @@ public class InsertBulkTests : IDisposable
}
/// <summary>
/// Verifies bulk inserts are immediately persisted and visible.
/// Verifies bulk inserts are immediately persisted and visible.
/// </summary>
[Fact]
public void InsertBulk_PersistsData_ImmediatelyVisible()
{
var users = new List<User>();
for (int i = 0; i < 50; i++)
{
users.Add(new User { Id = ZB.MOM.WW.CBDD.Bson.ObjectId.NewObjectId(), Name = $"User {i}", Age = 20 });
}
for (var i = 0; i < 50; i++) users.Add(new User { Id = ObjectId.NewObjectId(), Name = $"User {i}", Age = 20 });
_db.Users.InsertBulk(users);
_db.SaveChanges();
@@ -51,21 +43,23 @@ public class InsertBulkTests : IDisposable
}
/// <summary>
/// Verifies bulk inserts spanning multiple pages persist correctly.
/// Verifies bulk inserts spanning multiple pages persist correctly.
/// </summary>
[Fact]
public void InsertBulk_SpanningMultiplePages_PersistsCorrectly()
{
// 16KB page. User ~50 bytes. 400 users -> ~20KB -> 2 pages.
var users = new List<User>();
for (int i = 0; i < 400; i++)
{
users.Add(new User { Id = ZB.MOM.WW.CBDD.Bson.ObjectId.NewObjectId(), Name = $"User {i} with some long padding text to ensure we fill space {new string('x', 50)}", Age = 20 });
}
for (var i = 0; i < 400; i++)
users.Add(new User
{
Id = ObjectId.NewObjectId(),
Name = $"User {i} with some long padding text to ensure we fill space {new string('x', 50)}", Age = 20
});
_db.Users.InsertBulk(users);
_db.SaveChanges();
_db.Users.Count().ShouldBe(400);
}
}
}

View File

@@ -1,25 +1,24 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Shared;
namespace ZB.MOM.WW.CBDD.Tests;
public class SetMethodTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath;
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="SetMethodTests"/> class.
/// Initializes a new instance of the <see cref="SetMethodTests" /> class.
/// </summary>
public SetMethodTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"cbdd_set_{Guid.NewGuid()}.db");
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Disposes the resources used by this instance.
/// Disposes the resources used by this instance.
/// </summary>
public void Dispose()
{
@@ -28,7 +27,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set object id returns correct collection.
/// Tests set object id returns correct collection.
/// </summary>
[Fact]
public void Set_ObjectId_ReturnsCorrectCollection()
@@ -39,7 +38,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set shorthand returns correct collection.
/// Tests set shorthand returns correct collection.
/// </summary>
[Fact]
public void Set_Shorthand_ReturnsCorrectCollection()
@@ -50,7 +49,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set int returns correct collection.
/// Tests set int returns correct collection.
/// </summary>
[Fact]
public void Set_Int_ReturnsCorrectCollection()
@@ -61,7 +60,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set string returns correct collection.
/// Tests set string returns correct collection.
/// </summary>
[Fact]
public void Set_String_ReturnsCorrectCollection()
@@ -72,7 +71,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set guid returns correct collection.
/// Tests set guid returns correct collection.
/// </summary>
[Fact]
public void Set_Guid_ReturnsCorrectCollection()
@@ -83,7 +82,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set custom key returns correct collection.
/// Tests set custom key returns correct collection.
/// </summary>
[Fact]
public void Set_CustomKey_ReturnsCorrectCollection()
@@ -94,7 +93,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set all object id collections return correct instances.
/// Tests set all object id collections return correct instances.
/// </summary>
[Fact]
public void Set_AllObjectIdCollections_ReturnCorrectInstances()
@@ -110,7 +109,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set all int collections return correct instances.
/// Tests set all int collections return correct instances.
/// </summary>
[Fact]
public void Set_AllIntCollections_ReturnCorrectInstances()
@@ -123,7 +122,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set string key collections return correct instances.
/// Tests set string key collections return correct instances.
/// </summary>
[Fact]
public void Set_StringKeyCollections_ReturnCorrectInstances()
@@ -132,7 +131,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set unregistered entity throws invalid operation exception.
/// Tests set unregistered entity throws invalid operation exception.
/// </summary>
[Fact]
public void Set_UnregisteredEntity_ThrowsInvalidOperationException()
@@ -141,7 +140,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set wrong key type throws invalid operation exception.
/// Tests set wrong key type throws invalid operation exception.
/// </summary>
[Fact]
public void Set_WrongKeyType_ThrowsInvalidOperationException()
@@ -150,7 +149,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set can perform operations.
/// Tests set can perform operations.
/// </summary>
[Fact]
public void Set_CanPerformOperations()
@@ -167,7 +166,7 @@ public class SetMethodTests : IDisposable
}
/// <summary>
/// Tests set with int key can perform operations.
/// Tests set with int key can perform operations.
/// </summary>
[Fact]
public void Set_WithIntKey_CanPerformOperations()
@@ -186,20 +185,20 @@ public class SetMethodTests : IDisposable
public class SetMethodInheritanceTests : IDisposable
{
private readonly TestExtendedDbContext _db;
private readonly string _dbPath;
private readonly Shared.TestExtendedDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="SetMethodInheritanceTests"/> class.
/// Initializes a new instance of the <see cref="SetMethodInheritanceTests" /> class.
/// </summary>
public SetMethodInheritanceTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"cbdd_set_inherit_{Guid.NewGuid()}.db");
_db = new Shared.TestExtendedDbContext(_dbPath);
_db = new TestExtendedDbContext(_dbPath);
}
/// <summary>
/// Disposes the resources used by this instance.
/// Disposes the resources used by this instance.
/// </summary>
public void Dispose()
{
@@ -208,7 +207,7 @@ public class SetMethodInheritanceTests : IDisposable
}
/// <summary>
/// Tests set own collection returns correct instance.
/// Tests set own collection returns correct instance.
/// </summary>
[Fact]
public void Set_OwnCollection_ReturnsCorrectInstance()
@@ -219,7 +218,7 @@ public class SetMethodInheritanceTests : IDisposable
}
/// <summary>
/// Tests set parent collection returns correct instance.
/// Tests set parent collection returns correct instance.
/// </summary>
[Fact]
public void Set_ParentCollection_ReturnsCorrectInstance()
@@ -230,7 +229,7 @@ public class SetMethodInheritanceTests : IDisposable
}
/// <summary>
/// Tests set parent shorthand returns correct instance.
/// Tests set parent shorthand returns correct instance.
/// </summary>
[Fact]
public void Set_ParentShorthand_ReturnsCorrectInstance()
@@ -241,7 +240,7 @@ public class SetMethodInheritanceTests : IDisposable
}
/// <summary>
/// Tests set parent int collection returns correct instance.
/// Tests set parent int collection returns correct instance.
/// </summary>
[Fact]
public void Set_ParentIntCollection_ReturnsCorrectInstance()
@@ -251,7 +250,7 @@ public class SetMethodInheritanceTests : IDisposable
}
/// <summary>
/// Tests set parent custom key returns correct instance.
/// Tests set parent custom key returns correct instance.
/// </summary>
[Fact]
public void Set_ParentCustomKey_ReturnsCorrectInstance()
@@ -262,7 +261,7 @@ public class SetMethodInheritanceTests : IDisposable
}
/// <summary>
/// Tests set unregistered entity throws invalid operation exception.
/// Tests set unregistered entity throws invalid operation exception.
/// </summary>
[Fact]
public void Set_UnregisteredEntity_ThrowsInvalidOperationException()
@@ -271,7 +270,7 @@ public class SetMethodInheritanceTests : IDisposable
}
/// <summary>
/// Tests set own collection can perform operations.
/// Tests set own collection can perform operations.
/// </summary>
[Fact]
public void Set_OwnCollection_CanPerformOperations()
@@ -287,7 +286,7 @@ public class SetMethodInheritanceTests : IDisposable
}
/// <summary>
/// Tests set parent collection can perform operations.
/// Tests set parent collection can perform operations.
/// </summary>
[Fact]
public void Set_ParentCollection_CanPerformOperations()
@@ -301,4 +300,4 @@ public class SetMethodInheritanceTests : IDisposable
found.ShouldNotBeNull();
found.Name.ShouldBe("Bob");
}
}
}

View File

@@ -7,7 +7,7 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CompactionCrashRecoveryTests
{
/// <summary>
/// Verifies compaction resumes from marker phases and preserves data.
/// Verifies compaction resumes from marker phases and preserves data.
/// </summary>
/// <param name="phase">The crash marker phase to resume from.</param>
[Theory]
@@ -16,8 +16,8 @@ public class CompactionCrashRecoveryTests
[InlineData("Swapped")]
public void ResumeCompaction_FromCrashMarkerPhases_ShouldFinalizeAndPreserveData(string phase)
{
var dbPath = NewDbPath();
var markerPath = MarkerPath(dbPath);
string dbPath = NewDbPath();
string markerPath = MarkerPath(dbPath);
try
{
@@ -54,13 +54,13 @@ public class CompactionCrashRecoveryTests
}
/// <summary>
/// Verifies corrupted compaction markers are recovered deterministically.
/// Verifies corrupted compaction markers are recovered deterministically.
/// </summary>
[Fact]
public void ResumeCompaction_WithCorruptedMarker_ShouldRecoverDeterministically()
{
var dbPath = NewDbPath();
var markerPath = MarkerPath(dbPath);
string dbPath = NewDbPath();
string markerPath = MarkerPath(dbPath);
try
{
@@ -96,13 +96,11 @@ public class CompactionCrashRecoveryTests
{
var ids = new List<ObjectId>();
for (var i = 0; i < 120; i++)
{
ids.Add(db.Users.Insert(new User
{
Name = $"user-{i:D4}-payload-{new string('x', 120)}",
Age = i % 20
}));
}
db.SaveChanges();
return ids;
@@ -110,25 +108,30 @@ public class CompactionCrashRecoveryTests
private static void WriteMarker(string markerPath, string dbPath, string phase)
{
var safeDbPath = dbPath.Replace("\\", "\\\\", StringComparison.Ordinal);
string safeDbPath = dbPath.Replace("\\", "\\\\", StringComparison.Ordinal);
var now = DateTimeOffset.UtcNow.ToString("O");
var json = $$"""
{"version":1,"phase":"{{phase}}","databasePath":"{{safeDbPath}}","startedAtUtc":"{{now}}","lastUpdatedUtc":"{{now}}","onlineMode":false,"mode":"InPlace"}
""";
{"version":1,"phase":"{{phase}}","databasePath":"{{safeDbPath}}","startedAtUtc":"{{now}}","lastUpdatedUtc":"{{now}}","onlineMode":false,"mode":"InPlace"}
""";
File.WriteAllText(markerPath, json);
}
private static string MarkerPath(string dbPath) => $"{dbPath}.compact.state";
private static string MarkerPath(string dbPath)
{
return $"{dbPath}.compact.state";
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"compaction_crash_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"compaction_crash_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = MarkerPath(dbPath);
string walPath = Path.ChangeExtension(dbPath, ".wal");
string markerPath = MarkerPath(dbPath);
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(walPath)) File.Delete(walPath);
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
}

View File

@@ -1,4 +1,5 @@
using System.IO.MemoryMappedFiles;
using System.Text;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Core.Storage;
@@ -9,30 +10,23 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CompactionOfflineTests
{
/// <summary>
/// Tests offline compact should preserve logical data equivalence.
/// Tests offline compact should preserve logical data equivalence.
/// </summary>
[Fact]
public void OfflineCompact_ShouldPreserveLogicalDataEquivalence()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var db = new TestDbContext(dbPath);
var ids = new List<ObjectId>();
for (var i = 0; i < 160; i++)
{
ids.Add(db.Users.Insert(new User { Name = $"user-{i:D4}", Age = i % 31 }));
}
for (var i = 0; i < 160; i++) ids.Add(db.Users.Insert(new User { Name = $"user-{i:D4}", Age = i % 31 }));
for (var i = 0; i < ids.Count; i += 9)
{
if (db.Users.FindById(ids[i]) != null)
{
db.Users.Delete(ids[i]).ShouldBeTrue();
}
}
var updateTargets = db.Users.FindAll(u => u.Age % 4 == 0)
.Select(u => u.Id)
@@ -40,10 +34,7 @@ public class CompactionOfflineTests
foreach (var id in updateTargets)
{
var user = db.Users.FindById(id);
if (user == null)
{
continue;
}
if (user == null) continue;
user.Name += "-updated";
db.Users.Update(user).ShouldBeTrue();
@@ -76,25 +67,23 @@ public class CompactionOfflineTests
}
/// <summary>
/// Tests offline compact should keep index results consistent.
/// Tests offline compact should keep index results consistent.
/// </summary>
[Fact]
public void OfflineCompact_ShouldKeepIndexResultsConsistent()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var db = new TestDbContext(dbPath);
for (var i = 0; i < 300; i++)
{
db.People.Insert(new Person
{
Name = $"person-{i:D4}",
Age = i % 12
});
}
db.SaveChanges();
db.ForceCheckpoint();
@@ -104,7 +93,7 @@ public class CompactionOfflineTests
.ToDictionary(g => g.Key, g => g.Select(x => x.Name).OrderBy(x => x).ToArray());
db.SaveChanges();
var indexNamesBefore = db.People.GetIndexes().Select(x => x.Name).OrderBy(x => x).ToArray();
string[] indexNamesBefore = db.People.GetIndexes().Select(x => x.Name).OrderBy(x => x).ToArray();
var stats = db.Compact(new CompactionOptions
{
@@ -114,12 +103,12 @@ public class CompactionOfflineTests
});
stats.PrePageCount.ShouldBeGreaterThanOrEqualTo(stats.PostPageCount);
var indexNamesAfter = db.People.GetIndexes().Select(x => x.Name).OrderBy(x => x).ToArray();
string[] indexNamesAfter = db.People.GetIndexes().Select(x => x.Name).OrderBy(x => x).ToArray();
indexNamesAfter.ShouldBe(indexNamesBefore);
foreach (var age in expectedByAge.Keys.OrderBy(x => x))
foreach (int age in expectedByAge.Keys.OrderBy(x => x))
{
var actual = db.People.FindAll(p => p.Age == age)
string[] actual = db.People.FindAll(p => p.Age == age)
.Select(x => x.Name)
.OrderBy(x => x)
.ToArray();
@@ -134,25 +123,23 @@ public class CompactionOfflineTests
}
/// <summary>
/// Tests offline compact should rebuild hash index metadata and preserve results.
/// Tests offline compact should rebuild hash index metadata and preserve results.
/// </summary>
[Fact]
public void OfflineCompact_ShouldRebuildHashIndexMetadataAndPreserveResults()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var db = new TestDbContext(dbPath);
for (var i = 0; i < 300; i++)
{
db.People.Insert(new Person
{
Name = $"hash-person-{i:D4}",
Age = i % 12
});
}
db.SaveChanges();
db.ForceCheckpoint();
@@ -165,7 +152,8 @@ public class CompactionOfflineTests
metadata.ShouldNotBeNull();
var targetIndex = metadata!.Indexes
.FirstOrDefault(index => index.PropertyPaths.Any(path => path.Equals("Age", StringComparison.OrdinalIgnoreCase)));
.FirstOrDefault(index =>
index.PropertyPaths.Any(path => path.Equals("Age", StringComparison.OrdinalIgnoreCase)));
targetIndex.ShouldNotBeNull();
targetIndex!.Type = IndexType.Hash;
@@ -191,9 +179,9 @@ public class CompactionOfflineTests
runtimeIndex.ShouldNotBeNull();
runtimeIndex!.Type.ShouldBe(IndexType.Hash);
foreach (var age in expectedByAge.Keys.OrderBy(x => x))
foreach (int age in expectedByAge.Keys.OrderBy(x => x))
{
var actual = db.People.FindAll(p => p.Age == age)
string[] actual = db.People.FindAll(p => p.Age == age)
.Select(x => x.Name)
.OrderBy(x => x)
.ToArray();
@@ -208,12 +196,12 @@ public class CompactionOfflineTests
}
/// <summary>
/// Tests offline compact when tail is reclaimable should reduce file size.
/// Tests offline compact when tail is reclaimable should reduce file size.
/// </summary>
[Fact]
public void OfflineCompact_WhenTailIsReclaimable_ShouldReduceFileSize()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var ids = new List<ObjectId>();
try
@@ -233,24 +221,20 @@ public class CompactionOfflineTests
db.SaveChanges();
db.ForceCheckpoint();
for (var i = ids.Count - 1; i >= 60; i--)
{
for (int i = ids.Count - 1; i >= 60; i--)
if (db.Users.FindById(ids[i]) != null)
{
db.Users.Delete(ids[i]).ShouldBeTrue();
}
}
db.SaveChanges();
db.ForceCheckpoint();
var preCompactSize = new FileInfo(dbPath).Length;
long preCompactSize = new FileInfo(dbPath).Length;
var stats = db.Compact(new CompactionOptions
{
EnableTailTruncation = true,
MinimumRetainedPages = 2
});
var postCompactSize = new FileInfo(dbPath).Length;
long postCompactSize = new FileInfo(dbPath).Length;
postCompactSize.ShouldBeLessThanOrEqualTo(preCompactSize);
stats.ReclaimedFileBytes.ShouldBeGreaterThanOrEqualTo(0);
@@ -262,20 +246,17 @@ public class CompactionOfflineTests
}
/// <summary>
/// Tests offline compact with invalid primary root metadata should fail validation.
/// Tests offline compact with invalid primary root metadata should fail validation.
/// </summary>
[Fact]
public void OfflineCompact_WithInvalidPrimaryRootMetadata_ShouldFailValidation()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var db = new TestDbContext(dbPath);
for (var i = 0; i < 32; i++)
{
db.Users.Insert(new User { Name = $"invalid-primary-{i:D3}", Age = i });
}
for (var i = 0; i < 32; i++) db.Users.Insert(new User { Name = $"invalid-primary-{i:D3}", Age = i });
db.SaveChanges();
db.ForceCheckpoint();
@@ -295,20 +276,18 @@ public class CompactionOfflineTests
}
/// <summary>
/// Tests offline compact with invalid secondary root metadata should fail validation.
/// Tests offline compact with invalid secondary root metadata should fail validation.
/// </summary>
[Fact]
public void OfflineCompact_WithInvalidSecondaryRootMetadata_ShouldFailValidation()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var db = new TestDbContext(dbPath);
for (var i = 0; i < 48; i++)
{
db.People.Insert(new Person { Name = $"invalid-secondary-{i:D3}", Age = i % 10 });
}
db.SaveChanges();
db.ForceCheckpoint();
@@ -329,12 +308,12 @@ public class CompactionOfflineTests
}
/// <summary>
/// Tests offline compact should report live bytes relocation and throughput telemetry.
/// Tests offline compact should report live bytes relocation and throughput telemetry.
/// </summary>
[Fact]
public void OfflineCompact_ShouldReportLiveBytesRelocationAndThroughputTelemetry()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
@@ -342,21 +321,15 @@ public class CompactionOfflineTests
var ids = new List<ObjectId>();
for (var i = 0; i < 160; i++)
{
ids.Add(db.Users.Insert(new User
{
Name = BuildPayload(i, 9_000),
Age = i
}));
}
for (var i = 0; i < ids.Count; i += 7)
{
if (db.Users.FindById(ids[i]) != null)
{
db.Users.Delete(ids[i]).ShouldBeTrue();
}
}
db.SaveChanges();
db.ForceCheckpoint();
@@ -383,12 +356,12 @@ public class CompactionOfflineTests
}
/// <summary>
/// Tests offline compact when primary index points to deleted slot should fail validation.
/// Tests offline compact when primary index points to deleted slot should fail validation.
/// </summary>
[Fact]
public void OfflineCompact_WhenPrimaryIndexPointsToDeletedSlot_ShouldFailValidation()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
@@ -408,7 +381,7 @@ public class CompactionOfflineTests
db.Storage.ReadPage(location.PageId, null, page);
var header = SlottedPageHeader.ReadFrom(page);
var slotOffset = SlottedPageHeader.Size + (location.SlotIndex * SlotEntry.Size);
int slotOffset = SlottedPageHeader.Size + location.SlotIndex * SlotEntry.Size;
var slot = SlotEntry.ReadFrom(page.AsSpan(slotOffset, SlotEntry.Size));
slot.Flags |= SlotFlags.Deleted;
slot.WriteTo(page.AsSpan(slotOffset, SlotEntry.Size));
@@ -441,7 +414,7 @@ public class CompactionOfflineTests
private static string BuildPayload(int seed, int approxLength)
{
var builder = new System.Text.StringBuilder(approxLength + 256);
var builder = new StringBuilder(approxLength + 256);
var i = 0;
while (builder.Length < approxLength)
{
@@ -457,11 +430,13 @@ public class CompactionOfflineTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"compaction_offline_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"compaction_offline_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
var tempPath = $"{dbPath}.compact.tmp";
var backupPath = $"{dbPath}.compact.bak";
@@ -471,4 +446,4 @@ public class CompactionOfflineTests
if (File.Exists(tempPath)) File.Delete(tempPath);
if (File.Exists(backupPath)) File.Delete(backupPath);
}
}
}

View File

@@ -7,12 +7,12 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CompactionOnlineConcurrencyTests
{
/// <summary>
/// Verifies online compaction completes without deadlock under concurrent workload.
/// Verifies online compaction completes without deadlock under concurrent workload.
/// </summary>
[Fact]
public async Task OnlineCompaction_WithConcurrentishWorkload_ShouldCompleteWithoutDeadlock()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var activeIds = new List<ObjectId>();
var sync = new object();
var completedOps = 0;
@@ -48,10 +48,7 @@ public class CompactionOnlineConcurrencyTests
ObjectId? candidate = null;
lock (sync)
{
if (activeIds.Count > 0)
{
candidate = activeIds[i % activeIds.Count];
}
if (activeIds.Count > 0) candidate = activeIds[i % activeIds.Count];
}
if (candidate.HasValue)
@@ -76,10 +73,7 @@ public class CompactionOnlineConcurrencyTests
}
}
if (candidate.HasValue)
{
db.Users.Delete(candidate.Value);
}
if (candidate.HasValue) db.Users.Delete(candidate.Value);
}
db.SaveChanges();
@@ -115,10 +109,7 @@ public class CompactionOnlineConcurrencyTests
}
var actualIds = allUsers.Select(x => x.Id).ToHashSet();
foreach (var id in snapshotIds)
{
actualIds.ShouldContain(id);
}
foreach (var id in snapshotIds) actualIds.ShouldContain(id);
}
finally
{
@@ -127,14 +118,16 @@ public class CompactionOnlineConcurrencyTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"compaction_online_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"compaction_online_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(walPath)) File.Delete(walPath);
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
}

View File

@@ -7,21 +7,18 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CompactionWalCoordinationTests
{
/// <summary>
/// Verifies offline compaction checkpoints and leaves the WAL empty.
/// Verifies offline compaction checkpoints and leaves the WAL empty.
/// </summary>
[Fact]
public void OfflineCompact_ShouldCheckpointAndLeaveWalEmpty()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var markerPath = $"{dbPath}.compact.state";
try
{
using var db = new TestDbContext(dbPath);
for (var i = 0; i < 80; i++)
{
db.Users.Insert(new User { Name = $"wal-compact-{i:D3}", Age = i });
}
for (var i = 0; i < 80; i++) db.Users.Insert(new User { Name = $"wal-compact-{i:D3}", Age = i });
db.SaveChanges();
db.Storage.GetWalSize().ShouldBeGreaterThan(0);
@@ -46,13 +43,13 @@ public class CompactionWalCoordinationTests
}
/// <summary>
/// Verifies compaction after WAL recovery preserves durable data.
/// Verifies compaction after WAL recovery preserves durable data.
/// </summary>
[Fact]
public void Compact_AfterWalRecovery_ShouldKeepDataDurable()
{
var dbPath = NewDbPath();
var walPath = Path.ChangeExtension(dbPath, ".wal");
string dbPath = NewDbPath();
string walPath = Path.ChangeExtension(dbPath, ".wal");
var expected = new List<(ObjectId Id, string Name)>();
try
@@ -76,10 +73,7 @@ public class CompactionWalCoordinationTests
{
recovered.Users.Count().ShouldBe(expected.Count);
foreach (var item in expected)
{
recovered.Users.FindById(item.Id)!.Name.ShouldBe(item.Name);
}
foreach (var item in expected) recovered.Users.FindById(item.Id)!.Name.ShouldBe(item.Name);
recovered.SaveChanges();
recovered.Compact();
@@ -89,10 +83,7 @@ public class CompactionWalCoordinationTests
using (var verify = new TestDbContext(dbPath))
{
verify.Users.Count().ShouldBe(expected.Count);
foreach (var item in expected)
{
verify.Users.FindById(item.Id)!.Name.ShouldBe(item.Name);
}
foreach (var item in expected) verify.Users.FindById(item.Id)!.Name.ShouldBe(item.Name);
}
}
finally
@@ -102,14 +93,16 @@ public class CompactionWalCoordinationTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"compaction_wal_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"compaction_wal_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(walPath)) File.Delete(walPath);
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
}

View File

@@ -1,5 +1,6 @@
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
@@ -10,12 +11,12 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CompressionCompatibilityTests
{
/// <summary>
/// Verifies opening legacy uncompressed files with compression enabled does not mutate database bytes.
/// Verifies opening legacy uncompressed files with compression enabled does not mutate database bytes.
/// </summary>
[Fact]
public void OpeningLegacyUncompressedFile_WithCompressionEnabled_ShouldNotMutateDbFile()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var idList = new List<ObjectId>();
try
@@ -28,8 +29,8 @@ public class CompressionCompatibilityTests
db.ForceCheckpoint();
}
var beforeSize = new FileInfo(dbPath).Length;
var beforeHash = ComputeFileHash(dbPath);
long beforeSize = new FileInfo(dbPath).Length;
string beforeHash = ComputeFileHash(dbPath);
var compressionOptions = new CompressionOptions
{
@@ -47,8 +48,8 @@ public class CompressionCompatibilityTests
reopened.Users.Count().ShouldBe(2);
}
var afterSize = new FileInfo(dbPath).Length;
var afterHash = ComputeFileHash(dbPath);
long afterSize = new FileInfo(dbPath).Length;
string afterHash = ComputeFileHash(dbPath);
afterSize.ShouldBe(beforeSize);
afterHash.ShouldBe(beforeHash);
@@ -60,12 +61,12 @@ public class CompressionCompatibilityTests
}
/// <summary>
/// Verifies mixed compressed and uncompressed documents remain readable after partial migration.
/// Verifies mixed compressed and uncompressed documents remain readable after partial migration.
/// </summary>
[Fact]
public void MixedFormatDocuments_ShouldRemainReadableAfterPartialMigration()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
ObjectId legacyId;
ObjectId compressedId;
@@ -125,7 +126,7 @@ public class CompressionCompatibilityTests
for (ushort slotIndex = 0; slotIndex < header.SlotCount; slotIndex++)
{
var slotOffset = SlottedPageHeader.Size + (slotIndex * SlotEntry.Size);
int slotOffset = SlottedPageHeader.Size + slotIndex * SlotEntry.Size;
var slot = SlotEntry.ReadFrom(buffer.AsSpan(slotOffset, SlotEntry.Size));
if ((slot.Flags & SlotFlags.Deleted) != 0)
continue;
@@ -149,7 +150,7 @@ public class CompressionCompatibilityTests
private static string BuildPayload(int approxLength)
{
var builder = new System.Text.StringBuilder(approxLength + 256);
var builder = new StringBuilder(approxLength + 256);
var i = 0;
while (builder.Length < approxLength)
{
@@ -163,14 +164,16 @@ public class CompressionCompatibilityTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"compression_compat_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"compression_compat_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(walPath)) File.Delete(walPath);
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Buffers.Binary;
using System.IO.Compression;
using System.Text;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
@@ -10,12 +11,12 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CompressionCorruptionTests
{
/// <summary>
/// Verifies corrupted compressed payload checksum triggers invalid data errors.
/// Verifies corrupted compressed payload checksum triggers invalid data errors.
/// </summary>
[Fact]
public void Read_WithBadChecksum_ShouldThrowInvalidData()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var options = CompressionEnabledOptions();
try
@@ -23,7 +24,7 @@ public class CompressionCorruptionTests
using var db = new TestDbContext(dbPath, options);
var id = InsertCheckpointAndCorrupt(db, header =>
{
var currentChecksum = BinaryPrimitives.ReadUInt32LittleEndian(header.Slice(12, 4));
uint currentChecksum = BinaryPrimitives.ReadUInt32LittleEndian(header.Slice(12, 4));
BinaryPrimitives.WriteUInt32LittleEndian(header.Slice(12, 4), currentChecksum + 1);
});
@@ -38,21 +39,19 @@ public class CompressionCorruptionTests
}
/// <summary>
/// Verifies invalid original length metadata triggers invalid data errors.
/// Verifies invalid original length metadata triggers invalid data errors.
/// </summary>
[Fact]
public void Read_WithBadOriginalLength_ShouldThrowInvalidData()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var options = CompressionEnabledOptions();
try
{
using var db = new TestDbContext(dbPath, options);
var id = InsertCheckpointAndCorrupt(db, header =>
{
BinaryPrimitives.WriteInt32LittleEndian(header.Slice(4, 4), -1);
});
var id = InsertCheckpointAndCorrupt(db,
header => { BinaryPrimitives.WriteInt32LittleEndian(header.Slice(4, 4), -1); });
var ex = Should.Throw<InvalidDataException>(() => db.Users.FindById(id));
ex.Message.ShouldContain("decompress");
@@ -64,21 +63,19 @@ public class CompressionCorruptionTests
}
/// <summary>
/// Verifies oversized declared decompressed length enforces safety guardrails.
/// Verifies oversized declared decompressed length enforces safety guardrails.
/// </summary>
[Fact]
public void Read_WithOversizedDeclaredLength_ShouldEnforceGuardrail()
{
var dbPath = NewDbPath();
var options = CompressionEnabledOptions(maxDecompressedSizeBytes: 2048);
string dbPath = NewDbPath();
var options = CompressionEnabledOptions(2048);
try
{
using var db = new TestDbContext(dbPath, options);
var id = InsertCheckpointAndCorrupt(db, header =>
{
BinaryPrimitives.WriteInt32LittleEndian(header.Slice(4, 4), 2049);
});
var id = InsertCheckpointAndCorrupt(db,
header => { BinaryPrimitives.WriteInt32LittleEndian(header.Slice(4, 4), 2049); });
var ex = Should.Throw<InvalidDataException>(() => db.Users.FindById(id));
ex.Message.ShouldContain("invalid decompressed length");
@@ -91,12 +88,12 @@ public class CompressionCorruptionTests
}
/// <summary>
/// Verifies invalid codec identifiers in compressed headers trigger invalid data errors.
/// Verifies invalid codec identifiers in compressed headers trigger invalid data errors.
/// </summary>
[Fact]
public void Read_WithInvalidCodecId_ShouldThrowInvalidData()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var options = CompressionEnabledOptions();
try
@@ -128,7 +125,7 @@ public class CompressionCorruptionTests
db.SaveChanges();
db.ForceCheckpoint();
var (pageId, slot, _) = FindFirstCompressedSlot(db.Storage);
(uint pageId, var slot, _) = FindFirstCompressedSlot(db.Storage);
((slot.Flags & SlotFlags.HasOverflow) != 0).ShouldBeFalse();
var page = new byte[db.Storage.PageSize];
@@ -152,7 +149,7 @@ public class CompressionCorruptionTests
for (ushort slotIndex = 0; slotIndex < header.SlotCount; slotIndex++)
{
var slotOffset = SlottedPageHeader.Size + (slotIndex * SlotEntry.Size);
int slotOffset = SlottedPageHeader.Size + slotIndex * SlotEntry.Size;
var slot = SlotEntry.ReadFrom(buffer.AsSpan(slotOffset, SlotEntry.Size));
if ((slot.Flags & SlotFlags.Deleted) != 0)
continue;
@@ -178,11 +175,9 @@ public class CompressionCorruptionTests
};
}
private delegate void HeaderMutator(Span<byte> header);
private static string BuildPayload(int approxLength)
{
var builder = new System.Text.StringBuilder(approxLength + 256);
var builder = new StringBuilder(approxLength + 256);
var i = 0;
while (builder.Length < approxLength)
{
@@ -196,14 +191,18 @@ public class CompressionCorruptionTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"compression_corruption_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"compression_corruption_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(walPath)) File.Delete(walPath);
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
private delegate void HeaderMutator(Span<byte> header);
}

View File

@@ -1,4 +1,5 @@
using System.IO.Compression;
using System.Text;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
@@ -9,12 +10,12 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CompressionInsertReadTests
{
/// <summary>
/// Tests insert with threshold should store mixed compressed and uncompressed slots.
/// Tests insert with threshold should store mixed compressed and uncompressed slots.
/// </summary>
[Fact]
public void Insert_WithThreshold_ShouldStoreMixedCompressedAndUncompressedSlots()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var options = new CompressionOptions
{
EnableCompression = true,
@@ -50,12 +51,12 @@ public class CompressionInsertReadTests
}
/// <summary>
/// Tests find by id should read mixed compressed and uncompressed documents.
/// Tests find by id should read mixed compressed and uncompressed documents.
/// </summary>
[Fact]
public void FindById_ShouldReadMixedCompressedAndUncompressedDocuments()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var options = new CompressionOptions
{
EnableCompression = true,
@@ -98,12 +99,12 @@ public class CompressionInsertReadTests
}
/// <summary>
/// Tests insert when codec throws should fallback to uncompressed storage.
/// Tests insert when codec throws should fallback to uncompressed storage.
/// </summary>
[Fact]
public void Insert_WhenCodecThrows_ShouldFallbackToUncompressedStorage()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var options = new CompressionOptions
{
EnableCompression = true,
@@ -152,7 +153,7 @@ public class CompressionInsertReadTests
for (ushort slotIndex = 0; slotIndex < header.SlotCount; slotIndex++)
{
var slotOffset = SlottedPageHeader.Size + (slotIndex * SlotEntry.Size);
int slotOffset = SlottedPageHeader.Size + slotIndex * SlotEntry.Size;
var slot = SlotEntry.ReadFrom(buffer.AsSpan(slotOffset, SlotEntry.Size));
if ((slot.Flags & SlotFlags.Deleted) != 0)
continue;
@@ -168,7 +169,7 @@ public class CompressionInsertReadTests
private static string BuildPayload(int approxLength)
{
var builder = new System.Text.StringBuilder(approxLength + 256);
var builder = new StringBuilder(approxLength + 256);
var i = 0;
while (builder.Length < approxLength)
{
@@ -182,11 +183,13 @@ public class CompressionInsertReadTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"compression_insert_read_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"compression_insert_read_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(walPath)) File.Delete(walPath);
@@ -196,25 +199,29 @@ public class CompressionInsertReadTests
private sealed class FailingBrotliCodec : ICompressionCodec
{
/// <summary>
/// Gets or sets the codec.
/// Gets or sets the codec.
/// </summary>
public CompressionCodec Codec => CompressionCodec.Brotli;
/// <summary>
/// Tests compress.
/// Tests compress.
/// </summary>
/// <param name="input">Payload bytes to compress.</param>
/// <param name="level">Compression level.</param>
public byte[] Compress(ReadOnlySpan<byte> input, CompressionLevel level)
=> throw new InvalidOperationException("Forced codec failure for test coverage.");
{
throw new InvalidOperationException("Forced codec failure for test coverage.");
}
/// <summary>
/// Tests decompress.
/// Tests decompress.
/// </summary>
/// <param name="input">Compressed payload bytes.</param>
/// <param name="expectedLength">Expected decompressed payload length.</param>
/// <param name="maxDecompressedSizeBytes">Maximum allowed decompressed size.</param>
public byte[] Decompress(ReadOnlySpan<byte> input, int expectedLength, int maxDecompressedSizeBytes)
=> throw new InvalidOperationException("This codec should not be used for reads in this scenario.");
{
throw new InvalidOperationException("This codec should not be used for reads in this scenario.");
}
}
}
}

View File

@@ -1,5 +1,6 @@
using System.IO.Compression;
using System.IO.MemoryMappedFiles;
using System.Text;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Shared;
@@ -9,12 +10,12 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CompressionOverflowTests
{
/// <summary>
/// Tests insert compressed document spanning overflow pages should round trip.
/// Tests insert compressed document spanning overflow pages should round trip.
/// </summary>
[Fact]
public void Insert_CompressedDocumentSpanningOverflowPages_ShouldRoundTrip()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var options = new CompressionOptions
{
EnableCompression = true,
@@ -28,7 +29,7 @@ public class CompressionOverflowTests
{
using var db = new TestDbContext(dbPath, TinyPageConfig(), options);
var payload = BuildPayload(300_000);
string payload = BuildPayload(300_000);
var id = db.Users.Insert(new User { Name = payload, Age = 40 });
db.SaveChanges();
@@ -47,12 +48,12 @@ public class CompressionOverflowTests
}
/// <summary>
/// Tests update should transition across compression thresholds.
/// Tests update should transition across compression thresholds.
/// </summary>
[Fact]
public void Update_ShouldTransitionAcrossCompressionThresholds()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
var options = new CompressionOptions
{
EnableCompression = true,
@@ -123,13 +124,13 @@ public class CompressionOverflowTests
for (ushort slotIndex = 0; slotIndex < header.SlotCount; slotIndex++)
{
var slotOffset = SlottedPageHeader.Size + (slotIndex * SlotEntry.Size);
int slotOffset = SlottedPageHeader.Size + slotIndex * SlotEntry.Size;
var slot = SlotEntry.ReadFrom(buffer.AsSpan(slotOffset, SlotEntry.Size));
if ((slot.Flags & SlotFlags.Deleted) != 0)
continue;
var isCompressed = (slot.Flags & SlotFlags.Compressed) != 0;
var hasOverflow = (slot.Flags & SlotFlags.HasOverflow) != 0;
bool isCompressed = (slot.Flags & SlotFlags.Compressed) != 0;
bool hasOverflow = (slot.Flags & SlotFlags.HasOverflow) != 0;
if (isCompressed)
compressed++;
if (isCompressed && hasOverflow)
@@ -152,7 +153,7 @@ public class CompressionOverflowTests
private static string BuildPayload(int approxLength)
{
var builder = new System.Text.StringBuilder(approxLength + 256);
var builder = new StringBuilder(approxLength + 256);
var i = 0;
while (builder.Length < approxLength)
{
@@ -166,14 +167,16 @@ public class CompressionOverflowTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"compression_overflow_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"compression_overflow_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(walPath)) File.Delete(walPath);
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
}

View File

@@ -1,43 +1,42 @@
using ZB.MOM.WW.CBDD.Shared;
namespace ZB.MOM.WW.CBDD.Tests
namespace ZB.MOM.WW.CBDD.Tests;
public class AutoInitTests : IDisposable
{
public class AutoInitTests : System.IDisposable
private const string DbPath = "autoinit.db";
/// <summary>
/// Initializes a new instance of the <see cref="AutoInitTests" /> class.
/// </summary>
public AutoInitTests()
{
private const string DbPath = "autoinit.db";
/// <summary>
/// Initializes a new instance of the <see cref="AutoInitTests"/> class.
/// </summary>
public AutoInitTests()
{
if (File.Exists(DbPath)) File.Delete(DbPath);
}
/// <summary>
/// Releases test resources.
/// </summary>
public void Dispose()
{
if (File.Exists(DbPath)) File.Delete(DbPath);
}
/// <summary>
/// Verifies generated collection initializers set up collections automatically.
/// </summary>
[Fact]
public void Collections_Are_Initialized_By_Generator()
{
using var db = new Shared.TestDbContext(DbPath);
// Verify Collection is not null (initialized by generated method)
db.AutoInitEntities.ShouldNotBeNull();
// Verify we can use it
db.AutoInitEntities.Insert(new AutoInitEntity { Id = 1, Name = "Test" });
var stored = db.AutoInitEntities.FindById(1);
stored.ShouldNotBeNull();
stored.Name.ShouldBe("Test");
}
if (File.Exists(DbPath)) File.Delete(DbPath);
}
}
/// <summary>
/// Releases test resources.
/// </summary>
public void Dispose()
{
if (File.Exists(DbPath)) File.Delete(DbPath);
}
/// <summary>
/// Verifies generated collection initializers set up collections automatically.
/// </summary>
[Fact]
public void Collections_Are_Initialized_By_Generator()
{
using var db = new TestDbContext(DbPath);
// Verify Collection is not null (initialized by generated method)
db.AutoInitEntities.ShouldNotBeNull();
// Verify we can use it
db.AutoInitEntities.Insert(new AutoInitEntity { Id = 1, Name = "Test" });
var stored = db.AutoInitEntities.FindById(1);
stored.ShouldNotBeNull();
stored.Name.ShouldBe("Test");
}
}

View File

@@ -1,27 +1,23 @@
using System;
using System.IO;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Shared;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class DbContextInheritanceTests : IDisposable
{
private readonly TestExtendedDbContext _db;
private readonly string _dbPath;
private readonly Shared.TestExtendedDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="DbContextInheritanceTests"/> class.
/// Initializes a new instance of the <see cref="DbContextInheritanceTests" /> class.
/// </summary>
public DbContextInheritanceTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"cbdd_inheritance_{Guid.NewGuid()}.db");
_db = new Shared.TestExtendedDbContext(_dbPath);
_db = new TestExtendedDbContext(_dbPath);
}
/// <summary>
/// Releases test resources.
/// Releases test resources.
/// </summary>
public void Dispose()
{
@@ -30,7 +26,7 @@ public class DbContextInheritanceTests : IDisposable
}
/// <summary>
/// Verifies parent collections are initialized in the extended context.
/// Verifies parent collections are initialized in the extended context.
/// </summary>
[Fact]
public void ExtendedContext_Should_Initialize_Parent_Collections()
@@ -45,7 +41,7 @@ public class DbContextInheritanceTests : IDisposable
}
/// <summary>
/// Verifies extended context collections are initialized.
/// Verifies extended context collections are initialized.
/// </summary>
[Fact]
public void ExtendedContext_Should_Initialize_Own_Collections()
@@ -55,7 +51,7 @@ public class DbContextInheritanceTests : IDisposable
}
/// <summary>
/// Verifies parent collections are usable from the extended context.
/// Verifies parent collections are usable from the extended context.
/// </summary>
[Fact]
public void ExtendedContext_Can_Use_Parent_Collections()
@@ -73,7 +69,7 @@ public class DbContextInheritanceTests : IDisposable
}
/// <summary>
/// Verifies extended collections are usable from the extended context.
/// Verifies extended collections are usable from the extended context.
/// </summary>
[Fact]
public void ExtendedContext_Can_Use_Own_Collections()
@@ -95,7 +91,7 @@ public class DbContextInheritanceTests : IDisposable
}
/// <summary>
/// Verifies parent and extended collections can be used together.
/// Verifies parent and extended collections can be used together.
/// </summary>
[Fact]
public void ExtendedContext_Can_Use_Both_Parent_And_Own_Collections()
@@ -125,4 +121,4 @@ public class DbContextInheritanceTests : IDisposable
retrievedExtended.ShouldNotBeNull();
retrievedExtended.Description.ShouldBe("Related to John");
}
}
}

View File

@@ -1,10 +1,9 @@
using ZB.MOM.WW.CBDD.Bson;
using System.IO.Compression;
using System.IO.MemoryMappedFiles;
using System.Security.Cryptography;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Shared;
using System.Security.Cryptography;
using System.IO.Compression;
using System.IO.MemoryMappedFiles;
namespace ZB.MOM.WW.CBDD.Tests;
@@ -13,7 +12,7 @@ public class DbContextTests : IDisposable
private string _dbPath;
/// <summary>
/// Initializes test file paths for database context tests.
/// Initializes test file paths for database context tests.
/// </summary>
public DbContextTests()
{
@@ -21,12 +20,27 @@ public class DbContextTests : IDisposable
}
/// <summary>
/// Verifies the basic database context lifecycle works.
/// Disposes test resources and cleans up generated files.
/// </summary>
public void Dispose()
{
try
{
CleanupDbFiles(_dbPath);
}
catch
{
// Ignore cleanup errors
}
}
/// <summary>
/// Verifies the basic database context lifecycle works.
/// </summary>
[Fact]
public void DbContext_BasicLifecycle_Works()
{
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
var user = new User { Name = "Alice", Age = 30 };
var id = db.Users.Insert(user);
@@ -38,12 +52,12 @@ public class DbContextTests : IDisposable
}
/// <summary>
/// Verifies multiple CRUD operations execute correctly in one context.
/// Verifies multiple CRUD operations execute correctly in one context.
/// </summary>
[Fact]
public void DbContext_MultipleOperations_Work()
{
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
// Insert
var alice = new User { Name = "Alice", Age = 30 };
@@ -69,32 +83,33 @@ public class DbContextTests : IDisposable
}
/// <summary>
/// Verifies disposing and reopening context preserves persisted data.
/// Verifies disposing and reopening context preserves persisted data.
/// </summary>
[Fact]
public void DbContext_Dispose_ReleasesResources()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"test_dbcontext_reopen.db");
_dbPath = Path.Combine(Path.GetTempPath(), "test_dbcontext_reopen.db");
var totalUsers = 0;
// First context - insert and dispose (auto-checkpoint)
using (var db = new Shared.TestDbContext(_dbPath))
using (var db = new TestDbContext(_dbPath))
{
db.Users.Insert(new User { Name = "Test", Age = 20 });
db.SaveChanges(); // Explicitly save changes to ensure data is in WAL
var beforeCheckpointTotalUsers = db.Users.FindAll().Count();
int beforeCheckpointTotalUsers = db.Users.FindAll().Count();
db.ForceCheckpoint(); // Force checkpoint to ensure data is persisted to main file
totalUsers = db.Users.FindAll().Count();
var countedUsers = db.Users.Count();
int countedUsers = db.Users.Count();
totalUsers.ShouldBe(beforeCheckpointTotalUsers);
} // Dispose → Commit → ForceCheckpoint → Write to PageFile
// Should be able to open again and see persisted data
using var db2 = new Shared.TestDbContext(_dbPath);
using var db2 = new TestDbContext(_dbPath);
totalUsers.ShouldBe(1);
db2.Users.FindAll().Count().ShouldBe(totalUsers);
db2.Users.Count().ShouldBe(totalUsers);
}
private static string ComputeFileHash(string path)
{
using var stream = File.OpenRead(path);
@@ -103,29 +118,31 @@ public class DbContextTests : IDisposable
}
/// <summary>
/// Verifies database file size and content change after insert and checkpoint.
/// Verifies database file size and content change after insert and checkpoint.
/// </summary>
[Fact]
public void DatabaseFile_SizeAndContent_ChangeAfterInsert()
{
var dbPath = Path.Combine(Path.GetTempPath(), $"test_dbfile_{Guid.NewGuid()}.db");
string dbPath = Path.Combine(Path.GetTempPath(), $"test_dbfile_{Guid.NewGuid()}.db");
// 1. Crea e chiudi database vuoto
using (var db = new Shared.TestDbContext(dbPath))
using (var db = new TestDbContext(dbPath))
{
db.Users.Insert(new User { Name = "Pippo", Age = 42 });
}
var initialSize = new FileInfo(dbPath).Length;
var initialHash = ComputeFileHash(dbPath);
long initialSize = new FileInfo(dbPath).Length;
string initialHash = ComputeFileHash(dbPath);
// 2. Riapri, inserisci, chiudi
using (var db = new Shared.TestDbContext(dbPath))
using (var db = new TestDbContext(dbPath))
{
db.Users.Insert(new User { Name = "Test", Age = 42 });
db.ForceCheckpoint(); // Forza persistenza
}
var afterInsertSize = new FileInfo(dbPath).Length;
var afterInsertHash = ComputeFileHash(dbPath);
long afterInsertSize = new FileInfo(dbPath).Length;
string afterInsertHash = ComputeFileHash(dbPath);
// 3. Verifica che dimensione e hash siano cambiati
afterInsertSize.ShouldNotBe(initialSize);
@@ -133,25 +150,25 @@ public class DbContextTests : IDisposable
}
/// <summary>
/// Verifies the WAL file path is auto-derived from database path.
/// Verifies the WAL file path is auto-derived from database path.
/// </summary>
[Fact]
public void DbContext_AutoDerivesWalPath()
{
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
db.Users.Insert(new User { Name = "Test", Age = 20 });
var walPath = Path.ChangeExtension(_dbPath, ".wal");
string walPath = Path.ChangeExtension(_dbPath, ".wal");
File.Exists(walPath).ShouldBeTrue();
}
/// <summary>
/// Verifies custom page file and compression options support roundtrip data access.
/// Verifies custom page file and compression options support roundtrip data access.
/// </summary>
[Fact]
public void DbContext_WithCustomPageFileAndCompressionOptions_ShouldSupportRoundTrip()
{
var dbPath = Path.Combine(Path.GetTempPath(), $"test_dbcontext_compression_{Guid.NewGuid():N}.db");
string dbPath = Path.Combine(Path.GetTempPath(), $"test_dbcontext_compression_{Guid.NewGuid():N}.db");
var options = new CompressionOptions
{
EnableCompression = true,
@@ -170,8 +187,8 @@ public class DbContextTests : IDisposable
try
{
using var db = new Shared.TestDbContext(dbPath, config, options);
var payload = string.Concat(Enumerable.Repeat("compressible-", 3000));
using var db = new TestDbContext(dbPath, config, options);
string payload = string.Concat(Enumerable.Repeat("compressible-", 3000));
var id = db.Users.Insert(new User { Name = payload, Age = 77 });
db.SaveChanges();
@@ -187,19 +204,16 @@ public class DbContextTests : IDisposable
}
/// <summary>
/// Verifies compact API returns stats and preserves data consistency.
/// Verifies compact API returns stats and preserves data consistency.
/// </summary>
[Fact]
public void DbContext_CompactApi_ShouldReturnStatsAndPreserveData()
{
var dbPath = Path.Combine(Path.GetTempPath(), $"test_dbcontext_compact_{Guid.NewGuid():N}.db");
string dbPath = Path.Combine(Path.GetTempPath(), $"test_dbcontext_compact_{Guid.NewGuid():N}.db");
try
{
using var db = new Shared.TestDbContext(dbPath);
for (var i = 0; i < 120; i++)
{
db.Users.Insert(new User { Name = $"compact-{i:D3}", Age = i % 20 });
}
using var db = new TestDbContext(dbPath);
for (var i = 0; i < 120; i++) db.Users.Insert(new User { Name = $"compact-{i:D3}", Age = i % 20 });
db.SaveChanges();
db.Users.Count().ShouldBe(120);
@@ -221,29 +235,14 @@ public class DbContextTests : IDisposable
}
}
/// <summary>
/// Disposes test resources and cleans up generated files.
/// </summary>
public void Dispose()
{
try
{
CleanupDbFiles(_dbPath);
}
catch
{
// Ignore cleanup errors
}
}
private static void CleanupDbFiles(string dbPath)
{
if (File.Exists(dbPath)) File.Delete(dbPath);
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
if (File.Exists(walPath)) File.Delete(walPath);
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
}

View File

@@ -1,36 +1,48 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Shared;
using System.Linq;
namespace ZB.MOM.WW.CBDD.Tests;
/// <summary>
/// Tests for Source Generator enhancements:
/// 1. Property inheritance from base classes (including Id)
/// 2. Exclusion of computed getter-only properties
/// 3. Recognition of advanced collection types (HashSet, ISet, LinkedList, etc.)
/// Tests for Source Generator enhancements:
/// 1. Property inheritance from base classes (including Id)
/// 2. Exclusion of computed getter-only properties
/// 3. Recognition of advanced collection types (HashSet, ISet, LinkedList, etc.)
/// </summary>
public class SourceGeneratorFeaturesTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath;
private readonly string _walPath;
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="SourceGeneratorFeaturesTests"/> class.
/// Initializes a new instance of the <see cref="SourceGeneratorFeaturesTests" /> class.
/// </summary>
public SourceGeneratorFeaturesTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"test_sg_features_{Guid.NewGuid()}.db");
_walPath = Path.Combine(Path.GetTempPath(), $"test_sg_features_{Guid.NewGuid()}.wal");
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Disposes the resources used by this instance.
/// </summary>
public void Dispose()
{
_db?.Dispose();
if (File.Exists(_dbPath))
File.Delete(_dbPath);
if (File.Exists(_walPath))
File.Delete(_walPath);
}
#region Inheritance Tests
/// <summary>
/// Tests derived entity inherits id from base class.
/// Tests derived entity inherits id from base class.
/// </summary>
[Fact]
public void DerivedEntity_InheritsId_FromBaseClass()
@@ -57,7 +69,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests derived entity update works with inherited id.
/// Tests derived entity update works with inherited id.
/// </summary>
[Fact]
public void DerivedEntity_Update_WorksWithInheritedId()
@@ -90,7 +102,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests derived entity query works with inherited properties.
/// Tests derived entity query works with inherited properties.
/// </summary>
[Fact]
public void DerivedEntity_Query_WorksWithInheritedProperties()
@@ -120,7 +132,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
#region Computed Properties Tests
/// <summary>
/// Tests computed properties are not serialized.
/// Tests computed properties are not serialized.
/// </summary>
[Fact]
public void ComputedProperties_AreNotSerialized()
@@ -151,7 +163,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests computed properties update does not break.
/// Tests computed properties update does not break.
/// </summary>
[Fact]
public void ComputedProperties_UpdateDoesNotBreak()
@@ -189,7 +201,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
#region Advanced Collections Tests
/// <summary>
/// Tests hash set serializes and deserializes.
/// Tests hash set serializes and deserializes.
/// </summary>
[Fact]
public void HashSet_SerializesAndDeserializes()
@@ -219,7 +231,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests iset serializes and deserializes.
/// Tests iset serializes and deserializes.
/// </summary>
[Fact]
public void ISet_SerializesAndDeserializes()
@@ -250,7 +262,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests linked list serializes and deserializes.
/// Tests linked list serializes and deserializes.
/// </summary>
[Fact]
public void LinkedList_SerializesAndDeserializes()
@@ -281,7 +293,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests queue serializes and deserializes.
/// Tests queue serializes and deserializes.
/// </summary>
[Fact]
public void Queue_SerializesAndDeserializes()
@@ -311,7 +323,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests stack serializes and deserializes.
/// Tests stack serializes and deserializes.
/// </summary>
[Fact]
public void Stack_SerializesAndDeserializes()
@@ -341,7 +353,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests hash set with nested objects serializes and deserializes.
/// Tests hash set with nested objects serializes and deserializes.
/// </summary>
[Fact]
public void HashSet_WithNestedObjects_SerializesAndDeserializes()
@@ -351,8 +363,10 @@ public class SourceGeneratorFeaturesTests : IDisposable
{
Name = "Test Nested HashSet"
};
entity.Addresses.Add(new Address { Street = "123 Main St", City = new City { Name = "NYC", ZipCode = "10001" } });
entity.Addresses.Add(new Address { Street = "456 Oak Ave", City = new City { Name = "LA", ZipCode = "90001" } });
entity.Addresses.Add(
new Address { Street = "123 Main St", City = new City { Name = "NYC", ZipCode = "10001" } });
entity.Addresses.Add(new Address
{ Street = "456 Oak Ave", City = new City { Name = "LA", ZipCode = "90001" } });
// Act
var id = _db.AdvancedCollectionEntities.Insert(entity);
@@ -371,7 +385,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests iset with nested objects serializes and deserializes.
/// Tests iset with nested objects serializes and deserializes.
/// </summary>
[Fact]
public void ISet_WithNestedObjects_SerializesAndDeserializes()
@@ -403,7 +417,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests advanced collections all types in single entity.
/// Tests advanced collections all types in single entity.
/// </summary>
[Fact]
public void AdvancedCollections_AllTypesInSingleEntity()
@@ -454,7 +468,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
#region Private Setters Tests
/// <summary>
/// Tests entity with private setters can be deserialized.
/// Tests entity with private setters can be deserialized.
/// </summary>
[Fact]
public void EntityWithPrivateSetters_CanBeDeserialized()
@@ -475,7 +489,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests entity with private setters update works.
/// Tests entity with private setters update works.
/// </summary>
[Fact]
public void EntityWithPrivateSetters_Update_Works()
@@ -501,7 +515,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests entity with private setters query works.
/// Tests entity with private setters query works.
/// </summary>
[Fact]
public void EntityWithPrivateSetters_Query_Works()
@@ -530,7 +544,7 @@ public class SourceGeneratorFeaturesTests : IDisposable
#region Init-Only Setters Tests
/// <summary>
/// Tests entity with init setters can be deserialized.
/// Tests entity with init setters can be deserialized.
/// </summary>
[Fact]
public void EntityWithInitSetters_CanBeDeserialized()
@@ -557,15 +571,18 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
/// <summary>
/// Tests entity with init setters query works.
/// Tests entity with init setters query works.
/// </summary>
[Fact]
public void EntityWithInitSetters_Query_Works()
{
// Arrange
var entity1 = new EntityWithInitSetters { Id = ObjectId.NewObjectId(), Name = "Alpha", Age = 20, CreatedAt = DateTime.UtcNow };
var entity2 = new EntityWithInitSetters { Id = ObjectId.NewObjectId(), Name = "Beta", Age = 30, CreatedAt = DateTime.UtcNow };
var entity3 = new EntityWithInitSetters { Id = ObjectId.NewObjectId(), Name = "Gamma", Age = 40, CreatedAt = DateTime.UtcNow };
var entity1 = new EntityWithInitSetters
{ Id = ObjectId.NewObjectId(), Name = "Alpha", Age = 20, CreatedAt = DateTime.UtcNow };
var entity2 = new EntityWithInitSetters
{ Id = ObjectId.NewObjectId(), Name = "Beta", Age = 30, CreatedAt = DateTime.UtcNow };
var entity3 = new EntityWithInitSetters
{ Id = ObjectId.NewObjectId(), Name = "Gamma", Age = 40, CreatedAt = DateTime.UtcNow };
_db.InitSetterEntities.Insert(entity1);
_db.InitSetterEntities.Insert(entity2);
@@ -582,17 +599,4 @@ public class SourceGeneratorFeaturesTests : IDisposable
}
#endregion
/// <summary>
/// Disposes the resources used by this instance.
/// </summary>
public void Dispose()
{
_db?.Dispose();
if (File.Exists(_dbPath))
File.Delete(_dbPath);
if (File.Exists(_walPath))
File.Delete(_walPath);
}
}
}

View File

@@ -2,191 +2,220 @@ using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Core.Metadata;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Core.Metadata;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
namespace ZB.MOM.WW.CBDD.Shared;
/// <summary>
/// Test context with manual collection initialization
/// (Source Generator will automate this in the future)
/// Test context with manual collection initialization
/// (Source Generator will automate this in the future)
/// </summary>
public partial class TestDbContext : DocumentDbContext
{
/// <summary>
/// Gets or sets the AnnotatedUsers.
/// Initializes a new instance of the <see cref="TestDbContext" /> class.
/// </summary>
/// <param name="databasePath">The database path.</param>
public TestDbContext(string databasePath)
: this(databasePath, PageFileConfig.Default, CompressionOptions.Default)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="TestDbContext" /> class.
/// </summary>
/// <param name="databasePath">The database path.</param>
/// <param name="compressionOptions">The compression options.</param>
public TestDbContext(string databasePath, CompressionOptions compressionOptions)
: this(databasePath, PageFileConfig.Default, compressionOptions)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="TestDbContext" /> class.
/// </summary>
/// <param name="databasePath">The database path.</param>
/// <param name="pageFileConfig">The page file configuration.</param>
public TestDbContext(string databasePath, PageFileConfig pageFileConfig)
: this(databasePath, pageFileConfig, CompressionOptions.Default)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="TestDbContext" /> class.
/// </summary>
/// <param name="databasePath">The database path.</param>
/// <param name="pageFileConfig">The page file configuration.</param>
/// <param name="compressionOptions">The compression options.</param>
/// <param name="maintenanceOptions">The maintenance options.</param>
public TestDbContext(
string databasePath,
PageFileConfig pageFileConfig,
CompressionOptions? compressionOptions,
MaintenanceOptions? maintenanceOptions = null)
: base(databasePath, pageFileConfig, compressionOptions, maintenanceOptions)
{
}
/// <summary>
/// Gets or sets the AnnotatedUsers.
/// </summary>
public DocumentCollection<ObjectId, AnnotatedUser> AnnotatedUsers { get; set; } = null!;
/// <summary>
/// Gets or sets the Orders.
/// Gets or sets the Orders.
/// </summary>
public DocumentCollection<OrderId, Order> Orders { get; set; } = null!;
/// <summary>
/// Gets or sets the TestDocuments.
/// Gets or sets the TestDocuments.
/// </summary>
public DocumentCollection<ObjectId, TestDocument> TestDocuments { get; set; } = null!;
/// <summary>
/// Gets or sets the OrderDocuments.
/// Gets or sets the OrderDocuments.
/// </summary>
public DocumentCollection<ObjectId, OrderDocument> OrderDocuments { get; set; } = null!;
/// <summary>
/// Gets or sets the ComplexDocuments.
/// Gets or sets the ComplexDocuments.
/// </summary>
public DocumentCollection<ObjectId, ComplexDocument> ComplexDocuments { get; set; } = null!;
/// <summary>
/// Gets or sets the Users.
/// Gets or sets the Users.
/// </summary>
public DocumentCollection<ObjectId, User> Users { get; set; } = null!;
/// <summary>
/// Gets or sets the ComplexUsers.
/// Gets or sets the ComplexUsers.
/// </summary>
public DocumentCollection<ObjectId, ComplexUser> ComplexUsers { get; set; } = null!;
/// <summary>
/// Gets or sets the AutoInitEntities.
/// Gets or sets the AutoInitEntities.
/// </summary>
public DocumentCollection<int, AutoInitEntity> AutoInitEntities { get; set; } = null!;
/// <summary>
/// Gets or sets the People.
/// Gets or sets the People.
/// </summary>
public DocumentCollection<int, Person> People { get; set; } = null!;
/// <summary>
/// Gets or sets the PeopleV2.
/// Gets or sets the PeopleV2.
/// </summary>
public DocumentCollection<ObjectId, PersonV2> PeopleV2 { get; set; } = null!;
/// <summary>
/// Gets or sets the Products.
/// Gets or sets the Products.
/// </summary>
public DocumentCollection<int, Product> Products { get; set; } = null!;
/// <summary>
/// Gets or sets the IntEntities.
/// Gets or sets the IntEntities.
/// </summary>
public DocumentCollection<int, IntEntity> IntEntities { get; set; } = null!;
/// <summary>
/// Gets or sets the StringEntities.
/// Gets or sets the StringEntities.
/// </summary>
public DocumentCollection<string, StringEntity> StringEntities { get; set; } = null!;
/// <summary>
/// Gets or sets the GuidEntities.
/// Gets or sets the GuidEntities.
/// </summary>
public DocumentCollection<Guid, GuidEntity> GuidEntities { get; set; } = null!;
/// <summary>
/// Gets or sets the CustomKeyEntities.
/// Gets or sets the CustomKeyEntities.
/// </summary>
public DocumentCollection<string, CustomKeyEntity> CustomKeyEntities { get; set; } = null!;
/// <summary>
/// Gets or sets the AsyncDocs.
/// Gets or sets the AsyncDocs.
/// </summary>
public DocumentCollection<int, AsyncDoc> AsyncDocs { get; set; } = null!;
/// <summary>
/// Gets or sets the SchemaUsers.
/// Gets or sets the SchemaUsers.
/// </summary>
public DocumentCollection<int, SchemaUser> SchemaUsers { get; set; } = null!;
/// <summary>
/// Gets or sets the VectorItems.
/// Gets or sets the VectorItems.
/// </summary>
public DocumentCollection<ObjectId, VectorEntity> VectorItems { get; set; } = null!;
/// <summary>
/// Gets or sets the GeoItems.
/// Gets or sets the GeoItems.
/// </summary>
public DocumentCollection<ObjectId, GeoEntity> GeoItems { get; set; } = null!;
// Source Generator Feature Tests
/// <summary>
/// Gets or sets the DerivedEntities.
/// Gets or sets the DerivedEntities.
/// </summary>
public DocumentCollection<ObjectId, DerivedEntity> DerivedEntities { get; set; } = null!;
/// <summary>
/// Gets or sets the ComputedPropertyEntities.
/// Gets or sets the ComputedPropertyEntities.
/// </summary>
public DocumentCollection<ObjectId, EntityWithComputedProperties> ComputedPropertyEntities { get; set; } = null!;
/// <summary>
/// Gets or sets the AdvancedCollectionEntities.
/// Gets or sets the AdvancedCollectionEntities.
/// </summary>
public DocumentCollection<ObjectId, EntityWithAdvancedCollections> AdvancedCollectionEntities { get; set; } = null!;
/// <summary>
/// Gets or sets the PrivateSetterEntities.
/// Gets or sets the PrivateSetterEntities.
/// </summary>
public DocumentCollection<ObjectId, EntityWithPrivateSetters> PrivateSetterEntities { get; set; } = null!;
/// <summary>
/// Gets or sets the InitSetterEntities.
/// Gets or sets the InitSetterEntities.
/// </summary>
public DocumentCollection<ObjectId, EntityWithInitSetters> InitSetterEntities { get; set; } = null!;
// Circular Reference Tests
/// <summary>
/// Gets or sets the Employees.
/// Gets or sets the Employees.
/// </summary>
public DocumentCollection<ObjectId, Employee> Employees { get; set; } = null!;
/// <summary>
/// Gets or sets the CategoryRefs.
/// Gets or sets the CategoryRefs.
/// </summary>
public DocumentCollection<ObjectId, CategoryRef> CategoryRefs { get; set; } = null!;
/// <summary>
/// Gets or sets the ProductRefs.
/// Gets or sets the ProductRefs.
/// </summary>
public DocumentCollection<ObjectId, ProductRef> ProductRefs { get; set; } = null!;
// Nullable String Id Test (UuidEntity scenario with inheritance)
/// <summary>
/// Gets or sets the MockCounters.
/// Gets or sets the MockCounters.
/// </summary>
public DocumentCollection<string, MockCounter> MockCounters { get; set; } = null!;
// Temporal Types Test (DateTimeOffset, TimeSpan, DateOnly, TimeOnly)
/// <summary>
/// Gets or sets the TemporalEntities.
/// Gets or sets the TemporalEntities.
/// </summary>
public DocumentCollection<ObjectId, TemporalEntity> TemporalEntities { get; set; } = null!;
/// <summary>
/// Initializes a new instance of the <see cref="TestDbContext"/> class.
/// </summary>
/// <param name="databasePath">The database path.</param>
public TestDbContext(string databasePath)
: this(databasePath, PageFileConfig.Default, CompressionOptions.Default)
{
}
/// <summary>
/// Gets or sets the Storage.
/// </summary>
public StorageEngine Storage => Engine;
/// <summary>
/// Initializes a new instance of the <see cref="TestDbContext"/> class.
/// </summary>
/// <param name="databasePath">The database path.</param>
/// <param name="compressionOptions">The compression options.</param>
public TestDbContext(string databasePath, CompressionOptions compressionOptions)
: this(databasePath, PageFileConfig.Default, compressionOptions)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="TestDbContext"/> class.
/// </summary>
/// <param name="databasePath">The database path.</param>
/// <param name="pageFileConfig">The page file configuration.</param>
public TestDbContext(string databasePath, PageFileConfig pageFileConfig)
: this(databasePath, pageFileConfig, CompressionOptions.Default)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="TestDbContext"/> class.
/// </summary>
/// <param name="databasePath">The database path.</param>
/// <param name="pageFileConfig">The page file configuration.</param>
/// <param name="compressionOptions">The compression options.</param>
/// <param name="maintenanceOptions">The maintenance options.</param>
public TestDbContext(
string databasePath,
PageFileConfig pageFileConfig,
CompressionOptions? compressionOptions,
MaintenanceOptions? maintenanceOptions = null)
: base(databasePath, pageFileConfig, compressionOptions, maintenanceOptions)
{
}
/// <inheritdoc />
protected override void OnModelCreating(ModelBuilder modelBuilder)
/// <inheritdoc />
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<AnnotatedUser>();
modelBuilder.Entity<User>().ToCollection("users");
@@ -207,11 +236,11 @@ public partial class TestDbContext : DocumentDbContext
modelBuilder.Entity<VectorEntity>()
.ToCollection("vector_items")
.HasVectorIndex(x => x.Embedding, dimensions: 3, metric: VectorMetric.L2, name: "idx_vector");
.HasVectorIndex(x => x.Embedding, 3, VectorMetric.L2, "idx_vector");
modelBuilder.Entity<GeoEntity>()
.ToCollection("geo_items")
.HasSpatialIndex(x => x.Location, name: "idx_spatial");
.HasSpatialIndex(x => x.Location, "idx_spatial");
modelBuilder.Entity<Order>()
.HasKey(x => x.Id)
@@ -236,25 +265,20 @@ public partial class TestDbContext : DocumentDbContext
modelBuilder.Entity<TemporalEntity>().ToCollection("temporal_entities").HasKey(e => e.Id);
}
/// <summary>
/// Executes ForceCheckpoint.
/// </summary>
public void ForceCheckpoint()
{
Engine.Checkpoint();
}
/// <summary>
/// Executes ForceCheckpoint with the requested checkpoint mode.
/// </summary>
/// <param name="mode">Checkpoint mode to execute.</param>
public CheckpointResult ForceCheckpoint(CheckpointMode mode)
{
return Engine.Checkpoint(mode);
}
/// <summary>
/// Gets or sets the Storage.
/// </summary>
public StorageEngine Storage => Engine;
}
/// <summary>
/// Executes ForceCheckpoint.
/// </summary>
public void ForceCheckpoint()
{
Engine.Checkpoint();
}
/// <summary>
/// Executes ForceCheckpoint with the requested checkpoint mode.
/// </summary>
/// <param name="mode">Checkpoint mode to execute.</param>
public CheckpointResult ForceCheckpoint(CheckpointMode mode)
{
return Engine.Checkpoint(mode);
}
}

View File

@@ -4,27 +4,27 @@ using ZB.MOM.WW.CBDD.Core.Metadata;
namespace ZB.MOM.WW.CBDD.Shared;
/// <summary>
/// Extended test context that inherits from TestDbContext.
/// Used to verify that collection initialization works correctly with inheritance.
/// Extended test context that inherits from TestDbContext.
/// Used to verify that collection initialization works correctly with inheritance.
/// </summary>
public partial class TestExtendedDbContext : TestDbContext
{
/// <summary>
/// Gets or sets the extended entities.
/// Initializes a new instance of the <see cref="TestExtendedDbContext" /> class.
/// </summary>
public DocumentCollection<int, ExtendedEntity> ExtendedEntities { get; set; } = null!;
/// <summary>
/// Initializes a new instance of the <see cref="TestExtendedDbContext"/> class.
/// </summary>
/// <param name="databasePath">Database file path.</param>
public TestExtendedDbContext(string databasePath) : base(databasePath)
/// <param name="databasePath">Database file path.</param>
public TestExtendedDbContext(string databasePath) : base(databasePath)
{
InitializeCollections();
}
/// <inheritdoc />
protected override void OnModelCreating(ModelBuilder modelBuilder)
/// <summary>
/// Gets or sets the extended entities.
/// </summary>
public DocumentCollection<int, ExtendedEntity> ExtendedEntities { get; set; } = null!;
/// <inheritdoc />
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
@@ -32,4 +32,4 @@ public partial class TestExtendedDbContext : TestDbContext
.ToCollection("extended_entities")
.HasKey(e => e.Id);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1 +1 @@
global using Shouldly;
global using Shouldly;

View File

@@ -6,44 +6,37 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class BTreeDeleteUnderflowTests
{
/// <summary>
/// Executes Delete_HeavyWorkload_Should_Remain_Queryable_After_Merges.
/// Executes Delete_HeavyWorkload_Should_Remain_Queryable_After_Merges.
/// </summary>
[Fact]
public void Delete_HeavyWorkload_Should_Remain_Queryable_After_Merges()
{
var dbPath = Path.Combine(Path.GetTempPath(), $"btree_underflow_{Guid.NewGuid():N}.db");
string dbPath = Path.Combine(Path.GetTempPath(), $"btree_underflow_{Guid.NewGuid():N}.db");
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
var index = new BTreeIndex(storage, IndexOptions.CreateBTree("k"));
var insertTxn = storage.BeginTransaction().TransactionId;
for (int i = 1; i <= 240; i++)
{
ulong insertTxn = storage.BeginTransaction().TransactionId;
for (var i = 1; i <= 240; i++)
index.Insert(IndexKey.Create(i), new DocumentLocation((uint)(1000 + i), 0), insertTxn);
}
storage.CommitTransaction(insertTxn);
var deleteTxn = storage.BeginTransaction().TransactionId;
for (int i = 1; i <= 190; i++)
{
ulong deleteTxn = storage.BeginTransaction().TransactionId;
for (var i = 1; i <= 190; i++)
index.Delete(IndexKey.Create(i), new DocumentLocation((uint)(1000 + i), 0), deleteTxn).ShouldBeTrue();
}
storage.CommitTransaction(deleteTxn);
for (int i = 1; i <= 190; i++)
{
index.TryFind(IndexKey.Create(i), out _, 0).ShouldBeFalse();
}
for (var i = 1; i <= 190; i++) index.TryFind(IndexKey.Create(i), out _, 0).ShouldBeFalse();
for (int i = 191; i <= 240; i++)
for (var i = 191; i <= 240; i++)
{
index.TryFind(IndexKey.Create(i), out var location, 0).ShouldBeTrue();
location.PageId.ShouldBe((uint)(1000 + i));
}
var remaining = index.GreaterThan(IndexKey.Create(190), orEqual: false, 0).ToList();
var remaining = index.GreaterThan(IndexKey.Create(190), false, 0).ToList();
remaining.Count.ShouldBe(50);
remaining.First().Key.ShouldBe(IndexKey.Create(191));
remaining.Last().Key.ShouldBe(IndexKey.Create(240));
@@ -51,8 +44,8 @@ public class BTreeDeleteUnderflowTests
finally
{
if (File.Exists(dbPath)) File.Delete(dbPath);
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
if (File.Exists(walPath)) File.Delete(walPath);
}
}
}
}

View File

@@ -8,20 +8,20 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CollectionIndexManagerAndDefinitionTests
{
/// <summary>
/// Tests find best index should prefer unique index.
/// Tests find best index should prefer unique index.
/// </summary>
[Fact]
public void FindBestIndex_Should_Prefer_Unique_Index()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
var mapper = new ZB_MOM_WW_CBDD_Shared_PersonMapper();
using var manager = new CollectionIndexManager<int, Person>(storage, mapper, "people_idx_pref_unique");
manager.CreateIndex(p => p.Age, name: "idx_age", unique: false);
manager.CreateIndex(p => p.Age, name: "idx_age_unique", unique: true);
manager.CreateIndex(p => p.Age, "idx_age");
manager.CreateIndex(p => p.Age, "idx_age_unique", true);
var best = manager.FindBestIndex("Age");
@@ -36,12 +36,12 @@ public class CollectionIndexManagerAndDefinitionTests
}
/// <summary>
/// Tests find best compound index should choose longest prefix.
/// Tests find best compound index should choose longest prefix.
/// </summary>
[Fact]
public void FindBestCompoundIndex_Should_Choose_Longest_Prefix()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
@@ -76,12 +76,12 @@ public class CollectionIndexManagerAndDefinitionTests
}
/// <summary>
/// Tests drop index should remove metadata and be idempotent.
/// Tests drop index should remove metadata and be idempotent.
/// </summary>
[Fact]
public void DropIndex_Should_Remove_Metadata_And_Be_Idempotent()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
const string collectionName = "people_idx_drop";
try
@@ -91,7 +91,7 @@ public class CollectionIndexManagerAndDefinitionTests
using (var manager = new CollectionIndexManager<int, Person>(storage, mapper, collectionName))
{
manager.CreateIndex(p => p.Age, name: "idx_age", unique: false);
manager.CreateIndex(p => p.Age, "idx_age");
manager.DropIndex("idx_age").ShouldBeTrue();
manager.DropIndex("idx_age").ShouldBeFalse();
manager.GetIndexInfo().ShouldBeEmpty();
@@ -107,7 +107,7 @@ public class CollectionIndexManagerAndDefinitionTests
}
/// <summary>
/// Tests collection index definition should respect query support rules.
/// Tests collection index definition should respect query support rules.
/// </summary>
[Fact]
public void CollectionIndexDefinition_Should_Respect_Query_Support_Rules()
@@ -129,7 +129,7 @@ public class CollectionIndexManagerAndDefinitionTests
}
/// <summary>
/// Tests collection index info to string should include diagnostics.
/// Tests collection index info to string should include diagnostics.
/// </summary>
[Fact]
public void CollectionIndexInfo_ToString_Should_Include_Diagnostics()
@@ -150,16 +150,18 @@ public class CollectionIndexManagerAndDefinitionTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"idx_mgr_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"idx_mgr_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
if (File.Exists(dbPath)) File.Delete(dbPath);
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
if (File.Exists(walPath)) File.Delete(walPath);
var altWalPath = dbPath + "-wal";
string altWalPath = dbPath + "-wal";
if (File.Exists(altWalPath)) File.Delete(altWalPath);
}
}
}

View File

@@ -1,18 +1,16 @@
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Bson;
using Xunit;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests;
public class CursorTests : IDisposable
{
private readonly string _testFile;
private readonly StorageEngine _storage;
private readonly BTreeIndex _index;
private readonly StorageEngine _storage;
private readonly string _testFile;
/// <summary>
/// Initializes a new instance of the <see cref="CursorTests"/> class.
/// Initializes a new instance of the <see cref="CursorTests" /> class.
/// </summary>
public CursorTests()
{
@@ -25,9 +23,18 @@ public class CursorTests : IDisposable
SeedData();
}
/// <summary>
/// Disposes the resources used by this instance.
/// </summary>
public void Dispose()
{
_storage.Dispose();
if (File.Exists(_testFile)) File.Delete(_testFile);
}
private void SeedData()
{
var txnId = _storage.BeginTransaction().TransactionId;
ulong txnId = _storage.BeginTransaction().TransactionId;
// Insert 10, 20, 30
_index.Insert(IndexKey.Create(10), new DocumentLocation(1, 0), txnId);
@@ -38,7 +45,7 @@ public class CursorTests : IDisposable
}
/// <summary>
/// Tests move to first should position at first.
/// Tests move to first should position at first.
/// </summary>
[Fact]
public void MoveToFirst_ShouldPositionAtFirst()
@@ -49,7 +56,7 @@ public class CursorTests : IDisposable
}
/// <summary>
/// Tests move to last should position at last.
/// Tests move to last should position at last.
/// </summary>
[Fact]
public void MoveToLast_ShouldPositionAtLast()
@@ -60,7 +67,7 @@ public class CursorTests : IDisposable
}
/// <summary>
/// Tests move next should traverse forward.
/// Tests move next should traverse forward.
/// </summary>
[Fact]
public void MoveNext_ShouldTraverseForward()
@@ -78,7 +85,7 @@ public class CursorTests : IDisposable
}
/// <summary>
/// Tests move prev should traverse backward.
/// Tests move prev should traverse backward.
/// </summary>
[Fact]
public void MovePrev_ShouldTraverseBackward()
@@ -96,7 +103,7 @@ public class CursorTests : IDisposable
}
/// <summary>
/// Tests seek should position exact or next.
/// Tests seek should position exact or next.
/// </summary>
[Fact]
public void Seek_ShouldPositionExact_OrNext()
@@ -116,13 +123,4 @@ public class CursorTests : IDisposable
// Current should throw invalid
Should.Throw<InvalidOperationException>(() => cursor.Current);
}
/// <summary>
/// Disposes the resources used by this instance.
/// </summary>
public void Dispose()
{
_storage.Dispose();
if (File.Exists(_testFile)) File.Delete(_testFile);
}
}
}

View File

@@ -4,34 +4,43 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class GeospatialStressTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath;
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes database state for geospatial stress tests.
/// Initializes database state for geospatial stress tests.
/// </summary>
public GeospatialStressTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"geo_stress_{Guid.NewGuid():N}.db");
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Verifies spatial index handles node splits and query operations under load.
/// Disposes test resources and removes generated files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
string wal = Path.ChangeExtension(_dbPath, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
/// <summary>
/// Verifies spatial index handles node splits and query operations under load.
/// </summary>
[Fact]
public void SpatialIndex_Should_Handle_Node_Splits_And_Queries()
{
const int count = 350;
for (int i = 0; i < count; i++)
{
for (var i = 0; i < count; i++)
_db.GeoItems.Insert(new GeoEntity
{
Name = $"pt-{i}",
Location = (40.0 + (i * 0.001), -73.0 - (i * 0.001))
Location = (40.0 + i * 0.001, -73.0 - i * 0.001)
});
}
_db.SaveChanges();
@@ -45,15 +54,4 @@ public class GeospatialStressTests : IDisposable
var near = _db.GeoItems.Near("idx_spatial", (40.10, -73.10), 30.0).ToList();
near.Count.ShouldBeGreaterThan(0);
}
/// <summary>
/// Disposes test resources and removes generated files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
var wal = Path.ChangeExtension(_dbPath, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
}
}

View File

@@ -1,28 +1,33 @@
using Xunit;
using ZB.MOM.WW.CBDD.Core.Indexing;
using System.IO;
using System.Linq;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Shared;
namespace ZB.MOM.WW.CBDD.Tests;
public class GeospatialTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath;
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="GeospatialTests"/> class.
/// Initializes a new instance of the <see cref="GeospatialTests" /> class.
/// </summary>
public GeospatialTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"cbdd_geo_{Guid.NewGuid()}.db");
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Verifies spatial within queries return expected results.
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
}
/// <summary>
/// Verifies spatial within queries return expected results.
/// </summary>
[Fact]
public void Can_Insert_And_Search_Within()
@@ -45,7 +50,7 @@ public class GeospatialTests : IDisposable
}
/// <summary>
/// Verifies near queries return expected proximity results.
/// Verifies near queries return expected proximity results.
/// </summary>
[Fact]
public void Can_Search_Near_Proximity()
@@ -69,7 +74,7 @@ public class GeospatialTests : IDisposable
}
/// <summary>
/// Verifies LINQ near integration returns expected results.
/// Verifies LINQ near integration returns expected results.
/// </summary>
[Fact]
public void LINQ_Integration_Near_Works()
@@ -79,8 +84,8 @@ public class GeospatialTests : IDisposable
// LINQ query using .Near() extension
var query = from p in _db.GeoItems.AsQueryable()
where p.Location.Near(milan, 10.0)
select p;
where p.Location.Near(milan, 10.0)
select p;
var results = query.ToList();
@@ -89,7 +94,7 @@ public class GeospatialTests : IDisposable
}
/// <summary>
/// Verifies LINQ within integration returns expected results.
/// Verifies LINQ within integration returns expected results.
/// </summary>
[Fact]
public void LINQ_Integration_Within_Works()
@@ -102,19 +107,10 @@ public class GeospatialTests : IDisposable
// LINQ query using .Within() extension
var results = _db.GeoItems.AsQueryable()
.Where(p => p.Location.Within(min, max))
.ToList();
.Where(p => p.Location.Within(min, max))
.ToList();
results.Count().ShouldBe(1);
results[0].Name.ShouldBe("Milan Office");
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
}
}
}

View File

@@ -6,7 +6,7 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class HashIndexTests
{
/// <summary>
/// Executes Insert_And_TryFind_Should_Return_Location.
/// Executes Insert_And_TryFind_Should_Return_Location.
/// </summary>
[Fact]
public void Insert_And_TryFind_Should_Return_Location()
@@ -23,7 +23,7 @@ public class HashIndexTests
}
/// <summary>
/// Executes Unique_HashIndex_Should_Throw_On_Duplicate_Key.
/// Executes Unique_HashIndex_Should_Throw_On_Duplicate_Key.
/// </summary>
[Fact]
public void Unique_HashIndex_Should_Throw_On_Duplicate_Key()
@@ -45,7 +45,7 @@ public class HashIndexTests
}
/// <summary>
/// Executes Remove_Should_Remove_Only_Matching_Entry.
/// Executes Remove_Should_Remove_Only_Matching_Entry.
/// </summary>
[Fact]
public void Remove_Should_Remove_Only_Matching_Entry()
@@ -71,7 +71,7 @@ public class HashIndexTests
}
/// <summary>
/// Executes FindAll_Should_Return_All_Matching_Entries.
/// Executes FindAll_Should_Return_All_Matching_Entries.
/// </summary>
[Fact]
public void FindAll_Should_Return_All_Matching_Entries()
@@ -88,4 +88,4 @@ public class HashIndexTests
matches.Count.ShouldBe(2);
matches.All(e => e.Key == key).ShouldBeTrue();
}
}
}

View File

@@ -1,31 +1,25 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Shared;
using System;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class IndexDirectionTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath = "index_direction_tests.db";
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes database state for index direction tests.
/// Initializes database state for index direction tests.
/// </summary>
public IndexDirectionTests()
{
if (File.Exists(_dbPath)) File.Delete(_dbPath);
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
// _db.Database.EnsureCreated(); // Not needed/doesn't exist? StorageEngine handles creation.
}
/// <summary>
/// Disposes test resources and deletes temporary files.
/// Disposes test resources and deletes temporary files.
/// </summary>
public void Dispose()
{
@@ -34,7 +28,7 @@ public class IndexDirectionTests : IDisposable
}
/// <summary>
/// Verifies forward range scans return values in ascending order.
/// Verifies forward range scans return values in ascending order.
/// </summary>
[Fact]
public void Range_Forward_ReturnsOrderedResults()
@@ -42,20 +36,21 @@ public class IndexDirectionTests : IDisposable
var collection = _db.People;
var index = collection.EnsureIndex(p => p.Age, "idx_age");
var people = Enumerable.Range(1, 100).Select(i => new Person { Id = i, Name = $"Person {i}", Age = i }).ToList();
var people = Enumerable.Range(1, 100).Select(i => new Person { Id = i, Name = $"Person {i}", Age = i })
.ToList();
collection.InsertBulk(people);
_db.SaveChanges();
// Scan Forward
var results = index.Range(10, 20, IndexDirection.Forward).ToList();
var results = index.Range(10, 20).ToList();
results.Count.ShouldBe(11); // 10 to 20 inclusive
collection.FindByLocation(results.First())!.Age.ShouldBe(10); // First is 10
collection.FindByLocation(results.Last())!.Age.ShouldBe(20); // Last is 20
collection.FindByLocation(results.Last())!.Age.ShouldBe(20); // Last is 20
}
/// <summary>
/// Verifies backward range scans return values in descending order.
/// Verifies backward range scans return values in descending order.
/// </summary>
[Fact]
public void Range_Backward_ReturnsReverseOrderedResults()
@@ -63,7 +58,8 @@ public class IndexDirectionTests : IDisposable
var collection = _db.People;
var index = collection.EnsureIndex(p => p.Age, "idx_age");
var people = Enumerable.Range(1, 100).Select(i => new Person { Id = i, Name = $"Person {i}", Age = i }).ToList();
var people = Enumerable.Range(1, 100).Select(i => new Person { Id = i, Name = $"Person {i}", Age = i })
.ToList();
collection.InsertBulk(people);
_db.SaveChanges();
@@ -72,11 +68,11 @@ public class IndexDirectionTests : IDisposable
results.Count.ShouldBe(11); // 10 to 20 inclusive
collection.FindByLocation(results.First())!.Age.ShouldBe(20); // First is 20 (Reverse)
collection.FindByLocation(results.Last())!.Age.ShouldBe(10); // Last is 10
collection.FindByLocation(results.Last())!.Age.ShouldBe(10); // Last is 10
}
/// <summary>
/// Verifies backward scans across split index pages return complete result sets.
/// Verifies backward scans across split index pages return complete result sets.
/// </summary>
[Fact]
public void Range_Backward_WithMultiplePages_ReturnsReverseOrderedResults()
@@ -88,7 +84,8 @@ public class IndexDirectionTests : IDisposable
// Entry size approx 10 bytes key + 6 bytes loc + overhead
// 1000 items * 20 bytes = 20KB > 4KB.
var count = 1000;
var people = Enumerable.Range(1, count).Select(i => new Person { Id = i, Name = $"Person {i}", Age = i }).ToList();
var people = Enumerable.Range(1, count).Select(i => new Person { Id = i, Name = $"Person {i}", Age = i })
.ToList();
collection.InsertBulk(people);
_db.SaveChanges();
@@ -105,4 +102,4 @@ public class IndexDirectionTests : IDisposable
// collection.FindByLocation(results.First(), null)!.Age.ShouldBe(count); // Max Age (Fails: Max is likely 255)
// collection.FindByLocation(results.Last(), null)!.Age.ShouldBe(1); // Min Age (Fails: Min is likely 256)
}
}
}

View File

@@ -1,163 +1,161 @@
using Xunit;
using ZB.MOM.WW.CBDD.Core.Query;
using ZB.MOM.WW.CBDD.Core.Indexing;
using System.Linq.Expressions;
using System.Collections.Generic;
using System;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Core.Query;
namespace ZB.MOM.WW.CBDD.Tests
namespace ZB.MOM.WW.CBDD.Tests;
public class IndexOptimizationTests
{
public class IndexOptimizationTests
/// <summary>
/// Tests optimizer identifies equality.
/// </summary>
[Fact]
public void Optimizer_Identifies_Equality()
{
public class TestEntity
var indexes = new List<CollectionIndexInfo>
{
/// <summary>
/// Gets or sets the id.
/// </summary>
public int Id { get; set; }
/// <summary>
/// Gets or sets the name.
/// </summary>
public string Name { get; set; } = "";
/// <summary>
/// Gets or sets the age.
/// </summary>
public int Age { get; set; }
}
new() { Name = "idx_age", PropertyPaths = ["Age"] }
};
/// <summary>
/// Tests optimizer identifies equality.
/// </summary>
[Fact]
public void Optimizer_Identifies_Equality()
{
var indexes = new List<CollectionIndexInfo>
{
new CollectionIndexInfo { Name = "idx_age", PropertyPaths = ["Age"] }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Age == 30;
var model = new QueryModel { WhereClause = predicate };
Expression<Func<TestEntity, bool>> predicate = x => x.Age == 30;
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_age");
result.MinValue.ShouldBe(30);
result.MaxValue.ShouldBe(30);
result.IsRange.ShouldBeFalse();
}
/// <summary>
/// Tests optimizer identifies range greater than.
/// </summary>
[Fact]
public void Optimizer_Identifies_Range_GreaterThan()
{
var indexes = new List<CollectionIndexInfo>
{
new CollectionIndexInfo { Name = "idx_age", PropertyPaths = ["Age"] }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Age > 25;
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_age");
result.MinValue.ShouldBe(25);
result.MaxValue.ShouldBeNull();
result.IsRange.ShouldBeTrue();
}
/// <summary>
/// Tests optimizer identifies range less than.
/// </summary>
[Fact]
public void Optimizer_Identifies_Range_LessThan()
{
var indexes = new List<CollectionIndexInfo>
{
new CollectionIndexInfo { Name = "idx_age", PropertyPaths = ["Age"] }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Age < 50;
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_age");
result.MinValue.ShouldBeNull();
result.MaxValue.ShouldBe(50);
result.IsRange.ShouldBeTrue();
}
/// <summary>
/// Tests optimizer identifies range between simulated.
/// </summary>
[Fact]
public void Optimizer_Identifies_Range_Between_Simulated()
{
var indexes = new List<CollectionIndexInfo>
{
new CollectionIndexInfo { Name = "idx_age", PropertyPaths = ["Age"] }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Age > 20 && x.Age < 40;
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_age");
result.MinValue.ShouldBe(20);
result.MaxValue.ShouldBe(40);
result.IsRange.ShouldBeTrue();
}
/// <summary>
/// Tests optimizer identifies starts with.
/// </summary>
[Fact]
public void Optimizer_Identifies_StartsWith()
{
var indexes = new List<CollectionIndexInfo>
{
new CollectionIndexInfo { Name = "idx_name", PropertyPaths = ["Name"], Type = IndexType.BTree }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Name.StartsWith("Ali");
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_name");
result.MinValue.ShouldBe("Ali");
// "Ali" + next char -> "Alj"
result.MaxValue.ShouldBe("Alj");
result.IsRange.ShouldBeTrue();
}
/// <summary>
/// Tests optimizer ignores non indexed fields.
/// </summary>
[Fact]
public void Optimizer_Ignores_NonIndexed_Fields()
{
var indexes = new List<CollectionIndexInfo>
{
new CollectionIndexInfo { Name = "idx_age", PropertyPaths = ["Age"] }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Name == "Alice"; // Name is not indexed
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldBeNull();
}
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_age");
result.MinValue.ShouldBe(30);
result.MaxValue.ShouldBe(30);
result.IsRange.ShouldBeFalse();
}
}
/// <summary>
/// Tests optimizer identifies range greater than.
/// </summary>
[Fact]
public void Optimizer_Identifies_Range_GreaterThan()
{
var indexes = new List<CollectionIndexInfo>
{
new() { Name = "idx_age", PropertyPaths = ["Age"] }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Age > 25;
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_age");
result.MinValue.ShouldBe(25);
result.MaxValue.ShouldBeNull();
result.IsRange.ShouldBeTrue();
}
/// <summary>
/// Tests optimizer identifies range less than.
/// </summary>
[Fact]
public void Optimizer_Identifies_Range_LessThan()
{
var indexes = new List<CollectionIndexInfo>
{
new() { Name = "idx_age", PropertyPaths = ["Age"] }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Age < 50;
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_age");
result.MinValue.ShouldBeNull();
result.MaxValue.ShouldBe(50);
result.IsRange.ShouldBeTrue();
}
/// <summary>
/// Tests optimizer identifies range between simulated.
/// </summary>
[Fact]
public void Optimizer_Identifies_Range_Between_Simulated()
{
var indexes = new List<CollectionIndexInfo>
{
new() { Name = "idx_age", PropertyPaths = ["Age"] }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Age > 20 && x.Age < 40;
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_age");
result.MinValue.ShouldBe(20);
result.MaxValue.ShouldBe(40);
result.IsRange.ShouldBeTrue();
}
/// <summary>
/// Tests optimizer identifies starts with.
/// </summary>
[Fact]
public void Optimizer_Identifies_StartsWith()
{
var indexes = new List<CollectionIndexInfo>
{
new() { Name = "idx_name", PropertyPaths = ["Name"], Type = IndexType.BTree }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Name.StartsWith("Ali");
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldNotBeNull();
result.IndexName.ShouldBe("idx_name");
result.MinValue.ShouldBe("Ali");
// "Ali" + next char -> "Alj"
result.MaxValue.ShouldBe("Alj");
result.IsRange.ShouldBeTrue();
}
/// <summary>
/// Tests optimizer ignores non indexed fields.
/// </summary>
[Fact]
public void Optimizer_Ignores_NonIndexed_Fields()
{
var indexes = new List<CollectionIndexInfo>
{
new() { Name = "idx_age", PropertyPaths = ["Age"] }
};
Expression<Func<TestEntity, bool>> predicate = x => x.Name == "Alice"; // Name is not indexed
var model = new QueryModel { WhereClause = predicate };
var result = IndexOptimizer.TryOptimize<TestEntity>(model, indexes);
result.ShouldBeNull();
}
public class TestEntity
{
/// <summary>
/// Gets or sets the id.
/// </summary>
public int Id { get; set; }
/// <summary>
/// Gets or sets the name.
/// </summary>
public string Name { get; set; } = "";
/// <summary>
/// Gets or sets the age.
/// </summary>
public int Age { get; set; }
}
}

View File

@@ -1,10 +1,4 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Metadata;
using ZB.MOM.WW.CBDD.Shared;
using System;
using System.Buffers;
namespace ZB.MOM.WW.CBDD.Tests;
@@ -13,7 +7,7 @@ public class PrimaryKeyTests : IDisposable
private readonly string _dbPath = "primary_key_tests.db";
/// <summary>
/// Initializes a new instance of the <see cref="PrimaryKeyTests"/> class.
/// Initializes a new instance of the <see cref="PrimaryKeyTests" /> class.
/// </summary>
public PrimaryKeyTests()
{
@@ -21,7 +15,7 @@ public class PrimaryKeyTests : IDisposable
}
/// <summary>
/// Executes Dispose.
/// Executes Dispose.
/// </summary>
public void Dispose()
{
@@ -29,12 +23,12 @@ public class PrimaryKeyTests : IDisposable
}
/// <summary>
/// Executes Test_Int_PrimaryKey.
/// Executes Test_Int_PrimaryKey.
/// </summary>
[Fact]
public void Test_Int_PrimaryKey()
{
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
var entity = new IntEntity { Id = 1, Name = "Test 1" };
db.IntEntities.Insert(entity);
@@ -56,12 +50,12 @@ public class PrimaryKeyTests : IDisposable
}
/// <summary>
/// Executes Test_String_PrimaryKey.
/// Executes Test_String_PrimaryKey.
/// </summary>
[Fact]
public void Test_String_PrimaryKey()
{
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
var entity = new StringEntity { Id = "key1", Value = "Value 1" };
db.StringEntities.Insert(entity);
@@ -78,12 +72,12 @@ public class PrimaryKeyTests : IDisposable
}
/// <summary>
/// Executes Test_Guid_PrimaryKey.
/// Executes Test_Guid_PrimaryKey.
/// </summary>
[Fact]
public void Test_Guid_PrimaryKey()
{
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
var id = Guid.NewGuid();
var entity = new GuidEntity { Id = id, Name = "Guid Test" };
@@ -100,13 +94,13 @@ public class PrimaryKeyTests : IDisposable
}
/// <summary>
/// Executes Test_String_PrimaryKey_With_Custom_Name.
/// Executes Test_String_PrimaryKey_With_Custom_Name.
/// </summary>
[Fact]
public void Test_String_PrimaryKey_With_Custom_Name()
{
// Test entity with string key NOT named "Id" (named "Code" instead)
using var db = new Shared.TestDbContext(_dbPath);
using var db = new TestDbContext(_dbPath);
var entity = new CustomKeyEntity { Code = "ABC123", Description = "Test Description" };
db.CustomKeyEntities.Insert(entity);
@@ -131,4 +125,4 @@ public class PrimaryKeyTests : IDisposable
db.SaveChanges();
db.CustomKeyEntities.FindById("ABC123").ShouldBeNull();
}
}
}

View File

@@ -5,7 +5,7 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class VectorMathTests
{
/// <summary>
/// Verifies distance calculations across all supported vector metrics.
/// Verifies distance calculations across all supported vector metrics.
/// </summary>
[Fact]
public void Distance_Should_Cover_All_Metrics()
@@ -13,19 +13,19 @@ public class VectorMathTests
float[] v1 = [1f, 2f];
float[] v2 = [3f, 4f];
var cosineDistance = VectorMath.Distance(v1, v2, VectorMetric.Cosine);
var l2Distance = VectorMath.Distance(v1, v2, VectorMetric.L2);
var dotDistance = VectorMath.Distance(v1, v2, VectorMetric.DotProduct);
float cosineDistance = VectorMath.Distance(v1, v2, VectorMetric.Cosine);
float l2Distance = VectorMath.Distance(v1, v2, VectorMetric.L2);
float dotDistance = VectorMath.Distance(v1, v2, VectorMetric.DotProduct);
l2Distance.ShouldBe(8f);
dotDistance.ShouldBe(-11f);
var expectedCosine = 1f - (11f / (MathF.Sqrt(5f) * 5f));
float expectedCosine = 1f - 11f / (MathF.Sqrt(5f) * 5f);
MathF.Abs(cosineDistance - expectedCosine).ShouldBeLessThan(0.0001f);
}
/// <summary>
/// Verifies cosine similarity returns zero when one vector has zero magnitude.
/// Verifies cosine similarity returns zero when one vector has zero magnitude.
/// </summary>
[Fact]
public void CosineSimilarity_Should_Return_Zero_For_ZeroMagnitude_Vector()
@@ -37,7 +37,7 @@ public class VectorMathTests
}
/// <summary>
/// Verifies dot product throws for mismatched vector lengths.
/// Verifies dot product throws for mismatched vector lengths.
/// </summary>
[Fact]
public void DotProduct_Should_Throw_For_Length_Mismatch()
@@ -49,7 +49,7 @@ public class VectorMathTests
}
/// <summary>
/// Verifies squared Euclidean distance throws for mismatched vector lengths.
/// Verifies squared Euclidean distance throws for mismatched vector lengths.
/// </summary>
[Fact]
public void EuclideanDistanceSquared_Should_Throw_For_Length_Mismatch()
@@ -59,4 +59,4 @@ public class VectorMathTests
Should.Throw<ArgumentException>(() => VectorMath.EuclideanDistanceSquared(v1, v2));
}
}
}

View File

@@ -1,23 +1,20 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Shared;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class VectorSearchTests
{
/// <summary>
/// Verifies basic vector-search query behavior.
/// Verifies basic vector-search query behavior.
/// </summary>
[Fact]
public void Test_VectorSearch_Basic()
{
string dbPath = "vector_test.db";
var dbPath = "vector_test.db";
if (File.Exists(dbPath)) File.Delete(dbPath);
using (var db = new Shared.TestDbContext(dbPath))
using (var db = new TestDbContext(dbPath))
{
db.VectorItems.Insert(new VectorEntity { Title = "Near", Embedding = [1.0f, 1.0f, 1.0f] });
db.VectorItems.Insert(new VectorEntity { Title = "Far", Embedding = [10.0f, 10.0f, 10.0f] });
@@ -31,4 +28,4 @@ public class VectorSearchTests
File.Delete(dbPath);
}
}
}

View File

@@ -1,25 +1,19 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
using System.Buffers;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class WalIndexTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath;
private readonly string _walPath;
private readonly Shared.TestDbContext _db;
private readonly ITestOutputHelper _output;
private readonly string _walPath;
/// <summary>
/// Initializes a new instance of the <see cref="WalIndexTests"/> class.
/// Initializes a new instance of the <see cref="WalIndexTests" /> class.
/// </summary>
/// <param name="output">Test output sink.</param>
public WalIndexTests(ITestOutputHelper output)
@@ -29,11 +23,41 @@ public class WalIndexTests : IDisposable
// WAL defaults to .wal next to db
_walPath = Path.ChangeExtension(_dbPath, ".wal");
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Verifies index writes are recorded in the WAL.
/// Releases test resources.
/// </summary>
public void Dispose()
{
try
{
_db?.Dispose(); // Safe to call multiple times
}
catch
{
}
try
{
if (File.Exists(_dbPath)) File.Delete(_dbPath);
}
catch
{
}
try
{
if (File.Exists(_walPath)) File.Delete(_walPath);
}
catch
{
}
}
/// <summary>
/// Verifies index writes are recorded in the WAL.
/// </summary>
[Fact]
public void IndexWritesAreLoggedToWal()
@@ -71,8 +95,8 @@ public class WalIndexTests : IDisposable
_output.WriteLine($"Found {writeRecords.Count} Write records for Txn {txn.TransactionId}");
// Analyze pages
int indexPageCount = 0;
int dataPageCount = 0;
var indexPageCount = 0;
var dataPageCount = 0;
foreach (var record in writeRecords)
{
@@ -89,21 +113,18 @@ public class WalIndexTests : IDisposable
private PageType ParsePageType(byte[]? pageData)
{
if (pageData == null || pageData.Length < 32) return (PageType)0;
if (pageData == null || pageData.Length < 32) return 0;
// PageType is at offset 4 (1 byte)
return (PageType)pageData[4]; // Casting byte to PageType
}
/// <summary>
/// Verifies offline compaction leaves the WAL empty.
/// Verifies offline compaction leaves the WAL empty.
/// </summary>
[Fact]
public void Compact_ShouldLeaveWalEmpty_AfterOfflineRun()
{
for (var i = 0; i < 100; i++)
{
_db.Users.Insert(new User { Name = $"wal-compact-{i:D3}", Age = i % 30 });
}
for (var i = 0; i < 100; i++) _db.Users.Insert(new User { Name = $"wal-compact-{i:D3}", Age = i % 30 });
_db.SaveChanges();
_db.Storage.GetWalSize().ShouldBeGreaterThan(0);
@@ -121,24 +142,22 @@ public class WalIndexTests : IDisposable
}
/// <summary>
/// Verifies WAL recovery followed by compaction preserves data.
/// Verifies WAL recovery followed by compaction preserves data.
/// </summary>
[Fact]
public void Recover_WithCommittedWal_ThenCompact_ShouldPreserveData()
{
var dbPath = Path.Combine(Path.GetTempPath(), $"test_wal_recover_compact_{Guid.NewGuid():N}.db");
var walPath = Path.ChangeExtension(dbPath, ".wal");
string dbPath = Path.Combine(Path.GetTempPath(), $"test_wal_recover_compact_{Guid.NewGuid():N}.db");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
var expectedIds = new List<ObjectId>();
try
{
using (var writer = new Shared.TestDbContext(dbPath))
using (var writer = new TestDbContext(dbPath))
{
for (var i = 0; i < 48; i++)
{
expectedIds.Add(writer.Users.Insert(new User { Name = $"recover-{i:D3}", Age = i % 10 }));
}
writer.SaveChanges();
writer.Storage.GetWalSize().ShouldBeGreaterThan(0);
@@ -146,16 +165,13 @@ public class WalIndexTests : IDisposable
new FileInfo(walPath).Length.ShouldBeGreaterThan(0);
using (var recovered = new Shared.TestDbContext(dbPath))
using (var recovered = new TestDbContext(dbPath))
{
recovered.Users.Count().ShouldBe(expectedIds.Count);
recovered.Compact();
recovered.Storage.GetWalSize().ShouldBe(0);
foreach (var id in expectedIds)
{
recovered.Users.FindById(id).ShouldNotBeNull();
}
foreach (var id in expectedIds) recovered.Users.FindById(id).ShouldNotBeNull();
}
}
finally
@@ -165,19 +181,4 @@ public class WalIndexTests : IDisposable
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
/// <summary>
/// Releases test resources.
/// </summary>
public void Dispose()
{
try
{
_db?.Dispose(); // Safe to call multiple times
}
catch { }
try { if (File.Exists(_dbPath)) File.Delete(_dbPath); } catch { }
try { if (File.Exists(_walPath)) File.Delete(_walPath); } catch { }
}
}
}

View File

@@ -1,267 +1,259 @@
using Xunit;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Bson;
using System.Linq;
using System.Collections.Generic;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Core.Storage;
using System;
using System.IO;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
namespace ZB.MOM.WW.CBDD.Tests
namespace ZB.MOM.WW.CBDD.Tests;
public class AdvancedQueryTests : IDisposable
{
public class AdvancedQueryTests : IDisposable
private readonly TestDbContext _db;
private readonly string _dbPath;
/// <summary>
/// Initializes test database state used by advanced query tests.
/// </summary>
public AdvancedQueryTests()
{
private readonly string _dbPath;
private readonly Shared.TestDbContext _db;
_dbPath = Path.Combine(Path.GetTempPath(), $"cbdd_advanced_{Guid.NewGuid()}.db");
_db = new TestDbContext(_dbPath);
/// <summary>
/// Initializes test database state used by advanced query tests.
/// </summary>
public AdvancedQueryTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"cbdd_advanced_{Guid.NewGuid()}.db");
_db = new Shared.TestDbContext(_dbPath);
// Seed Data
_db.TestDocuments.Insert(new TestDocument { Category = "A", Amount = 10, Name = "Item1" });
_db.TestDocuments.Insert(new TestDocument { Category = "A", Amount = 20, Name = "Item2" });
_db.TestDocuments.Insert(new TestDocument { Category = "B", Amount = 30, Name = "Item3" });
_db.TestDocuments.Insert(new TestDocument { Category = "B", Amount = 40, Name = "Item4" });
_db.TestDocuments.Insert(new TestDocument { Category = "C", Amount = 50, Name = "Item5" });
_db.SaveChanges();
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
}
/// <summary>
/// Verifies grouping by a simple key returns expected groups and counts.
/// </summary>
[Fact]
public void GroupBy_Simple_Key_Works()
{
var groups = _db.TestDocuments.AsQueryable()
.GroupBy(x => x.Category)
.ToList();
groups.Count.ShouldBe(3);
var groupA = groups.First(g => g.Key == "A");
groupA.Count().ShouldBe(2);
groupA.ShouldContain(x => x.Amount == 10);
groupA.ShouldContain(x => x.Amount == 20);
var groupB = groups.First(g => g.Key == "B");
groupB.Count().ShouldBe(2);
var groupC = groups.First(g => g.Key == "C");
groupC.Count().ShouldBe(1);
}
/// <summary>
/// Verifies grouped projection with aggregation returns expected totals.
/// </summary>
[Fact]
public void GroupBy_With_Aggregation_Select()
{
var results = _db.TestDocuments.AsQueryable()
.GroupBy(x => x.Category)
.Select(g => new { Category = g.Key, Total = g.Sum(x => x.Amount) })
.OrderBy(x => x.Category)
.ToList();
results.Count.ShouldBe(3);
results[0].Category.ShouldBe("A");
results[0].Total.ShouldBe(30); // 10 + 20
results[1].Category.ShouldBe("B");
results[1].Total.ShouldBe(70); // 30 + 40
results[2].Category.ShouldBe("C");
results[2].Total.ShouldBe(50); // 50
}
/// <summary>
/// Verifies direct aggregate operators return expected values.
/// </summary>
[Fact]
public void Aggregations_Direct_Works()
{
var query = _db.TestDocuments.AsQueryable();
query.Count().ShouldBe(5);
query.Sum(x => x.Amount).ShouldBe(150);
query.Average(x => x.Amount).ShouldBe(30.0);
query.Min(x => x.Amount).ShouldBe(10);
query.Max(x => x.Amount).ShouldBe(50);
}
/// <summary>
/// Verifies aggregate operators with predicates return expected values.
/// </summary>
[Fact]
public void Aggregations_With_Predicate_Works()
{
var query = _db.TestDocuments.AsQueryable().Where(x => x.Category == "A");
query.Count().ShouldBe(2);
query.Sum(x => x.Amount).ShouldBe(30);
}
/// <summary>
/// Verifies in-memory join query execution returns expected rows.
/// </summary>
[Fact]
public void Join_Works_InMemory()
{
// Create a second collection for joining
_db.OrderDocuments.Insert(new OrderDocument { ItemName = "Item1", Quantity = 5 });
_db.OrderDocuments.Insert(new OrderDocument { ItemName = "Item3", Quantity = 2 });
_db.SaveChanges();
var query = _db.TestDocuments.AsQueryable()
.Join(_db.OrderDocuments.AsQueryable(),
doc => doc.Name,
order => order.ItemName,
(doc, order) => new { doc.Name, doc.Category, order.Quantity })
.OrderBy(x => x.Name)
.ToList();
query.Count.ShouldBe(2);
query[0].Name.ShouldBe("Item1");
query[0].Category.ShouldBe("A");
query[0].Quantity.ShouldBe(5);
query[1].Name.ShouldBe("Item3");
query[1].Category.ShouldBe("B");
query[1].Quantity.ShouldBe(2);
}
/// <summary>
/// Verifies projection of nested object properties works.
/// </summary>
[Fact]
public void Select_Project_Nested_Object()
{
var doc = new ComplexDocument
{
Id = ObjectId.NewObjectId(),
Title = "Order1",
ShippingAddress = new Address { City = new City { Name = "New York" }, Street = "5th Ave" },
Items = new List<OrderItem>
{
new OrderItem { Name = "Laptop", Price = 1000 },
new OrderItem { Name = "Mouse", Price = 50 }
}
};
_db.ComplexDocuments.Insert(doc);
_db.SaveChanges();
var query = _db.ComplexDocuments.AsQueryable()
.Select(x => x.ShippingAddress)
.ToList();
query.Count().ShouldBe(1);
query[0].City.Name.ShouldBe("New York");
query[0].Street.ShouldBe("5th Ave");
}
/// <summary>
/// Verifies projection of nested scalar fields works.
/// </summary>
[Fact]
public void Select_Project_Nested_Field()
{
var doc = new ComplexDocument
{
Id = ObjectId.NewObjectId(),
Title = "Order1",
ShippingAddress = new Address { City = new City { Name = "New York" }, Street = "5th Ave" }
};
_db.ComplexDocuments.Insert(doc);
_db.SaveChanges();
var cities = _db.ComplexDocuments.AsQueryable()
.Select(x => x.ShippingAddress.City.Name)
.ToList();
cities.Count().ShouldBe(1);
cities[0].ShouldBe("New York");
}
/// <summary>
/// Verifies anonymous projection including nested values works.
/// </summary>
[Fact]
public void Select_Anonymous_Complex()
{
ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers.ZB_MOM_WW_CBDD_Shared_CityMapper cityMapper = new ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers.ZB_MOM_WW_CBDD_Shared_CityMapper();
var doc = new ComplexDocument
{
Id = ObjectId.NewObjectId(),
Title = "Order1",
ShippingAddress = new Address { City = new City { Name = "New York" }, Street = "5th Ave" }
};
_db.ComplexDocuments.Insert(doc);
_db.SaveChanges();
var result = _db.ComplexDocuments.AsQueryable()
.Select(x => new { x.Title, x.ShippingAddress.City })
.ToList();
result.Count().ShouldBe(1);
result[0].Title.ShouldBe("Order1");
result[0].City.Name.ShouldBe("New York");
}
/// <summary>
/// Verifies projection and retrieval of nested arrays of objects works.
/// </summary>
[Fact]
public void Select_Project_Nested_Array_Of_Objects()
{
var doc = new ComplexDocument
{
Id = ObjectId.NewObjectId(),
Title = "Order with Items",
ShippingAddress = new Address { City = new City { Name = "Los Angeles" }, Street = "Hollywood Blvd" },
Items = new List<OrderItem>
{
new OrderItem { Name = "Laptop", Price = 1500 },
new OrderItem { Name = "Mouse", Price = 25 },
new OrderItem { Name = "Keyboard", Price = 75 }
}
};
_db.ComplexDocuments.Insert(doc);
_db.SaveChanges();
// Retrieve the full document and verify Items array
var retrieved = _db.ComplexDocuments.FindAll().First();
retrieved.Title.ShouldBe("Order with Items");
retrieved.ShippingAddress.City.Name.ShouldBe("Los Angeles");
retrieved.ShippingAddress.Street.ShouldBe("Hollywood Blvd");
// Verify array of nested objects
retrieved.Items.Count.ShouldBe(3);
retrieved.Items[0].Name.ShouldBe("Laptop");
retrieved.Items[0].Price.ShouldBe(1500);
retrieved.Items[1].Name.ShouldBe("Mouse");
retrieved.Items[1].Price.ShouldBe(25);
retrieved.Items[2].Name.ShouldBe("Keyboard");
retrieved.Items[2].Price.ShouldBe(75);
}
// Seed Data
_db.TestDocuments.Insert(new TestDocument { Category = "A", Amount = 10, Name = "Item1" });
_db.TestDocuments.Insert(new TestDocument { Category = "A", Amount = 20, Name = "Item2" });
_db.TestDocuments.Insert(new TestDocument { Category = "B", Amount = 30, Name = "Item3" });
_db.TestDocuments.Insert(new TestDocument { Category = "B", Amount = 40, Name = "Item4" });
_db.TestDocuments.Insert(new TestDocument { Category = "C", Amount = 50, Name = "Item5" });
_db.SaveChanges();
}
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
}
/// <summary>
/// Verifies grouping by a simple key returns expected groups and counts.
/// </summary>
[Fact]
public void GroupBy_Simple_Key_Works()
{
var groups = _db.TestDocuments.AsQueryable()
.GroupBy(x => x.Category)
.ToList();
groups.Count.ShouldBe(3);
var groupA = groups.First(g => g.Key == "A");
groupA.Count().ShouldBe(2);
groupA.ShouldContain(x => x.Amount == 10);
groupA.ShouldContain(x => x.Amount == 20);
var groupB = groups.First(g => g.Key == "B");
groupB.Count().ShouldBe(2);
var groupC = groups.First(g => g.Key == "C");
groupC.Count().ShouldBe(1);
}
/// <summary>
/// Verifies grouped projection with aggregation returns expected totals.
/// </summary>
[Fact]
public void GroupBy_With_Aggregation_Select()
{
var results = _db.TestDocuments.AsQueryable()
.GroupBy(x => x.Category)
.Select(g => new { Category = g.Key, Total = g.Sum(x => x.Amount) })
.OrderBy(x => x.Category)
.ToList();
results.Count.ShouldBe(3);
results[0].Category.ShouldBe("A");
results[0].Total.ShouldBe(30); // 10 + 20
results[1].Category.ShouldBe("B");
results[1].Total.ShouldBe(70); // 30 + 40
results[2].Category.ShouldBe("C");
results[2].Total.ShouldBe(50); // 50
}
/// <summary>
/// Verifies direct aggregate operators return expected values.
/// </summary>
[Fact]
public void Aggregations_Direct_Works()
{
var query = _db.TestDocuments.AsQueryable();
query.Count().ShouldBe(5);
query.Sum(x => x.Amount).ShouldBe(150);
query.Average(x => x.Amount).ShouldBe(30.0);
query.Min(x => x.Amount).ShouldBe(10);
query.Max(x => x.Amount).ShouldBe(50);
}
/// <summary>
/// Verifies aggregate operators with predicates return expected values.
/// </summary>
[Fact]
public void Aggregations_With_Predicate_Works()
{
var query = _db.TestDocuments.AsQueryable().Where(x => x.Category == "A");
query.Count().ShouldBe(2);
query.Sum(x => x.Amount).ShouldBe(30);
}
/// <summary>
/// Verifies in-memory join query execution returns expected rows.
/// </summary>
[Fact]
public void Join_Works_InMemory()
{
// Create a second collection for joining
_db.OrderDocuments.Insert(new OrderDocument { ItemName = "Item1", Quantity = 5 });
_db.OrderDocuments.Insert(new OrderDocument { ItemName = "Item3", Quantity = 2 });
_db.SaveChanges();
var query = _db.TestDocuments.AsQueryable()
.Join(_db.OrderDocuments.AsQueryable(),
doc => doc.Name,
order => order.ItemName,
(doc, order) => new { doc.Name, doc.Category, order.Quantity })
.OrderBy(x => x.Name)
.ToList();
query.Count.ShouldBe(2);
query[0].Name.ShouldBe("Item1");
query[0].Category.ShouldBe("A");
query[0].Quantity.ShouldBe(5);
query[1].Name.ShouldBe("Item3");
query[1].Category.ShouldBe("B");
query[1].Quantity.ShouldBe(2);
}
/// <summary>
/// Verifies projection of nested object properties works.
/// </summary>
[Fact]
public void Select_Project_Nested_Object()
{
var doc = new ComplexDocument
{
Id = ObjectId.NewObjectId(),
Title = "Order1",
ShippingAddress = new Address { City = new City { Name = "New York" }, Street = "5th Ave" },
Items = new List<OrderItem>
{
new() { Name = "Laptop", Price = 1000 },
new() { Name = "Mouse", Price = 50 }
}
};
_db.ComplexDocuments.Insert(doc);
_db.SaveChanges();
var query = _db.ComplexDocuments.AsQueryable()
.Select(x => x.ShippingAddress)
.ToList();
query.Count().ShouldBe(1);
query[0].City.Name.ShouldBe("New York");
query[0].Street.ShouldBe("5th Ave");
}
/// <summary>
/// Verifies projection of nested scalar fields works.
/// </summary>
[Fact]
public void Select_Project_Nested_Field()
{
var doc = new ComplexDocument
{
Id = ObjectId.NewObjectId(),
Title = "Order1",
ShippingAddress = new Address { City = new City { Name = "New York" }, Street = "5th Ave" }
};
_db.ComplexDocuments.Insert(doc);
_db.SaveChanges();
var cities = _db.ComplexDocuments.AsQueryable()
.Select(x => x.ShippingAddress.City.Name)
.ToList();
cities.Count().ShouldBe(1);
cities[0].ShouldBe("New York");
}
/// <summary>
/// Verifies anonymous projection including nested values works.
/// </summary>
[Fact]
public void Select_Anonymous_Complex()
{
var cityMapper = new ZB_MOM_WW_CBDD_Shared_CityMapper();
var doc = new ComplexDocument
{
Id = ObjectId.NewObjectId(),
Title = "Order1",
ShippingAddress = new Address { City = new City { Name = "New York" }, Street = "5th Ave" }
};
_db.ComplexDocuments.Insert(doc);
_db.SaveChanges();
var result = _db.ComplexDocuments.AsQueryable()
.Select(x => new { x.Title, x.ShippingAddress.City })
.ToList();
result.Count().ShouldBe(1);
result[0].Title.ShouldBe("Order1");
result[0].City.Name.ShouldBe("New York");
}
/// <summary>
/// Verifies projection and retrieval of nested arrays of objects works.
/// </summary>
[Fact]
public void Select_Project_Nested_Array_Of_Objects()
{
var doc = new ComplexDocument
{
Id = ObjectId.NewObjectId(),
Title = "Order with Items",
ShippingAddress = new Address { City = new City { Name = "Los Angeles" }, Street = "Hollywood Blvd" },
Items = new List<OrderItem>
{
new() { Name = "Laptop", Price = 1500 },
new() { Name = "Mouse", Price = 25 },
new() { Name = "Keyboard", Price = 75 }
}
};
_db.ComplexDocuments.Insert(doc);
_db.SaveChanges();
// Retrieve the full document and verify Items array
var retrieved = _db.ComplexDocuments.FindAll().First();
retrieved.Title.ShouldBe("Order with Items");
retrieved.ShippingAddress.City.Name.ShouldBe("Los Angeles");
retrieved.ShippingAddress.Street.ShouldBe("Hollywood Blvd");
// Verify array of nested objects
retrieved.Items.Count.ShouldBe(3);
retrieved.Items[0].Name.ShouldBe("Laptop");
retrieved.Items[0].Price.ShouldBe(1500);
retrieved.Items[1].Name.ShouldBe("Mouse");
retrieved.Items[1].Price.ShouldBe(25);
retrieved.Items[2].Name.ShouldBe("Keyboard");
retrieved.Items[2].Price.ShouldBe(75);
}
}

View File

@@ -1,166 +1,157 @@
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Bson;
using Xunit;
using System;
using System.IO;
using System.Linq;
using System.Collections.Generic;
using ZB.MOM.WW.CBDD.Shared;
namespace ZB.MOM.WW.CBDD.Tests
namespace ZB.MOM.WW.CBDD.Tests;
public class LinqTests : IDisposable
{
public class LinqTests : IDisposable
private readonly TestDbContext _db;
private readonly string _testFile;
/// <summary>
/// Initializes a new instance of the <see cref="LinqTests" /> class.
/// </summary>
public LinqTests()
{
private readonly string _testFile;
private readonly Shared.TestDbContext _db;
_testFile = Path.Combine(Path.GetTempPath(), $"linq_tests_{Guid.NewGuid()}.db");
if (File.Exists(_testFile)) File.Delete(_testFile);
string wal = Path.ChangeExtension(_testFile, ".wal");
if (File.Exists(wal)) File.Delete(wal);
/// <summary>
/// Initializes a new instance of the <see cref="LinqTests"/> class.
/// </summary>
public LinqTests()
{
_testFile = Path.Combine(Path.GetTempPath(), $"linq_tests_{Guid.NewGuid()}.db");
if (File.Exists(_testFile)) File.Delete(_testFile);
var wal = Path.ChangeExtension(_testFile, ".wal");
if (File.Exists(wal)) File.Delete(wal);
_db = new TestDbContext(_testFile);
_db = new Shared.TestDbContext(_testFile);
// Seed Data
_db.Users.Insert(new User { Name = "Alice", Age = 30 });
_db.Users.Insert(new User { Name = "Bob", Age = 25 });
_db.Users.Insert(new User { Name = "Charlie", Age = 35 });
_db.Users.Insert(new User { Name = "Dave", Age = 20 });
_db.Users.Insert(new User { Name = "Eve", Age = 40 });
_db.SaveChanges();
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_testFile)) File.Delete(_testFile);
var wal = Path.ChangeExtension(_testFile, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
/// <summary>
/// Verifies where filters return matching documents.
/// </summary>
[Fact]
public void Where_FiltersDocuments()
{
var query = _db.Users.AsQueryable().Where(x => x.Age > 28);
var results = query.ToList();
results.Count.ShouldBe(3); // Alice(30), Charlie(35), Eve(40)
results.ShouldNotContain(d => d.Name == "Bob");
}
/// <summary>
/// Verifies order by returns sorted documents.
/// </summary>
[Fact]
public void OrderBy_SortsDocuments()
{
var results = _db.Users.AsQueryable().OrderBy(x => x.Age).ToList();
results.Count.ShouldBe(5);
results[0].Name.ShouldBe("Dave"); // 20
results[1].Name.ShouldBe("Bob"); // 25
results.Last().Name.ShouldBe("Eve"); // 40
}
/// <summary>
/// Verifies skip and take support pagination.
/// </summary>
[Fact]
public void SkipTake_Pagination()
{
var results = _db.Users.AsQueryable()
.OrderBy(x => x.Age)
.Skip(1)
.Take(2)
.ToList();
results.Count.ShouldBe(2);
results[0].Name.ShouldBe("Bob"); // 25 (Skipped Dave)
results[1].Name.ShouldBe("Alice"); // 30
}
/// <summary>
/// Verifies select supports projections.
/// </summary>
[Fact]
public void Select_Projections()
{
var names = _db.Users.AsQueryable()
.Where(x => x.Age < 30)
.OrderBy(x => x.Age)
.Select(x => x.Name)
.ToList();
names.Count.ShouldBe(2);
names[0].ShouldBe("Dave");
names[1].ShouldBe("Bob");
}
/// <summary>
/// Verifies indexed where queries use index-backed filtering.
/// </summary>
[Fact]
public void IndexedWhere_UsedIndex()
{
// Create index on Age
_db.Users.EnsureIndex(x => x.Age, "idx_age", false);
var query = _db.Users.AsQueryable().Where(x => x.Age > 25);
var results = query.ToList();
results.Count.ShouldBe(3); // Alice(30), Charlie(35), Eve(40)
results.ShouldNotContain(d => d.Name == "Bob"); // Age 25 (filtered out by strict >)
results.ShouldNotContain(d => d.Name == "Dave"); // Age 20
}
/// <summary>
/// Verifies starts-with predicates can use an index.
/// </summary>
[Fact]
public void StartsWith_UsedIndex()
{
// Create index on Name
_db.Users.EnsureIndex(x => x.Name!, "idx_name", false);
// StartsWith "Cha" -> Should find "Charlie"
var query = _db.Users.AsQueryable().Where(x => x.Name!.StartsWith("Cha"));
var results = query.ToList();
results.Count().ShouldBe(1);
results[0].Name.ShouldBe("Charlie");
}
/// <summary>
/// Verifies range predicates can use an index.
/// </summary>
[Fact]
public void Between_UsedIndex()
{
// Create index on Age
_db.Users.EnsureIndex(x => x.Age, "idx_age_between", false);
// Age >= 22 && Age <= 32
// Alice(30), Bob(25) -> Should be found.
// Dave(20), Charlie(35), Eve(40) -> excluded.
var query = _db.Users.AsQueryable().Where(x => x.Age >= 22 && x.Age <= 32);
var results = query.ToList();
results.Count.ShouldBe(2);
results.ShouldContain(x => x.Name == "Alice");
results.ShouldContain(x => x.Name == "Bob");
}
// Seed Data
_db.Users.Insert(new User { Name = "Alice", Age = 30 });
_db.Users.Insert(new User { Name = "Bob", Age = 25 });
_db.Users.Insert(new User { Name = "Charlie", Age = 35 });
_db.Users.Insert(new User { Name = "Dave", Age = 20 });
_db.Users.Insert(new User { Name = "Eve", Age = 40 });
_db.SaveChanges();
}
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_testFile)) File.Delete(_testFile);
string wal = Path.ChangeExtension(_testFile, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
/// <summary>
/// Verifies where filters return matching documents.
/// </summary>
[Fact]
public void Where_FiltersDocuments()
{
var query = _db.Users.AsQueryable().Where(x => x.Age > 28);
var results = query.ToList();
results.Count.ShouldBe(3); // Alice(30), Charlie(35), Eve(40)
results.ShouldNotContain(d => d.Name == "Bob");
}
/// <summary>
/// Verifies order by returns sorted documents.
/// </summary>
[Fact]
public void OrderBy_SortsDocuments()
{
var results = _db.Users.AsQueryable().OrderBy(x => x.Age).ToList();
results.Count.ShouldBe(5);
results[0].Name.ShouldBe("Dave"); // 20
results[1].Name.ShouldBe("Bob"); // 25
results.Last().Name.ShouldBe("Eve"); // 40
}
/// <summary>
/// Verifies skip and take support pagination.
/// </summary>
[Fact]
public void SkipTake_Pagination()
{
var results = _db.Users.AsQueryable()
.OrderBy(x => x.Age)
.Skip(1)
.Take(2)
.ToList();
results.Count.ShouldBe(2);
results[0].Name.ShouldBe("Bob"); // 25 (Skipped Dave)
results[1].Name.ShouldBe("Alice"); // 30
}
/// <summary>
/// Verifies select supports projections.
/// </summary>
[Fact]
public void Select_Projections()
{
var names = _db.Users.AsQueryable()
.Where(x => x.Age < 30)
.OrderBy(x => x.Age)
.Select(x => x.Name)
.ToList();
names.Count.ShouldBe(2);
names[0].ShouldBe("Dave");
names[1].ShouldBe("Bob");
}
/// <summary>
/// Verifies indexed where queries use index-backed filtering.
/// </summary>
[Fact]
public void IndexedWhere_UsedIndex()
{
// Create index on Age
_db.Users.EnsureIndex(x => x.Age, "idx_age");
var query = _db.Users.AsQueryable().Where(x => x.Age > 25);
var results = query.ToList();
results.Count.ShouldBe(3); // Alice(30), Charlie(35), Eve(40)
results.ShouldNotContain(d => d.Name == "Bob"); // Age 25 (filtered out by strict >)
results.ShouldNotContain(d => d.Name == "Dave"); // Age 20
}
/// <summary>
/// Verifies starts-with predicates can use an index.
/// </summary>
[Fact]
public void StartsWith_UsedIndex()
{
// Create index on Name
_db.Users.EnsureIndex(x => x.Name!, "idx_name");
// StartsWith "Cha" -> Should find "Charlie"
var query = _db.Users.AsQueryable().Where(x => x.Name!.StartsWith("Cha"));
var results = query.ToList();
results.Count().ShouldBe(1);
results[0].Name.ShouldBe("Charlie");
}
/// <summary>
/// Verifies range predicates can use an index.
/// </summary>
[Fact]
public void Between_UsedIndex()
{
// Create index on Age
_db.Users.EnsureIndex(x => x.Age, "idx_age_between");
// Age >= 22 && Age <= 32
// Alice(30), Bob(25) -> Should be found.
// Dave(20), Charlie(35), Eve(40) -> excluded.
var query = _db.Users.AsQueryable().Where(x => x.Age >= 22 && x.Age <= 32);
var results = query.ToList();
results.Count.ShouldBe(2);
results.ShouldContain(x => x.Name == "Alice");
results.ShouldContain(x => x.Name == "Bob");
}
}

View File

@@ -1,18 +1,16 @@
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Bson;
using Xunit;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests;
public class QueryPrimitivesTests : IDisposable
{
private readonly string _testFile;
private readonly StorageEngine _storage;
private readonly BTreeIndex _index;
private readonly StorageEngine _storage;
private readonly string _testFile;
/// <summary>
/// Initializes a new instance of the <see cref="QueryPrimitivesTests"/> class.
/// Initializes a new instance of the <see cref="QueryPrimitivesTests" /> class.
/// </summary>
public QueryPrimitivesTests()
{
@@ -26,12 +24,21 @@ public class QueryPrimitivesTests : IDisposable
SeedData();
}
/// <summary>
/// Executes Dispose.
/// </summary>
public void Dispose()
{
_storage.Dispose();
File.Delete(_testFile);
}
private void SeedData()
{
// Insert keys: 10, 20, 30, 40, 50
// And strings: "A", "AB", "ABC", "B", "C"
var txnId = _storage.BeginTransaction().TransactionId;
ulong txnId = _storage.BeginTransaction().TransactionId;
Insert(10, txnId);
Insert(20, txnId);
@@ -59,7 +66,7 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes Equal_ShouldFindExactMatch.
/// Executes Equal_ShouldFindExactMatch.
/// </summary>
[Fact]
public void Equal_ShouldFindExactMatch()
@@ -72,7 +79,7 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes Equal_ShouldReturnEmpty_WhenNotFound.
/// Executes Equal_ShouldReturnEmpty_WhenNotFound.
/// </summary>
[Fact]
public void Equal_ShouldReturnEmpty_WhenNotFound()
@@ -84,13 +91,13 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes GreaterThan_ShouldReturnMatches.
/// Executes GreaterThan_ShouldReturnMatches.
/// </summary>
[Fact]
public void GreaterThan_ShouldReturnMatches()
{
var key = IndexKey.Create(30);
var result = _index.GreaterThan(key, orEqual: false, 0).ToList();
var result = _index.GreaterThan(key, false, 0).ToList();
(result.Count >= 2).ShouldBeTrue();
result[0].Key.ShouldBe(IndexKey.Create(40));
@@ -98,13 +105,13 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes GreaterThanOrEqual_ShouldReturnMatches.
/// Executes GreaterThanOrEqual_ShouldReturnMatches.
/// </summary>
[Fact]
public void GreaterThanOrEqual_ShouldReturnMatches()
{
var key = IndexKey.Create(30);
var result = _index.GreaterThan(key, orEqual: true, 0).ToList();
var result = _index.GreaterThan(key, true, 0).ToList();
(result.Count >= 3).ShouldBeTrue();
result[0].Key.ShouldBe(IndexKey.Create(30));
@@ -113,13 +120,13 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes LessThan_ShouldReturnMatches.
/// Executes LessThan_ShouldReturnMatches.
/// </summary>
[Fact]
public void LessThan_ShouldReturnMatches()
{
var key = IndexKey.Create(30);
var result = _index.LessThan(key, orEqual: false, 0).ToList();
var result = _index.LessThan(key, false, 0).ToList();
result.Count.ShouldBe(2); // 20, 10 (Order is backward?)
// LessThan yields backward?
@@ -129,14 +136,14 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes Between_ShouldReturnRange.
/// Executes Between_ShouldReturnRange.
/// </summary>
[Fact]
public void Between_ShouldReturnRange()
{
var start = IndexKey.Create(20);
var end = IndexKey.Create(40);
var result = _index.Between(start, end, startInclusive: true, endInclusive: true, 0).ToList();
var result = _index.Between(start, end, true, true, 0).ToList();
result.Count.ShouldBe(3); // 20, 30, 40
result[0].Key.ShouldBe(IndexKey.Create(20));
@@ -145,7 +152,7 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes StartsWith_ShouldReturnPrefixMatches.
/// Executes StartsWith_ShouldReturnPrefixMatches.
/// </summary>
[Fact]
public void StartsWith_ShouldReturnPrefixMatches()
@@ -158,7 +165,7 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes Like_ShouldSupportWildcards.
/// Executes Like_ShouldSupportWildcards.
/// </summary>
[Fact]
public void Like_ShouldSupportWildcards()
@@ -176,7 +183,7 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes Like_Underscore_ShouldMatchSingleChar.
/// Executes Like_Underscore_ShouldMatchSingleChar.
/// </summary>
[Fact]
public void Like_Underscore_ShouldMatchSingleChar()
@@ -188,7 +195,7 @@ public class QueryPrimitivesTests : IDisposable
}
/// <summary>
/// Executes In_ShouldReturnSpecificKeys.
/// Executes In_ShouldReturnSpecificKeys.
/// </summary>
[Fact]
public void In_ShouldReturnSpecificKeys()
@@ -201,13 +208,4 @@ public class QueryPrimitivesTests : IDisposable
result[1].Key.ShouldBe(IndexKey.Create(30));
result[2].Key.ShouldBe(IndexKey.Create(50));
}
/// <summary>
/// Executes Dispose.
/// </summary>
public void Dispose()
{
_storage.Dispose();
File.Delete(_testFile);
}
}
}

View File

@@ -1,128 +1,111 @@
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Bson;
using Xunit;
using System;
using System.IO;
using System.Linq;
using System.Collections.Generic;
using ZB.MOM.WW.CBDD.Shared;
namespace ZB.MOM.WW.CBDD.Tests
namespace ZB.MOM.WW.CBDD.Tests;
public class ScanTests : IDisposable
{
public class ScanTests : IDisposable
private readonly TestDbContext _db;
private readonly string _testFile;
/// <summary>
/// Initializes a new instance of the <see cref="ScanTests" /> class.
/// </summary>
public ScanTests()
{
private readonly string _testFile;
private readonly Shared.TestDbContext _db;
_testFile = Path.Combine(Path.GetTempPath(), $"scan_tests_{Guid.NewGuid()}.db");
if (File.Exists(_testFile)) File.Delete(_testFile);
string wal = Path.ChangeExtension(_testFile, ".wal");
if (File.Exists(wal)) File.Delete(wal);
/// <summary>
/// Initializes a new instance of the <see cref="ScanTests"/> class.
/// </summary>
public ScanTests()
_db = new TestDbContext(_testFile);
}
/// <summary>
/// Executes Dispose.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_testFile)) File.Delete(_testFile);
string wal = Path.ChangeExtension(_testFile, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
/// <summary>
/// Executes Scan_FindsMatchingDocuments.
/// </summary>
[Fact]
public void Scan_FindsMatchingDocuments()
{
// Arrange
_db.Users.Insert(new User { Name = "Alice", Age = 30 });
_db.Users.Insert(new User { Name = "Bob", Age = 25 });
_db.Users.Insert(new User { Name = "Charlie", Age = 35 });
_db.SaveChanges();
// Act: Find users older than 28
var results = _db.Users.Scan(reader => ParseAge(reader) > 28).ToList();
// Assert
results.Count.ShouldBe(2);
results.ShouldContain(d => d.Name == "Alice");
results.ShouldContain(d => d.Name == "Charlie");
}
/// <summary>
/// Executes Repro_Insert_Loop_Hang.
/// </summary>
[Fact]
public void Repro_Insert_Loop_Hang()
{
// Reproduce hang reported by user at 501 documents
var count = 600;
for (var i = 0; i < count; i++) _db.Users.Insert(new User { Name = $"User_{i}", Age = i });
_db.SaveChanges();
}
/// <summary>
/// Executes ParallelScan_FindsMatchingDocuments.
/// </summary>
[Fact]
public void ParallelScan_FindsMatchingDocuments()
{
// Arrange
var count = 1000;
for (var i = 0; i < count; i++) _db.Users.Insert(new User { Name = $"User_{i}", Age = i });
_db.SaveChanges();
// Act: Find users with Age >= 500
// Parallelism 2 to force partitioning
var results = _db.Users.ParallelScan(reader => ParseAge(reader) >= 500, 2).ToList();
// Assert
results.Count.ShouldBe(500);
}
private int ParseAge(BsonSpanReader reader)
{
try
{
_testFile = Path.Combine(Path.GetTempPath(), $"scan_tests_{Guid.NewGuid()}.db");
if (File.Exists(_testFile)) File.Delete(_testFile);
var wal = Path.ChangeExtension(_testFile, ".wal");
if (File.Exists(wal)) File.Delete(wal);
_db = new Shared.TestDbContext(_testFile);
}
/// <summary>
/// Executes Dispose.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_testFile)) File.Delete(_testFile);
var wal = Path.ChangeExtension(_testFile, ".wal");
if (File.Exists(wal)) File.Delete(wal);
}
/// <summary>
/// Executes Scan_FindsMatchingDocuments.
/// </summary>
[Fact]
public void Scan_FindsMatchingDocuments()
{
// Arrange
_db.Users.Insert(new User { Name = "Alice", Age = 30 });
_db.Users.Insert(new User { Name = "Bob", Age = 25 });
_db.Users.Insert(new User { Name = "Charlie", Age = 35 });
_db.SaveChanges();
// Act: Find users older than 28
var results = _db.Users.Scan(reader => ParseAge(reader) > 28).ToList();
// Assert
results.Count.ShouldBe(2);
results.ShouldContain(d => d.Name == "Alice");
results.ShouldContain(d => d.Name == "Charlie");
}
/// <summary>
/// Executes Repro_Insert_Loop_Hang.
/// </summary>
[Fact]
public void Repro_Insert_Loop_Hang()
{
// Reproduce hang reported by user at 501 documents
int count = 600;
for (int i = 0; i < count; i++)
reader.ReadDocumentSize();
while (reader.Remaining > 0)
{
_db.Users.Insert(new User { Name = $"User_{i}", Age = i });
var type = reader.ReadBsonType();
if (type == 0) break; // End of doc
string name = reader.ReadElementHeader();
if (name == "age") return reader.ReadInt32();
reader.SkipValue(type);
}
_db.SaveChanges();
}
/// <summary>
/// Executes ParallelScan_FindsMatchingDocuments.
/// </summary>
[Fact]
public void ParallelScan_FindsMatchingDocuments()
catch
{
// Arrange
int count = 1000;
for (int i = 0; i < count; i++)
{
_db.Users.Insert(new User { Name = $"User_{i}", Age = i });
}
_db.SaveChanges();
// Act: Find users with Age >= 500
// Parallelism 2 to force partitioning
var results = _db.Users.ParallelScan(reader => ParseAge(reader) >= 500, degreeOfParallelism: 2).ToList();
// Assert
results.Count.ShouldBe(500);
}
private int ParseAge(BsonSpanReader reader)
{
try
{
reader.ReadDocumentSize();
while (reader.Remaining > 0)
{
var type = reader.ReadBsonType();
if (type == 0) break; // End of doc
var name = reader.ReadElementHeader();
if (name == "age")
{
return reader.ReadInt32();
}
else
{
reader.SkipValue(type);
}
}
}
catch { return -1; }
return -1;
}
return -1;
}
}
}

View File

@@ -1,159 +1,187 @@
using System.Collections.Concurrent;
using System.ComponentModel.DataAnnotations;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
namespace ZB.MOM.WW.CBDD.Tests
namespace ZB.MOM.WW.CBDD.Tests;
public class AttributeTests
{
public class AttributeTests
private readonly ConcurrentDictionary<string, ushort> _keyMap = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<ushort, string> _keys = new();
/// <summary>
/// Initializes lookup maps used by attribute mapper tests.
/// </summary>
public AttributeTests()
{
// Use full path for mapper until we are sure of the namespace
private ZB_MOM_WW_CBDD_Shared_AnnotatedUserMapper CreateMapper() => new();
private readonly System.Collections.Concurrent.ConcurrentDictionary<string, ushort> _keyMap = new(StringComparer.OrdinalIgnoreCase);
private readonly System.Collections.Concurrent.ConcurrentDictionary<ushort, string> _keys = new();
/// <summary>
/// Initializes lookup maps used by attribute mapper tests.
/// </summary>
public AttributeTests()
ushort id = 1;
string[] keys = ["_id", "display_name", "age", "location", "0", "1"];
foreach (string key in keys)
{
ushort id = 1;
string[] keys = ["_id", "display_name", "age", "location", "0", "1"];
foreach (var key in keys)
{
_keyMap[key] = id;
_keys[id] = key;
id++;
}
}
/// <summary>
/// Verifies table attribute mapping resolves the expected collection name.
/// </summary>
[Fact]
public void Test_Table_Attribute_Mapping()
{
// Verify that the generated mapper has the correct collection name
var mapper = CreateMapper();
mapper.CollectionName.ShouldBe("test.custom_users");
}
/// <summary>
/// Verifies required attribute validation is enforced.
/// </summary>
[Fact]
public void Test_Required_Validation()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "" }; // Required name is empty
var writer = new BsonSpanWriter(new byte[1024], _keyMap);
bool thrown = false;
try
{
mapper.Serialize(user, writer);
}
catch (ValidationException)
{
thrown = true;
}
thrown.ShouldBeTrue("Should throw ValidationException for empty Name.");
}
/// <summary>
/// Verifies string length attribute validation is enforced.
/// </summary>
[Fact]
public void Test_StringLength_Validation()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "Jo" }; // Too short
var writer = new BsonSpanWriter(new byte[1024], _keyMap);
bool thrown = false;
try { mapper.Serialize(user, writer); } catch (ValidationException) { thrown = true; }
thrown.ShouldBeTrue("Should throw ValidationException for Name too short.");
user.Name = new string('A', 51); // Too long
thrown = false;
try { mapper.Serialize(user, writer); } catch (ValidationException) { thrown = true; }
thrown.ShouldBeTrue("Should throw ValidationException for Name too long.");
}
/// <summary>
/// Verifies range attribute validation is enforced.
/// </summary>
[Fact]
public void Test_Range_Validation()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "John", Age = 200 }; // Out of range
var writer = new BsonSpanWriter(new byte[1024], _keyMap);
bool thrown = false;
try { mapper.Serialize(user, writer); } catch (ValidationException) { thrown = true; }
thrown.ShouldBeTrue("Should throw ValidationException for Age out of range.");
}
/// <summary>
/// Verifies column attribute maps to the expected BSON field name.
/// </summary>
[Fact]
public void Test_Column_Name_Mapping()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "John", Age = 30 };
var buffer = new byte[1024];
var writer = new BsonSpanWriter(buffer, _keyMap);
mapper.Serialize(user, writer);
var reader = new BsonSpanReader(buffer, _keys);
reader.ReadDocumentSize();
bool foundDisplayName = false;
while (reader.Remaining > 0)
{
var type = reader.ReadBsonType();
if (type == BsonType.EndOfDocument) break;
var name = reader.ReadElementHeader();
if (name == "display_name") foundDisplayName = true;
reader.SkipValue(type);
}
foundDisplayName.ShouldBeTrue("BSON field name should be 'display_name' from [Column] attribute.");
}
/// <summary>
/// Verifies not-mapped attribute excludes properties from BSON serialization.
/// </summary>
[Fact]
public void Test_NotMapped_Attribute()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "John", Age = 30 };
var buffer = new byte[1024];
var writer = new BsonSpanWriter(buffer, _keyMap);
mapper.Serialize(user, writer);
var reader = new BsonSpanReader(buffer, _keys);
reader.ReadDocumentSize();
bool foundComputed = false;
while (reader.Remaining > 0)
{
var type = reader.ReadBsonType();
if (type == BsonType.EndOfDocument) break;
var name = reader.ReadElementHeader();
if (name == "ComputedInfo") foundComputed = true;
reader.SkipValue(type);
}
foundComputed.ShouldBeFalse("ComputedInfo should not be mapped to BSON.");
_keyMap[key] = id;
_keys[id] = key;
id++;
}
}
}
// Use full path for mapper until we are sure of the namespace
private ZB_MOM_WW_CBDD_Shared_AnnotatedUserMapper CreateMapper()
{
return new ZB_MOM_WW_CBDD_Shared_AnnotatedUserMapper();
}
/// <summary>
/// Verifies table attribute mapping resolves the expected collection name.
/// </summary>
[Fact]
public void Test_Table_Attribute_Mapping()
{
// Verify that the generated mapper has the correct collection name
var mapper = CreateMapper();
mapper.CollectionName.ShouldBe("test.custom_users");
}
/// <summary>
/// Verifies required attribute validation is enforced.
/// </summary>
[Fact]
public void Test_Required_Validation()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "" }; // Required name is empty
var writer = new BsonSpanWriter(new byte[1024], _keyMap);
var thrown = false;
try
{
mapper.Serialize(user, writer);
}
catch (ValidationException)
{
thrown = true;
}
thrown.ShouldBeTrue("Should throw ValidationException for empty Name.");
}
/// <summary>
/// Verifies string length attribute validation is enforced.
/// </summary>
[Fact]
public void Test_StringLength_Validation()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "Jo" }; // Too short
var writer = new BsonSpanWriter(new byte[1024], _keyMap);
var thrown = false;
try
{
mapper.Serialize(user, writer);
}
catch (ValidationException)
{
thrown = true;
}
thrown.ShouldBeTrue("Should throw ValidationException for Name too short.");
user.Name = new string('A', 51); // Too long
thrown = false;
try
{
mapper.Serialize(user, writer);
}
catch (ValidationException)
{
thrown = true;
}
thrown.ShouldBeTrue("Should throw ValidationException for Name too long.");
}
/// <summary>
/// Verifies range attribute validation is enforced.
/// </summary>
[Fact]
public void Test_Range_Validation()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "John", Age = 200 }; // Out of range
var writer = new BsonSpanWriter(new byte[1024], _keyMap);
var thrown = false;
try
{
mapper.Serialize(user, writer);
}
catch (ValidationException)
{
thrown = true;
}
thrown.ShouldBeTrue("Should throw ValidationException for Age out of range.");
}
/// <summary>
/// Verifies column attribute maps to the expected BSON field name.
/// </summary>
[Fact]
public void Test_Column_Name_Mapping()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "John", Age = 30 };
var buffer = new byte[1024];
var writer = new BsonSpanWriter(buffer, _keyMap);
mapper.Serialize(user, writer);
var reader = new BsonSpanReader(buffer, _keys);
reader.ReadDocumentSize();
var foundDisplayName = false;
while (reader.Remaining > 0)
{
var type = reader.ReadBsonType();
if (type == BsonType.EndOfDocument) break;
string name = reader.ReadElementHeader();
if (name == "display_name") foundDisplayName = true;
reader.SkipValue(type);
}
foundDisplayName.ShouldBeTrue("BSON field name should be 'display_name' from [Column] attribute.");
}
/// <summary>
/// Verifies not-mapped attribute excludes properties from BSON serialization.
/// </summary>
[Fact]
public void Test_NotMapped_Attribute()
{
var mapper = CreateMapper();
var user = new AnnotatedUser { Name = "John", Age = 30 };
var buffer = new byte[1024];
var writer = new BsonSpanWriter(buffer, _keyMap);
mapper.Serialize(user, writer);
var reader = new BsonSpanReader(buffer, _keys);
reader.ReadDocumentSize();
var foundComputed = false;
while (reader.Remaining > 0)
{
var type = reader.ReadBsonType();
if (type == BsonType.EndOfDocument) break;
string name = reader.ReadElementHeader();
if (name == "ComputedInfo") foundComputed = true;
reader.SkipValue(type);
}
foundComputed.ShouldBeFalse("ComputedInfo should not be mapped to BSON.");
}
}

View File

@@ -1,44 +1,38 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Tests;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
/// <summary>
/// Tests for circular references and N-N relationships
/// Validates that the source generator handles:
/// 1. Self-referencing entities using ObjectId references (Employee → ManagerId, DirectReportIds)
/// 2. N-N via referencing with ObjectIds (CategoryRef/ProductRef) - BEST PRACTICE
///
/// Note: Bidirectional embedding (Category ↔ Product with full objects) is NOT supported
/// by the source generator and is an anti-pattern for document databases.
/// Use referencing (ObjectIds) instead for N-N relationships.
/// Tests for circular references and N-N relationships
/// Validates that the source generator handles:
/// 1. Self-referencing entities using ObjectId references (Employee → ManagerId, DirectReportIds)
/// 2. N-N via referencing with ObjectIds (CategoryRef/ProductRef) - BEST PRACTICE
/// Note: Bidirectional embedding (Category ↔ Product with full objects) is NOT supported
/// by the source generator and is an anti-pattern for document databases.
/// Use referencing (ObjectIds) instead for N-N relationships.
/// </summary>
public class CircularReferenceTests : IDisposable
{
private readonly TestDbContext _context;
private readonly string _dbPath;
private readonly Shared.TestDbContext _context;
/// <summary>
/// Initializes a new instance of the <see cref="CircularReferenceTests"/> class.
/// Initializes a new instance of the <see cref="CircularReferenceTests" /> class.
/// </summary>
public CircularReferenceTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"cbdd_circular_test_{Guid.NewGuid()}");
_context = new Shared.TestDbContext(_dbPath);
_context = new TestDbContext(_dbPath);
}
/// <summary>
/// Executes Dispose.
/// Executes Dispose.
/// </summary>
public void Dispose()
{
_context?.Dispose();
if (Directory.Exists(_dbPath))
{
Directory.Delete(_dbPath, true);
}
if (Directory.Exists(_dbPath)) Directory.Delete(_dbPath, true);
}
// ========================================
@@ -46,7 +40,7 @@ public class CircularReferenceTests : IDisposable
// ========================================
/// <summary>
/// Executes SelfReference_InsertAndQuery_ShouldWork.
/// Executes SelfReference_InsertAndQuery_ShouldWork.
/// </summary>
[Fact]
public void SelfReference_InsertAndQuery_ShouldWork()
@@ -125,7 +119,7 @@ public class CircularReferenceTests : IDisposable
}
/// <summary>
/// Executes SelfReference_UpdateDirectReports_ShouldPersist.
/// Executes SelfReference_UpdateDirectReports_ShouldPersist.
/// </summary>
[Fact]
public void SelfReference_UpdateDirectReports_ShouldPersist()
@@ -177,7 +171,7 @@ public class CircularReferenceTests : IDisposable
}
/// <summary>
/// Executes SelfReference_QueryByManagerId_ShouldWork.
/// Executes SelfReference_QueryByManagerId_ShouldWork.
/// </summary>
[Fact]
public void SelfReference_QueryByManagerId_ShouldWork()
@@ -230,7 +224,7 @@ public class CircularReferenceTests : IDisposable
// ========================================
/// <summary>
/// Executes NtoNReferencing_InsertAndQuery_ShouldWork.
/// Executes NtoNReferencing_InsertAndQuery_ShouldWork.
/// </summary>
[Fact]
public void NtoNReferencing_InsertAndQuery_ShouldWork()
@@ -298,7 +292,7 @@ public class CircularReferenceTests : IDisposable
}
/// <summary>
/// Executes NtoNReferencing_UpdateRelationships_ShouldPersist.
/// Executes NtoNReferencing_UpdateRelationships_ShouldPersist.
/// </summary>
[Fact]
public void NtoNReferencing_UpdateRelationships_ShouldPersist()
@@ -358,7 +352,7 @@ public class CircularReferenceTests : IDisposable
}
/// <summary>
/// Executes NtoNReferencing_DocumentSize_RemainSmall.
/// Executes NtoNReferencing_DocumentSize_RemainSmall.
/// </summary>
[Fact]
public void NtoNReferencing_DocumentSize_RemainSmall()
@@ -390,7 +384,7 @@ public class CircularReferenceTests : IDisposable
}
/// <summary>
/// Executes NtoNReferencing_QueryByProductId_ShouldWork.
/// Executes NtoNReferencing_QueryByProductId_ShouldWork.
/// </summary>
[Fact]
public void NtoNReferencing_QueryByProductId_ShouldWork()
@@ -428,4 +422,4 @@ public class CircularReferenceTests : IDisposable
categoriesWithProduct.ShouldContain(c => c.Name == "Category 1");
categoriesWithProduct.ShouldContain(c => c.Name == "Category 2");
}
}
}

View File

@@ -1,170 +1,169 @@
using ZB.MOM.WW.CBDD.Shared;
namespace ZB.MOM.WW.CBDD.Tests
namespace ZB.MOM.WW.CBDD.Tests;
/// <summary>
/// Tests for entities with nullable string Id (like UuidEntity scenario from CleanCore)
/// This reproduces the bug where the generator incorrectly chose ObjectIdMapperBase
/// instead of StringMapperBase for inherited nullable string Id properties
/// </summary>
public class NullableStringIdTests : IDisposable
{
private const string DbPath = "nullable_string_id.db";
/// <summary>
/// Tests for entities with nullable string Id (like UuidEntity scenario from CleanCore)
/// This reproduces the bug where the generator incorrectly chose ObjectIdMapperBase
/// instead of StringMapperBase for inherited nullable string Id properties
/// Initializes a new instance of the <see cref="NullableStringIdTests" /> class.
/// </summary>
public class NullableStringIdTests : System.IDisposable
public NullableStringIdTests()
{
private const string DbPath = "nullable_string_id.db";
/// <summary>
/// Initializes a new instance of the <see cref="NullableStringIdTests"/> class.
/// </summary>
public NullableStringIdTests()
{
if (File.Exists(DbPath)) File.Delete(DbPath);
}
/// <summary>
/// Disposes test resources.
/// </summary>
public void Dispose()
{
if (File.Exists(DbPath)) File.Delete(DbPath);
}
/// <summary>
/// Verifies the mock counter collection is initialized.
/// </summary>
[Fact]
public void MockCounter_Collection_IsInitialized()
{
using var db = new Shared.TestDbContext(DbPath);
// Verify Collection is not null (initialized by generated method)
db.MockCounters.ShouldNotBeNull();
}
/// <summary>
/// Verifies insert and find-by-id operations work for string identifiers.
/// </summary>
[Fact]
public void MockCounter_Insert_And_FindById_Works()
{
using var db = new Shared.TestDbContext(DbPath);
var counter = new MockCounter("test-id-123")
{
Name = "TestCounter",
Value = 42
};
// Insert should work with string Id
db.MockCounters.Insert(counter);
// FindById should retrieve the entity
var stored = db.MockCounters.FindById("test-id-123");
stored.ShouldNotBeNull();
stored.Id.ShouldBe("test-id-123");
stored.Name.ShouldBe("TestCounter");
stored.Value.ShouldBe(42);
}
/// <summary>
/// Verifies update operations work for string identifiers.
/// </summary>
[Fact]
public void MockCounter_Update_Works()
{
using var db = new Shared.TestDbContext(DbPath);
var counter = new MockCounter("update-test")
{
Name = "Original",
Value = 10
};
db.MockCounters.Insert(counter);
// Update the entity
counter.Name = "Updated";
counter.Value = 20;
db.MockCounters.Update(counter);
// Verify update
var updated = db.MockCounters.FindById("update-test");
updated.ShouldNotBeNull();
updated.Name.ShouldBe("Updated");
updated.Value.ShouldBe(20);
}
/// <summary>
/// Verifies delete operations work for string identifiers.
/// </summary>
[Fact]
public void MockCounter_Delete_Works()
{
using var db = new Shared.TestDbContext(DbPath);
var counter = new MockCounter("delete-test")
{
Name = "ToDelete",
Value = 99
};
db.MockCounters.Insert(counter);
db.MockCounters.FindById("delete-test").ShouldNotBeNull();
// Delete the entity
db.MockCounters.Delete("delete-test");
// Verify deletion
var deleted = db.MockCounters.FindById("delete-test");
deleted.ShouldBeNull();
}
/// <summary>
/// Verifies query operations work for string identifiers.
/// </summary>
[Fact]
public void MockCounter_Query_Works()
{
using var db = new Shared.TestDbContext(DbPath);
db.MockCounters.Insert(new MockCounter("q1") { Name = "First", Value = 100 });
db.MockCounters.Insert(new MockCounter("q2") { Name = "Second", Value = 200 });
db.MockCounters.Insert(new MockCounter("q3") { Name = "Third", Value = 150 });
// Query all
var all = db.MockCounters.AsQueryable().ToList();
all.Count.ShouldBe(3);
// Query with condition
var highValues = db.MockCounters.AsQueryable()
.Where(c => c.Value > 150)
.ToList();
highValues.Count().ShouldBe(1);
highValues[0].Name.ShouldBe("Second");
}
/// <summary>
/// Verifies inherited string identifiers are stored and retrieved correctly.
/// </summary>
[Fact]
public void MockCounter_InheritedId_IsStoredCorrectly()
{
using var db = new Shared.TestDbContext(DbPath);
// Test that the inherited nullable string Id from MockBaseEntity works correctly
var counter = new MockCounter("inherited-id-test")
{
Name = "Inherited",
Value = 777
};
db.MockCounters.Insert(counter);
var stored = db.MockCounters.FindById("inherited-id-test");
stored.ShouldNotBeNull();
// Verify the Id is correctly stored and retrieved through inheritance
stored.Id.ShouldBe("inherited-id-test");
stored.Id.ShouldBeOfType<string>();
}
if (File.Exists(DbPath)) File.Delete(DbPath);
}
}
/// <summary>
/// Disposes test resources.
/// </summary>
public void Dispose()
{
if (File.Exists(DbPath)) File.Delete(DbPath);
}
/// <summary>
/// Verifies the mock counter collection is initialized.
/// </summary>
[Fact]
public void MockCounter_Collection_IsInitialized()
{
using var db = new TestDbContext(DbPath);
// Verify Collection is not null (initialized by generated method)
db.MockCounters.ShouldNotBeNull();
}
/// <summary>
/// Verifies insert and find-by-id operations work for string identifiers.
/// </summary>
[Fact]
public void MockCounter_Insert_And_FindById_Works()
{
using var db = new TestDbContext(DbPath);
var counter = new MockCounter("test-id-123")
{
Name = "TestCounter",
Value = 42
};
// Insert should work with string Id
db.MockCounters.Insert(counter);
// FindById should retrieve the entity
var stored = db.MockCounters.FindById("test-id-123");
stored.ShouldNotBeNull();
stored.Id.ShouldBe("test-id-123");
stored.Name.ShouldBe("TestCounter");
stored.Value.ShouldBe(42);
}
/// <summary>
/// Verifies update operations work for string identifiers.
/// </summary>
[Fact]
public void MockCounter_Update_Works()
{
using var db = new TestDbContext(DbPath);
var counter = new MockCounter("update-test")
{
Name = "Original",
Value = 10
};
db.MockCounters.Insert(counter);
// Update the entity
counter.Name = "Updated";
counter.Value = 20;
db.MockCounters.Update(counter);
// Verify update
var updated = db.MockCounters.FindById("update-test");
updated.ShouldNotBeNull();
updated.Name.ShouldBe("Updated");
updated.Value.ShouldBe(20);
}
/// <summary>
/// Verifies delete operations work for string identifiers.
/// </summary>
[Fact]
public void MockCounter_Delete_Works()
{
using var db = new TestDbContext(DbPath);
var counter = new MockCounter("delete-test")
{
Name = "ToDelete",
Value = 99
};
db.MockCounters.Insert(counter);
db.MockCounters.FindById("delete-test").ShouldNotBeNull();
// Delete the entity
db.MockCounters.Delete("delete-test");
// Verify deletion
var deleted = db.MockCounters.FindById("delete-test");
deleted.ShouldBeNull();
}
/// <summary>
/// Verifies query operations work for string identifiers.
/// </summary>
[Fact]
public void MockCounter_Query_Works()
{
using var db = new TestDbContext(DbPath);
db.MockCounters.Insert(new MockCounter("q1") { Name = "First", Value = 100 });
db.MockCounters.Insert(new MockCounter("q2") { Name = "Second", Value = 200 });
db.MockCounters.Insert(new MockCounter("q3") { Name = "Third", Value = 150 });
// Query all
var all = db.MockCounters.AsQueryable().ToList();
all.Count.ShouldBe(3);
// Query with condition
var highValues = db.MockCounters.AsQueryable()
.Where(c => c.Value > 150)
.ToList();
highValues.Count().ShouldBe(1);
highValues[0].Name.ShouldBe("Second");
}
/// <summary>
/// Verifies inherited string identifiers are stored and retrieved correctly.
/// </summary>
[Fact]
public void MockCounter_InheritedId_IsStoredCorrectly()
{
using var db = new TestDbContext(DbPath);
// Test that the inherited nullable string Id from MockBaseEntity works correctly
var counter = new MockCounter("inherited-id-test")
{
Name = "Inherited",
Value = 777
};
db.MockCounters.Insert(counter);
var stored = db.MockCounters.FindById("inherited-id-test");
stored.ShouldNotBeNull();
// Verify the Id is correctly stored and retrieved through inheritance
stored.Id.ShouldBe("inherited-id-test");
stored.Id.ShouldBeOfType<string>();
}
}

View File

@@ -1,33 +1,29 @@
using System;
using System.IO;
using System.Linq;
using Xunit;
using System.Collections.Concurrent;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Bson.Schema;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
namespace ZB.MOM.WW.CBDD.Tests;
public class SchemaPersistenceTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath;
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="SchemaPersistenceTests"/> class.
/// Initializes a new instance of the <see cref="SchemaPersistenceTests" /> class.
/// </summary>
public SchemaPersistenceTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"schema_test_{Guid.NewGuid()}.db");
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
@@ -36,7 +32,7 @@ public class SchemaPersistenceTests : IDisposable
}
/// <summary>
/// Verifies BSON schema serialization and deserialization round-trips correctly.
/// Verifies BSON schema serialization and deserialization round-trips correctly.
/// </summary>
[Fact]
public void BsonSchema_Serialization_RoundTrip()
@@ -65,12 +61,16 @@ public class SchemaPersistenceTests : IDisposable
};
var buffer = new byte[1024];
var keyMap = new System.Collections.Concurrent.ConcurrentDictionary<string, ushort>(StringComparer.OrdinalIgnoreCase);
var keys = new System.Collections.Concurrent.ConcurrentDictionary<ushort, string>();
var keyMap = new ConcurrentDictionary<string, ushort>(StringComparer.OrdinalIgnoreCase);
var keys = new ConcurrentDictionary<ushort, string>();
// Manual registration for schema keys
ushort id = 1;
foreach (var k in new[] { "person", "id", "name", "age", "address", "city", "fields", "title", "type", "isnullable", "nestedschema", "t", "v", "f", "n", "b", "s", "a", "_v", "0", "1", "2", "3", "4", "5" })
foreach (string k in new[]
{
"person", "id", "name", "age", "address", "city", "fields", "title", "type", "isnullable",
"nestedschema", "t", "v", "f", "n", "b", "s", "a", "_v", "0", "1", "2", "3", "4", "5"
})
{
keyMap[k] = id;
keys[id] = k;
@@ -91,7 +91,7 @@ public class SchemaPersistenceTests : IDisposable
}
/// <summary>
/// Verifies collection metadata is persisted and reloaded correctly.
/// Verifies collection metadata is persisted and reloaded correctly.
/// </summary>
[Fact]
public void StorageEngine_Collections_Metadata_Persistence()
@@ -102,7 +102,8 @@ public class SchemaPersistenceTests : IDisposable
PrimaryRootPageId = 10,
SchemaRootPageId = 20
};
meta.Indexes.Add(new IndexMetadata { Name = "age", IsUnique = false, Type = IndexType.BTree, PropertyPaths = ["Age"] });
meta.Indexes.Add(new IndexMetadata
{ Name = "age", IsUnique = false, Type = IndexType.BTree, PropertyPaths = ["Age"] });
_db.Storage.SaveCollectionMetadata(meta);
@@ -116,38 +117,48 @@ public class SchemaPersistenceTests : IDisposable
}
/// <summary>
/// Verifies schema versioning appends new schema versions correctly.
/// Verifies schema versioning appends new schema versions correctly.
/// </summary>
[Fact]
public void StorageEngine_Schema_Versioning()
{
var schema1 = new BsonSchema { Title = "V1", Fields = { new BsonField { Name = "f1", Type = BsonType.String } } };
var schema2 = new BsonSchema { Title = "V2", Fields = { new BsonField { Name = "f1", Type = BsonType.String }, new BsonField { Name = "f2", Type = BsonType.Int32 } } };
var schema1 = new BsonSchema
{ Title = "V1", Fields = { new BsonField { Name = "f1", Type = BsonType.String } } };
var schema2 = new BsonSchema
{
Title = "V2",
Fields =
{
new BsonField { Name = "f1", Type = BsonType.String },
new BsonField { Name = "f2", Type = BsonType.Int32 }
}
};
var rootId = _db.Storage.AppendSchema(0, schema1);
uint rootId = _db.Storage.AppendSchema(0, schema1);
rootId.ShouldNotBe(0u);
var schemas = _db.Storage.GetSchemas(rootId);
schemas.Count().ShouldBe(1);
schemas[0].Title.ShouldBe("V1");
var updatedRoot = _db.Storage.AppendSchema(rootId, schema2);
uint updatedRoot = _db.Storage.AppendSchema(rootId, schema2);
updatedRoot.ShouldBe(rootId);
schemas = _db.Storage.GetSchemas(rootId);
schemas.Count.ShouldBe(2, $"Expected 2 schemas but found {schemas.Count}. Titles: {(schemas.Count > 0 ? string.Join(", ", schemas.Select(s => s.Title)) : "None")}");
schemas.Count.ShouldBe(2,
$"Expected 2 schemas but found {schemas.Count}. Titles: {(schemas.Count > 0 ? string.Join(", ", schemas.Select(s => s.Title)) : "None")}");
schemas[0].Title.ShouldBe("V1");
schemas[1].Title.ShouldBe("V2");
}
/// <summary>
/// Verifies collection startup integrates schema versioning behavior.
/// Verifies collection startup integrates schema versioning behavior.
/// </summary>
[Fact]
public void DocumentCollection_Integrates_Schema_Versioning_On_Startup()
{
// Use a dedicated database for this test to avoid schema pollution from _db
var testDbPath = Path.Combine(Path.GetTempPath(), $"schema_versioning_test_{Guid.NewGuid()}.db");
string testDbPath = Path.Combine(Path.GetTempPath(), $"schema_versioning_test_{Guid.NewGuid()}.db");
try
{
@@ -155,7 +166,7 @@ public class SchemaPersistenceTests : IDisposable
var schema1 = mapper1.GetSchema();
// 1. First startup - create DB and initialize Person collection
using (var db1 = new Shared.TestDbContext(testDbPath))
using (var db1 = new TestDbContext(testDbPath))
{
// Access only People collection to avoid initializing others
var coll = db1.People;
@@ -171,7 +182,7 @@ public class SchemaPersistenceTests : IDisposable
}
// 2. Restart with SAME schema (should NOT append)
using (var db2 = new Shared.TestDbContext(testDbPath))
using (var db2 = new TestDbContext(testDbPath))
{
var coll = db2.People;
var meta = db2.Storage.GetCollectionMetadata("people_collection");
@@ -186,7 +197,7 @@ public class SchemaPersistenceTests : IDisposable
// Since we can't change the actual Person class at runtime, this test verifies
// that the same schema doesn't get re-appended.
// A real-world scenario would involve deploying a new mapper version.
using (var db3 = new Shared.TestDbContext(testDbPath))
using (var db3 = new TestDbContext(testDbPath))
{
var coll = db3.People;
var meta = db3.Storage.GetCollectionMetadata("people_collection");
@@ -205,7 +216,7 @@ public class SchemaPersistenceTests : IDisposable
}
/// <summary>
/// Verifies persisted documents include the schema version field.
/// Verifies persisted documents include the schema version field.
/// </summary>
[Fact]
public void Document_Contains_Schema_Version_Field()
@@ -214,7 +225,7 @@ public class SchemaPersistenceTests : IDisposable
using (var coll = _db.People)
{
var person = new Person { Name = "John" };
var id = coll.Insert(person);
int id = coll.Insert(person);
_db.SaveChanges();
coll.Count().ShouldBe(1);
@@ -232,7 +243,7 @@ public class SchemaPersistenceTests : IDisposable
// Read raw bytes from page
var pageBuffer = new byte[_db.Storage.PageSize];
_db.Storage.ReadPage(location.PageId, 0, pageBuffer);
var slotOffset = SlottedPageHeader.Size + (location.SlotIndex * SlotEntry.Size);
int slotOffset = SlottedPageHeader.Size + location.SlotIndex * SlotEntry.Size;
var slot = SlotEntry.ReadFrom(pageBuffer.AsSpan(slotOffset));
var docData = pageBuffer.AsSpan(slot.Offset, slot.Length);
@@ -241,7 +252,7 @@ public class SchemaPersistenceTests : IDisposable
// Look for _v (BsonType.Int32 + 2-byte ID)
ushort vId = _db.Storage.GetKeyMap()["_v"];
string vIdHex = vId.ToString("X4");
var vIdHex = vId.ToString("X4");
// Reverse endian for hex string check (ushort is LE)
string vIdHexLE = vIdHex.Substring(2, 2) + vIdHex.Substring(0, 2);
string pattern = "10" + vIdHexLE;
@@ -255,4 +266,4 @@ public class SchemaPersistenceTests : IDisposable
valueHex.ShouldBe("01000000");
}
}
}
}

View File

@@ -1,23 +1,23 @@
using System.Collections.Concurrent;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
using System.Text;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class SchemaTests
{
private static readonly System.Collections.Concurrent.ConcurrentDictionary<string, ushort> _testKeyMap = new(StringComparer.OrdinalIgnoreCase);
private static readonly ConcurrentDictionary<string, ushort> _testKeyMap = new(StringComparer.OrdinalIgnoreCase);
static SchemaTests()
{
ushort id = 1;
foreach (var k in new[] { "_id", "name", "mainaddress", "otheraddresses", "tags", "secret", "street", "city" }) _testKeyMap[k] = id++;
foreach (string k in new[]
{ "_id", "name", "mainaddress", "otheraddresses", "tags", "secret", "street", "city" })
_testKeyMap[k] = id++;
}
/// <summary>
/// Executes UsedKeys_ShouldReturnAllKeys.
/// Executes UsedKeys_ShouldReturnAllKeys.
/// </summary>
[Fact]
public void UsedKeys_ShouldReturnAllKeys()
@@ -33,11 +33,10 @@ public class SchemaTests
keys.ShouldContain("secret");
keys.ShouldContain("street");
keys.ShouldContain("city");
}
/// <summary>
/// Executes GetSchema_ShouldReturnBsonSchema.
/// Executes GetSchema_ShouldReturnBsonSchema.
/// </summary>
[Fact]
public void GetSchema_ShouldReturnBsonSchema()
@@ -60,4 +59,4 @@ public class SchemaTests
// Address in MockEntities has City (Nested)
addressField.NestedSchema.Fields.ShouldContain(f => f.Name == "city");
}
}
}

View File

@@ -1,253 +1,250 @@
using System;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Tests;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests
namespace ZB.MOM.WW.CBDD.Tests;
public class TemporalTypesTests : IDisposable
{
public class TemporalTypesTests : IDisposable
private readonly TestDbContext _db;
private readonly string _dbPath;
/// <summary>
/// Initializes a new instance of the <see cref="TemporalTypesTests" /> class.
/// </summary>
public TemporalTypesTests()
{
private readonly Shared.TestDbContext _db;
private readonly string _dbPath;
/// <summary>
/// Initializes a new instance of the <see cref="TemporalTypesTests"/> class.
/// </summary>
public TemporalTypesTests()
{
_dbPath = $"temporal_test_{Guid.NewGuid()}.db";
_db = new Shared.TestDbContext(_dbPath);
}
/// <summary>
/// Releases test resources.
/// </summary>
public void Dispose()
{
_db?.Dispose();
if (File.Exists(_dbPath))
File.Delete(_dbPath);
}
/// <summary>
/// Verifies temporal entity collection initialization.
/// </summary>
[Fact]
public void TemporalEntity_Collection_IsInitialized()
{
_db.TemporalEntities.ShouldNotBeNull();
}
/// <summary>
/// Verifies temporal fields round-trip through insert and lookup.
/// </summary>
[Fact]
public void TemporalEntity_Insert_And_FindById_Works()
{
// Arrange
var now = DateTime.UtcNow;
var offset = DateTimeOffset.UtcNow;
var duration = TimeSpan.FromHours(5.5);
var birthDate = new DateOnly(1990, 5, 15);
var openingTime = new TimeOnly(9, 30, 0);
var entity = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Test Entity",
CreatedAt = now,
UpdatedAt = offset,
LastAccessedAt = offset.AddDays(1),
Duration = duration,
OptionalDuration = TimeSpan.FromMinutes(30),
BirthDate = birthDate,
Anniversary = new DateOnly(2020, 6, 10),
OpeningTime = openingTime,
ClosingTime = new TimeOnly(18, 0, 0)
};
// Act
_db.TemporalEntities.Insert(entity);
var retrieved = _db.TemporalEntities.FindById(entity.Id);
// Assert
retrieved.ShouldNotBeNull();
retrieved.Name.ShouldBe(entity.Name);
// DateTime comparison (allowing some millisecond precision loss)
(retrieved.CreatedAt.Ticks / 10000).ShouldBe(entity.CreatedAt.Ticks / 10000); // millisecond precision
// DateTimeOffset comparison
(retrieved.UpdatedAt.UtcDateTime.Ticks / 10000).ShouldBe(entity.UpdatedAt.UtcDateTime.Ticks / 10000);
retrieved.LastAccessedAt.ShouldNotBeNull();
(retrieved.LastAccessedAt!.Value.UtcDateTime.Ticks / 10000).ShouldBe(entity.LastAccessedAt!.Value.UtcDateTime.Ticks / 10000);
// TimeSpan comparison
retrieved.Duration.ShouldBe(entity.Duration);
retrieved.OptionalDuration.ShouldNotBeNull();
retrieved.OptionalDuration!.Value.ShouldBe(entity.OptionalDuration!.Value);
// DateOnly comparison
retrieved.BirthDate.ShouldBe(entity.BirthDate);
retrieved.Anniversary.ShouldNotBeNull();
retrieved.Anniversary!.Value.ShouldBe(entity.Anniversary!.Value);
// TimeOnly comparison
retrieved.OpeningTime.ShouldBe(entity.OpeningTime);
retrieved.ClosingTime.ShouldNotBeNull();
retrieved.ClosingTime!.Value.ShouldBe(entity.ClosingTime!.Value);
}
/// <summary>
/// Verifies insert behavior when optional temporal fields are null.
/// </summary>
[Fact]
public void TemporalEntity_Insert_WithNullOptionalFields_Works()
{
// Arrange
var entity = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Minimal Entity",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.FromHours(1),
BirthDate = new DateOnly(1985, 3, 20),
OpeningTime = new TimeOnly(8, 0, 0),
// Optional fields left null
LastAccessedAt = null,
OptionalDuration = null,
Anniversary = null,
ClosingTime = null
};
// Act
_db.TemporalEntities.Insert(entity);
var retrieved = _db.TemporalEntities.FindById(entity.Id);
// Assert
retrieved.ShouldNotBeNull();
retrieved.Name.ShouldBe(entity.Name);
retrieved.LastAccessedAt.ShouldBeNull();
retrieved.OptionalDuration.ShouldBeNull();
retrieved.Anniversary.ShouldBeNull();
retrieved.ClosingTime.ShouldBeNull();
}
/// <summary>
/// Verifies temporal entity updates persist correctly.
/// </summary>
[Fact]
public void TemporalEntity_Update_Works()
{
// Arrange
var entity = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Original",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.FromHours(1),
BirthDate = new DateOnly(1990, 1, 1),
OpeningTime = new TimeOnly(9, 0, 0)
};
_db.TemporalEntities.Insert(entity);
// Act - Update temporal fields
entity.Name = "Updated";
entity.UpdatedAt = DateTimeOffset.UtcNow.AddDays(1);
entity.Duration = TimeSpan.FromHours(2);
entity.BirthDate = new DateOnly(1991, 2, 2);
entity.OpeningTime = new TimeOnly(10, 0, 0);
_db.TemporalEntities.Update(entity);
var retrieved = _db.TemporalEntities.FindById(entity.Id);
// Assert
retrieved.ShouldNotBeNull();
retrieved.Name.ShouldBe("Updated");
retrieved.Duration.ShouldBe(entity.Duration);
retrieved.BirthDate.ShouldBe(entity.BirthDate);
retrieved.OpeningTime.ShouldBe(entity.OpeningTime);
}
/// <summary>
/// Verifies querying temporal entities by temporal fields.
/// </summary>
[Fact]
public void TemporalEntity_Query_Works()
{
// Arrange
var birthDate1 = new DateOnly(1990, 1, 1);
var birthDate2 = new DateOnly(1995, 6, 15);
var entity1 = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Person 1",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.FromHours(1),
BirthDate = birthDate1,
OpeningTime = new TimeOnly(9, 0, 0)
};
var entity2 = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Person 2",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.FromHours(2),
BirthDate = birthDate2,
OpeningTime = new TimeOnly(10, 0, 0)
};
_db.TemporalEntities.Insert(entity1);
_db.TemporalEntities.Insert(entity2);
// Act
var results = _db.TemporalEntities.AsQueryable()
.Where(e => e.BirthDate == birthDate1)
.ToList();
// Assert
results.Count().ShouldBe(1);
results[0].Name.ShouldBe("Person 1");
}
/// <summary>
/// Verifies edge-case TimeSpan values are persisted correctly.
/// </summary>
[Fact]
public void TimeSpan_EdgeCases_Work()
{
// Arrange - Test various TimeSpan values
var entity = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "TimeSpan Test",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.Zero,
OptionalDuration = TimeSpan.MaxValue,
BirthDate = DateOnly.MinValue,
OpeningTime = TimeOnly.MinValue
};
// Act
_db.TemporalEntities.Insert(entity);
var retrieved = _db.TemporalEntities.FindById(entity.Id);
// Assert
retrieved.ShouldNotBeNull();
retrieved.Duration.ShouldBe(TimeSpan.Zero);
retrieved.OptionalDuration.ShouldNotBeNull();
retrieved.OptionalDuration!.Value.ShouldBe(TimeSpan.MaxValue);
retrieved.BirthDate.ShouldBe(DateOnly.MinValue);
retrieved.OpeningTime.ShouldBe(TimeOnly.MinValue);
}
_dbPath = $"temporal_test_{Guid.NewGuid()}.db";
_db = new TestDbContext(_dbPath);
}
}
/// <summary>
/// Releases test resources.
/// </summary>
public void Dispose()
{
_db?.Dispose();
if (File.Exists(_dbPath))
File.Delete(_dbPath);
}
/// <summary>
/// Verifies temporal entity collection initialization.
/// </summary>
[Fact]
public void TemporalEntity_Collection_IsInitialized()
{
_db.TemporalEntities.ShouldNotBeNull();
}
/// <summary>
/// Verifies temporal fields round-trip through insert and lookup.
/// </summary>
[Fact]
public void TemporalEntity_Insert_And_FindById_Works()
{
// Arrange
var now = DateTime.UtcNow;
var offset = DateTimeOffset.UtcNow;
var duration = TimeSpan.FromHours(5.5);
var birthDate = new DateOnly(1990, 5, 15);
var openingTime = new TimeOnly(9, 30, 0);
var entity = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Test Entity",
CreatedAt = now,
UpdatedAt = offset,
LastAccessedAt = offset.AddDays(1),
Duration = duration,
OptionalDuration = TimeSpan.FromMinutes(30),
BirthDate = birthDate,
Anniversary = new DateOnly(2020, 6, 10),
OpeningTime = openingTime,
ClosingTime = new TimeOnly(18, 0, 0)
};
// Act
_db.TemporalEntities.Insert(entity);
var retrieved = _db.TemporalEntities.FindById(entity.Id);
// Assert
retrieved.ShouldNotBeNull();
retrieved.Name.ShouldBe(entity.Name);
// DateTime comparison (allowing some millisecond precision loss)
(retrieved.CreatedAt.Ticks / 10000).ShouldBe(entity.CreatedAt.Ticks / 10000); // millisecond precision
// DateTimeOffset comparison
(retrieved.UpdatedAt.UtcDateTime.Ticks / 10000).ShouldBe(entity.UpdatedAt.UtcDateTime.Ticks / 10000);
retrieved.LastAccessedAt.ShouldNotBeNull();
(retrieved.LastAccessedAt!.Value.UtcDateTime.Ticks / 10000).ShouldBe(
entity.LastAccessedAt!.Value.UtcDateTime.Ticks / 10000);
// TimeSpan comparison
retrieved.Duration.ShouldBe(entity.Duration);
retrieved.OptionalDuration.ShouldNotBeNull();
retrieved.OptionalDuration!.Value.ShouldBe(entity.OptionalDuration!.Value);
// DateOnly comparison
retrieved.BirthDate.ShouldBe(entity.BirthDate);
retrieved.Anniversary.ShouldNotBeNull();
retrieved.Anniversary!.Value.ShouldBe(entity.Anniversary!.Value);
// TimeOnly comparison
retrieved.OpeningTime.ShouldBe(entity.OpeningTime);
retrieved.ClosingTime.ShouldNotBeNull();
retrieved.ClosingTime!.Value.ShouldBe(entity.ClosingTime!.Value);
}
/// <summary>
/// Verifies insert behavior when optional temporal fields are null.
/// </summary>
[Fact]
public void TemporalEntity_Insert_WithNullOptionalFields_Works()
{
// Arrange
var entity = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Minimal Entity",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.FromHours(1),
BirthDate = new DateOnly(1985, 3, 20),
OpeningTime = new TimeOnly(8, 0, 0),
// Optional fields left null
LastAccessedAt = null,
OptionalDuration = null,
Anniversary = null,
ClosingTime = null
};
// Act
_db.TemporalEntities.Insert(entity);
var retrieved = _db.TemporalEntities.FindById(entity.Id);
// Assert
retrieved.ShouldNotBeNull();
retrieved.Name.ShouldBe(entity.Name);
retrieved.LastAccessedAt.ShouldBeNull();
retrieved.OptionalDuration.ShouldBeNull();
retrieved.Anniversary.ShouldBeNull();
retrieved.ClosingTime.ShouldBeNull();
}
/// <summary>
/// Verifies temporal entity updates persist correctly.
/// </summary>
[Fact]
public void TemporalEntity_Update_Works()
{
// Arrange
var entity = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Original",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.FromHours(1),
BirthDate = new DateOnly(1990, 1, 1),
OpeningTime = new TimeOnly(9, 0, 0)
};
_db.TemporalEntities.Insert(entity);
// Act - Update temporal fields
entity.Name = "Updated";
entity.UpdatedAt = DateTimeOffset.UtcNow.AddDays(1);
entity.Duration = TimeSpan.FromHours(2);
entity.BirthDate = new DateOnly(1991, 2, 2);
entity.OpeningTime = new TimeOnly(10, 0, 0);
_db.TemporalEntities.Update(entity);
var retrieved = _db.TemporalEntities.FindById(entity.Id);
// Assert
retrieved.ShouldNotBeNull();
retrieved.Name.ShouldBe("Updated");
retrieved.Duration.ShouldBe(entity.Duration);
retrieved.BirthDate.ShouldBe(entity.BirthDate);
retrieved.OpeningTime.ShouldBe(entity.OpeningTime);
}
/// <summary>
/// Verifies querying temporal entities by temporal fields.
/// </summary>
[Fact]
public void TemporalEntity_Query_Works()
{
// Arrange
var birthDate1 = new DateOnly(1990, 1, 1);
var birthDate2 = new DateOnly(1995, 6, 15);
var entity1 = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Person 1",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.FromHours(1),
BirthDate = birthDate1,
OpeningTime = new TimeOnly(9, 0, 0)
};
var entity2 = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "Person 2",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.FromHours(2),
BirthDate = birthDate2,
OpeningTime = new TimeOnly(10, 0, 0)
};
_db.TemporalEntities.Insert(entity1);
_db.TemporalEntities.Insert(entity2);
// Act
var results = _db.TemporalEntities.AsQueryable()
.Where(e => e.BirthDate == birthDate1)
.ToList();
// Assert
results.Count().ShouldBe(1);
results[0].Name.ShouldBe("Person 1");
}
/// <summary>
/// Verifies edge-case TimeSpan values are persisted correctly.
/// </summary>
[Fact]
public void TimeSpan_EdgeCases_Work()
{
// Arrange - Test various TimeSpan values
var entity = new TemporalEntity
{
Id = ObjectId.NewObjectId(),
Name = "TimeSpan Test",
CreatedAt = DateTime.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Duration = TimeSpan.Zero,
OptionalDuration = TimeSpan.MaxValue,
BirthDate = DateOnly.MinValue,
OpeningTime = TimeOnly.MinValue
};
// Act
_db.TemporalEntities.Insert(entity);
var retrieved = _db.TemporalEntities.FindById(entity.Id);
// Assert
retrieved.ShouldNotBeNull();
retrieved.Duration.ShouldBe(TimeSpan.Zero);
retrieved.OptionalDuration.ShouldNotBeNull();
retrieved.OptionalDuration!.Value.ShouldBe(TimeSpan.MaxValue);
retrieved.BirthDate.ShouldBe(DateOnly.MinValue);
retrieved.OpeningTime.ShouldBe(TimeOnly.MinValue);
}
}

View File

@@ -1,48 +1,11 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using Xunit;
using System.Linq;
namespace ZB.MOM.WW.CBDD.Tests;
public class VisibilityTests
{
public class VisibilityEntity
{
// Should be included
/// <summary>
/// Gets or sets the normal prop.
/// </summary>
public int NormalProp { get; set; }
// Should be included (serialization usually writes it)
/// <summary>
/// Gets or sets the private set prop.
/// </summary>
public int PrivateSetProp { get; private set; }
// Should be included
/// <summary>
/// Gets or sets the init prop.
/// </summary>
public int InitProp { get; init; }
// Fields - typically included in BSON if public, but reflection need GetFields
public string PublicField = string.Empty;
// Should NOT be included
private int _privateField;
// Helper to set private
/// <summary>
/// Tests set private.
/// </summary>
/// <param name="val">Value assigned to the private field.</param>
public void SetPrivate(int val) => _privateField = val;
}
/// <summary>
/// Tests generate schema visibility checks.
/// Tests generate schema visibility checks.
/// </summary>
[Fact]
public void GenerateSchema_VisibilityChecks()
@@ -60,4 +23,41 @@ public class VisibilityTests
schema.Fields.ShouldNotContain(f => f.Name == "_privatefield");
}
}
public class VisibilityEntity
{
// Should NOT be included
private int _privateField;
// Fields - typically included in BSON if public, but reflection need GetFields
public string PublicField = string.Empty;
// Should be included
/// <summary>
/// Gets or sets the normal prop.
/// </summary>
public int NormalProp { get; set; }
// Should be included (serialization usually writes it)
/// <summary>
/// Gets or sets the private set prop.
/// </summary>
public int PrivateSetProp { get; private set; }
// Should be included
/// <summary>
/// Gets or sets the init prop.
/// </summary>
public int InitProp { get; init; }
// Helper to set private
/// <summary>
/// Tests set private.
/// </summary>
/// <param name="val">Value assigned to the private field.</param>
public void SetPrivate(int val)
{
_privateField = val;
}
}
}

View File

@@ -1,5 +1,4 @@
using System.Reflection;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Shared;
@@ -9,12 +8,12 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class CheckpointModeTests
{
/// <summary>
/// Verifies default checkpoint mode truncates WAL.
/// Verifies default checkpoint mode truncates WAL.
/// </summary>
[Fact]
public void Checkpoint_Default_ShouldUseTruncate()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var db = new TestDbContext(dbPath);
@@ -36,12 +35,12 @@ public class CheckpointModeTests
}
/// <summary>
/// Verifies passive mode skips when checkpoint lock is contended.
/// Verifies passive mode skips when checkpoint lock is contended.
/// </summary>
[Fact]
public void Checkpoint_Passive_ShouldSkip_WhenLockIsContended()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
@@ -67,13 +66,13 @@ public class CheckpointModeTests
}
/// <summary>
/// Verifies full checkpoint applies data and appends a checkpoint marker without truncating WAL.
/// Verifies full checkpoint applies data and appends a checkpoint marker without truncating WAL.
/// </summary>
[Fact]
public void Checkpoint_Full_ShouldAppendMarker_AndPreserveWal()
{
var dbPath = NewDbPath();
var walPath = Path.ChangeExtension(dbPath, ".wal");
string dbPath = NewDbPath();
string walPath = Path.ChangeExtension(dbPath, ".wal");
try
{
@@ -82,7 +81,7 @@ public class CheckpointModeTests
db.Users.Insert(new User { Name = "checkpoint-full", Age = 50 });
db.SaveChanges();
var walBefore = db.Storage.GetWalSize();
long walBefore = db.Storage.GetWalSize();
walBefore.ShouldBeGreaterThan(0);
var result = db.Checkpoint(CheckpointMode.Full);
@@ -103,12 +102,12 @@ public class CheckpointModeTests
}
/// <summary>
/// Verifies restart checkpoint clears WAL and allows subsequent writes.
/// Verifies restart checkpoint clears WAL and allows subsequent writes.
/// </summary>
[Fact]
public void Checkpoint_Restart_ShouldResetWal_AndAcceptNewWrites()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var db = new TestDbContext(dbPath);
@@ -134,12 +133,12 @@ public class CheckpointModeTests
}
/// <summary>
/// Verifies recovery remains deterministic after a full checkpoint boundary.
/// Verifies recovery remains deterministic after a full checkpoint boundary.
/// </summary>
[Fact]
public void Recover_AfterFullCheckpoint_ShouldApplyLatestCommitDeterministically()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
uint pageId;
@@ -182,12 +181,12 @@ public class CheckpointModeTests
}
/// <summary>
/// Verifies asynchronous mode-based checkpoints return expected result metadata.
/// Verifies asynchronous mode-based checkpoints return expected result metadata.
/// </summary>
[Fact]
public async Task CheckpointAsync_Full_ShouldReturnResult()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var db = new TestDbContext(dbPath);
@@ -213,16 +212,18 @@ public class CheckpointModeTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"checkpoint_mode_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"checkpoint_mode_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
if (File.Exists(dbPath)) File.Delete(dbPath);
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
if (File.Exists(walPath)) File.Delete(walPath);
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
}

View File

@@ -1,7 +1,5 @@
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Storage;
using System.Text;
using Xunit;
using ZB.MOM.WW.CBDD.Core.Storage;
namespace ZB.MOM.WW.CBDD.Tests;
@@ -10,7 +8,7 @@ public class DictionaryPageTests
private const int PageSize = 16384;
/// <summary>
/// Verifies dictionary page initialization sets expected defaults.
/// Verifies dictionary page initialization sets expected defaults.
/// </summary>
[Fact]
public void Initialize_ShouldSetupEmptyPage()
@@ -30,7 +28,7 @@ public class DictionaryPageTests
}
/// <summary>
/// Verifies insert adds entries and keeps them ordered.
/// Verifies insert adds entries and keeps them ordered.
/// </summary>
[Fact]
public void Insert_ShouldAddEntryAndSort()
@@ -65,7 +63,7 @@ public class DictionaryPageTests
}
/// <summary>
/// Verifies key lookup returns the expected value.
/// Verifies key lookup returns the expected value.
/// </summary>
[Fact]
public void TryFind_ShouldReturnCorrectValue()
@@ -86,7 +84,7 @@ public class DictionaryPageTests
}
/// <summary>
/// Verifies inserts fail when the page is full.
/// Verifies inserts fail when the page is full.
/// </summary>
[Fact]
public void Overflow_ShouldReturnFalse_WhenFull()
@@ -94,18 +92,16 @@ public class DictionaryPageTests
var page = new byte[PageSize];
DictionaryPage.Initialize(page, 1);
string bigKey = new string('X', 250);
var bigKey = new string('X', 250);
int count = 0;
var count = 0;
while (true)
{
// Use unique keys
var key = bigKey + count;
string key = bigKey + count;
if (!DictionaryPage.Insert(page, key, (ushort)count))
{
// Should fail here
break;
}
count++;
if (count > 1000) throw new ShouldAssertException("Should have filled the page much earlier");
}
@@ -118,16 +114,16 @@ public class DictionaryPageTests
}
/// <summary>
/// Verifies global lookup finds keys across chained dictionary pages.
/// Verifies global lookup finds keys across chained dictionary pages.
/// </summary>
[Fact]
public void Chaining_ShouldFindKeysInLinkedPages()
{
var dbPath = Path.Combine(Path.GetTempPath(), $"test_dict_chain_{Guid.NewGuid()}.db");
string dbPath = Path.Combine(Path.GetTempPath(), $"test_dict_chain_{Guid.NewGuid()}.db");
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
// 1. Create First Page
var page1Id = storage.AllocatePage();
uint page1Id = storage.AllocatePage();
var pageBuffer = new byte[storage.PageSize];
DictionaryPage.Initialize(pageBuffer, page1Id);
@@ -136,7 +132,7 @@ public class DictionaryPageTests
DictionaryPage.Insert(pageBuffer, "KeyA", 200);
// 2. Create Second Page
var page2Id = storage.AllocatePage();
uint page2Id = storage.AllocatePage();
var page2Buffer = new byte[storage.PageSize];
DictionaryPage.Initialize(page2Buffer, page2Id);
@@ -174,18 +170,18 @@ public class DictionaryPageTests
}
/// <summary>
/// Verifies global enumeration returns keys across chained dictionary pages.
/// Verifies global enumeration returns keys across chained dictionary pages.
/// </summary>
[Fact]
public void FindAllGlobal_ShouldRetrieveAllKeys()
{
var dbPath = Path.Combine(Path.GetTempPath(), $"test_dict_findall_{Guid.NewGuid()}.db");
string dbPath = Path.Combine(Path.GetTempPath(), $"test_dict_findall_{Guid.NewGuid()}.db");
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
// 1. Create Chain of 3 Pages
var page1Id = storage.AllocatePage();
var page2Id = storage.AllocatePage();
var page3Id = storage.AllocatePage();
uint page1Id = storage.AllocatePage();
uint page2Id = storage.AllocatePage();
uint page3Id = storage.AllocatePage();
var buf = new byte[storage.PageSize];
@@ -226,4 +222,4 @@ public class DictionaryPageTests
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(Path.ChangeExtension(dbPath, ".wal"))) File.Delete(Path.ChangeExtension(dbPath, ".wal"));
}
}
}

View File

@@ -1,10 +1,8 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Bson.Schema;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Storage;
using Xunit;
using System.Collections.Generic;
using System.Linq;
using ZB.MOM.WW.CBDD.Bson.Schema;
using System.Diagnostics.CodeAnalysis;
namespace ZB.MOM.WW.CBDD.Tests;
@@ -14,7 +12,7 @@ public class DictionaryPersistenceTests : IDisposable
private readonly StorageEngine _storage;
/// <summary>
/// Initializes a new instance of the <see cref="DictionaryPersistenceTests"/> class.
/// Initializes a new instance of the <see cref="DictionaryPersistenceTests" /> class.
/// </summary>
public DictionaryPersistenceTests()
{
@@ -23,55 +21,18 @@ public class DictionaryPersistenceTests : IDisposable
}
/// <summary>
/// Disposes test resources and removes temporary files.
/// Disposes test resources and removes temporary files.
/// </summary>
public void Dispose()
{
_storage.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
var walPath = Path.ChangeExtension(_dbPath, ".wal");
string walPath = Path.ChangeExtension(_dbPath, ".wal");
if (File.Exists(walPath)) File.Delete(walPath);
}
private class MockMapper : DocumentMapperBase<ObjectId, Dictionary<string, object>>
{
private readonly string _collectionName;
private readonly List<string> _keys;
/// <summary>
/// Initializes a new instance of the <see cref="MockMapper"/> class.
/// </summary>
/// <param name="name">The collection name.</param>
/// <param name="keys">The mapper keys.</param>
public MockMapper(string name, params string[] keys)
{
_collectionName = name;
_keys = keys.ToList();
}
/// <inheritdoc />
public override string CollectionName => _collectionName;
/// <inheritdoc />
public override IEnumerable<string> UsedKeys => _keys;
/// <inheritdoc />
public override BsonSchema GetSchema() => new BsonSchema { Title = _collectionName };
/// <inheritdoc />
public override ObjectId GetId(Dictionary<string, object> entity) => throw new NotImplementedException();
/// <inheritdoc />
public override void SetId(Dictionary<string, object> entity, ObjectId id) => throw new NotImplementedException();
/// <inheritdoc />
public override int Serialize(Dictionary<string, object> entity, BsonSpanWriter writer) => throw new NotImplementedException();
/// <inheritdoc />
public override Dictionary<string, object> Deserialize(BsonSpanReader reader) => throw new NotImplementedException();
}
/// <summary>
/// Verifies mapper registration adds all unique dictionary keys.
/// Verifies mapper registration adds all unique dictionary keys.
/// </summary>
[Fact]
public void RegisterMappers_Registers_All_Unique_Keys()
@@ -99,7 +60,7 @@ public class DictionaryPersistenceTests : IDisposable
}
/// <summary>
/// Verifies dictionary keys persist across storage restarts.
/// Verifies dictionary keys persist across storage restarts.
/// </summary>
[Fact]
public void Dictionary_Keys_Persist_Across_Restarts()
@@ -107,7 +68,7 @@ public class DictionaryPersistenceTests : IDisposable
var mapper = new MockMapper("Coll1", "PersistedKey");
_storage.RegisterMappers(new IDocumentMapper[] { mapper });
var originalId = _storage.GetOrAddDictionaryEntry("PersistedKey");
ushort originalId = _storage.GetOrAddDictionaryEntry("PersistedKey");
originalId.ShouldNotBe((ushort)0);
_storage.Dispose();
@@ -115,10 +76,78 @@ public class DictionaryPersistenceTests : IDisposable
// Re-open
using var storage2 = new StorageEngine(_dbPath, PageFileConfig.Default);
var recoveredId = storage2.GetOrAddDictionaryEntry("PersistedKey");
ushort recoveredId = storage2.GetOrAddDictionaryEntry("PersistedKey");
recoveredId.ShouldBe(originalId);
}
/// <summary>
/// Verifies nested schema fields are registered as dictionary keys.
/// </summary>
[Fact]
public void RegisterMappers_Handles_Nested_Keys()
{
var mapper = new NestedMockMapper();
_storage.RegisterMappers(new IDocumentMapper[] { mapper });
_storage.GetOrAddDictionaryEntry("Top").ShouldNotBe((ushort)0);
_storage.GetOrAddDictionaryEntry("Child").ShouldNotBe((ushort)0);
}
[SuppressMessage("ReSharper", "All", Justification = "Test-only stub mapper; members are intentionally not used.")]
private class MockMapper : DocumentMapperBase<ObjectId, Dictionary<string, object>>
{
private readonly string _collectionName;
private readonly List<string> _keys;
/// <summary>
/// Initializes a new instance of the <see cref="MockMapper" /> class.
/// </summary>
/// <param name="name">The collection name.</param>
/// <param name="keys">The mapper keys.</param>
public MockMapper(string name, params string[] keys)
{
_collectionName = name;
_keys = keys.ToList();
}
/// <inheritdoc />
public override string CollectionName => _collectionName;
/// <inheritdoc />
public override IEnumerable<string> UsedKeys => _keys;
/// <inheritdoc />
public override BsonSchema GetSchema()
{
return new BsonSchema { Title = _collectionName };
}
/// <inheritdoc />
public override ObjectId GetId(Dictionary<string, object> entity)
{
throw new NotImplementedException();
}
/// <inheritdoc />
public override void SetId(Dictionary<string, object> entity, ObjectId id)
{
throw new NotImplementedException();
}
/// <inheritdoc />
public override int Serialize(Dictionary<string, object> entity, BsonSpanWriter writer)
{
throw new NotImplementedException();
}
/// <inheritdoc />
public override Dictionary<string, object> Deserialize(BsonSpanReader reader)
{
throw new NotImplementedException();
}
}
[SuppressMessage("ReSharper", "All", Justification = "Test-only stub mapper; members are intentionally not used.")]
private class NestedMockMapper : DocumentMapperBase<ObjectId, object>
{
/// <inheritdoc />
@@ -141,28 +170,27 @@ public class DictionaryPersistenceTests : IDisposable
}
/// <inheritdoc />
public override ObjectId GetId(object entity) => throw new NotImplementedException();
public override ObjectId GetId(object entity)
{
throw new NotImplementedException();
}
/// <inheritdoc />
public override void SetId(object entity, ObjectId id) => throw new NotImplementedException();
public override void SetId(object entity, ObjectId id)
{
throw new NotImplementedException();
}
/// <inheritdoc />
public override int Serialize(object entity, BsonSpanWriter writer) => throw new NotImplementedException();
public override int Serialize(object entity, BsonSpanWriter writer)
{
throw new NotImplementedException();
}
/// <inheritdoc />
public override object Deserialize(BsonSpanReader reader) => throw new NotImplementedException();
}
/// <summary>
/// Verifies nested schema fields are registered as dictionary keys.
/// </summary>
[Fact]
public void RegisterMappers_Handles_Nested_Keys()
{
var mapper = new NestedMockMapper();
_storage.RegisterMappers(new IDocumentMapper[] { mapper });
_storage.GetOrAddDictionaryEntry("Top").ShouldNotBe((ushort)0);
_storage.GetOrAddDictionaryEntry("Child").ShouldNotBe((ushort)0);
public override object Deserialize(BsonSpanReader reader)
{
throw new NotImplementedException();
}
}
}

View File

@@ -1,33 +1,29 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
using System.IO.Compression;
using System.IO.MemoryMappedFiles;
using Xunit;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Shared;
namespace ZB.MOM.WW.CBDD.Tests;
public class DocumentOverflowTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath;
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="DocumentOverflowTests"/> class.
/// Initializes a new instance of the <see cref="DocumentOverflowTests" /> class.
/// </summary>
public DocumentOverflowTests()
{
_dbPath = Path.Combine(Path.GetTempPath(), $"test_overflow_{Guid.NewGuid()}.db");
// Use default PageSize (16KB)
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Releases test resources.
/// Releases test resources.
/// </summary>
public void Dispose()
{
@@ -36,7 +32,7 @@ public class DocumentOverflowTests : IDisposable
}
/// <summary>
/// Verifies inserting a medium-sized document succeeds.
/// Verifies inserting a medium-sized document succeeds.
/// </summary>
[Fact]
public void Insert_MediumDoc_64KB_ShouldSucceed()
@@ -60,7 +56,7 @@ public class DocumentOverflowTests : IDisposable
}
/// <summary>
/// Verifies inserting a large document succeeds.
/// Verifies inserting a large document succeeds.
/// </summary>
[Fact]
public void Insert_LargeDoc_100KB_ShouldSucceed()
@@ -83,7 +79,7 @@ public class DocumentOverflowTests : IDisposable
}
/// <summary>
/// Verifies inserting a very large document succeeds.
/// Verifies inserting a very large document succeeds.
/// </summary>
[Fact]
public void Insert_HugeDoc_3MB_ShouldSucceed()
@@ -109,7 +105,7 @@ public class DocumentOverflowTests : IDisposable
}
/// <summary>
/// Verifies updating from a small payload to a huge payload succeeds.
/// Verifies updating from a small payload to a huge payload succeeds.
/// </summary>
[Fact]
public void Update_SmallToHuge_ShouldSucceed()
@@ -123,7 +119,7 @@ public class DocumentOverflowTests : IDisposable
var hugeString = new string('U', 3 * 1024 * 1024);
user.Name = hugeString;
var updated = _db.Users.Update(user);
bool updated = _db.Users.Update(user);
_db.SaveChanges();
updated.ShouldBeTrue();
@@ -133,17 +129,17 @@ public class DocumentOverflowTests : IDisposable
}
/// <summary>
/// Verifies bulk inserts with mixed payload sizes succeed.
/// Verifies bulk inserts with mixed payload sizes succeed.
/// </summary>
[Fact]
public void InsertBulk_MixedSizes_ShouldSucceed()
{
var users = new List<User>
{
new User { Id = ObjectId.NewObjectId(), Name = "Small 1", Age = 1 },
new User { Id = ObjectId.NewObjectId(), Name = new string('M', 100 * 1024), Age = 2 }, // 100KB
new User { Id = ObjectId.NewObjectId(), Name = "Small 2", Age = 3 },
new User { Id = ObjectId.NewObjectId(), Name = new string('H', 3 * 1024 * 1024), Age = 4 } // 3MB
new() { Id = ObjectId.NewObjectId(), Name = "Small 1", Age = 1 },
new() { Id = ObjectId.NewObjectId(), Name = new string('M', 100 * 1024), Age = 2 }, // 100KB
new() { Id = ObjectId.NewObjectId(), Name = "Small 2", Age = 3 },
new() { Id = ObjectId.NewObjectId(), Name = new string('H', 3 * 1024 * 1024), Age = 4 } // 3MB
};
var ids = _db.Users.InsertBulk(users);
@@ -158,12 +154,12 @@ public class DocumentOverflowTests : IDisposable
}
/// <summary>
/// Verifies huge inserts succeed with compression enabled and small page configuration.
/// Verifies huge inserts succeed with compression enabled and small page configuration.
/// </summary>
[Fact]
public void Insert_HugeDoc_WithCompressionEnabledAndSmallPages_ShouldSucceed()
{
var localDbPath = Path.Combine(Path.GetTempPath(), $"test_overflow_compression_{Guid.NewGuid():N}.db");
string localDbPath = Path.Combine(Path.GetTempPath(), $"test_overflow_compression_{Guid.NewGuid():N}.db");
var options = new CompressionOptions
{
EnableCompression = true,
@@ -175,7 +171,7 @@ public class DocumentOverflowTests : IDisposable
try
{
using var db = new Shared.TestDbContext(localDbPath, TinyPageConfig(), options);
using var db = new TestDbContext(localDbPath, TinyPageConfig(), options);
var huge = new string('Z', 2 * 1024 * 1024);
var id = db.Users.Insert(new User
{
@@ -197,12 +193,13 @@ public class DocumentOverflowTests : IDisposable
}
/// <summary>
/// Verifies updates from huge to small payloads succeed with compression enabled.
/// Verifies updates from huge to small payloads succeed with compression enabled.
/// </summary>
[Fact]
public void Update_HugeToSmall_WithCompressionEnabled_ShouldSucceed()
{
var localDbPath = Path.Combine(Path.GetTempPath(), $"test_overflow_compression_update_{Guid.NewGuid():N}.db");
string localDbPath =
Path.Combine(Path.GetTempPath(), $"test_overflow_compression_update_{Guid.NewGuid():N}.db");
var options = new CompressionOptions
{
EnableCompression = true,
@@ -214,7 +211,7 @@ public class DocumentOverflowTests : IDisposable
try
{
using var db = new Shared.TestDbContext(localDbPath, TinyPageConfig(), options);
using var db = new TestDbContext(localDbPath, TinyPageConfig(), options);
var user = new User
{
Id = ObjectId.NewObjectId(),
@@ -251,10 +248,10 @@ public class DocumentOverflowTests : IDisposable
private static void CleanupLocalFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(walPath)) File.Delete(walPath);
if (File.Exists(markerPath)) File.Delete(markerPath);
}
}
}

View File

@@ -1,4 +1,6 @@
using System.IO.Compression;
using System.Text;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Shared;
@@ -8,12 +10,12 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class MaintenanceDiagnosticsAndMigrationTests
{
/// <summary>
/// Verifies diagnostics APIs return page usage, compression, and fragmentation data.
/// Verifies diagnostics APIs return page usage, compression, and fragmentation data.
/// </summary>
[Fact]
public void DiagnosticsApis_ShouldReturnPageUsageCompressionAndFragmentationData()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
@@ -28,13 +30,11 @@ public class MaintenanceDiagnosticsAndMigrationTests
using var db = new TestDbContext(dbPath, options);
for (var i = 0; i < 40; i++)
{
db.Users.Insert(new User
{
Name = BuildPayload(i, 9000),
Age = i
});
}
db.SaveChanges();
db.ForceCheckpoint();
@@ -47,7 +47,8 @@ public class MaintenanceDiagnosticsAndMigrationTests
byCollection.Any(x => x.CollectionName.Equals("users", StringComparison.OrdinalIgnoreCase)).ShouldBeTrue();
var compressionByCollection = db.GetCompressionRatioByCollection();
var usersCompression = compressionByCollection.First(x => x.CollectionName.Equals("users", StringComparison.OrdinalIgnoreCase));
var usersCompression = compressionByCollection.First(x =>
x.CollectionName.Equals("users", StringComparison.OrdinalIgnoreCase));
usersCompression.DocumentCount.ShouldBeGreaterThan(0);
usersCompression.BytesBeforeCompression.ShouldBeGreaterThan(0);
usersCompression.BytesAfterCompression.ShouldBeGreaterThan(0);
@@ -65,26 +66,24 @@ public class MaintenanceDiagnosticsAndMigrationTests
}
/// <summary>
/// Verifies compression migration dry-run and apply modes return deterministic stats and preserve data.
/// Verifies compression migration dry-run and apply modes return deterministic stats and preserve data.
/// </summary>
[Fact]
public void MigrateCompression_DryRunAndApply_ShouldReturnDeterministicStatsAndPreserveData()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var db = new TestDbContext(dbPath, CompressionOptions.Default);
var ids = new List<ZB.MOM.WW.CBDD.Bson.ObjectId>();
var ids = new List<ObjectId>();
for (var i = 0; i < 60; i++)
{
ids.Add(db.Users.Insert(new User
{
Name = BuildPayload(i, 12000),
Age = i % 17
}));
}
db.SaveChanges();
db.ForceCheckpoint();
@@ -132,7 +131,7 @@ public class MaintenanceDiagnosticsAndMigrationTests
private static string BuildPayload(int seed, int approxLength)
{
var builder = new System.Text.StringBuilder(approxLength + 128);
var builder = new StringBuilder(approxLength + 128);
var i = 0;
while (builder.Length < approxLength)
{
@@ -148,11 +147,13 @@ public class MaintenanceDiagnosticsAndMigrationTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"maint_diag_migrate_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"maint_diag_migrate_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
var tempPath = $"{dbPath}.compact.tmp";
var backupPath = $"{dbPath}.compact.bak";
@@ -163,4 +164,4 @@ public class MaintenanceDiagnosticsAndMigrationTests
if (File.Exists(tempPath)) File.Delete(tempPath);
if (File.Exists(backupPath)) File.Delete(backupPath);
}
}
}

View File

@@ -1,11 +1,8 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Indexing;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Core.Transactions;
using ZB.MOM.WW.CBDD.Shared;
using ZB.MOM.WW.CBDD.Shared.TestDbContext_TestDbContext_Mappers;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
@@ -15,7 +12,7 @@ public class MetadataPersistenceTests : IDisposable
private readonly string _walPath;
/// <summary>
/// Initializes a new instance of the <see cref="MetadataPersistenceTests"/> class.
/// Initializes a new instance of the <see cref="MetadataPersistenceTests" /> class.
/// </summary>
public MetadataPersistenceTests()
{
@@ -24,7 +21,16 @@ public class MetadataPersistenceTests : IDisposable
}
/// <summary>
/// Tests index definitions are persisted and reloaded.
/// Disposes the resources used by this instance.
/// </summary>
public void Dispose()
{
if (File.Exists(_dbPath)) File.Delete(_dbPath);
if (File.Exists(_walPath)) File.Delete(_walPath);
}
/// <summary>
/// Tests index definitions are persisted and reloaded.
/// </summary>
[Fact]
public void IndexDefinitions_ArePersisted_AndReloaded()
@@ -66,19 +72,19 @@ public class MetadataPersistenceTests : IDisposable
}
/// <summary>
/// Tests ensure index does not recreate if index exists.
/// Tests ensure index does not recreate if index exists.
/// </summary>
[Fact]
public void EnsureIndex_DoesNotRecreate_IfIndexExists()
{
// 1. Create index
using (var context = new Shared.TestDbContext(_dbPath))
using (var context = new TestDbContext(_dbPath))
{
context.Users.EnsureIndex(u => u.Age);
}
// 2. Re-open and EnsureIndex again - should be fast/no-op
using (var context = new Shared.TestDbContext(_dbPath))
using (var context = new TestDbContext(_dbPath))
{
var mapper = new ZB_MOM_WW_CBDD_Shared_UserMapper();
@@ -99,13 +105,4 @@ public class MetadataPersistenceTests : IDisposable
results.Count().ShouldBe(1);
}
}
/// <summary>
/// Disposes the resources used by this instance.
/// </summary>
public void Dispose()
{
if (File.Exists(_dbPath)) File.Delete(_dbPath);
if (File.Exists(_walPath)) File.Delete(_walPath);
}
}
}

View File

@@ -1,52 +1,12 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Collections;
using Xunit;
using System.Collections.Generic;
using System;
using System.Linq;
namespace ZB.MOM.WW.CBDD.Tests;
public class RobustnessTests
{
public struct Point
{
/// <summary>
/// Gets or sets the X.
/// </summary>
public int X { get; set; }
/// <summary>
/// Gets or sets the Y.
/// </summary>
public int Y { get; set; }
}
public class RobustEntity
{
/// <summary>
/// Gets or sets the NullableInts.
/// </summary>
public List<int?> NullableInts { get; set; } = new();
/// <summary>
/// Gets or sets the Map.
/// </summary>
public Dictionary<string, int> Map { get; set; } = new();
/// <summary>
/// Gets or sets the EnumerableStrings.
/// </summary>
public IEnumerable<string> EnumerableStrings { get; set; } = Array.Empty<string>();
/// <summary>
/// Gets or sets the Location.
/// </summary>
public Point Location { get; set; }
/// <summary>
/// Gets or sets the NullableLocation.
/// </summary>
public Point? NullableLocation { get; set; }
}
/// <summary>
/// Executes GenerateSchema_RobustnessChecks.
/// Executes GenerateSchema_RobustnessChecks.
/// </summary>
[Fact]
public void GenerateSchema_RobustnessChecks()
@@ -83,4 +43,45 @@ public class RobustnessTests
nullableLocation.IsNullable.ShouldBeTrue();
nullableLocation.NestedSchema.ShouldNotBeNull();
}
}
public struct Point
{
/// <summary>
/// Gets or sets the X.
/// </summary>
public int X { get; set; }
/// <summary>
/// Gets or sets the Y.
/// </summary>
public int Y { get; set; }
}
public class RobustEntity
{
/// <summary>
/// Gets or sets the NullableInts.
/// </summary>
public List<int?> NullableInts { get; set; } = new();
/// <summary>
/// Gets or sets the Map.
/// </summary>
public Dictionary<string, int> Map { get; set; } = new();
/// <summary>
/// Gets or sets the EnumerableStrings.
/// </summary>
public IEnumerable<string> EnumerableStrings { get; set; } = Array.Empty<string>();
/// <summary>
/// Gets or sets the Location.
/// </summary>
public Point Location { get; set; }
/// <summary>
/// Gets or sets the NullableLocation.
/// </summary>
public Point? NullableLocation { get; set; }
}
}

View File

@@ -1,11 +1,13 @@
using ZB.MOM.WW.CBDD.Core.Storage;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class StorageEngineDictionaryTests
{
private string GetTempDbPath() => Path.Combine(Path.GetTempPath(), $"test_storage_dict_{Guid.NewGuid()}.db");
private string GetTempDbPath()
{
return Path.Combine(Path.GetTempPath(), $"test_storage_dict_{Guid.NewGuid()}.db");
}
private void Cleanup(string path)
{
@@ -14,34 +16,37 @@ public class StorageEngineDictionaryTests
}
/// <summary>
/// Verifies dictionary pages are initialized and return normalized keys.
/// Verifies dictionary pages are initialized and return normalized keys.
/// </summary>
[Fact]
public void StorageEngine_ShouldInitializeDictionary()
{
var path = GetTempDbPath();
string path = GetTempDbPath();
try
{
using (var storage = new StorageEngine(path, PageFileConfig.Default))
{
// Should generate ID > 100
var id = storage.GetOrAddDictionaryEntry("TestKey");
ushort id = storage.GetOrAddDictionaryEntry("TestKey");
(id > DictionaryPage.ReservedValuesEnd).ShouldBeTrue();
var key = storage.GetDictionaryKey(id);
string? key = storage.GetDictionaryKey(id);
key.ShouldBe("testkey");
}
}
finally { Cleanup(path); }
finally
{
Cleanup(path);
}
}
/// <summary>
/// Verifies dictionary entries persist across reopen.
/// Verifies dictionary entries persist across reopen.
/// </summary>
[Fact]
public void StorageEngine_ShouldPersistDictionary()
{
var path = GetTempDbPath();
string path = GetTempDbPath();
try
{
ushort id1, id2;
@@ -54,8 +59,8 @@ public class StorageEngineDictionaryTests
// Reopen
using (var storage = new StorageEngine(path, PageFileConfig.Default))
{
var val1 = storage.GetOrAddDictionaryEntry("Key1");
var val2 = storage.GetOrAddDictionaryEntry("Key2");
ushort val1 = storage.GetOrAddDictionaryEntry("Key1");
ushort val2 = storage.GetOrAddDictionaryEntry("Key2");
val1.ShouldBe(id1);
val2.ShouldBe(id2);
@@ -64,16 +69,19 @@ public class StorageEngineDictionaryTests
storage.GetDictionaryKey(val2).ShouldBe("key2");
}
}
finally { Cleanup(path); }
finally
{
Cleanup(path);
}
}
/// <summary>
/// Verifies dictionary handling scales to many keys and remains durable.
/// Verifies dictionary handling scales to many keys and remains durable.
/// </summary>
[Fact]
public void StorageEngine_ShouldHandleManyKeys()
{
var path = GetTempDbPath();
string path = GetTempDbPath();
try
{
const int keyCount = 3000;
@@ -81,10 +89,10 @@ public class StorageEngineDictionaryTests
using (var storage = new StorageEngine(path, PageFileConfig.Default))
{
for (int i = 0; i < keyCount; i++)
for (var i = 0; i < keyCount; i++)
{
var key = $"Key_{i}";
var id = storage.GetOrAddDictionaryEntry(key);
ushort id = storage.GetOrAddDictionaryEntry(key);
expectedIds[key] = id;
}
}
@@ -92,22 +100,25 @@ public class StorageEngineDictionaryTests
// Reopen and Verify
using (var storage = new StorageEngine(path, PageFileConfig.Default))
{
for (int i = 0; i < keyCount; i++)
for (var i = 0; i < keyCount; i++)
{
var key = $"Key_{i}";
var id = storage.GetOrAddDictionaryEntry(key); // Should get existing
ushort id = storage.GetOrAddDictionaryEntry(key); // Should get existing
id.ShouldBe(expectedIds[key]);
var loadedKey = storage.GetDictionaryKey(id);
string? loadedKey = storage.GetDictionaryKey(id);
loadedKey.ShouldBe(key.ToLowerInvariant());
}
// Add new one
var newId = storage.GetOrAddDictionaryEntry("NewKeyAfterReopen");
ushort newId = storage.GetOrAddDictionaryEntry("NewKeyAfterReopen");
(newId > 0).ShouldBeTrue();
expectedIds.ContainsValue(newId).ShouldBeFalse();
}
}
finally { Cleanup(path); }
finally
{
Cleanup(path);
}
}
}
}

View File

@@ -6,12 +6,12 @@ namespace ZB.MOM.WW.CBDD.Tests;
public class StorageEngineTransactionProtocolTests
{
/// <summary>
/// Verifies preparing an unknown transaction returns false.
/// Verifies preparing an unknown transaction returns false.
/// </summary>
[Fact]
public void PrepareTransaction_Should_ReturnFalse_For_Unknown_Transaction()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
@@ -24,12 +24,12 @@ public class StorageEngineTransactionProtocolTests
}
/// <summary>
/// Verifies committing a detached transaction object throws.
/// Verifies committing a detached transaction object throws.
/// </summary>
[Fact]
public void CommitTransaction_With_TransactionObject_Should_Throw_When_Not_Active()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
@@ -44,18 +44,18 @@ public class StorageEngineTransactionProtocolTests
}
/// <summary>
/// Verifies committing a transaction object persists writes and clears active state.
/// Verifies committing a transaction object persists writes and clears active state.
/// </summary>
[Fact]
public void CommitTransaction_With_TransactionObject_Should_Commit_Writes()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
using var txn = storage.BeginTransaction();
var pageId = storage.AllocatePage();
uint pageId = storage.AllocatePage();
var data = new byte[storage.PageSize];
data[0] = 0xAB;
@@ -75,12 +75,12 @@ public class StorageEngineTransactionProtocolTests
}
/// <summary>
/// Verifies committing by identifier with no writes does not throw.
/// Verifies committing by identifier with no writes does not throw.
/// </summary>
[Fact]
public void CommitTransaction_ById_With_NoWrites_Should_Not_Throw()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
@@ -93,18 +93,18 @@ public class StorageEngineTransactionProtocolTests
}
/// <summary>
/// Verifies committed transaction cache moves into readable state and active count is cleared.
/// Verifies committed transaction cache moves into readable state and active count is cleared.
/// </summary>
[Fact]
public void MarkTransactionCommitted_Should_Move_Cache_And_Clear_ActiveCount()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
using var txn = storage.BeginTransaction();
var pageId = storage.AllocatePage();
uint pageId = storage.AllocatePage();
var data = new byte[storage.PageSize];
data[5] = 0x5A;
storage.WritePage(pageId, txn.TransactionId, data);
@@ -124,17 +124,17 @@ public class StorageEngineTransactionProtocolTests
}
/// <summary>
/// Verifies rollback discards uncommitted page writes.
/// Verifies rollback discards uncommitted page writes.
/// </summary>
[Fact]
public void RollbackTransaction_Should_Discard_Uncommitted_Write()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
var pageId = storage.AllocatePage();
uint pageId = storage.AllocatePage();
var baseline = new byte[storage.PageSize];
baseline[0] = 0x11;
storage.WritePageImmediate(pageId, baseline);
@@ -159,18 +159,18 @@ public class StorageEngineTransactionProtocolTests
}
/// <summary>
/// Verifies marking a transaction committed transitions state correctly.
/// Verifies marking a transaction committed transitions state correctly.
/// </summary>
[Fact]
public void Transaction_MarkCommitted_Should_Transition_State()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
using var txn = storage.BeginTransaction();
var pageId = storage.AllocatePage();
uint pageId = storage.AllocatePage();
var data = new byte[storage.PageSize];
data[3] = 0x33;
storage.WritePage(pageId, txn.TransactionId, data);
@@ -191,18 +191,18 @@ public class StorageEngineTransactionProtocolTests
}
/// <summary>
/// Verifies preparing then committing writes WAL data and updates transaction state.
/// Verifies preparing then committing writes WAL data and updates transaction state.
/// </summary>
[Fact]
public void Transaction_Prepare_Should_Write_Wal_And_Transition_State()
{
var dbPath = NewDbPath();
string dbPath = NewDbPath();
try
{
using var storage = new StorageEngine(dbPath, PageFileConfig.Default);
using var txn = storage.BeginTransaction();
var pageId = storage.AllocatePage();
uint pageId = storage.AllocatePage();
var data = new byte[storage.PageSize];
data[11] = 0x7B;
storage.WritePage(pageId, txn.TransactionId, data);
@@ -220,16 +220,18 @@ public class StorageEngineTransactionProtocolTests
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"storage_txn_{Guid.NewGuid():N}.db");
{
return Path.Combine(Path.GetTempPath(), $"storage_txn_{Guid.NewGuid():N}.db");
}
private static void CleanupFiles(string dbPath)
{
if (File.Exists(dbPath)) File.Delete(dbPath);
var walPath = Path.ChangeExtension(dbPath, ".wal");
string walPath = Path.ChangeExtension(dbPath, ".wal");
if (File.Exists(walPath)) File.Delete(walPath);
var altWalPath = dbPath + "-wal";
string altWalPath = dbPath + "-wal";
if (File.Exists(altWalPath)) File.Delete(altWalPath);
}
}
}

View File

@@ -1,12 +1,11 @@
using ZB.MOM.WW.CBDD.Bson;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class ObjectIdTests
{
/// <summary>
/// Verifies new object identifiers are 12 bytes long.
/// Verifies new object identifiers are 12 bytes long.
/// </summary>
[Fact]
public void NewObjectId_ShouldCreate12ByteId()
@@ -20,7 +19,7 @@ public class ObjectIdTests
}
/// <summary>
/// Verifies object identifiers round-trip from their binary form.
/// Verifies object identifiers round-trip from their binary form.
/// </summary>
[Fact]
public void ObjectId_ShouldRoundTrip()
@@ -36,7 +35,7 @@ public class ObjectIdTests
}
/// <summary>
/// Verifies object identifier equality behavior.
/// Verifies object identifier equality behavior.
/// </summary>
[Fact]
public void ObjectId_Equals_ShouldWork()
@@ -50,7 +49,7 @@ public class ObjectIdTests
}
/// <summary>
/// Verifies object identifier timestamps are recent UTC values.
/// Verifies object identifier timestamps are recent UTC values.
/// </summary>
[Fact]
public void ObjectId_Timestamp_ShouldBeRecentUtc()
@@ -61,4 +60,4 @@ public class ObjectIdTests
(timestamp <= DateTime.UtcNow).ShouldBeTrue();
(timestamp >= DateTime.UtcNow.AddSeconds(-5)).ShouldBeTrue();
}
}
}

View File

@@ -1,28 +1,32 @@
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core;
using ZB.MOM.WW.CBDD.Core.Collections;
using ZB.MOM.WW.CBDD.Core.Metadata;
using ZB.MOM.WW.CBDD.Shared;
using Xunit;
namespace ZB.MOM.WW.CBDD.Tests;
public class ValueObjectIdTests : IDisposable
{
private readonly TestDbContext _db;
private readonly string _dbPath = "value_object_ids.db";
private readonly Shared.TestDbContext _db;
/// <summary>
/// Initializes a new instance of the <see cref="ValueObjectIdTests"/> class.
/// Initializes a new instance of the <see cref="ValueObjectIdTests" /> class.
/// </summary>
public ValueObjectIdTests()
{
if (File.Exists(_dbPath)) File.Delete(_dbPath);
_db = new Shared.TestDbContext(_dbPath);
_db = new TestDbContext(_dbPath);
}
/// <summary>
/// Executes Should_Support_ValueObject_Id_Conversion.
/// Executes Dispose.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
}
/// <summary>
/// Executes Should_Support_ValueObject_Id_Conversion.
/// </summary>
[Fact]
public void Should_Support_ValueObject_Id_Conversion()
@@ -41,13 +45,4 @@ public class ValueObjectIdTests : IDisposable
retrieved.Id.Value.ShouldBe("ORD-123");
retrieved.CustomerName.ShouldBe("John Doe");
}
/// <summary>
/// Executes Dispose.
/// </summary>
public void Dispose()
{
_db.Dispose();
if (File.Exists(_dbPath)) File.Delete(_dbPath);
}
}
}

View File

@@ -1,39 +1,39 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<AssemblyName>ZB.MOM.WW.CBDD.Tests</AssemblyName>
<RootNamespace>ZB.MOM.WW.CBDD.Tests</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="coverlet.msbuild" Version="6.0.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="NSubstitute" Version="5.3.0" />
<PackageReference Include="Shouldly" Version="4.3.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="xunit.v3" Version="3.2.2" />
</ItemGroup>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\CBDD.SourceGenerators\ZB.MOM.WW.CBDD.SourceGenerators.csproj" OutputItemType="Analyzer" ReferenceOutputAssembly="false" />
<ProjectReference Include="..\..\src\CBDD.Bson\ZB.MOM.WW.CBDD.Bson.csproj" />
<ProjectReference Include="..\..\src\CBDD.Core\ZB.MOM.WW.CBDD.Core.csproj" />
</ItemGroup>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<AssemblyName>ZB.MOM.WW.CBDD.Tests</AssemblyName>
<RootNamespace>ZB.MOM.WW.CBDD.Tests</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4"/>
<PackageReference Include="coverlet.msbuild" Version="6.0.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/>
<PackageReference Include="NSubstitute" Version="5.3.0"/>
<PackageReference Include="Shouldly" Version="4.3.0"/>
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="xunit.v3" Version="3.2.2"/>
</ItemGroup>
<ItemGroup>
<Using Include="Xunit"/>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\CBDD.SourceGenerators\ZB.MOM.WW.CBDD.SourceGenerators.csproj" OutputItemType="Analyzer" ReferenceOutputAssembly="false"/>
<ProjectReference Include="..\..\src\CBDD.Bson\ZB.MOM.WW.CBDD.Bson.csproj"/>
<ProjectReference Include="..\..\src\CBDD.Core\ZB.MOM.WW.CBDD.Core.csproj"/>
</ItemGroup>
</Project>

File diff suppressed because it is too large Load Diff