using System.IO.Compression;
using ZB.MOM.WW.CBDD.Bson;
using ZB.MOM.WW.CBDD.Core.Compression;
using ZB.MOM.WW.CBDD.Core.Storage;
using ZB.MOM.WW.CBDD.Shared;
namespace ZB.MOM.WW.CBDD.Tests;
public class CompressionInsertReadTests
{
///
/// Tests insert with threshold should store mixed compressed and uncompressed slots.
///
[Fact]
public void Insert_WithThreshold_ShouldStoreMixedCompressedAndUncompressedSlots()
{
var dbPath = NewDbPath();
var options = new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 4096,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = CompressionLevel.Fastest
};
try
{
using var db = new TestDbContext(dbPath, options);
var small = new User { Name = "tiny", Age = 10 };
var large = new User { Name = BuildPayload(24_000), Age = 11 };
var smallId = db.Users.Insert(small);
var largeId = db.Users.Insert(large);
db.SaveChanges();
db.Users.FindById(smallId)!.Name.ShouldBe(small.Name);
db.Users.FindById(largeId)!.Name.ShouldBe(large.Name);
var counts = CountActiveDataSlots(db.Storage);
counts.Total.ShouldBeGreaterThanOrEqualTo(2);
counts.Compressed.ShouldBeGreaterThanOrEqualTo(1);
counts.Compressed.ShouldBeLessThan(counts.Total);
}
finally
{
CleanupFiles(dbPath);
}
}
///
/// Tests find by id should read mixed compressed and uncompressed documents.
///
[Fact]
public void FindById_ShouldReadMixedCompressedAndUncompressedDocuments()
{
var dbPath = NewDbPath();
var options = new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 512,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = CompressionLevel.Fastest
};
var ids = new List();
try
{
using (var db = new TestDbContext(dbPath, options))
{
ids.Add(db.Users.Insert(new User { Name = "small-a", Age = 1 }));
ids.Add(db.Users.Insert(new User { Name = BuildPayload(18_000), Age = 2 }));
ids.Add(db.Users.Insert(new User { Name = "small-b", Age = 3 }));
ids.Add(db.Users.Insert(new User { Name = BuildPayload(26_000), Age = 4 }));
db.SaveChanges();
db.ForceCheckpoint();
}
using (var reopened = new TestDbContext(dbPath, options))
{
reopened.Users.FindById(ids[0])!.Name.ShouldBe("small-a");
reopened.Users.FindById(ids[2])!.Name.ShouldBe("small-b");
reopened.Users.FindById(ids[1])!.Name.Length.ShouldBeGreaterThan(10_000);
reopened.Users.FindById(ids[3])!.Name.Length.ShouldBeGreaterThan(10_000);
var counts = CountActiveDataSlots(reopened.Storage);
counts.Compressed.ShouldBeGreaterThanOrEqualTo(1);
counts.Compressed.ShouldBeLessThan(counts.Total);
}
}
finally
{
CleanupFiles(dbPath);
}
}
///
/// Tests insert when codec throws should fallback to uncompressed storage.
///
[Fact]
public void Insert_WhenCodecThrows_ShouldFallbackToUncompressedStorage()
{
var dbPath = NewDbPath();
var options = new CompressionOptions
{
EnableCompression = true,
MinSizeBytes = 0,
MinSavingsPercent = 0,
Codec = CompressionCodec.Brotli,
Level = CompressionLevel.Fastest
};
try
{
using var db = new TestDbContext(dbPath, options);
db.Storage.CompressionService.RegisterCodec(new FailingBrotliCodec());
var user = new User { Name = BuildPayload(20_000), Age = 7 };
var id = db.Users.Insert(user);
db.SaveChanges();
db.Users.FindById(id)!.Name.ShouldBe(user.Name);
var stats = db.GetCompressionStats();
stats.CompressionFailureCount.ShouldBeGreaterThanOrEqualTo(1);
stats.CompressedDocumentCount.ShouldBe(0);
var counts = CountActiveDataSlots(db.Storage);
counts.Compressed.ShouldBe(0);
}
finally
{
CleanupFiles(dbPath);
}
}
private static (int Total, int Compressed) CountActiveDataSlots(StorageEngine storage)
{
var buffer = new byte[storage.PageSize];
var total = 0;
var compressed = 0;
for (uint pageId = 1; pageId < storage.PageCount; pageId++)
{
storage.ReadPage(pageId, null, buffer);
var header = SlottedPageHeader.ReadFrom(buffer);
if (header.PageType != PageType.Data)
continue;
for (ushort slotIndex = 0; slotIndex < header.SlotCount; slotIndex++)
{
var slotOffset = SlottedPageHeader.Size + (slotIndex * SlotEntry.Size);
var slot = SlotEntry.ReadFrom(buffer.AsSpan(slotOffset, SlotEntry.Size));
if ((slot.Flags & SlotFlags.Deleted) != 0)
continue;
total++;
if ((slot.Flags & SlotFlags.Compressed) != 0)
compressed++;
}
}
return (total, compressed);
}
private static string BuildPayload(int approxLength)
{
var builder = new System.Text.StringBuilder(approxLength + 256);
var i = 0;
while (builder.Length < approxLength)
{
builder.Append("payload-");
builder.Append(i.ToString("D8"));
builder.Append('|');
i++;
}
return builder.ToString();
}
private static string NewDbPath()
=> Path.Combine(Path.GetTempPath(), $"compression_insert_read_{Guid.NewGuid():N}.db");
private static void CleanupFiles(string dbPath)
{
var walPath = Path.ChangeExtension(dbPath, ".wal");
var markerPath = $"{dbPath}.compact.state";
if (File.Exists(dbPath)) File.Delete(dbPath);
if (File.Exists(walPath)) File.Delete(walPath);
if (File.Exists(markerPath)) File.Delete(markerPath);
}
private sealed class FailingBrotliCodec : ICompressionCodec
{
///
/// Gets or sets the codec.
///
public CompressionCodec Codec => CompressionCodec.Brotli;
///
/// Tests compress.
///
/// Payload bytes to compress.
/// Compression level.
public byte[] Compress(ReadOnlySpan input, CompressionLevel level)
=> throw new InvalidOperationException("Forced codec failure for test coverage.");
///
/// Tests decompress.
///
/// Compressed payload bytes.
/// Expected decompressed payload length.
/// Maximum allowed decompressed size.
public byte[] Decompress(ReadOnlySpan input, int expectedLength, int maxDecompressedSizeBytes)
=> throw new InvalidOperationException("This codec should not be used for reads in this scenario.");
}
}