feat: add SequenceSet for sparse deletion tracking with secure erase (Gap 1.7)

Replace HashSet<ulong> _deleted in MsgBlock with SequenceSet — a sorted-range
list that compresses contiguous deletions into (Start, End) intervals. Adds
O(log n) Contains/Add via binary search on range count, matching Go's avl.SequenceSet
semantics with a simpler implementation.

- Add SequenceSet.cs: sorted-range compressed set with Add/Remove/Contains/Count/Clear
  and IEnumerable<ulong> in ascending order. Binary search for all O(log n) ops.
- Replace HashSet<ulong> _deleted and _skipSequences in MsgBlock with SequenceSet.
- Add secureErase parameter (default false) to MsgBlock.Delete(): when true, payload
  bytes are overwritten with RandomNumberGenerator.Fill() before the delete record is
  written, making original content unrecoverable on disk.
- Update FileStore.DeleteInBlock() to propagate secureErase flag.
- Update FileStore.EraseMsg() to use secureErase: true via block layer instead of
  delegating to RemoveMsg().
- Add SequenceSetTests.cs: 25 tests covering Add, Remove, Contains, Count, range
  compression, gap filling, bridge merges, enumeration, boundary values, round-trip.
- Add FileStoreTombstoneTrackingTests.cs: 12 tests covering SequenceSet tracking in
  MsgBlock, tombstone persistence through RebuildIndex recovery, secure erase
  payload overwrite verification, and FileStore.EraseMsg integration.

Go reference: filestore.go:5267 (removeMsg), filestore.go:5890 (eraseMsg),
              avl/seqset.go (SequenceSet).
This commit is contained in:
Joseph Doherty
2026-02-25 08:02:44 -05:00
parent 646a5eb2ae
commit cbe41d0efb
5 changed files with 1045 additions and 14 deletions

View File

@@ -0,0 +1,358 @@
// Reference: golang/nats-server/server/filestore.go:5267 (removeMsg)
// golang/nats-server/server/filestore.go:5890 (eraseMsg)
//
// Tests verifying:
// 1. SequenceSet correctly tracks deleted sequences in MsgBlock
// 2. Tombstones survive MsgBlock recovery (RebuildIndex populates SequenceSet)
// 3. Secure erase (Delete with secureErase=true) overwrites payload bytes
// 4. EraseMsg at FileStore level marks the sequence as deleted
//
// Go test analogs:
// TestFileStoreEraseMsgDoesNotLoseTombstones (filestore_test.go:10781)
// TestFileStoreTombstonesNoFirstSeqRollback (filestore_test.go:10911)
// TestFileStoreRemoveMsg (filestore_test.go:5267)
using System.Security.Cryptography;
using System.Text;
using NATS.Server.JetStream.Storage;
namespace NATS.Server.Tests.JetStream.Storage;
/// <summary>
/// Tests for SequenceSet-backed deletion tracking and secure erase in MsgBlock.
/// Reference: golang/nats-server/server/filestore.go eraseMsg / removeMsg.
/// </summary>
public sealed class FileStoreTombstoneTrackingTests : IDisposable
{
private readonly string _testDir;
public FileStoreTombstoneTrackingTests()
{
_testDir = Path.Combine(Path.GetTempPath(), $"nats-tombstone-tracking-{Guid.NewGuid():N}");
Directory.CreateDirectory(_testDir);
}
public void Dispose()
{
if (Directory.Exists(_testDir))
Directory.Delete(_testDir, recursive: true);
}
private string UniqueDir()
{
var dir = Path.Combine(_testDir, Guid.NewGuid().ToString("N"));
Directory.CreateDirectory(dir);
return dir;
}
// -------------------------------------------------------------------------
// SequenceSet tracking in MsgBlock
// -------------------------------------------------------------------------
// Go: removeMsg — after Delete, IsDeleted returns true and DeletedCount == 1
[Fact]
public void MsgBlock_Delete_TracksDeletionInSequenceSet()
{
var dir = UniqueDir();
using var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024);
block.Write("a", ReadOnlyMemory<byte>.Empty, "payload"u8.ToArray());
block.Write("b", ReadOnlyMemory<byte>.Empty, "payload"u8.ToArray());
block.Write("c", ReadOnlyMemory<byte>.Empty, "payload"u8.ToArray());
block.Delete(2).ShouldBeTrue();
block.IsDeleted(2).ShouldBeTrue();
block.IsDeleted(1).ShouldBeFalse();
block.IsDeleted(3).ShouldBeFalse();
block.DeletedCount.ShouldBe(1UL);
block.MessageCount.ShouldBe(2UL);
}
// Multiple deletes tracked correctly — SequenceSet merges contiguous ranges.
[Fact]
public void MsgBlock_MultipleDeletes_AllTrackedInSequenceSet()
{
var dir = UniqueDir();
using var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024);
for (var i = 0; i < 10; i++)
block.Write($"subj.{i}", ReadOnlyMemory<byte>.Empty, "payload"u8.ToArray());
// Delete seqs 3, 4, 5 (contiguous — SequenceSet will merge into one range).
block.Delete(3).ShouldBeTrue();
block.Delete(4).ShouldBeTrue();
block.Delete(5).ShouldBeTrue();
block.DeletedCount.ShouldBe(3UL);
block.MessageCount.ShouldBe(7UL);
block.IsDeleted(3).ShouldBeTrue();
block.IsDeleted(4).ShouldBeTrue();
block.IsDeleted(5).ShouldBeTrue();
block.IsDeleted(2).ShouldBeFalse();
block.IsDeleted(6).ShouldBeFalse();
}
// -------------------------------------------------------------------------
// Tombstones survive recovery (RebuildIndex populates SequenceSet)
// -------------------------------------------------------------------------
// Go: TestFileStoreTombstonesNoFirstSeqRollback — after restart, deleted seqs still deleted.
// Reference: filestore.go RebuildIndex reads ebit from block file.
[Fact]
public void MsgBlock_Recovery_TombstonesInSequenceSet()
{
var dir = UniqueDir();
// Phase 1: write messages and delete one, then close.
using (var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024))
{
block.Write("a", ReadOnlyMemory<byte>.Empty, "one"u8.ToArray());
block.Write("b", ReadOnlyMemory<byte>.Empty, "two"u8.ToArray());
block.Write("c", ReadOnlyMemory<byte>.Empty, "three"u8.ToArray());
block.Delete(2); // marks seq 2 with ebit on disk
block.Flush();
}
// Phase 2: recover from file — SequenceSet must be populated by RebuildIndex.
using var recovered = MsgBlock.Recover(0, dir);
recovered.DeletedCount.ShouldBe(1UL);
recovered.MessageCount.ShouldBe(2UL);
recovered.IsDeleted(1).ShouldBeFalse();
recovered.IsDeleted(2).ShouldBeTrue();
recovered.IsDeleted(3).ShouldBeFalse();
// Read should return null for deleted seq.
recovered.Read(2).ShouldBeNull();
recovered.Read(1).ShouldNotBeNull();
recovered.Read(3).ShouldNotBeNull();
}
// Multiple tombstones survive recovery.
[Fact]
public void MsgBlock_Recovery_MultipleDeletedSeqs_AllInSequenceSet()
{
var dir = UniqueDir();
using (var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024))
{
for (var i = 0; i < 10; i++)
block.Write($"subj", ReadOnlyMemory<byte>.Empty, "payload"u8.ToArray());
block.Delete(1);
block.Delete(3);
block.Delete(5);
block.Delete(7);
block.Delete(9);
block.Flush();
}
using var recovered = MsgBlock.Recover(0, dir);
recovered.DeletedCount.ShouldBe(5UL);
recovered.MessageCount.ShouldBe(5UL);
for (ulong seq = 1; seq <= 9; seq += 2)
recovered.IsDeleted(seq).ShouldBeTrue($"seq {seq} should be deleted");
for (ulong seq = 2; seq <= 10; seq += 2)
recovered.IsDeleted(seq).ShouldBeFalse($"seq {seq} should NOT be deleted");
}
// Skip records (WriteSkip) survive recovery and appear in SequenceSet.
[Fact]
public void MsgBlock_Recovery_SkipRecordsInSequenceSet()
{
var dir = UniqueDir();
using (var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024, firstSequence: 1))
{
block.Write("a", ReadOnlyMemory<byte>.Empty, "payload"u8.ToArray()); // seq=1
block.WriteSkip(2); // tombstone
block.WriteSkip(3); // tombstone
block.Write("b", ReadOnlyMemory<byte>.Empty, "payload"u8.ToArray()); // seq=4
block.Flush();
}
using var recovered = MsgBlock.Recover(0, dir);
// Seqs 2 and 3 are skip records → deleted.
recovered.IsDeleted(2).ShouldBeTrue();
recovered.IsDeleted(3).ShouldBeTrue();
recovered.IsDeleted(1).ShouldBeFalse();
recovered.IsDeleted(4).ShouldBeFalse();
recovered.DeletedCount.ShouldBe(2UL);
recovered.MessageCount.ShouldBe(2UL);
}
// -------------------------------------------------------------------------
// Secure erase — payload bytes are overwritten with random data
// -------------------------------------------------------------------------
// Go: eraseMsg (filestore.go:5890) — payload bytes replaced with random bytes.
[Fact]
public void MsgBlock_SecureErase_OverwritesPayloadBytes()
{
var dir = UniqueDir();
var original = Encoding.UTF8.GetBytes("this is a secret payload");
using (var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024))
{
block.Write("secret", ReadOnlyMemory<byte>.Empty, original);
// Perform secure erase — overwrites payload bytes in-place on disk.
block.Delete(1, secureErase: true).ShouldBeTrue();
block.Flush();
}
// Read the raw block file and verify the original payload bytes are gone.
var blockFile = Path.Combine(dir, "000000.blk");
var rawBytes = File.ReadAllBytes(blockFile);
// The payload "this is a secret payload" should no longer appear as a substring.
var payloadBytes = Encoding.UTF8.GetBytes("this is a secret");
var rawAsSpan = rawBytes.AsSpan();
var found = false;
for (var i = 0; i <= rawBytes.Length - payloadBytes.Length; i++)
{
if (rawAsSpan[i..].StartsWith(payloadBytes.AsSpan()))
{
found = true;
break;
}
}
found.ShouldBeFalse("Secret payload bytes should have been overwritten by secure erase");
}
// After secure erase, the message appears deleted (returns null on Read).
[Fact]
public void MsgBlock_SecureErase_MessageAppearsDeleted()
{
var dir = UniqueDir();
using var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024);
block.Write("sensitive", ReadOnlyMemory<byte>.Empty, "secret data"u8.ToArray());
block.Write("other", ReadOnlyMemory<byte>.Empty, "normal"u8.ToArray());
block.Delete(1, secureErase: true).ShouldBeTrue();
block.IsDeleted(1).ShouldBeTrue();
block.Read(1).ShouldBeNull();
block.Read(2).ShouldNotBeNull(); // other message unaffected
block.DeletedCount.ShouldBe(1UL);
block.MessageCount.ShouldBe(1UL);
}
// Secure erase with secureErase=false is identical to regular delete (no overwrite).
[Fact]
public void MsgBlock_Delete_WithSecureEraseFalse_NormalDelete()
{
var dir = UniqueDir();
using var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024);
block.Write("x", ReadOnlyMemory<byte>.Empty, "content"u8.ToArray());
block.Delete(1, secureErase: false).ShouldBeTrue();
block.IsDeleted(1).ShouldBeTrue();
block.Read(1).ShouldBeNull();
}
// Double secure erase returns false on second call.
[Fact]
public void MsgBlock_SecureErase_DoubleErase_ReturnsFalse()
{
var dir = UniqueDir();
using var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024);
block.Write("x", ReadOnlyMemory<byte>.Empty, "content"u8.ToArray());
block.Delete(1, secureErase: true).ShouldBeTrue();
block.Delete(1, secureErase: true).ShouldBeFalse(); // already deleted
}
// -------------------------------------------------------------------------
// DeletedSequences property returns snapshot of SequenceSet
// -------------------------------------------------------------------------
// DeletedSequences snapshot contains all deleted seqs (still IReadOnlySet from HashSet copy).
[Fact]
public void DeletedSequences_ReturnsCorrectSnapshot()
{
var dir = UniqueDir();
using var block = MsgBlock.Create(0, dir, maxBytes: 1024 * 1024);
block.Write("a", ReadOnlyMemory<byte>.Empty, "one"u8.ToArray());
block.Write("b", ReadOnlyMemory<byte>.Empty, "two"u8.ToArray());
block.Write("c", ReadOnlyMemory<byte>.Empty, "three"u8.ToArray());
block.Write("d", ReadOnlyMemory<byte>.Empty, "four"u8.ToArray());
block.Delete(2);
block.Delete(4);
var snapshot = block.DeletedSequences;
snapshot.Count.ShouldBe(2);
snapshot.ShouldContain(2UL);
snapshot.ShouldContain(4UL);
snapshot.ShouldNotContain(1UL);
snapshot.ShouldNotContain(3UL);
}
// -------------------------------------------------------------------------
// FileStore EraseMsg integration
// -------------------------------------------------------------------------
// Go: eraseMsg — after EraseMsg, message is gone and state reflects deletion.
[Fact]
public void FileStore_EraseMsg_MessageGoneAfterErase()
{
var dir = UniqueDir();
var opts = new FileStoreOptions { Directory = dir };
using var store = new FileStore(opts);
store.StoreMsg("foo", null, "secret"u8.ToArray(), 0);
store.StoreMsg("foo", null, "normal"u8.ToArray(), 0);
var state1 = store.State();
state1.Msgs.ShouldBe(2UL);
store.EraseMsg(1).ShouldBeTrue();
var state2 = store.State();
state2.Msgs.ShouldBe(1UL);
// Erasing same seq twice returns false.
store.EraseMsg(1).ShouldBeFalse();
}
// Go: TestFileStoreEraseMsgDoesNotLoseTombstones — erase does not disturb other tombstones.
// Reference: filestore_test.go:10781
[Fact]
public void FileStore_EraseMsg_DoesNotLoseTombstones()
{
var dir = UniqueDir();
var opts = new FileStoreOptions { Directory = dir };
using var store = new FileStore(opts);
store.StoreMsg("foo", null, [], 0); // seq=1
store.StoreMsg("foo", null, [], 0); // seq=2 (tombstone)
store.StoreMsg("foo", null, "secret"u8.ToArray(), 0); // seq=3 (erased)
store.RemoveMsg(2); // tombstone seq=2
store.StoreMsg("foo", null, [], 0); // seq=4
store.EraseMsg(3); // erase seq=3
var state = store.State();
state.Msgs.ShouldBe(2UL); // msgs 1 and 4 remain
state.NumDeleted.ShouldBe(2); // seqs 2 and 3 deleted
state.Deleted.ShouldNotBeNull();
state.Deleted!.ShouldContain(2UL);
state.Deleted.ShouldContain(3UL);
// Restart — state should be identical.
store.Dispose();
using var store2 = new FileStore(opts);
var after = store2.State();
after.Msgs.ShouldBe(2UL);
after.NumDeleted.ShouldBe(2);
after.Deleted.ShouldNotBeNull();
after.Deleted!.ShouldContain(2UL);
after.Deleted.ShouldContain(3UL);
}
}

View File

@@ -0,0 +1,395 @@
// Reference: golang/nats-server/server/avl/seqset_test.go
// Tests ported / inspired by:
// TestSequenceSetBasic → Add_Contains_Count_BasicOperations
// TestSequenceSetRange → GetEnumerator_ReturnsAscendingOrder
// TestSequenceSetDelete → Remove_SplitsAndTrimsRanges
// (range compression) → Add_ContiguousSequences_CompressesToOneRange
// (binary search) → Contains_BinarySearchCorrectness
// (boundary) → Add_Remove_AtBoundaries
using NATS.Server.JetStream.Storage;
namespace NATS.Server.Tests.JetStream.Storage;
/// <summary>
/// Unit tests for <see cref="SequenceSet"/> — the range-compressed sorted set
/// used to track soft-deleted sequences in JetStream FileStore blocks.
///
/// Reference: golang/nats-server/server/avl/seqset_test.go
/// </summary>
public sealed class SequenceSetTests
{
// -------------------------------------------------------------------------
// Basic Add / Contains / Count
// -------------------------------------------------------------------------
// Go: TestSequenceSetBasic — empty set has zero count
[Fact]
public void Count_EmptySet_ReturnsZero()
{
var ss = new SequenceSet();
ss.Count.ShouldBe(0);
ss.IsEmpty.ShouldBeTrue();
}
// Go: TestSequenceSetBasic — single element is found
[Fact]
public void Add_SingleSequence_ContainsIt()
{
var ss = new SequenceSet();
ss.Add(42).ShouldBeTrue();
ss.Contains(42).ShouldBeTrue();
ss.Count.ShouldBe(1);
ss.IsEmpty.ShouldBeFalse();
}
// Go: duplicate insert returns false (already present)
[Fact]
public void Add_DuplicateSequence_ReturnsFalse()
{
var ss = new SequenceSet();
ss.Add(10).ShouldBeTrue();
ss.Add(10).ShouldBeFalse();
ss.Count.ShouldBe(1);
}
// Go: non-member returns false on Contains
[Fact]
public void Contains_NonMember_ReturnsFalse()
{
var ss = new SequenceSet();
ss.Add(5);
ss.Contains(4).ShouldBeFalse();
ss.Contains(6).ShouldBeFalse();
ss.Contains(0).ShouldBeFalse();
ss.Contains(ulong.MaxValue).ShouldBeFalse();
}
// -------------------------------------------------------------------------
// Range compression
// -------------------------------------------------------------------------
// Adding three contiguous sequences should compress to a single range.
// This is the key efficiency property of SequenceSet vs HashSet.
[Fact]
public void Add_ContiguousSequences_CompressesToOneRange()
{
var ss = new SequenceSet();
ss.Add(1);
ss.Add(2);
ss.Add(3);
ss.Count.ShouldBe(3);
ss.RangeCount.ShouldBe(1); // single range [1, 3]
ss.Contains(1).ShouldBeTrue();
ss.Contains(2).ShouldBeTrue();
ss.Contains(3).ShouldBeTrue();
}
// Adding in reverse order should still compress.
[Fact]
public void Add_ContiguousReverse_CompressesToOneRange()
{
var ss = new SequenceSet();
ss.Add(3);
ss.Add(2);
ss.Add(1);
ss.Count.ShouldBe(3);
ss.RangeCount.ShouldBe(1); // single range [1, 3]
}
// Two separate gaps should stay as two ranges.
[Fact]
public void Add_WithGap_TwoRanges()
{
var ss = new SequenceSet();
ss.Add(1);
ss.Add(2);
ss.Add(4); // gap at 3
ss.Add(5);
ss.Count.ShouldBe(4);
ss.RangeCount.ShouldBe(2); // [1,2] and [4,5]
}
// Filling the gap merges to one range.
[Fact]
public void Add_FillsGap_MergesToOneRange()
{
var ss = new SequenceSet();
ss.Add(1);
ss.Add(2);
ss.Add(4);
ss.Add(5);
ss.RangeCount.ShouldBe(2);
// Fill the gap.
ss.Add(3);
ss.RangeCount.ShouldBe(1); // [1, 5]
ss.Count.ShouldBe(5);
}
// Large run of contiguous sequences stays as one range.
[Fact]
public void Add_LargeContiguousRun_OnlyOneRange()
{
var ss = new SequenceSet();
for (ulong i = 1; i <= 10_000; i++)
ss.Add(i);
ss.Count.ShouldBe(10_000);
ss.RangeCount.ShouldBe(1);
}
// -------------------------------------------------------------------------
// Remove / split / trim
// -------------------------------------------------------------------------
// Removing from an empty set returns false.
[Fact]
public void Remove_EmptySet_ReturnsFalse()
{
var ss = new SequenceSet();
ss.Remove(1).ShouldBeFalse();
}
// Removing a non-member returns false and doesn't change count.
[Fact]
public void Remove_NonMember_ReturnsFalse()
{
var ss = new SequenceSet();
ss.Add(5);
ss.Remove(4).ShouldBeFalse();
ss.Count.ShouldBe(1);
}
// Removing the only element empties the set.
[Fact]
public void Remove_SingleElement_EmptiesSet()
{
var ss = new SequenceSet();
ss.Add(7);
ss.Remove(7).ShouldBeTrue();
ss.Count.ShouldBe(0);
ss.IsEmpty.ShouldBeTrue();
ss.Contains(7).ShouldBeFalse();
}
// Removing the left edge of a range trims it.
[Fact]
public void Remove_LeftEdge_TrimsRange()
{
var ss = new SequenceSet();
ss.Add(1); ss.Add(2); ss.Add(3);
ss.RangeCount.ShouldBe(1);
ss.Remove(1).ShouldBeTrue();
ss.Count.ShouldBe(2);
ss.Contains(1).ShouldBeFalse();
ss.Contains(2).ShouldBeTrue();
ss.Contains(3).ShouldBeTrue();
ss.RangeCount.ShouldBe(1); // still one range [2, 3]
}
// Removing the right edge of a range trims it.
[Fact]
public void Remove_RightEdge_TrimsRange()
{
var ss = new SequenceSet();
ss.Add(1); ss.Add(2); ss.Add(3);
ss.Remove(3).ShouldBeTrue();
ss.Count.ShouldBe(2);
ss.Contains(3).ShouldBeFalse();
ss.RangeCount.ShouldBe(1); // still [1, 2]
}
// Removing the middle element splits a range into two.
[Fact]
public void Remove_MiddleElement_SplitsRange()
{
var ss = new SequenceSet();
ss.Add(1); ss.Add(2); ss.Add(3); ss.Add(4); ss.Add(5);
ss.RangeCount.ShouldBe(1);
ss.Remove(3).ShouldBeTrue();
ss.Count.ShouldBe(4);
ss.Contains(3).ShouldBeFalse();
ss.Contains(1).ShouldBeTrue();
ss.Contains(2).ShouldBeTrue();
ss.Contains(4).ShouldBeTrue();
ss.Contains(5).ShouldBeTrue();
ss.RangeCount.ShouldBe(2); // [1,2] and [4,5]
}
// -------------------------------------------------------------------------
// Enumeration
// -------------------------------------------------------------------------
// GetEnumerator returns all sequences in ascending order.
[Fact]
public void GetEnumerator_ReturnsAscendingOrder()
{
var ss = new SequenceSet();
ss.Add(5); ss.Add(3); ss.Add(1); ss.Add(2); ss.Add(4);
var list = ss.ToList();
list.ShouldBe([1, 2, 3, 4, 5]);
}
// Enumeration over a compressed range expands correctly.
[Fact]
public void GetEnumerator_CompressedRange_ExpandsAll()
{
var ss = new SequenceSet();
for (ulong i = 100; i <= 200; i++)
ss.Add(i);
var list = ss.ToList();
list.Count.ShouldBe(101);
list[0].ShouldBe(100UL);
list[^1].ShouldBe(200UL);
}
// Enumeration over multiple disjoint ranges returns all in order.
[Fact]
public void GetEnumerator_MultipleRanges_AllInOrder()
{
var ss = new SequenceSet();
ss.Add(10); ss.Add(11);
ss.Add(20); ss.Add(21); ss.Add(22);
ss.Add(30);
var list = ss.ToList();
list.ShouldBe([10UL, 11UL, 20UL, 21UL, 22UL, 30UL]);
}
// -------------------------------------------------------------------------
// Clear
// -------------------------------------------------------------------------
[Fact]
public void Clear_RemovesAll()
{
var ss = new SequenceSet();
ss.Add(1); ss.Add(2); ss.Add(3);
ss.Clear();
ss.Count.ShouldBe(0);
ss.IsEmpty.ShouldBeTrue();
ss.Contains(1).ShouldBeFalse();
}
// -------------------------------------------------------------------------
// ToHashSet snapshot
// -------------------------------------------------------------------------
[Fact]
public void ToHashSet_ReturnsAllElements()
{
var ss = new SequenceSet();
ss.Add(1); ss.Add(2); ss.Add(5); ss.Add(6); ss.Add(7);
var hs = ss.ToHashSet();
hs.Count.ShouldBe(5);
hs.ShouldContain(1UL);
hs.ShouldContain(2UL);
hs.ShouldContain(5UL);
hs.ShouldContain(6UL);
hs.ShouldContain(7UL);
}
// -------------------------------------------------------------------------
// Binary search correctness — sparse insertions
// -------------------------------------------------------------------------
// Reference: Go seqset_test.go — large number of non-contiguous sequences.
[Fact]
public void Add_Contains_SparseInsertions_AllFound()
{
var ss = new SequenceSet();
var expected = new List<ulong>();
for (ulong i = 1; i <= 1000; i += 3) // every 3rd: 1, 4, 7, ...
{
ss.Add(i);
expected.Add(i);
}
ss.Count.ShouldBe(expected.Count);
foreach (var seq in expected)
ss.Contains(seq).ShouldBeTrue($"Expected seq {seq} to be present");
// Non-members should not appear.
ss.Contains(2).ShouldBeFalse();
ss.Contains(3).ShouldBeFalse();
ss.Contains(999).ShouldBeFalse();
}
// -------------------------------------------------------------------------
// Boundary conditions
// -------------------------------------------------------------------------
[Fact]
public void Add_SequenceZero_Works()
{
var ss = new SequenceSet();
ss.Add(0).ShouldBeTrue();
ss.Contains(0).ShouldBeTrue();
ss.Count.ShouldBe(1);
}
[Fact]
public void Add_AdjacentToZero_Merges()
{
var ss = new SequenceSet();
ss.Add(0);
ss.Add(1);
ss.RangeCount.ShouldBe(1); // [0, 1]
ss.Count.ShouldBe(2);
}
[Fact]
public void Add_Remove_RoundTrip()
{
var ss = new SequenceSet();
for (ulong i = 1; i <= 100; i++)
ss.Add(i);
// Remove all odd sequences.
for (ulong i = 1; i <= 100; i += 2)
ss.Remove(i);
ss.Count.ShouldBe(50);
for (ulong i = 2; i <= 100; i += 2)
ss.Contains(i).ShouldBeTrue();
for (ulong i = 1; i <= 99; i += 2)
ss.Contains(i).ShouldBeFalse();
}
// -------------------------------------------------------------------------
// Merging at boundaries of existing ranges (not just single adjacency)
// -------------------------------------------------------------------------
[Fact]
public void Add_BridgesMultipleGaps_CorrectState()
{
var ss = new SequenceSet();
// Create three separate ranges: [1,2], [4,5], [7,8]
ss.Add(1); ss.Add(2);
ss.Add(4); ss.Add(5);
ss.Add(7); ss.Add(8);
ss.RangeCount.ShouldBe(3);
ss.Count.ShouldBe(6);
// Fill gap between [1,2] and [4,5]: add 3
ss.Add(3);
ss.RangeCount.ShouldBe(2); // [1,5] and [7,8]
ss.Count.ShouldBe(7);
// Fill gap between [1,5] and [7,8]: add 6
ss.Add(6);
ss.RangeCount.ShouldBe(1); // [1,8]
ss.Count.ShouldBe(8);
}
}