refactor: rename remaining tests to NATS.Server.Core.Tests
- Rename tests/NATS.Server.Tests -> tests/NATS.Server.Core.Tests - Update solution file, InternalsVisibleTo, and csproj references - Remove JETSTREAM_INTEGRATION_MATRIX and NATS.NKeys from csproj (moved to JetStream.Tests and Auth.Tests) - Update all namespaces from NATS.Server.Tests.* to NATS.Server.Core.Tests.* - Replace private GetFreePort/ReadUntilAsync helpers with TestUtilities calls - Fix stale namespace in Transport.Tests/NetworkingGoParityTests.cs
This commit is contained in:
0
tests/NATS.Server.Core.Tests/Internal/Avl/.gitkeep
Normal file
0
tests/NATS.Server.Core.Tests/Internal/Avl/.gitkeep
Normal file
540
tests/NATS.Server.Core.Tests/Internal/Avl/SequenceSetTests.cs
Normal file
540
tests/NATS.Server.Core.Tests/Internal/Avl/SequenceSetTests.cs
Normal file
@@ -0,0 +1,540 @@
|
||||
// Copyright 2024 The NATS Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
using System.Diagnostics;
|
||||
using NATS.Server.Internal.Avl;
|
||||
|
||||
namespace NATS.Server.Core.Tests.Internal.Avl;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for the AVL-backed SequenceSet, ported from Go server/avl/seqset_test.go
|
||||
/// and server/avl/norace_test.go.
|
||||
/// </summary>
|
||||
public class SequenceSetTests
|
||||
{
|
||||
private const int NumEntries = SequenceSet.NumEntries; // 2048
|
||||
private const int BitsPerBucket = SequenceSet.BitsPerBucket;
|
||||
private const int NumBuckets = SequenceSet.NumBuckets;
|
||||
|
||||
// Go: TestSeqSetBasics server/avl/seqset_test.go:22
|
||||
[Fact]
|
||||
public void Basics_InsertExistsDelete()
|
||||
{
|
||||
var ss = new SequenceSet();
|
||||
|
||||
ulong[] seqs = [22, 222, 2000, 2, 2, 4];
|
||||
foreach (var seq in seqs)
|
||||
{
|
||||
ss.Insert(seq);
|
||||
ss.Exists(seq).ShouldBeTrue();
|
||||
}
|
||||
|
||||
ss.Nodes.ShouldBe(1);
|
||||
ss.Size.ShouldBe(seqs.Length - 1); // One dup (2 appears twice)
|
||||
var (lh, rh) = ss.Heights();
|
||||
lh.ShouldBe(0);
|
||||
rh.ShouldBe(0);
|
||||
}
|
||||
|
||||
// Go: TestSeqSetLeftLean server/avl/seqset_test.go:38
|
||||
[Fact]
|
||||
public void LeftLean_TreeBalancesCorrectly()
|
||||
{
|
||||
var ss = new SequenceSet();
|
||||
|
||||
// Insert from high to low to create a left-leaning tree.
|
||||
for (var i = (ulong)(4 * NumEntries); i > 0; i--)
|
||||
{
|
||||
ss.Insert(i);
|
||||
}
|
||||
|
||||
ss.Nodes.ShouldBe(5);
|
||||
ss.Size.ShouldBe(4 * NumEntries);
|
||||
var (lh, rh) = ss.Heights();
|
||||
lh.ShouldBe(2);
|
||||
rh.ShouldBe(1);
|
||||
}
|
||||
|
||||
// Go: TestSeqSetRightLean server/avl/seqset_test.go:52
|
||||
[Fact]
|
||||
public void RightLean_TreeBalancesCorrectly()
|
||||
{
|
||||
var ss = new SequenceSet();
|
||||
|
||||
// Insert from low to high to create a right-leaning tree.
|
||||
for (var i = 0UL; i < (ulong)(4 * NumEntries); i++)
|
||||
{
|
||||
ss.Insert(i);
|
||||
}
|
||||
|
||||
ss.Nodes.ShouldBe(4);
|
||||
ss.Size.ShouldBe(4 * NumEntries);
|
||||
var (lh, rh) = ss.Heights();
|
||||
lh.ShouldBe(1);
|
||||
rh.ShouldBe(2);
|
||||
}
|
||||
|
||||
// Go: TestSeqSetCorrectness server/avl/seqset_test.go:66
|
||||
[Fact]
|
||||
public void Correctness_RandomInsertDelete()
|
||||
{
|
||||
// Generate 100k sequences across 500k range.
|
||||
const int num = 100_000;
|
||||
const int max = 500_000;
|
||||
|
||||
var rng = new Random(42);
|
||||
var set = new HashSet<ulong>();
|
||||
var ss = new SequenceSet();
|
||||
|
||||
for (var i = 0; i < num; i++)
|
||||
{
|
||||
var n = (ulong)rng.NextInt64(max + 1);
|
||||
ss.Insert(n);
|
||||
set.Add(n);
|
||||
}
|
||||
|
||||
for (var i = 0UL; i <= max; i++)
|
||||
{
|
||||
ss.Exists(i).ShouldBe(set.Contains(i));
|
||||
}
|
||||
}
|
||||
|
||||
// Go: TestSeqSetRange server/avl/seqset_test.go:85
|
||||
[Fact]
|
||||
public void Range_IteratesInOrder()
|
||||
{
|
||||
var num = 2 * NumEntries + 22;
|
||||
var nums = new List<ulong>(num);
|
||||
for (var i = 0; i < num; i++)
|
||||
{
|
||||
nums.Add((ulong)i);
|
||||
}
|
||||
|
||||
// Shuffle and insert.
|
||||
var rng = new Random(42);
|
||||
Shuffle(nums, rng);
|
||||
|
||||
var ss = new SequenceSet();
|
||||
foreach (var n in nums)
|
||||
{
|
||||
ss.Insert(n);
|
||||
}
|
||||
|
||||
// Range should produce ascending order.
|
||||
var result = new List<ulong>();
|
||||
ss.Range(n =>
|
||||
{
|
||||
result.Add(n);
|
||||
return true;
|
||||
});
|
||||
|
||||
result.Count.ShouldBe(num);
|
||||
for (var i = 0UL; i < (ulong)num; i++)
|
||||
{
|
||||
result[(int)i].ShouldBe(i);
|
||||
}
|
||||
|
||||
// Test truncating the range call.
|
||||
result.Clear();
|
||||
ss.Range(n =>
|
||||
{
|
||||
if (n >= 10)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
result.Add(n);
|
||||
return true;
|
||||
});
|
||||
|
||||
result.Count.ShouldBe(10);
|
||||
for (var i = 0UL; i < 10; i++)
|
||||
{
|
||||
result[(int)i].ShouldBe(i);
|
||||
}
|
||||
}
|
||||
|
||||
// Go: TestSeqSetDelete server/avl/seqset_test.go:123
|
||||
[Fact]
|
||||
public void Delete_VariousPatterns()
|
||||
{
|
||||
var ss = new SequenceSet();
|
||||
|
||||
ulong[] seqs = [22, 222, 2222, 2, 2, 4];
|
||||
foreach (var seq in seqs)
|
||||
{
|
||||
ss.Insert(seq);
|
||||
}
|
||||
|
||||
foreach (var seq in seqs)
|
||||
{
|
||||
ss.Delete(seq);
|
||||
ss.Exists(seq).ShouldBeFalse();
|
||||
}
|
||||
|
||||
ss.Root.ShouldBeNull();
|
||||
}
|
||||
|
||||
// Go: TestSeqSetInsertAndDeletePedantic server/avl/seqset_test.go:139
|
||||
[Fact]
|
||||
public void InsertAndDelete_PedanticVerification()
|
||||
{
|
||||
var ss = new SequenceSet();
|
||||
|
||||
var num = 50 * NumEntries + 22;
|
||||
var nums = new List<ulong>(num);
|
||||
for (var i = 0; i < num; i++)
|
||||
{
|
||||
nums.Add((ulong)i);
|
||||
}
|
||||
|
||||
var rng = new Random(42);
|
||||
Shuffle(nums, rng);
|
||||
|
||||
// Insert all, verify balanced after each insert.
|
||||
foreach (var n in nums)
|
||||
{
|
||||
ss.Insert(n);
|
||||
VerifyBalanced(ss);
|
||||
}
|
||||
|
||||
ss.Root.ShouldNotBeNull();
|
||||
|
||||
// Delete all, verify balanced after each delete.
|
||||
foreach (var n in nums)
|
||||
{
|
||||
ss.Delete(n);
|
||||
VerifyBalanced(ss);
|
||||
ss.Exists(n).ShouldBeFalse();
|
||||
if (ss.Size > 0)
|
||||
{
|
||||
ss.Root.ShouldNotBeNull();
|
||||
}
|
||||
}
|
||||
|
||||
ss.Root.ShouldBeNull();
|
||||
}
|
||||
|
||||
// Go: TestSeqSetMinMax server/avl/seqset_test.go:181
|
||||
[Fact]
|
||||
public void MinMax_TracksCorrectly()
|
||||
{
|
||||
var ss = new SequenceSet();
|
||||
|
||||
// Simple single node.
|
||||
ulong[] seqs = [22, 222, 2222, 2, 2, 4];
|
||||
foreach (var seq in seqs)
|
||||
{
|
||||
ss.Insert(seq);
|
||||
}
|
||||
|
||||
var (min, max) = ss.MinMax();
|
||||
min.ShouldBe(2UL);
|
||||
max.ShouldBe(2222UL);
|
||||
|
||||
// Multi-node
|
||||
ss.Empty();
|
||||
|
||||
var num = 22 * NumEntries + 22;
|
||||
var nums = new List<ulong>(num);
|
||||
for (var i = 0; i < num; i++)
|
||||
{
|
||||
nums.Add((ulong)i);
|
||||
}
|
||||
|
||||
var rng = new Random(42);
|
||||
Shuffle(nums, rng);
|
||||
foreach (var n in nums)
|
||||
{
|
||||
ss.Insert(n);
|
||||
}
|
||||
|
||||
(min, max) = ss.MinMax();
|
||||
min.ShouldBe(0UL);
|
||||
max.ShouldBe((ulong)(num - 1));
|
||||
}
|
||||
|
||||
// Go: TestSeqSetClone server/avl/seqset_test.go:210
|
||||
[Fact]
|
||||
public void Clone_IndependentCopy()
|
||||
{
|
||||
// Generate 100k sequences across 500k range.
|
||||
const int num = 100_000;
|
||||
const int max = 500_000;
|
||||
|
||||
var rng = new Random(42);
|
||||
var ss = new SequenceSet();
|
||||
for (var i = 0; i < num; i++)
|
||||
{
|
||||
ss.Insert((ulong)rng.NextInt64(max + 1));
|
||||
}
|
||||
|
||||
var ssc = ss.Clone();
|
||||
ssc.Size.ShouldBe(ss.Size);
|
||||
ssc.Nodes.ShouldBe(ss.Nodes);
|
||||
}
|
||||
|
||||
// Go: TestSeqSetUnion server/avl/seqset_test.go:225
|
||||
[Fact]
|
||||
public void Union_MergesSets()
|
||||
{
|
||||
var ss1 = new SequenceSet();
|
||||
var ss2 = new SequenceSet();
|
||||
|
||||
ulong[] seqs1 = [22, 222, 2222, 2, 2, 4];
|
||||
foreach (var seq in seqs1)
|
||||
{
|
||||
ss1.Insert(seq);
|
||||
}
|
||||
|
||||
ulong[] seqs2 = [33, 333, 3333, 3, 33_333, 333_333];
|
||||
foreach (var seq in seqs2)
|
||||
{
|
||||
ss2.Insert(seq);
|
||||
}
|
||||
|
||||
var ss = SequenceSet.CreateUnion(ss1, ss2);
|
||||
ss.Size.ShouldBe(11);
|
||||
|
||||
ulong[] allSeqs = [.. seqs1, .. seqs2];
|
||||
foreach (var n in allSeqs)
|
||||
{
|
||||
ss.Exists(n).ShouldBeTrue();
|
||||
}
|
||||
}
|
||||
|
||||
// Go: TestSeqSetFirst server/avl/seqset_test.go:247
|
||||
[Fact]
|
||||
public void First_ReturnsMinimum()
|
||||
{
|
||||
var ss = new SequenceSet();
|
||||
|
||||
ulong[] seqs = [22, 222, 2222, 222_222];
|
||||
foreach (var seq in seqs)
|
||||
{
|
||||
// Normal case where we pick first/base.
|
||||
ss.Insert(seq);
|
||||
ss.Root!.Base.ShouldBe((seq / (ulong)NumEntries) * (ulong)NumEntries);
|
||||
ss.Empty();
|
||||
|
||||
// Where we set the minimum start value.
|
||||
ss.SetInitialMin(seq);
|
||||
ss.Insert(seq);
|
||||
ss.Root!.Base.ShouldBe(seq);
|
||||
ss.Empty();
|
||||
}
|
||||
}
|
||||
|
||||
// Go: TestSeqSetDistinctUnion server/avl/seqset_test.go:265
|
||||
[Fact]
|
||||
public void DistinctUnion_NoOverlap()
|
||||
{
|
||||
var ss1 = new SequenceSet();
|
||||
ulong[] seqs1 = [1, 10, 100, 200];
|
||||
foreach (var seq in seqs1)
|
||||
{
|
||||
ss1.Insert(seq);
|
||||
}
|
||||
|
||||
var ss2 = new SequenceSet();
|
||||
ulong[] seqs2 = [5000, 6100, 6200, 6222];
|
||||
foreach (var seq in seqs2)
|
||||
{
|
||||
ss2.Insert(seq);
|
||||
}
|
||||
|
||||
var ss = ss1.Clone();
|
||||
ulong[] allSeqs = [.. seqs1, .. seqs2];
|
||||
|
||||
ss.Union(ss2);
|
||||
ss.Size.ShouldBe(allSeqs.Length);
|
||||
foreach (var seq in allSeqs)
|
||||
{
|
||||
ss.Exists(seq).ShouldBeTrue();
|
||||
}
|
||||
}
|
||||
|
||||
// Go: TestSeqSetDecodeV1 server/avl/seqset_test.go:289
|
||||
[Fact]
|
||||
public void DecodeV1_BackwardsCompatible()
|
||||
{
|
||||
// Encoding from v1 which was 64 buckets.
|
||||
ulong[] seqs = [22, 222, 2222, 222_222, 2_222_222];
|
||||
var encStr =
|
||||
"FgEDAAAABQAAAABgAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAADgIQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAA==";
|
||||
|
||||
var enc = Convert.FromBase64String(encStr);
|
||||
var (ss, _) = SequenceSet.Decode(enc);
|
||||
|
||||
ss.Size.ShouldBe(seqs.Length);
|
||||
foreach (var seq in seqs)
|
||||
{
|
||||
ss.Exists(seq).ShouldBeTrue();
|
||||
}
|
||||
}
|
||||
|
||||
// Go: TestNoRaceSeqSetSizeComparison server/avl/norace_test.go:33
|
||||
[Fact]
|
||||
public void SizeComparison_LargeSet()
|
||||
{
|
||||
// Create 5M random entries out of 7M range.
|
||||
const int num = 5_000_000;
|
||||
const int max = 7_000_000;
|
||||
|
||||
var rng = new Random(42);
|
||||
var seqs = new ulong[num];
|
||||
for (var i = 0; i < num; i++)
|
||||
{
|
||||
seqs[i] = (ulong)rng.NextInt64(max + 1);
|
||||
}
|
||||
|
||||
// Insert into a dictionary to compare.
|
||||
var dmap = new HashSet<ulong>(num);
|
||||
foreach (var n in seqs)
|
||||
{
|
||||
dmap.Add(n);
|
||||
}
|
||||
|
||||
// Insert into SequenceSet.
|
||||
var ss = new SequenceSet();
|
||||
foreach (var n in seqs)
|
||||
{
|
||||
ss.Insert(n);
|
||||
}
|
||||
|
||||
// Verify sizes match.
|
||||
ss.Size.ShouldBe(dmap.Count);
|
||||
|
||||
// Verify SequenceSet uses very few nodes relative to its element count.
|
||||
// With 2048 entries per node and 7M range, we expect ~ceil(7M/2048) = ~3419 nodes at most.
|
||||
ss.Nodes.ShouldBeLessThan(5000);
|
||||
}
|
||||
|
||||
// Go: TestNoRaceSeqSetEncodeLarge server/avl/norace_test.go:81
|
||||
[Fact]
|
||||
public void EncodeLarge_RoundTrips()
|
||||
{
|
||||
const int num = 2_500_000;
|
||||
const int max = 5_000_000;
|
||||
|
||||
var rng = new Random(42);
|
||||
var ss = new SequenceSet();
|
||||
for (var i = 0; i < num; i++)
|
||||
{
|
||||
ss.Insert((ulong)rng.NextInt64(max + 1));
|
||||
}
|
||||
|
||||
var sw = Stopwatch.StartNew();
|
||||
var buf = ss.Encode();
|
||||
sw.Stop();
|
||||
|
||||
// Encode should be fast (the Go test uses 1ms, we allow more for .NET JIT).
|
||||
sw.Elapsed.ShouldBeLessThan(TimeSpan.FromSeconds(1));
|
||||
|
||||
sw.Restart();
|
||||
var (ss2, bytesRead) = SequenceSet.Decode(buf);
|
||||
sw.Stop();
|
||||
|
||||
sw.Elapsed.ShouldBeLessThan(TimeSpan.FromSeconds(1));
|
||||
bytesRead.ShouldBe(buf.Length);
|
||||
ss2.Nodes.ShouldBe(ss.Nodes);
|
||||
ss2.Size.ShouldBe(ss.Size);
|
||||
}
|
||||
|
||||
// Go: TestNoRaceSeqSetRelativeSpeed server/avl/norace_test.go:123
|
||||
[Fact]
|
||||
public void RelativeSpeed_Performance()
|
||||
{
|
||||
const int num = 1_000_000;
|
||||
const int max = 3_000_000;
|
||||
|
||||
var rng = new Random(42);
|
||||
var seqs = new ulong[num];
|
||||
for (var i = 0; i < num; i++)
|
||||
{
|
||||
seqs[i] = (ulong)rng.NextInt64(max + 1);
|
||||
}
|
||||
|
||||
// SequenceSet insert.
|
||||
var sw = Stopwatch.StartNew();
|
||||
var ss = new SequenceSet();
|
||||
foreach (var n in seqs)
|
||||
{
|
||||
ss.Insert(n);
|
||||
}
|
||||
|
||||
var ssInsert = sw.Elapsed;
|
||||
|
||||
// SequenceSet lookup.
|
||||
sw.Restart();
|
||||
foreach (var n in seqs)
|
||||
{
|
||||
ss.Exists(n).ShouldBeTrue();
|
||||
}
|
||||
|
||||
var ssLookup = sw.Elapsed;
|
||||
|
||||
// Dictionary insert.
|
||||
sw.Restart();
|
||||
var dmap = new HashSet<ulong>();
|
||||
foreach (var n in seqs)
|
||||
{
|
||||
dmap.Add(n);
|
||||
}
|
||||
|
||||
var mapInsert = sw.Elapsed;
|
||||
|
||||
// Dictionary lookup.
|
||||
sw.Restart();
|
||||
foreach (var n in seqs)
|
||||
{
|
||||
dmap.Contains(n).ShouldBeTrue();
|
||||
}
|
||||
|
||||
var mapLookup = sw.Elapsed;
|
||||
|
||||
// Relaxed bounds: SequenceSet insert should be no more than 10x slower.
|
||||
// (.NET JIT and test host overhead can be significant vs Go's simpler runtime.)
|
||||
ssInsert.ShouldBeLessThan(mapInsert * 10);
|
||||
ssLookup.ShouldBeLessThan(mapLookup * 10);
|
||||
}
|
||||
|
||||
/// <summary>Verifies the AVL tree is balanced at every node.</summary>
|
||||
private static void VerifyBalanced(SequenceSet ss)
|
||||
{
|
||||
if (ss.Root == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Check all node heights and balance factors.
|
||||
SequenceSet.Node.NodeIter(ss.Root, n =>
|
||||
{
|
||||
var expectedHeight = SequenceSet.Node.MaxHeight(n) + 1;
|
||||
n.Height.ShouldBe(expectedHeight, $"Node height is wrong for node with base {n.Base}");
|
||||
});
|
||||
|
||||
var bf = SequenceSet.Node.BalanceFactor(ss.Root);
|
||||
bf.ShouldBeInRange(-1, 1, "Tree is unbalanced at root");
|
||||
}
|
||||
|
||||
/// <summary>Fisher-Yates shuffle.</summary>
|
||||
private static void Shuffle(List<ulong> list, Random rng)
|
||||
{
|
||||
for (var i = list.Count - 1; i > 0; i--)
|
||||
{
|
||||
var j = rng.Next(i + 1);
|
||||
(list[i], list[j]) = (list[j], list[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
0
tests/NATS.Server.Core.Tests/Internal/Gsl/.gitkeep
Normal file
0
tests/NATS.Server.Core.Tests/Internal/Gsl/.gitkeep
Normal file
@@ -0,0 +1,429 @@
|
||||
// Go reference: server/gsl/gsl_test.go
|
||||
// Tests for GenericSubjectList<T> trie-based subject matching.
|
||||
|
||||
using NATS.Server.Internal.Gsl;
|
||||
|
||||
namespace NATS.Server.Core.Tests.Internal.Gsl;
|
||||
|
||||
public class GenericSubjectListTests
|
||||
{
|
||||
/// <summary>
|
||||
/// Helper: count matches for a subject.
|
||||
/// </summary>
|
||||
private static int CountMatches<T>(GenericSubjectList<T> s, string subject) where T : IEquatable<T>
|
||||
{
|
||||
var count = 0;
|
||||
s.Match(subject, _ => count++);
|
||||
return count;
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistInit server/gsl/gsl_test.go:23
|
||||
[Fact]
|
||||
public void Init_EmptyList()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Count.ShouldBe(0u);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistInsertCount server/gsl/gsl_test.go:29
|
||||
[Fact]
|
||||
public void InsertCount_TracksCorrectly()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("foo", 1);
|
||||
s.Insert("bar", 2);
|
||||
s.Insert("foo.bar", 3);
|
||||
s.Count.ShouldBe(3u);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistSimple server/gsl/gsl_test.go:37
|
||||
[Fact]
|
||||
public void Simple_ExactMatch()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("foo", 1);
|
||||
CountMatches(s, "foo").ShouldBe(1);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistSimpleMultiTokens server/gsl/gsl_test.go:43
|
||||
[Fact]
|
||||
public void SimpleMultiTokens_Match()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("foo.bar.baz", 1);
|
||||
CountMatches(s, "foo.bar.baz").ShouldBe(1);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistPartialWildcard server/gsl/gsl_test.go:49
|
||||
[Fact]
|
||||
public void PartialWildcard_StarMatches()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("a.b.c", 1);
|
||||
s.Insert("a.*.c", 2);
|
||||
CountMatches(s, "a.b.c").ShouldBe(2);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistPartialWildcardAtEnd server/gsl/gsl_test.go:56
|
||||
[Fact]
|
||||
public void PartialWildcardAtEnd_StarMatches()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("a.b.c", 1);
|
||||
s.Insert("a.b.*", 2);
|
||||
CountMatches(s, "a.b.c").ShouldBe(2);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistFullWildcard server/gsl/gsl_test.go:63
|
||||
[Fact]
|
||||
public void FullWildcard_GreaterThanMatches()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("a.b.c", 1);
|
||||
s.Insert("a.>", 2);
|
||||
CountMatches(s, "a.b.c").ShouldBe(2);
|
||||
CountMatches(s, "a.>").ShouldBe(1);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistRemove server/gsl/gsl_test.go:71
|
||||
[Fact]
|
||||
public void Remove_DecreasesCount()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
|
||||
s.Insert("a.b.c.d", 1);
|
||||
s.Count.ShouldBe(1u);
|
||||
CountMatches(s, "a.b.c.d").ShouldBe(1);
|
||||
|
||||
s.Remove("a.b.c.d", 1);
|
||||
s.Count.ShouldBe(0u);
|
||||
CountMatches(s, "a.b.c.d").ShouldBe(0);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistRemoveWildcard server/gsl/gsl_test.go:83
|
||||
[Fact]
|
||||
public void RemoveWildcard_CleansUp()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
|
||||
s.Insert("a.b.c.d", 11);
|
||||
s.Insert("a.b.*.d", 22);
|
||||
s.Insert("a.b.>", 33);
|
||||
s.Count.ShouldBe(3u);
|
||||
CountMatches(s, "a.b.c.d").ShouldBe(3);
|
||||
|
||||
s.Remove("a.b.*.d", 22);
|
||||
s.Count.ShouldBe(2u);
|
||||
CountMatches(s, "a.b.c.d").ShouldBe(2);
|
||||
|
||||
s.Remove("a.b.>", 33);
|
||||
s.Count.ShouldBe(1u);
|
||||
CountMatches(s, "a.b.c.d").ShouldBe(1);
|
||||
|
||||
s.Remove("a.b.c.d", 11);
|
||||
s.Count.ShouldBe(0u);
|
||||
CountMatches(s, "a.b.c.d").ShouldBe(0);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistRemoveCleanup server/gsl/gsl_test.go:105
|
||||
[Fact]
|
||||
public void RemoveCleanup_PrunesEmptyNodes()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.NumLevels().ShouldBe(0);
|
||||
s.Insert("a.b.c.d.e.f", 1);
|
||||
s.NumLevels().ShouldBe(6);
|
||||
s.Remove("a.b.c.d.e.f", 1);
|
||||
s.NumLevels().ShouldBe(0);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistRemoveCleanupWildcards server/gsl/gsl_test.go:114
|
||||
[Fact]
|
||||
public void RemoveCleanupWildcards_PrunesEmptyNodes()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.NumLevels().ShouldBe(0);
|
||||
s.Insert("a.b.*.d.e.>", 1);
|
||||
s.NumLevels().ShouldBe(6);
|
||||
s.Remove("a.b.*.d.e.>", 1);
|
||||
s.NumLevels().ShouldBe(0);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistInvalidSubjectsInsert server/gsl/gsl_test.go:123
|
||||
[Fact]
|
||||
public void InvalidSubjectsInsert_RejectsInvalid()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
|
||||
// Empty tokens and FWC not terminal
|
||||
Should.Throw<InvalidOperationException>(() => s.Insert(".foo", 1));
|
||||
Should.Throw<InvalidOperationException>(() => s.Insert("foo.", 1));
|
||||
Should.Throw<InvalidOperationException>(() => s.Insert("foo..bar", 1));
|
||||
Should.Throw<InvalidOperationException>(() => s.Insert("foo.bar..baz", 1));
|
||||
Should.Throw<InvalidOperationException>(() => s.Insert("foo.>.baz", 1));
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistBadSubjectOnRemove server/gsl/gsl_test.go:134
|
||||
[Fact]
|
||||
public void BadSubjectOnRemove_RejectsInvalid()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
Should.Throw<InvalidOperationException>(() => s.Insert("a.b..d", 1));
|
||||
Should.Throw<InvalidOperationException>(() => s.Remove("a.b..d", 1));
|
||||
Should.Throw<InvalidOperationException>(() => s.Remove("a.>.b", 1));
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistTwoTokenPubMatchSingleTokenSub server/gsl/gsl_test.go:141
|
||||
[Fact]
|
||||
public void TwoTokenPub_DoesNotMatchSingleTokenSub()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("foo", 1);
|
||||
CountMatches(s, "foo").ShouldBe(1);
|
||||
CountMatches(s, "foo.bar").ShouldBe(0);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistInsertWithWildcardsAsLiterals server/gsl/gsl_test.go:148
|
||||
[Fact]
|
||||
public void InsertWithWildcardsAsLiterals_TreatsAsLiteral()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
var subjects = new[] { "foo.*-", "foo.>-" };
|
||||
for (var i = 0; i < subjects.Length; i++)
|
||||
{
|
||||
s.Insert(subjects[i], i);
|
||||
CountMatches(s, "foo.bar").ShouldBe(0);
|
||||
CountMatches(s, subjects[i]).ShouldBe(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistRemoveWithWildcardsAsLiterals server/gsl/gsl_test.go:157
|
||||
[Fact]
|
||||
public void RemoveWithWildcardsAsLiterals_RemovesCorrectly()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
var subjects = new[] { "foo.*-", "foo.>-" };
|
||||
for (var i = 0; i < subjects.Length; i++)
|
||||
{
|
||||
s.Insert(subjects[i], i);
|
||||
CountMatches(s, "foo.bar").ShouldBe(0);
|
||||
CountMatches(s, subjects[i]).ShouldBe(1);
|
||||
Should.Throw<KeyNotFoundException>(() => s.Remove("foo.bar", i));
|
||||
s.Count.ShouldBe(1u);
|
||||
s.Remove(subjects[i], i);
|
||||
s.Count.ShouldBe(0u);
|
||||
}
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistMatchWithEmptyTokens server/gsl/gsl_test.go:170
|
||||
[Theory]
|
||||
[InlineData(".foo")]
|
||||
[InlineData("..foo")]
|
||||
[InlineData("foo..")]
|
||||
[InlineData("foo.")]
|
||||
[InlineData("foo..bar")]
|
||||
[InlineData("foo...bar")]
|
||||
public void MatchWithEmptyTokens_HandlesEdgeCase(string subject)
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert(">", 1);
|
||||
CountMatches(s, subject).ShouldBe(0);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistHasInterest server/gsl/gsl_test.go:180
|
||||
[Fact]
|
||||
public void HasInterest_ReturnsTrueForMatchingSubjects()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("foo", 11);
|
||||
|
||||
// Expect to find that "foo" matches but "bar" doesn't.
|
||||
s.HasInterest("foo").ShouldBeTrue();
|
||||
s.HasInterest("bar").ShouldBeFalse();
|
||||
|
||||
// Call Match on a subject we know there is no match.
|
||||
CountMatches(s, "bar").ShouldBe(0);
|
||||
s.HasInterest("bar").ShouldBeFalse();
|
||||
|
||||
// Remove fooSub and check interest again
|
||||
s.Remove("foo", 11);
|
||||
s.HasInterest("foo").ShouldBeFalse();
|
||||
|
||||
// Try with partial wildcard *
|
||||
s.Insert("foo.*", 22);
|
||||
s.HasInterest("foo").ShouldBeFalse();
|
||||
s.HasInterest("foo.bar").ShouldBeTrue();
|
||||
s.HasInterest("foo.bar.baz").ShouldBeFalse();
|
||||
|
||||
// Remove sub, there should be no interest
|
||||
s.Remove("foo.*", 22);
|
||||
s.HasInterest("foo").ShouldBeFalse();
|
||||
s.HasInterest("foo.bar").ShouldBeFalse();
|
||||
s.HasInterest("foo.bar.baz").ShouldBeFalse();
|
||||
|
||||
// Try with full wildcard >
|
||||
s.Insert("foo.>", 33);
|
||||
s.HasInterest("foo").ShouldBeFalse();
|
||||
s.HasInterest("foo.bar").ShouldBeTrue();
|
||||
s.HasInterest("foo.bar.baz").ShouldBeTrue();
|
||||
|
||||
s.Remove("foo.>", 33);
|
||||
s.HasInterest("foo").ShouldBeFalse();
|
||||
s.HasInterest("foo.bar").ShouldBeFalse();
|
||||
s.HasInterest("foo.bar.baz").ShouldBeFalse();
|
||||
|
||||
// Try with *.>
|
||||
s.Insert("*.>", 44);
|
||||
s.HasInterest("foo").ShouldBeFalse();
|
||||
s.HasInterest("foo.bar").ShouldBeTrue();
|
||||
s.HasInterest("foo.baz").ShouldBeTrue();
|
||||
s.Remove("*.>", 44);
|
||||
|
||||
// Try with *.bar
|
||||
s.Insert("*.bar", 55);
|
||||
s.HasInterest("foo").ShouldBeFalse();
|
||||
s.HasInterest("foo.bar").ShouldBeTrue();
|
||||
s.HasInterest("foo.baz").ShouldBeFalse();
|
||||
s.Remove("*.bar", 55);
|
||||
|
||||
// Try with *
|
||||
s.Insert("*", 66);
|
||||
s.HasInterest("foo").ShouldBeTrue();
|
||||
s.HasInterest("foo.bar").ShouldBeFalse();
|
||||
s.Remove("*", 66);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistHasInterestOverlapping server/gsl/gsl_test.go:237
|
||||
[Fact]
|
||||
public void HasInterestOverlapping_HandlesOverlap()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("stream.A.child", 11);
|
||||
s.Insert("stream.*", 11);
|
||||
s.HasInterest("stream.A.child").ShouldBeTrue();
|
||||
s.HasInterest("stream.A").ShouldBeTrue();
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistHasInterestStartingInRace server/gsl/gsl_test.go:247
|
||||
[Fact]
|
||||
public async Task HasInterestStartingIn_ThreadSafe()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
|
||||
// Pre-populate with some patterns
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
s.Insert("foo.bar.baz", i);
|
||||
s.Insert("foo.*.baz", i + 10);
|
||||
s.Insert("foo.>", i + 20);
|
||||
}
|
||||
|
||||
const int iterations = 1000;
|
||||
var tasks = new List<Task>();
|
||||
|
||||
// Task 1: repeatedly call HasInterestStartingIn
|
||||
tasks.Add(Task.Run(() =>
|
||||
{
|
||||
for (var i = 0; i < iterations; i++)
|
||||
{
|
||||
s.HasInterestStartingIn("foo");
|
||||
s.HasInterestStartingIn("foo.bar");
|
||||
s.HasInterestStartingIn("foo.bar.baz");
|
||||
s.HasInterestStartingIn("other.subject");
|
||||
}
|
||||
}));
|
||||
|
||||
// Task 2: repeatedly modify the sublist
|
||||
tasks.Add(Task.Run(() =>
|
||||
{
|
||||
for (var i = 0; i < iterations; i++)
|
||||
{
|
||||
var val = 1000 + i;
|
||||
var ch = (char)('a' + (i % 26));
|
||||
s.Insert($"test.subject.{ch}", val);
|
||||
s.Insert("foo.*.test", val);
|
||||
s.Remove($"test.subject.{ch}", val);
|
||||
s.Remove("foo.*.test", val);
|
||||
}
|
||||
}));
|
||||
|
||||
// Task 3: also call HasInterest (which does lock)
|
||||
tasks.Add(Task.Run(() =>
|
||||
{
|
||||
for (var i = 0; i < iterations; i++)
|
||||
{
|
||||
s.HasInterest("foo.bar.baz");
|
||||
s.HasInterest("foo.something.baz");
|
||||
}
|
||||
}));
|
||||
|
||||
// Wait for all tasks - should not throw (no deadlocks or data races)
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
|
||||
// Go: TestGenericSublistNumInterest server/gsl/gsl_test.go:298
|
||||
[Fact]
|
||||
public void NumInterest_CountsMatchingSubscriptions()
|
||||
{
|
||||
var s = new GenericSubjectList<int>();
|
||||
s.Insert("foo", 11);
|
||||
|
||||
// Helper to check both Match count and NumInterest agree
|
||||
void RequireNumInterest(string subj, int expected)
|
||||
{
|
||||
CountMatches(s, subj).ShouldBe(expected);
|
||||
s.NumInterest(subj).ShouldBe(expected);
|
||||
}
|
||||
|
||||
// Expect to find that "foo" matches but "bar" doesn't.
|
||||
RequireNumInterest("foo", 1);
|
||||
RequireNumInterest("bar", 0);
|
||||
|
||||
// Remove fooSub and check interest again
|
||||
s.Remove("foo", 11);
|
||||
RequireNumInterest("foo", 0);
|
||||
|
||||
// Try with partial wildcard *
|
||||
s.Insert("foo.*", 22);
|
||||
RequireNumInterest("foo", 0);
|
||||
RequireNumInterest("foo.bar", 1);
|
||||
RequireNumInterest("foo.bar.baz", 0);
|
||||
|
||||
// Remove sub, there should be no interest
|
||||
s.Remove("foo.*", 22);
|
||||
RequireNumInterest("foo", 0);
|
||||
RequireNumInterest("foo.bar", 0);
|
||||
RequireNumInterest("foo.bar.baz", 0);
|
||||
|
||||
// Full wildcard >
|
||||
s.Insert("foo.>", 33);
|
||||
RequireNumInterest("foo", 0);
|
||||
RequireNumInterest("foo.bar", 1);
|
||||
RequireNumInterest("foo.bar.baz", 1);
|
||||
|
||||
s.Remove("foo.>", 33);
|
||||
RequireNumInterest("foo", 0);
|
||||
RequireNumInterest("foo.bar", 0);
|
||||
RequireNumInterest("foo.bar.baz", 0);
|
||||
|
||||
// *.>
|
||||
s.Insert("*.>", 44);
|
||||
RequireNumInterest("foo", 0);
|
||||
RequireNumInterest("foo.bar", 1);
|
||||
RequireNumInterest("foo.bar.baz", 1);
|
||||
s.Remove("*.>", 44);
|
||||
|
||||
// *.bar
|
||||
s.Insert("*.bar", 55);
|
||||
RequireNumInterest("foo", 0);
|
||||
RequireNumInterest("foo.bar", 1);
|
||||
RequireNumInterest("foo.bar.baz", 0);
|
||||
s.Remove("*.bar", 55);
|
||||
|
||||
// *
|
||||
s.Insert("*", 66);
|
||||
RequireNumInterest("foo", 1);
|
||||
RequireNumInterest("foo.bar", 0);
|
||||
s.Remove("*", 66);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
using System.Text;
|
||||
using NATS.Server.Internal.Avl;
|
||||
using NATS.Server.Internal.Gsl;
|
||||
using NATS.Server.Internal.SubjectTree;
|
||||
using NATS.Server.Internal.SysMem;
|
||||
using NATS.Server.Internal.TimeHashWheel;
|
||||
|
||||
namespace NATS.Server.Core.Tests.Internal;
|
||||
|
||||
public class InternalDsParityBatch2Tests
|
||||
{
|
||||
[Fact]
|
||||
public void SubjectTreeHelper_IntersectGSL_matches_interested_subjects_once()
|
||||
{
|
||||
var tree = new SubjectTree<int>();
|
||||
tree.Insert("foo.bar"u8.ToArray(), 1);
|
||||
tree.Insert("foo.baz"u8.ToArray(), 2);
|
||||
tree.Insert("other.subject"u8.ToArray(), 3);
|
||||
|
||||
var sublist = new GenericSubjectList<int>();
|
||||
sublist.Insert("foo.*", 1);
|
||||
sublist.Insert("foo.bar", 2); // overlap should not duplicate callback for same subject
|
||||
|
||||
var seen = new HashSet<string>(StringComparer.Ordinal);
|
||||
SubjectTreeHelper.IntersectGSL(tree, sublist, (subject, _) =>
|
||||
{
|
||||
seen.Add(Encoding.UTF8.GetString(subject));
|
||||
});
|
||||
|
||||
seen.Count.ShouldBe(2);
|
||||
seen.ShouldContain("foo.bar");
|
||||
seen.ShouldContain("foo.baz");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SubjectTree_Dump_outputs_node_and_leaf_structure()
|
||||
{
|
||||
var tree = new SubjectTree<int>();
|
||||
tree.Insert("foo.bar"u8.ToArray(), 1);
|
||||
tree.Insert("foo.baz"u8.ToArray(), 2);
|
||||
|
||||
using var sw = new StringWriter();
|
||||
tree.Dump(sw);
|
||||
var dump = sw.ToString();
|
||||
|
||||
dump.ShouldContain("NODE");
|
||||
dump.ShouldContain("LEAF");
|
||||
dump.ShouldContain("Prefix:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SequenceSet_Encode_supports_destination_buffer_reuse()
|
||||
{
|
||||
var set = new SequenceSet();
|
||||
set.Insert(1);
|
||||
set.Insert(65);
|
||||
set.Insert(1024);
|
||||
|
||||
var buffer = new byte[set.EncodeLength() + 32];
|
||||
var written = set.Encode(buffer);
|
||||
written.ShouldBe(set.EncodeLength());
|
||||
|
||||
var (decoded, bytesRead) = SequenceSet.Decode(buffer.AsSpan(0, written));
|
||||
bytesRead.ShouldBe(written);
|
||||
decoded.Exists(1).ShouldBeTrue();
|
||||
decoded.Exists(65).ShouldBeTrue();
|
||||
decoded.Exists(1024).ShouldBeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HashWheelEntry_struct_exposes_sequence_and_expiration()
|
||||
{
|
||||
var entry = new HashWheel.HashWheelEntry(42, 99);
|
||||
entry.Sequence.ShouldBe((ulong)42);
|
||||
entry.Expires.ShouldBe(99);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SystemMemory_returns_positive_memory_value()
|
||||
{
|
||||
SystemMemory.Memory().ShouldBeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SimpleSubjectList_works_with_empty_marker_values()
|
||||
{
|
||||
var list = new SimpleSubjectList();
|
||||
list.Insert("foo.bar", new SimpleSublistValue());
|
||||
list.HasInterest("foo.bar").ShouldBeTrue();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
using System.Reflection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using NATS.Server.Monitoring;
|
||||
|
||||
namespace NATS.Server.Core.Tests.Internal;
|
||||
|
||||
public class InternalDsPeriodicSamplerParityTests
|
||||
{
|
||||
[Fact]
|
||||
[SlopwatchSuppress("SW004", "Test must observe a real 1-second CPU sampling timer tick; wall-clock elapsed time is the observable under test")]
|
||||
public async Task VarzHandler_uses_periodic_background_cpu_sampler()
|
||||
{
|
||||
var options = new NatsOptions { Host = "127.0.0.1", Port = 0 };
|
||||
var server = new NatsServer(options, NullLoggerFactory.Instance);
|
||||
using var cts = new CancellationTokenSource();
|
||||
_ = server.StartAsync(cts.Token);
|
||||
await server.WaitForReadyAsync();
|
||||
|
||||
try
|
||||
{
|
||||
using var handler = new VarzHandler(server, options, NullLoggerFactory.Instance);
|
||||
var field = typeof(VarzHandler).GetField("_lastCpuSampleTime", BindingFlags.NonPublic | BindingFlags.Instance);
|
||||
field.ShouldNotBeNull();
|
||||
|
||||
var before = (DateTime)field!.GetValue(handler)!;
|
||||
await Task.Delay(TimeSpan.FromMilliseconds(1200));
|
||||
var after = (DateTime)field.GetValue(handler)!;
|
||||
|
||||
after.ShouldBeGreaterThan(before);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await cts.CancelAsync();
|
||||
server.Dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,628 @@
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using NATS.Server.Events;
|
||||
using NATS.Server.Internal;
|
||||
|
||||
namespace NATS.Server.Core.Tests.Internal;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MsgTraceContext: header parsing, event collection, trace propagation,
|
||||
/// JetStream two-phase send, hop tracking, and JSON serialization.
|
||||
/// Go reference: msgtrace.go — initMsgTrace, sendEvent, addEgressEvent,
|
||||
/// addJetStreamEvent, genHeaderMapIfTraceHeadersPresent.
|
||||
/// </summary>
|
||||
public class MessageTraceContextTests
|
||||
{
|
||||
private static ReadOnlyMemory<byte> BuildHeaders(params (string key, string value)[] headers)
|
||||
{
|
||||
var sb = new StringBuilder("NATS/1.0\r\n");
|
||||
foreach (var (key, value) in headers)
|
||||
{
|
||||
sb.Append($"{key}: {value}\r\n");
|
||||
}
|
||||
sb.Append("\r\n");
|
||||
return Encoding.ASCII.GetBytes(sb.ToString());
|
||||
}
|
||||
|
||||
// --- Header parsing ---
|
||||
|
||||
[Fact]
|
||||
public void ParseTraceHeaders_returns_null_for_no_trace_headers()
|
||||
{
|
||||
var headers = BuildHeaders(("Content-Type", "text/plain"));
|
||||
var result = MsgTraceContext.ParseTraceHeaders(headers.Span);
|
||||
result.ShouldBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseTraceHeaders_returns_map_when_trace_dest_present()
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, "trace.subject"),
|
||||
("Content-Type", "text/plain"));
|
||||
var result = MsgTraceContext.ParseTraceHeaders(headers.Span);
|
||||
result.ShouldNotBeNull();
|
||||
result.ShouldContainKey(MsgTraceHeaders.TraceDest);
|
||||
result[MsgTraceHeaders.TraceDest][0].ShouldBe("trace.subject");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseTraceHeaders_returns_null_when_trace_disabled()
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, MsgTraceHeaders.TraceDestDisabled));
|
||||
var result = MsgTraceContext.ParseTraceHeaders(headers.Span);
|
||||
result.ShouldBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseTraceHeaders_detects_traceparent_with_sampled_flag()
|
||||
{
|
||||
// W3C trace context: version-traceid-parentid-flags (01 = sampled)
|
||||
var headers = BuildHeaders(
|
||||
("traceparent", "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01"));
|
||||
var result = MsgTraceContext.ParseTraceHeaders(headers.Span);
|
||||
result.ShouldNotBeNull();
|
||||
result.ShouldContainKey("traceparent");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseTraceHeaders_ignores_traceparent_without_sampled_flag()
|
||||
{
|
||||
// flags=00 means not sampled
|
||||
var headers = BuildHeaders(
|
||||
("traceparent", "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-00"));
|
||||
var result = MsgTraceContext.ParseTraceHeaders(headers.Span);
|
||||
result.ShouldBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseTraceHeaders_returns_null_for_empty_input()
|
||||
{
|
||||
var result = MsgTraceContext.ParseTraceHeaders(ReadOnlySpan<byte>.Empty);
|
||||
result.ShouldBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseTraceHeaders_returns_null_for_non_nats_header()
|
||||
{
|
||||
var headers = Encoding.ASCII.GetBytes("HTTP/1.1 200 OK\r\nFoo: bar\r\n\r\n");
|
||||
var result = MsgTraceContext.ParseTraceHeaders(headers);
|
||||
result.ShouldBeNull();
|
||||
}
|
||||
|
||||
// --- Context creation ---
|
||||
|
||||
[Fact]
|
||||
public void Create_returns_null_for_empty_headers()
|
||||
{
|
||||
var ctx = MsgTraceContext.Create(
|
||||
ReadOnlyMemory<byte>.Empty,
|
||||
clientId: 1,
|
||||
clientName: "test",
|
||||
accountName: "$G",
|
||||
subject: "test.sub",
|
||||
msgSize: 10);
|
||||
ctx.ShouldBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_returns_null_for_headers_without_trace()
|
||||
{
|
||||
var headers = BuildHeaders(("Content-Type", "text/plain"));
|
||||
var ctx = MsgTraceContext.Create(
|
||||
headers,
|
||||
clientId: 1,
|
||||
clientName: "test",
|
||||
accountName: "$G",
|
||||
subject: "test.sub",
|
||||
msgSize: 10);
|
||||
ctx.ShouldBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_builds_context_with_ingress_event()
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, "trace.dest"));
|
||||
|
||||
var ctx = MsgTraceContext.Create(
|
||||
headers,
|
||||
clientId: 42,
|
||||
clientName: "my-publisher",
|
||||
accountName: "$G",
|
||||
subject: "orders.new",
|
||||
msgSize: 128);
|
||||
|
||||
ctx.ShouldNotBeNull();
|
||||
ctx.IsActive.ShouldBeTrue();
|
||||
ctx.Destination.ShouldBe("trace.dest");
|
||||
ctx.TraceOnly.ShouldBeFalse();
|
||||
ctx.AccountName.ShouldBe("$G");
|
||||
|
||||
// Check ingress event
|
||||
ctx.Event.Events.Count.ShouldBe(1);
|
||||
var ingress = ctx.Event.Events[0].ShouldBeOfType<MsgTraceIngress>();
|
||||
ingress.Type.ShouldBe(MsgTraceTypes.Ingress);
|
||||
ingress.Cid.ShouldBe(42UL);
|
||||
ingress.Name.ShouldBe("my-publisher");
|
||||
ingress.Account.ShouldBe("$G");
|
||||
ingress.Subject.ShouldBe("orders.new");
|
||||
ingress.Error.ShouldBeNull();
|
||||
|
||||
// Check request info
|
||||
ctx.Event.Request.MsgSize.ShouldBe(128);
|
||||
ctx.Event.Request.Header.ShouldNotBeNull();
|
||||
ctx.Event.Request.Header.ShouldContainKey(MsgTraceHeaders.TraceDest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_with_trace_only_flag()
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, "trace.dest"),
|
||||
(MsgTraceHeaders.TraceOnly, "true"));
|
||||
|
||||
var ctx = MsgTraceContext.Create(
|
||||
headers,
|
||||
clientId: 1,
|
||||
clientName: "test",
|
||||
accountName: "$G",
|
||||
subject: "test",
|
||||
msgSize: 0);
|
||||
|
||||
ctx.ShouldNotBeNull();
|
||||
ctx.TraceOnly.ShouldBeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_with_trace_only_flag_numeric()
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, "trace.dest"),
|
||||
(MsgTraceHeaders.TraceOnly, "1"));
|
||||
|
||||
var ctx = MsgTraceContext.Create(
|
||||
headers,
|
||||
clientId: 1,
|
||||
clientName: "test",
|
||||
accountName: "$G",
|
||||
subject: "test",
|
||||
msgSize: 0);
|
||||
|
||||
ctx.ShouldNotBeNull();
|
||||
ctx.TraceOnly.ShouldBeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_without_trace_only_flag()
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, "trace.dest"),
|
||||
(MsgTraceHeaders.TraceOnly, "false"));
|
||||
|
||||
var ctx = MsgTraceContext.Create(
|
||||
headers,
|
||||
clientId: 1,
|
||||
clientName: "test",
|
||||
accountName: "$G",
|
||||
subject: "test",
|
||||
msgSize: 0);
|
||||
|
||||
ctx.ShouldNotBeNull();
|
||||
ctx.TraceOnly.ShouldBeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_captures_hop_from_non_client_kind()
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, "trace.dest"),
|
||||
(MsgTraceHeaders.TraceHop, "1.2"));
|
||||
|
||||
var ctx = MsgTraceContext.Create(
|
||||
headers,
|
||||
clientId: 1,
|
||||
clientName: "route-1",
|
||||
accountName: "$G",
|
||||
subject: "test",
|
||||
msgSize: 0,
|
||||
clientKind: MsgTraceContext.KindRouter);
|
||||
|
||||
ctx.ShouldNotBeNull();
|
||||
ctx.Hop.ShouldBe("1.2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ignores_hop_from_client_kind()
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, "trace.dest"),
|
||||
(MsgTraceHeaders.TraceHop, "1.2"));
|
||||
|
||||
var ctx = MsgTraceContext.Create(
|
||||
headers,
|
||||
clientId: 1,
|
||||
clientName: "test",
|
||||
accountName: "$G",
|
||||
subject: "test",
|
||||
msgSize: 0,
|
||||
clientKind: MsgTraceContext.KindClient);
|
||||
|
||||
ctx.ShouldNotBeNull();
|
||||
ctx.Hop.ShouldBe(""); // Client hop is ignored
|
||||
}
|
||||
|
||||
// --- Event recording ---
|
||||
|
||||
[Fact]
|
||||
public void SetIngressError_sets_error_on_first_event()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.SetIngressError("publish denied");
|
||||
|
||||
var ingress = ctx.Event.Events[0].ShouldBeOfType<MsgTraceIngress>();
|
||||
ingress.Error.ShouldBe("publish denied");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddSubjectMappingEvent_appends_mapping()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddSubjectMappingEvent("orders.mapped");
|
||||
|
||||
ctx.Event.Events.Count.ShouldBe(2);
|
||||
var mapping = ctx.Event.Events[1].ShouldBeOfType<MsgTraceSubjectMapping>();
|
||||
mapping.Type.ShouldBe(MsgTraceTypes.SubjectMapping);
|
||||
mapping.MappedTo.ShouldBe("orders.mapped");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddEgressEvent_appends_egress_with_subscription_and_queue()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddEgressEvent(
|
||||
clientId: 99,
|
||||
clientName: "subscriber",
|
||||
clientKind: MsgTraceContext.KindClient,
|
||||
subscriptionSubject: "orders.>",
|
||||
queue: "workers");
|
||||
|
||||
ctx.Event.Events.Count.ShouldBe(2);
|
||||
var egress = ctx.Event.Events[1].ShouldBeOfType<MsgTraceEgress>();
|
||||
egress.Type.ShouldBe(MsgTraceTypes.Egress);
|
||||
egress.Kind.ShouldBe(MsgTraceContext.KindClient);
|
||||
egress.Cid.ShouldBe(99UL);
|
||||
egress.Name.ShouldBe("subscriber");
|
||||
egress.Subscription.ShouldBe("orders.>");
|
||||
egress.Queue.ShouldBe("workers");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddEgressEvent_records_account_when_different_from_ingress()
|
||||
{
|
||||
var ctx = CreateSimpleContext(accountName: "acctA");
|
||||
ctx.AddEgressEvent(
|
||||
clientId: 99,
|
||||
clientName: "subscriber",
|
||||
clientKind: MsgTraceContext.KindClient,
|
||||
subscriptionSubject: "api.>",
|
||||
account: "acctB");
|
||||
|
||||
var egress = ctx.Event.Events[1].ShouldBeOfType<MsgTraceEgress>();
|
||||
egress.Account.ShouldBe("acctB");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddEgressEvent_omits_account_when_same_as_ingress()
|
||||
{
|
||||
var ctx = CreateSimpleContext(accountName: "$G");
|
||||
ctx.AddEgressEvent(
|
||||
clientId: 99,
|
||||
clientName: "subscriber",
|
||||
clientKind: MsgTraceContext.KindClient,
|
||||
subscriptionSubject: "test",
|
||||
account: "$G");
|
||||
|
||||
var egress = ctx.Event.Events[1].ShouldBeOfType<MsgTraceEgress>();
|
||||
egress.Account.ShouldBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddEgressEvent_for_router_omits_subscription_and_queue()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddEgressEvent(
|
||||
clientId: 1,
|
||||
clientName: "route-1",
|
||||
clientKind: MsgTraceContext.KindRouter,
|
||||
subscriptionSubject: "should.not.appear",
|
||||
queue: "should.not.appear");
|
||||
|
||||
var egress = ctx.Event.Events[1].ShouldBeOfType<MsgTraceEgress>();
|
||||
egress.Subscription.ShouldBeNull();
|
||||
egress.Queue.ShouldBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddEgressEvent_with_error()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddEgressEvent(
|
||||
clientId: 50,
|
||||
clientName: "slow-client",
|
||||
clientKind: MsgTraceContext.KindClient,
|
||||
error: MsgTraceErrors.ClientClosed);
|
||||
|
||||
var egress = ctx.Event.Events[1].ShouldBeOfType<MsgTraceEgress>();
|
||||
egress.Error.ShouldBe(MsgTraceErrors.ClientClosed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddStreamExportEvent_records_account_and_target()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddStreamExportEvent("exportAccount", "export.subject");
|
||||
|
||||
ctx.Event.Events.Count.ShouldBe(2);
|
||||
var se = ctx.Event.Events[1].ShouldBeOfType<MsgTraceStreamExport>();
|
||||
se.Type.ShouldBe(MsgTraceTypes.StreamExport);
|
||||
se.Account.ShouldBe("exportAccount");
|
||||
se.To.ShouldBe("export.subject");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddServiceImportEvent_records_from_and_to()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddServiceImportEvent("importAccount", "from.subject", "to.subject");
|
||||
|
||||
ctx.Event.Events.Count.ShouldBe(2);
|
||||
var si = ctx.Event.Events[1].ShouldBeOfType<MsgTraceServiceImport>();
|
||||
si.Type.ShouldBe(MsgTraceTypes.ServiceImport);
|
||||
si.Account.ShouldBe("importAccount");
|
||||
si.From.ShouldBe("from.subject");
|
||||
si.To.ShouldBe("to.subject");
|
||||
}
|
||||
|
||||
// --- JetStream events ---
|
||||
|
||||
[Fact]
|
||||
public void AddJetStreamEvent_records_stream_name()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddJetStreamEvent("ORDERS");
|
||||
|
||||
ctx.Event.Events.Count.ShouldBe(2);
|
||||
var js = ctx.Event.Events[1].ShouldBeOfType<MsgTraceJetStreamEntry>();
|
||||
js.Type.ShouldBe(MsgTraceTypes.JetStream);
|
||||
js.Stream.ShouldBe("ORDERS");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdateJetStreamEvent_sets_subject_and_nointerest()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddJetStreamEvent("ORDERS");
|
||||
ctx.UpdateJetStreamEvent("orders.new", noInterest: true);
|
||||
|
||||
var js = ctx.Event.Events[1].ShouldBeOfType<MsgTraceJetStreamEntry>();
|
||||
js.Subject.ShouldBe("orders.new");
|
||||
js.NoInterest.ShouldBeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SendEventFromJetStream_requires_both_phases()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddJetStreamEvent("ORDERS");
|
||||
|
||||
bool published = false;
|
||||
ctx.PublishCallback = (dest, reply, body) => { published = true; };
|
||||
|
||||
// Phase 1: message path calls SendEvent — should not publish yet
|
||||
ctx.SendEvent();
|
||||
published.ShouldBeFalse();
|
||||
|
||||
// Phase 2: JetStream path calls SendEventFromJetStream — now publishes
|
||||
ctx.SendEventFromJetStream();
|
||||
published.ShouldBeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SendEventFromJetStream_with_error()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.AddJetStreamEvent("ORDERS");
|
||||
|
||||
object? publishedBody = null;
|
||||
ctx.PublishCallback = (dest, reply, body) => { publishedBody = body; };
|
||||
|
||||
ctx.SendEvent(); // Phase 1
|
||||
ctx.SendEventFromJetStream("stream full"); // Phase 2
|
||||
|
||||
publishedBody.ShouldNotBeNull();
|
||||
var js = ctx.Event.Events[1].ShouldBeOfType<MsgTraceJetStreamEntry>();
|
||||
js.Error.ShouldBe("stream full");
|
||||
}
|
||||
|
||||
// --- Hop tracking ---
|
||||
|
||||
[Fact]
|
||||
public void SetHopHeader_increments_and_builds_hop_id()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
|
||||
ctx.SetHopHeader();
|
||||
ctx.Event.Hops.ShouldBe(1);
|
||||
ctx.NextHop.ShouldBe("1");
|
||||
|
||||
ctx.SetHopHeader();
|
||||
ctx.Event.Hops.ShouldBe(2);
|
||||
ctx.NextHop.ShouldBe("2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SetHopHeader_chains_from_existing_hop()
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, "trace.dest"),
|
||||
(MsgTraceHeaders.TraceHop, "1"));
|
||||
|
||||
var ctx = MsgTraceContext.Create(
|
||||
headers,
|
||||
clientId: 1,
|
||||
clientName: "router",
|
||||
accountName: "$G",
|
||||
subject: "test",
|
||||
msgSize: 0,
|
||||
clientKind: MsgTraceContext.KindRouter);
|
||||
|
||||
ctx.ShouldNotBeNull();
|
||||
ctx.Hop.ShouldBe("1");
|
||||
|
||||
ctx.SetHopHeader();
|
||||
ctx.NextHop.ShouldBe("1.1");
|
||||
|
||||
ctx.SetHopHeader();
|
||||
ctx.NextHop.ShouldBe("1.2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddEgressEvent_captures_and_clears_next_hop()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.SetHopHeader();
|
||||
ctx.NextHop.ShouldBe("1");
|
||||
|
||||
ctx.AddEgressEvent(1, "route-1", MsgTraceContext.KindRouter);
|
||||
|
||||
var egress = ctx.Event.Events[1].ShouldBeOfType<MsgTraceEgress>();
|
||||
egress.Hop.ShouldBe("1");
|
||||
|
||||
// NextHop should be cleared after adding egress
|
||||
ctx.NextHop.ShouldBe("");
|
||||
}
|
||||
|
||||
// --- SendEvent (non-JetStream) ---
|
||||
|
||||
[Fact]
|
||||
public void SendEvent_publishes_immediately_without_jetstream()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
string? publishedDest = null;
|
||||
ctx.PublishCallback = (dest, reply, body) => { publishedDest = dest; };
|
||||
|
||||
ctx.SendEvent();
|
||||
publishedDest.ShouldBe("trace.dest");
|
||||
}
|
||||
|
||||
// --- JSON serialization ---
|
||||
|
||||
[Fact]
|
||||
public void MsgTraceEvent_serializes_to_valid_json()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.Event.Server = new EventServerInfo { Name = "srv", Id = "SRV1" };
|
||||
ctx.AddSubjectMappingEvent("mapped.subject");
|
||||
ctx.AddEgressEvent(99, "subscriber", MsgTraceContext.KindClient, "test.>", "q1");
|
||||
ctx.AddStreamExportEvent("exportAcc", "export.subject");
|
||||
|
||||
var json = JsonSerializer.Serialize(ctx.Event);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
root.GetProperty("server").GetProperty("name").GetString().ShouldBe("srv");
|
||||
root.GetProperty("request").GetProperty("msgsize").GetInt32().ShouldBe(64);
|
||||
root.GetProperty("events").GetArrayLength().ShouldBe(4);
|
||||
|
||||
var events = root.GetProperty("events");
|
||||
events[0].GetProperty("type").GetString().ShouldBe(MsgTraceTypes.Ingress);
|
||||
events[1].GetProperty("type").GetString().ShouldBe(MsgTraceTypes.SubjectMapping);
|
||||
events[2].GetProperty("type").GetString().ShouldBe(MsgTraceTypes.Egress);
|
||||
events[3].GetProperty("type").GetString().ShouldBe(MsgTraceTypes.StreamExport);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MsgTraceIngress_json_omits_null_error()
|
||||
{
|
||||
var ingress = new MsgTraceIngress
|
||||
{
|
||||
Type = MsgTraceTypes.Ingress,
|
||||
Cid = 1,
|
||||
Account = "$G",
|
||||
Subject = "test",
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize<MsgTraceEntry>(ingress);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
doc.RootElement.TryGetProperty("error", out _).ShouldBeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MsgTraceEgress_json_omits_null_optional_fields()
|
||||
{
|
||||
var egress = new MsgTraceEgress
|
||||
{
|
||||
Type = MsgTraceTypes.Egress,
|
||||
Kind = MsgTraceContext.KindRouter,
|
||||
Cid = 5,
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize<MsgTraceEntry>(egress);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
root.TryGetProperty("hop", out _).ShouldBeFalse();
|
||||
root.TryGetProperty("acc", out _).ShouldBeFalse();
|
||||
root.TryGetProperty("sub", out _).ShouldBeFalse();
|
||||
root.TryGetProperty("queue", out _).ShouldBeFalse();
|
||||
root.TryGetProperty("error", out _).ShouldBeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Full_trace_event_with_all_event_types_serializes_correctly()
|
||||
{
|
||||
var ctx = CreateSimpleContext();
|
||||
ctx.Event.Server = new EventServerInfo { Name = "test-srv", Id = "ABC123" };
|
||||
ctx.AddSubjectMappingEvent("mapped");
|
||||
ctx.AddServiceImportEvent("importAcc", "from.sub", "to.sub");
|
||||
ctx.AddStreamExportEvent("exportAcc", "export.sub");
|
||||
ctx.AddJetStreamEvent("ORDERS");
|
||||
ctx.UpdateJetStreamEvent("orders.new", false);
|
||||
ctx.AddEgressEvent(100, "sub-1", MsgTraceContext.KindClient, "orders.>", "workers");
|
||||
ctx.AddEgressEvent(200, "route-east", MsgTraceContext.KindRouter, error: MsgTraceErrors.NoSupport);
|
||||
|
||||
var json = JsonSerializer.Serialize(ctx.Event);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
var events = doc.RootElement.GetProperty("events");
|
||||
|
||||
events.GetArrayLength().ShouldBe(7);
|
||||
events[0].GetProperty("type").GetString().ShouldBe("in");
|
||||
events[1].GetProperty("type").GetString().ShouldBe("sm");
|
||||
events[2].GetProperty("type").GetString().ShouldBe("si");
|
||||
events[3].GetProperty("type").GetString().ShouldBe("se");
|
||||
events[4].GetProperty("type").GetString().ShouldBe("js");
|
||||
events[5].GetProperty("type").GetString().ShouldBe("eg");
|
||||
events[6].GetProperty("type").GetString().ShouldBe("eg");
|
||||
}
|
||||
|
||||
// --- Helper ---
|
||||
|
||||
private static MsgTraceContext CreateSimpleContext(string destination = "trace.dest", string accountName = "$G")
|
||||
{
|
||||
var headers = BuildHeaders(
|
||||
(MsgTraceHeaders.TraceDest, destination));
|
||||
|
||||
var ctx = MsgTraceContext.Create(
|
||||
headers,
|
||||
clientId: 1,
|
||||
clientName: "publisher",
|
||||
accountName: accountName,
|
||||
subject: "test.subject",
|
||||
msgSize: 64);
|
||||
|
||||
ctx.ShouldNotBeNull();
|
||||
return ctx;
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,321 @@
|
||||
// Go reference: server/thw/thw_test.go
|
||||
|
||||
using NATS.Server.Internal.TimeHashWheel;
|
||||
|
||||
namespace NATS.Server.Core.Tests.Internal.TimeHashWheel;
|
||||
|
||||
public class HashWheelTests
|
||||
{
|
||||
/// <summary>
|
||||
/// Helper to produce nanosecond timestamps relative to a base, matching
|
||||
/// the Go test pattern of now.Add(N * time.Second).UnixNano().
|
||||
/// </summary>
|
||||
private static long NowNanos() => DateTimeOffset.UtcNow.ToUnixTimeMilliseconds() * 1_000_000;
|
||||
|
||||
private static long SecondsToNanos(long seconds) => seconds * 1_000_000_000;
|
||||
|
||||
// Go: TestHashWheelBasics server/thw/thw_test.go:22
|
||||
[Fact]
|
||||
public void Basics_AddRemoveCount()
|
||||
{
|
||||
var hw = new HashWheel();
|
||||
var now = NowNanos();
|
||||
|
||||
// Add a sequence.
|
||||
ulong seq = 1;
|
||||
var expires = now + SecondsToNanos(5);
|
||||
hw.Add(seq, expires);
|
||||
hw.Count.ShouldBe(1UL);
|
||||
|
||||
// Try to remove non-existent sequence.
|
||||
hw.Remove(999, expires).ShouldBeFalse();
|
||||
hw.Count.ShouldBe(1UL);
|
||||
|
||||
// Remove the sequence properly.
|
||||
hw.Remove(seq, expires).ShouldBeTrue();
|
||||
hw.Count.ShouldBe(0UL);
|
||||
|
||||
// Verify it's gone.
|
||||
hw.Remove(seq, expires).ShouldBeFalse();
|
||||
hw.Count.ShouldBe(0UL);
|
||||
}
|
||||
|
||||
// Go: TestHashWheelUpdate server/thw/thw_test.go:44
|
||||
[Fact]
|
||||
public void Update_ChangesExpiration()
|
||||
{
|
||||
var hw = new HashWheel();
|
||||
var now = NowNanos();
|
||||
var oldExpires = now + SecondsToNanos(5);
|
||||
var newExpires = now + SecondsToNanos(10);
|
||||
|
||||
// Add initial sequence.
|
||||
hw.Add(1, oldExpires);
|
||||
hw.Count.ShouldBe(1UL);
|
||||
|
||||
// Update expiration.
|
||||
hw.Update(1, oldExpires, newExpires);
|
||||
hw.Count.ShouldBe(1UL);
|
||||
|
||||
// Verify old expiration is gone.
|
||||
hw.Remove(1, oldExpires).ShouldBeFalse();
|
||||
hw.Count.ShouldBe(1UL);
|
||||
|
||||
// Verify new expiration exists.
|
||||
hw.Remove(1, newExpires).ShouldBeTrue();
|
||||
hw.Count.ShouldBe(0UL);
|
||||
}
|
||||
|
||||
// Go: TestHashWheelExpiration server/thw/thw_test.go:67
|
||||
[Fact]
|
||||
public void Expiration_FiresCallbackForExpired()
|
||||
{
|
||||
var hw = new HashWheel();
|
||||
var now = NowNanos();
|
||||
|
||||
// Add sequences with different expiration times.
|
||||
var seqs = new Dictionary<ulong, long>
|
||||
{
|
||||
[1] = now - SecondsToNanos(1), // Already expired
|
||||
[2] = now + SecondsToNanos(1), // Expires soon
|
||||
[3] = now + SecondsToNanos(10), // Expires later
|
||||
[4] = now + SecondsToNanos(60), // Expires much later
|
||||
};
|
||||
|
||||
foreach (var (seq, expires) in seqs)
|
||||
{
|
||||
hw.Add(seq, expires);
|
||||
}
|
||||
|
||||
hw.Count.ShouldBe((ulong)seqs.Count);
|
||||
|
||||
// Process expired tasks using internal method with explicit "now" timestamp.
|
||||
var expired = new Dictionary<ulong, bool>();
|
||||
hw.ExpireTasksInternal(now, (seq, _) =>
|
||||
{
|
||||
expired[seq] = true;
|
||||
return true;
|
||||
});
|
||||
|
||||
// Verify only sequence 1 expired.
|
||||
expired.Count.ShouldBe(1);
|
||||
expired.ShouldContainKey(1UL);
|
||||
hw.Count.ShouldBe(3UL);
|
||||
}
|
||||
|
||||
// Go: TestHashWheelManualExpiration server/thw/thw_test.go:97
|
||||
[Fact]
|
||||
public void ManualExpiration_SpecificTime()
|
||||
{
|
||||
var hw = new HashWheel();
|
||||
var now = NowNanos();
|
||||
|
||||
for (ulong seq = 1; seq <= 4; seq++)
|
||||
{
|
||||
hw.Add(seq, now);
|
||||
}
|
||||
|
||||
hw.Count.ShouldBe(4UL);
|
||||
|
||||
// Loop over expired multiple times, but without removing them.
|
||||
var expired = new Dictionary<ulong, ulong>();
|
||||
for (ulong i = 0; i <= 1; i++)
|
||||
{
|
||||
hw.ExpireTasksInternal(now, (seq, _) =>
|
||||
{
|
||||
if (!expired.TryGetValue(seq, out var count))
|
||||
{
|
||||
count = 0;
|
||||
}
|
||||
|
||||
expired[seq] = count + 1;
|
||||
return false;
|
||||
});
|
||||
|
||||
expired.Count.ShouldBe(4);
|
||||
expired[1].ShouldBe(1 + i);
|
||||
expired[2].ShouldBe(1 + i);
|
||||
expired[3].ShouldBe(1 + i);
|
||||
expired[4].ShouldBe(1 + i);
|
||||
hw.Count.ShouldBe(4UL);
|
||||
}
|
||||
|
||||
// Only remove even sequences.
|
||||
for (ulong i = 0; i <= 1; i++)
|
||||
{
|
||||
hw.ExpireTasksInternal(now, (seq, _) =>
|
||||
{
|
||||
if (!expired.TryGetValue(seq, out var count))
|
||||
{
|
||||
count = 0;
|
||||
}
|
||||
|
||||
expired[seq] = count + 1;
|
||||
return seq % 2 == 0;
|
||||
});
|
||||
|
||||
// Verify even sequences are removed.
|
||||
expired[1].ShouldBe(3 + i);
|
||||
expired[2].ShouldBe(3UL);
|
||||
expired[3].ShouldBe(3 + i);
|
||||
expired[4].ShouldBe(3UL);
|
||||
hw.Count.ShouldBe(2UL);
|
||||
}
|
||||
|
||||
// Manually remove last items.
|
||||
hw.Remove(1, now).ShouldBeTrue();
|
||||
hw.Remove(3, now).ShouldBeTrue();
|
||||
hw.Count.ShouldBe(0UL);
|
||||
}
|
||||
|
||||
// Go: TestHashWheelExpirationLargerThanWheel server/thw/thw_test.go:143
|
||||
[Fact]
|
||||
public void LargerThanWheel_HandlesWrapAround()
|
||||
{
|
||||
var hw = new HashWheel();
|
||||
|
||||
// Add sequences such that they can be expired immediately.
|
||||
var seqs = new Dictionary<ulong, long>
|
||||
{
|
||||
[1] = 0,
|
||||
[2] = SecondsToNanos(1),
|
||||
};
|
||||
|
||||
foreach (var (seq, expires) in seqs)
|
||||
{
|
||||
hw.Add(seq, expires);
|
||||
}
|
||||
|
||||
hw.Count.ShouldBe(2UL);
|
||||
|
||||
// Pick a timestamp such that the expiration needs to wrap around the whole wheel.
|
||||
// Go: now := int64(time.Second) * wheelMask
|
||||
var now = SecondsToNanos(1) * HashWheel.WheelSize - SecondsToNanos(1);
|
||||
|
||||
// Process expired tasks.
|
||||
var expired = new Dictionary<ulong, bool>();
|
||||
hw.ExpireTasksInternal(now, (seq, _) =>
|
||||
{
|
||||
expired[seq] = true;
|
||||
return true;
|
||||
});
|
||||
|
||||
// Verify both sequences are expired.
|
||||
expired.Count.ShouldBe(2);
|
||||
hw.Count.ShouldBe(0UL);
|
||||
}
|
||||
|
||||
// Go: TestHashWheelNextExpiration server/thw/thw_test.go:171
|
||||
[Fact]
|
||||
public void NextExpiration_FindsEarliest()
|
||||
{
|
||||
var hw = new HashWheel();
|
||||
var now = NowNanos();
|
||||
|
||||
// Add sequences with different expiration times.
|
||||
var seqs = new Dictionary<ulong, long>
|
||||
{
|
||||
[1] = now + SecondsToNanos(5),
|
||||
[2] = now + SecondsToNanos(3), // Earliest
|
||||
[3] = now + SecondsToNanos(10),
|
||||
};
|
||||
|
||||
foreach (var (seq, expires) in seqs)
|
||||
{
|
||||
hw.Add(seq, expires);
|
||||
}
|
||||
|
||||
hw.Count.ShouldBe((ulong)seqs.Count);
|
||||
|
||||
// Test GetNextExpiration.
|
||||
var nextExternalTick = now + SecondsToNanos(6);
|
||||
// Should return sequence 2's expiration.
|
||||
hw.GetNextExpiration(nextExternalTick).ShouldBe(seqs[2]);
|
||||
|
||||
// Test with empty wheel.
|
||||
var empty = new HashWheel();
|
||||
empty.GetNextExpiration(now + SecondsToNanos(1)).ShouldBe(long.MaxValue);
|
||||
}
|
||||
|
||||
// Go: TestHashWheelStress server/thw/thw_test.go:197
|
||||
[Fact]
|
||||
public void Stress_ConcurrentAddRemove()
|
||||
{
|
||||
var hw = new HashWheel();
|
||||
var now = NowNanos();
|
||||
const int numSequences = 100_000;
|
||||
|
||||
// Add many sequences.
|
||||
for (var seq = 0; seq < numSequences; seq++)
|
||||
{
|
||||
var expires = now + SecondsToNanos(seq);
|
||||
hw.Add((ulong)seq, expires);
|
||||
}
|
||||
|
||||
// Update many sequences (every other one).
|
||||
for (var seq = 0; seq < numSequences; seq += 2)
|
||||
{
|
||||
var oldExpires = now + SecondsToNanos(seq);
|
||||
var newExpires = now + SecondsToNanos(seq + numSequences);
|
||||
hw.Update((ulong)seq, oldExpires, newExpires);
|
||||
}
|
||||
|
||||
// Remove odd-numbered sequences.
|
||||
for (var seq = 1; seq < numSequences; seq += 2)
|
||||
{
|
||||
var expires = now + SecondsToNanos(seq);
|
||||
hw.Remove((ulong)seq, expires).ShouldBeTrue();
|
||||
}
|
||||
|
||||
// After updates and removals, only half remain (the even ones with updated expiration).
|
||||
hw.Count.ShouldBe((ulong)(numSequences / 2));
|
||||
}
|
||||
|
||||
// Go: TestHashWheelEncodeDecode server/thw/thw_test.go:222
|
||||
[Fact]
|
||||
public void EncodeDecode_RoundTrips()
|
||||
{
|
||||
var hw = new HashWheel();
|
||||
var now = NowNanos();
|
||||
const int numSequences = 100_000;
|
||||
|
||||
// Add many sequences.
|
||||
for (var seq = 0; seq < numSequences; seq++)
|
||||
{
|
||||
var expires = now + SecondsToNanos(seq);
|
||||
hw.Add((ulong)seq, expires);
|
||||
}
|
||||
|
||||
var encoded = hw.Encode(12345);
|
||||
encoded.Length.ShouldBeGreaterThan(17); // Bigger than just the header.
|
||||
|
||||
var nhw = new HashWheel();
|
||||
var (highSeq, bytesRead) = nhw.Decode(encoded);
|
||||
highSeq.ShouldBe(12345UL);
|
||||
bytesRead.ShouldBe(encoded.Length);
|
||||
hw.GetNextExpiration(long.MaxValue).ShouldBe(nhw.GetNextExpiration(long.MaxValue));
|
||||
|
||||
// Verify all slots match.
|
||||
for (var s = 0; s < HashWheel.WheelSize; s++)
|
||||
{
|
||||
var slot = hw.Wheel[s];
|
||||
var nslot = nhw.Wheel[s];
|
||||
|
||||
if (slot is null)
|
||||
{
|
||||
nslot.ShouldBeNull();
|
||||
continue;
|
||||
}
|
||||
|
||||
nslot.ShouldNotBeNull();
|
||||
slot.Lowest.ShouldBe(nslot!.Lowest);
|
||||
slot.Entries.Count.ShouldBe(nslot.Entries.Count);
|
||||
|
||||
foreach (var (seq, ts) in slot.Entries)
|
||||
{
|
||||
nslot.Entries.ShouldContainKey(seq);
|
||||
nslot.Entries[seq].ShouldBe(ts);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
using System.Text;
|
||||
using NATS.Server.Internal;
|
||||
|
||||
namespace NATS.Server.Core.Tests.Internal;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for TraceContextPropagator: trace creation, header injection/extraction,
|
||||
/// child span creation, round-trip fidelity, and ShouldTrace detection.
|
||||
/// Go reference: server/msgtrace.go — trace context embedding and extraction.
|
||||
/// </summary>
|
||||
public class TraceContextPropagationTests
|
||||
{
|
||||
// Helper: build a minimal NATS/1.0 header block with the given headers.
|
||||
private static byte[] BuildNatsHeaders(params (string key, string value)[] headers)
|
||||
{
|
||||
var sb = new StringBuilder("NATS/1.0\r\n");
|
||||
foreach (var (key, value) in headers)
|
||||
sb.Append($"{key}: {value}\r\n");
|
||||
sb.Append("\r\n");
|
||||
return Encoding.ASCII.GetBytes(sb.ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateTrace_GeneratesValidContext()
|
||||
{
|
||||
var ctx = TraceContextPropagator.CreateTrace("abc123", "span456", destination: "trace.dest");
|
||||
|
||||
ctx.TraceId.ShouldBe("abc123");
|
||||
ctx.SpanId.ShouldBe("span456");
|
||||
ctx.Destination.ShouldBe("trace.dest");
|
||||
ctx.TraceOnly.ShouldBeFalse();
|
||||
ctx.CreatedAt.ShouldBeInRange(DateTime.UtcNow.AddSeconds(-5), DateTime.UtcNow.AddSeconds(1));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractTrace_ValidHeaders_ReturnsContext()
|
||||
{
|
||||
var headers = BuildNatsHeaders((TraceContextPropagator.TraceParentHeader, "trace1-span1"));
|
||||
|
||||
var ctx = TraceContextPropagator.ExtractTrace(headers);
|
||||
|
||||
ctx.ShouldNotBeNull();
|
||||
ctx.TraceId.ShouldBe("trace1");
|
||||
ctx.SpanId.ShouldBe("span1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractTrace_NoTraceHeader_ReturnsNull()
|
||||
{
|
||||
var headers = BuildNatsHeaders(("Content-Type", "text/plain"));
|
||||
|
||||
var ctx = TraceContextPropagator.ExtractTrace(headers);
|
||||
|
||||
ctx.ShouldBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InjectTrace_AppendsToHeaders()
|
||||
{
|
||||
var existing = BuildNatsHeaders(("Content-Type", "text/plain"));
|
||||
var ctx = TraceContextPropagator.CreateTrace("tid", "sid");
|
||||
|
||||
var result = TraceContextPropagator.InjectTrace(ctx, existing);
|
||||
|
||||
var text = Encoding.ASCII.GetString(result);
|
||||
text.ShouldContain($"{TraceContextPropagator.TraceParentHeader}: tid-sid");
|
||||
text.ShouldContain("Content-Type: text/plain");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InjectTrace_EmptyHeaders_CreatesNew()
|
||||
{
|
||||
var ctx = TraceContextPropagator.CreateTrace("newtrace", "newspan");
|
||||
|
||||
var result = TraceContextPropagator.InjectTrace(ctx, ReadOnlySpan<byte>.Empty);
|
||||
|
||||
var text = Encoding.ASCII.GetString(result);
|
||||
text.ShouldStartWith("NATS/1.0\r\n");
|
||||
text.ShouldContain($"{TraceContextPropagator.TraceParentHeader}: newtrace-newspan");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateChildSpan_PreservesTraceId()
|
||||
{
|
||||
var parent = TraceContextPropagator.CreateTrace("parentTrace", "parentSpan");
|
||||
|
||||
var child = TraceContextPropagator.CreateChildSpan(parent, "childSpan");
|
||||
|
||||
child.TraceId.ShouldBe("parentTrace");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateChildSpan_NewSpanId()
|
||||
{
|
||||
var parent = TraceContextPropagator.CreateTrace("parentTrace", "parentSpan");
|
||||
|
||||
var child = TraceContextPropagator.CreateChildSpan(parent, "childSpan");
|
||||
|
||||
child.SpanId.ShouldBe("childSpan");
|
||||
child.SpanId.ShouldNotBe(parent.SpanId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldTrace_WithHeader_ReturnsTrue()
|
||||
{
|
||||
var headers = BuildNatsHeaders((TraceContextPropagator.TraceParentHeader, "trace1-span1"));
|
||||
|
||||
TraceContextPropagator.ShouldTrace(headers).ShouldBeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldTrace_WithoutHeader_ReturnsFalse()
|
||||
{
|
||||
var headers = BuildNatsHeaders(("Content-Type", "text/plain"));
|
||||
|
||||
TraceContextPropagator.ShouldTrace(headers).ShouldBeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RoundTrip_CreateInjectExtract_Matches()
|
||||
{
|
||||
// Use hex-style IDs (no dashes) so the "{traceId}-{spanId}" wire format
|
||||
// can be unambiguously split on the single separator dash.
|
||||
var original = TraceContextPropagator.CreateTrace("0af7651916cd43dd8448eb211c80319c", "b7ad6b7169203331", destination: "trace.dest");
|
||||
|
||||
// Inject into empty headers
|
||||
var injected = TraceContextPropagator.InjectTrace(original, ReadOnlySpan<byte>.Empty);
|
||||
|
||||
// Extract back from the injected headers
|
||||
var extracted = TraceContextPropagator.ExtractTrace(injected);
|
||||
|
||||
extracted.ShouldNotBeNull();
|
||||
extracted.TraceId.ShouldBe(original.TraceId);
|
||||
extracted.SpanId.ShouldBe(original.SpanId);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user