diff --git a/src/NATS.Server/Internal/Avl/SequenceSet.cs b/src/NATS.Server/Internal/Avl/SequenceSet.cs
index 5b3c169..397757c 100644
--- a/src/NATS.Server/Internal/Avl/SequenceSet.cs
+++ b/src/NATS.Server/Internal/Avl/SequenceSet.cs
@@ -1,7 +1,777 @@
-namespace NATS.Server.Internal.Avl;
+// Copyright 2024 The NATS Authors
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
// Go reference: server/avl/seqset.go
-// TODO: Port AVL-backed sparse sequence set
+
+using System.Buffers.Binary;
+using System.Numerics;
+
+namespace NATS.Server.Internal.Avl;
+
+///
+/// SequenceSet is a memory and encoding optimized set for storing unsigned ints.
+/// Uses an AVL tree with nodes that hold bitmasks for set membership.
+/// Not thread safe.
+///
public class SequenceSet
{
+ internal const int BitsPerBucket = 64;
+ internal const int NumBuckets = 32;
+ internal const int NumEntries = NumBuckets * BitsPerBucket; // 2048
+
+ private const byte Magic = 22;
+ private const byte Version = 2;
+ private const int HdrLen = 2;
+ private const int MinLen = HdrLen + 8; // magic + version + num nodes + num entries
+
+ internal Node? Root;
+ private int _size;
+ private int _nodes;
+ private bool _changed;
+
+ /// Number of items in the set.
+ public int Size => _size;
+
+ /// Number of nodes in the tree.
+ public int Nodes => _nodes;
+
+ /// Fast check of the set being empty.
+ public bool IsEmpty => Root == null;
+
+ /// Insert will insert the sequence into the set. The tree will be balanced inline.
+ public void Insert(ulong seq)
+ {
+ Root = Node.Insert(Root, seq, ref _changed, ref _nodes);
+ if (_changed)
+ {
+ _changed = false;
+ _size++;
+ }
+ }
+
+ /// Returns true if the sequence is a member of this set.
+ public bool Exists(ulong seq)
+ {
+ var n = Root;
+ while (n != null)
+ {
+ if (seq < n.Base)
+ {
+ n = n.Left;
+ }
+ else if (seq >= n.Base + NumEntries)
+ {
+ n = n.Right;
+ }
+ else
+ {
+ return n.ExistsBit(seq);
+ }
+ }
+
+ return false;
+ }
+
+ ///
+ /// Sets the initial minimum sequence when known. More effectively utilizes space.
+ /// The set must be empty.
+ ///
+ public void SetInitialMin(ulong min)
+ {
+ if (!IsEmpty)
+ {
+ throw new InvalidOperationException("Set not empty");
+ }
+
+ Root = new Node { Base = min, Height = 1 };
+ _nodes = 1;
+ }
+
+ ///
+ /// Removes the sequence from the set. Returns true if the sequence was present.
+ ///
+ public bool Delete(ulong seq)
+ {
+ if (Root == null)
+ {
+ return false;
+ }
+
+ Root = Node.Delete(Root, seq, ref _changed, ref _nodes);
+ if (_changed)
+ {
+ _changed = false;
+ _size--;
+ if (_size == 0)
+ {
+ Empty();
+ }
+
+ return true;
+ }
+
+ return false;
+ }
+
+ /// Clears all items from the set.
+ public void Empty()
+ {
+ Root = null;
+ _size = 0;
+ _nodes = 0;
+ }
+
+ ///
+ /// Invokes the callback for each item in ascending order.
+ /// If the callback returns false, iteration terminates.
+ ///
+ public void Range(Func callback) => Node.Iter(Root, callback);
+
+ /// Returns the left and right heights of the tree root.
+ public (int Left, int Right) Heights()
+ {
+ if (Root == null)
+ {
+ return (0, 0);
+ }
+
+ var l = Root.Left?.Height ?? 0;
+ var r = Root.Right?.Height ?? 0;
+ return (l, r);
+ }
+
+ /// Returns min, max, and number of set items.
+ public (ulong Min, ulong Max, ulong Num) State()
+ {
+ if (Root == null)
+ {
+ return (0, 0, 0);
+ }
+
+ var (min, max) = MinMax();
+ return (min, max, (ulong)_size);
+ }
+
+ /// Returns the minimum and maximum values in the set.
+ public (ulong Min, ulong Max) MinMax()
+ {
+ if (Root == null)
+ {
+ return (0, 0);
+ }
+
+ ulong min = 0;
+ for (var l = Root; l != null; l = l.Left)
+ {
+ if (l.Left == null)
+ {
+ min = l.Min();
+ }
+ }
+
+ ulong max = 0;
+ for (var r = Root; r != null; r = r.Right)
+ {
+ if (r.Right == null)
+ {
+ max = r.Max();
+ }
+ }
+
+ return (min, max);
+ }
+
+ /// Returns a deep clone of this SequenceSet.
+ public SequenceSet Clone()
+ {
+ var css = new SequenceSet { _nodes = _nodes, _size = _size };
+ css.Root = CloneNode(Root);
+ return css;
+ }
+
+ /// Unions this set with one or more other sets by inserting all their elements.
+ public void Union(params SequenceSet[] others)
+ {
+ foreach (var other in others)
+ {
+ Node.NodeIter(other.Root, n =>
+ {
+ for (var nb = 0; nb < NumBuckets; nb++)
+ {
+ var b = n.Bits[nb];
+ for (var pos = 0UL; b != 0; pos++)
+ {
+ if ((b & 1) == 1)
+ {
+ var seq = n.Base + ((ulong)nb * BitsPerBucket) + pos;
+ Insert(seq);
+ }
+
+ b >>= 1;
+ }
+ }
+ });
+ }
+ }
+
+ /// Returns a union of all provided sets.
+ public static SequenceSet CreateUnion(params SequenceSet[] sets)
+ {
+ if (sets.Length == 0)
+ {
+ return new SequenceSet();
+ }
+
+ // Sort descending by size so we clone the largest.
+ var sorted = sets.OrderByDescending(s => s.Size).ToArray();
+ var ss = sorted[0].Clone();
+
+ for (var i = 1; i < sorted.Length; i++)
+ {
+ sorted[i].Range(n =>
+ {
+ ss.Insert(n);
+ return true;
+ });
+ }
+
+ return ss;
+ }
+
+ /// Returns the bytes needed for encoding.
+ public int EncodeLength() => MinLen + (_nodes * ((NumBuckets + 1) * 8 + 2));
+
+ /// Encodes the set to a compact binary format.
+ public byte[] Encode()
+ {
+ var encLen = EncodeLength();
+ var buf = new byte[encLen];
+
+ buf[0] = Magic;
+ buf[1] = Version;
+ var i = HdrLen;
+ BinaryPrimitives.WriteUInt32LittleEndian(buf.AsSpan(i), (uint)_nodes);
+ BinaryPrimitives.WriteUInt32LittleEndian(buf.AsSpan(i + 4), (uint)_size);
+ i += 8;
+
+ Node.NodeIter(Root, n =>
+ {
+ BinaryPrimitives.WriteUInt64LittleEndian(buf.AsSpan(i), n.Base);
+ i += 8;
+ for (var bi = 0; bi < NumBuckets; bi++)
+ {
+ BinaryPrimitives.WriteUInt64LittleEndian(buf.AsSpan(i), n.Bits[bi]);
+ i += 8;
+ }
+
+ BinaryPrimitives.WriteUInt16LittleEndian(buf.AsSpan(i), (ushort)n.Height);
+ i += 2;
+ });
+
+ return buf.AsSpan(0, i).ToArray();
+ }
+
+ /// Decodes a SequenceSet from a binary buffer. Returns the set and number of bytes read.
+ public static (SequenceSet Set, int BytesRead) Decode(ReadOnlySpan buf)
+ {
+ if (buf.Length < MinLen || buf[0] != Magic)
+ {
+ throw new InvalidOperationException("Bad encoding");
+ }
+
+ return buf[1] switch
+ {
+ 1 => DecodeV1(buf),
+ 2 => DecodeV2(buf),
+ _ => throw new InvalidOperationException("Bad version"),
+ };
+ }
+
+ private static (SequenceSet Set, int BytesRead) DecodeV2(ReadOnlySpan buf)
+ {
+ var index = 2;
+ var nn = (int)BinaryPrimitives.ReadUInt32LittleEndian(buf[index..]);
+ var sz = (int)BinaryPrimitives.ReadUInt32LittleEndian(buf[(index + 4)..]);
+ index += 8;
+
+ var expectedLen = MinLen + (nn * ((NumBuckets + 1) * 8 + 2));
+ if (buf.Length < expectedLen)
+ {
+ throw new InvalidOperationException("Bad encoding");
+ }
+
+ var ss = new SequenceSet { _size = sz };
+
+ for (var i = 0; i < nn; i++)
+ {
+ var n = new Node
+ {
+ Base = BinaryPrimitives.ReadUInt64LittleEndian(buf[index..]),
+ };
+ index += 8;
+
+ for (var bi = 0; bi < NumBuckets; bi++)
+ {
+ n.Bits[bi] = BinaryPrimitives.ReadUInt64LittleEndian(buf[index..]);
+ index += 8;
+ }
+
+ n.Height = BinaryPrimitives.ReadUInt16LittleEndian(buf[index..]);
+ index += 2;
+
+ ss.InsertNode(n);
+ }
+
+ return (ss, index);
+ }
+
+ private static (SequenceSet Set, int BytesRead) DecodeV1(ReadOnlySpan buf)
+ {
+ const int v1NumBuckets = 64;
+ var index = 2;
+ var nn = (int)BinaryPrimitives.ReadUInt32LittleEndian(buf[index..]);
+ var sz = (int)BinaryPrimitives.ReadUInt32LittleEndian(buf[(index + 4)..]);
+ index += 8;
+
+ var expectedLen = MinLen + (nn * ((v1NumBuckets + 1) * 8 + 2));
+ if (buf.Length < expectedLen)
+ {
+ throw new InvalidOperationException("Bad encoding");
+ }
+
+ var ss = new SequenceSet();
+
+ for (var i = 0; i < nn; i++)
+ {
+ var nodeBase = BinaryPrimitives.ReadUInt64LittleEndian(buf[index..]);
+ index += 8;
+
+ for (var nb = 0UL; nb < v1NumBuckets; nb++)
+ {
+ var n = BinaryPrimitives.ReadUInt64LittleEndian(buf[index..]);
+ for (var pos = 0UL; n != 0; pos++)
+ {
+ if ((n & 1) == 1)
+ {
+ var seq = nodeBase + (nb * BitsPerBucket) + pos;
+ ss.Insert(seq);
+ }
+
+ n >>= 1;
+ }
+
+ index += 8;
+ }
+
+ // Skip encoded height.
+ index += 2;
+ }
+
+ if (ss.Size != sz)
+ {
+ throw new InvalidOperationException("Bad encoding");
+ }
+
+ return (ss, index);
+ }
+
+ /// Inserts a decoded node directly into the tree (no rebalancing needed for ordered inserts).
+ private void InsertNode(Node n)
+ {
+ _nodes++;
+
+ if (Root == null)
+ {
+ Root = n;
+ return;
+ }
+
+ for (var p = Root; ;)
+ {
+ if (n.Base < p.Base)
+ {
+ if (p.Left == null)
+ {
+ p.Left = n;
+ return;
+ }
+
+ p = p.Left;
+ }
+ else
+ {
+ if (p.Right == null)
+ {
+ p.Right = n;
+ return;
+ }
+
+ p = p.Right;
+ }
+ }
+ }
+
+ private static Node? CloneNode(Node? src)
+ {
+ if (src == null)
+ {
+ return null;
+ }
+
+ var n = new Node { Base = src.Base, Height = src.Height };
+ Array.Copy(src.Bits, n.Bits, NumBuckets);
+ n.Left = CloneNode(src.Left);
+ n.Right = CloneNode(src.Right);
+ return n;
+ }
+
+ /// AVL tree node that stores a bitmask covering NumEntries (2048) consecutive sequences.
+ internal sealed class Node
+ {
+ public ulong Base;
+ public readonly ulong[] Bits = new ulong[NumBuckets];
+ public Node? Left;
+ public Node? Right;
+ public int Height;
+
+ /// Sets the bit for the given sequence. Reports whether it was newly inserted.
+ public void SetBit(ulong seq, ref bool inserted)
+ {
+ seq -= Base;
+ var i = seq / BitsPerBucket;
+ var mask = 1UL << (int)(seq % BitsPerBucket);
+ if ((Bits[i] & mask) == 0)
+ {
+ Bits[i] |= mask;
+ inserted = true;
+ }
+ }
+
+ /// Clears the bit for the given sequence. Returns true if this node is now empty.
+ public bool ClearBit(ulong seq, ref bool deleted)
+ {
+ seq -= Base;
+ var i = seq / BitsPerBucket;
+ var mask = 1UL << (int)(seq % BitsPerBucket);
+ if ((Bits[i] & mask) != 0)
+ {
+ Bits[i] &= ~mask;
+ deleted = true;
+ }
+
+ for (var b = 0; b < NumBuckets; b++)
+ {
+ if (Bits[b] != 0)
+ {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ /// Checks if the bit for the given sequence is set.
+ public bool ExistsBit(ulong seq)
+ {
+ seq -= Base;
+ var i = seq / BitsPerBucket;
+ var mask = 1UL << (int)(seq % BitsPerBucket);
+ return (Bits[i] & mask) != 0;
+ }
+
+ /// Returns the minimum sequence in this node (node must not be empty).
+ public ulong Min()
+ {
+ for (var i = 0; i < NumBuckets; i++)
+ {
+ if (Bits[i] != 0)
+ {
+ return Base + (ulong)(i * BitsPerBucket) + (ulong)BitOperations.TrailingZeroCount(Bits[i]);
+ }
+ }
+
+ return 0;
+ }
+
+ /// Returns the maximum sequence in this node (node must not be empty).
+ public ulong Max()
+ {
+ for (var i = NumBuckets - 1; i >= 0; i--)
+ {
+ if (Bits[i] != 0)
+ {
+ return Base + (ulong)(i * BitsPerBucket) + (ulong)(BitsPerBucket - BitOperations.LeadingZeroCount(Bits[i] >> 1));
+ }
+ }
+
+ return 0;
+ }
+
+ /// Inserts a sequence into the subtree rooted at this node, rebalancing as needed.
+ public static Node Insert(Node? n, ulong seq, ref bool inserted, ref int nodes)
+ {
+ if (n == null)
+ {
+ var nodeBase = (seq / NumEntries) * NumEntries;
+ var newNode = new Node { Base = nodeBase, Height = 1 };
+ newNode.SetBit(seq, ref inserted);
+ nodes++;
+ return newNode;
+ }
+
+ if (seq < n.Base)
+ {
+ n.Left = Insert(n.Left, seq, ref inserted, ref nodes);
+ }
+ else if (seq >= n.Base + NumEntries)
+ {
+ n.Right = Insert(n.Right, seq, ref inserted, ref nodes);
+ }
+ else
+ {
+ n.SetBit(seq, ref inserted);
+ }
+
+ n.Height = MaxHeight(n) + 1;
+
+ var bf = BalanceFactor(n);
+ if (bf > 1)
+ {
+ if (BalanceFactor(n.Left) < 0)
+ {
+ n.Left = RotateLeft(n.Left!);
+ }
+
+ return RotateRight(n);
+ }
+ else if (bf < -1)
+ {
+ if (BalanceFactor(n.Right) > 0)
+ {
+ n.Right = RotateRight(n.Right!);
+ }
+
+ return RotateLeft(n);
+ }
+
+ return n;
+ }
+
+ /// Deletes a sequence from the subtree rooted at this node, rebalancing as needed.
+ public static Node? Delete(Node? n, ulong seq, ref bool deleted, ref int nodes)
+ {
+ if (n == null)
+ {
+ return null;
+ }
+
+ if (seq < n.Base)
+ {
+ n.Left = Delete(n.Left, seq, ref deleted, ref nodes);
+ }
+ else if (seq >= n.Base + NumEntries)
+ {
+ n.Right = Delete(n.Right, seq, ref deleted, ref nodes);
+ }
+ else if (n.ClearBit(seq, ref deleted))
+ {
+ // Node is now empty, remove it.
+ nodes--;
+ if (n.Left == null)
+ {
+ n = n.Right;
+ }
+ else if (n.Right == null)
+ {
+ n = n.Left;
+ }
+ else
+ {
+ // Both children present: insert left subtree into the leftmost position of right subtree.
+ n.Right = InsertNodePrev(n.Right, n.Left);
+ n = n.Right;
+ }
+ }
+
+ if (n != null)
+ {
+ n.Height = MaxHeight(n) + 1;
+ }
+
+ var bf = BalanceFactor(n);
+ if (bf > 1)
+ {
+ if (BalanceFactor(n!.Left) < 0)
+ {
+ n.Left = RotateLeft(n.Left!);
+ }
+
+ return RotateRight(n);
+ }
+ else if (bf < -1)
+ {
+ if (BalanceFactor(n!.Right) > 0)
+ {
+ n.Right = RotateRight(n.Right!);
+ }
+
+ return RotateLeft(n);
+ }
+
+ return n;
+ }
+
+ /// Inserts nn into the leftmost position of n's subtree, then rebalances.
+ private static Node InsertNodePrev(Node n, Node nn)
+ {
+ if (n.Left == null)
+ {
+ n.Left = nn;
+ }
+ else
+ {
+ n.Left = InsertNodePrev(n.Left, nn);
+ }
+
+ n.Height = MaxHeight(n) + 1;
+
+ var bf = BalanceFactor(n);
+ if (bf > 1)
+ {
+ if (BalanceFactor(n.Left) < 0)
+ {
+ n.Left = RotateLeft(n.Left!);
+ }
+
+ return RotateRight(n);
+ }
+ else if (bf < -1)
+ {
+ if (BalanceFactor(n.Right) > 0)
+ {
+ n.Right = RotateRight(n.Right!);
+ }
+
+ return RotateLeft(n);
+ }
+
+ return n;
+ }
+
+ /// Left rotation.
+ private static Node RotateLeft(Node n)
+ {
+ var r = n.Right;
+ if (r != null)
+ {
+ n.Right = r.Left;
+ r.Left = n;
+ n.Height = MaxHeight(n) + 1;
+ r.Height = MaxHeight(r) + 1;
+ }
+ else
+ {
+ n.Right = null;
+ n.Height = MaxHeight(n) + 1;
+ }
+
+ return r ?? n;
+ }
+
+ /// Right rotation.
+ private static Node RotateRight(Node n)
+ {
+ var l = n.Left;
+ if (l != null)
+ {
+ n.Left = l.Right;
+ l.Right = n;
+ n.Height = MaxHeight(n) + 1;
+ l.Height = MaxHeight(l) + 1;
+ }
+ else
+ {
+ n.Left = null;
+ n.Height = MaxHeight(n) + 1;
+ }
+
+ return l ?? n;
+ }
+
+ /// Returns the balance factor (left height - right height).
+ internal static int BalanceFactor(Node? n)
+ {
+ if (n == null)
+ {
+ return 0;
+ }
+
+ var lh = n.Left?.Height ?? 0;
+ var rh = n.Right?.Height ?? 0;
+ return lh - rh;
+ }
+
+ /// Returns the max of left and right child heights.
+ internal static int MaxHeight(Node? n)
+ {
+ if (n == null)
+ {
+ return 0;
+ }
+
+ var lh = n.Left?.Height ?? 0;
+ var rh = n.Right?.Height ?? 0;
+ return Math.Max(lh, rh);
+ }
+
+ /// Iterates nodes in pre-order (root, left, right) for encoding.
+ internal static void NodeIter(Node? n, Action f)
+ {
+ if (n == null)
+ {
+ return;
+ }
+
+ f(n);
+ NodeIter(n.Left, f);
+ NodeIter(n.Right, f);
+ }
+
+ /// Iterates items in ascending order. Returns false if iteration was terminated early.
+ internal static bool Iter(Node? n, Func f)
+ {
+ if (n == null)
+ {
+ return true;
+ }
+
+ if (!Iter(n.Left, f))
+ {
+ return false;
+ }
+
+ for (var num = n.Base; num < n.Base + NumEntries; num++)
+ {
+ if (n.ExistsBit(num))
+ {
+ if (!f(num))
+ {
+ return false;
+ }
+ }
+ }
+
+ return Iter(n.Right, f);
+ }
+ }
}
diff --git a/src/NATS.Server/Internal/Gsl/GenericSubjectList.cs b/src/NATS.Server/Internal/Gsl/GenericSubjectList.cs
index b2a8cd9..7cc0e4d 100644
--- a/src/NATS.Server/Internal/Gsl/GenericSubjectList.cs
+++ b/src/NATS.Server/Internal/Gsl/GenericSubjectList.cs
@@ -1,7 +1,650 @@
+// Go reference: server/gsl/gsl.go
+// Trie-based generic subject list with wildcard support for NATS subject matching.
+
namespace NATS.Server.Internal.Gsl;
-// Go reference: server/gsl/gsl.go
-// TODO: Port generic trie-based subject list
+///
+/// Sublist related errors.
+///
+public static class GslErrors
+{
+ public static readonly InvalidOperationException InvalidSubject = new("gsl: invalid subject");
+ public static readonly KeyNotFoundException NotFound = new("gsl: no matches found");
+}
+
+///
+/// A level represents a group of nodes and special pointers to wildcard nodes.
+/// Go reference: server/gsl/gsl.go level struct
+///
+internal sealed class Level where T : IEquatable
+{
+ public Dictionary> Nodes { get; } = new();
+ public Node? Pwc { get; set; } // partial wildcard '*'
+ public Node? Fwc { get; set; } // full wildcard '>'
+
+ public int NumNodes()
+ {
+ var num = Nodes.Count;
+ if (Pwc is not null) num++;
+ if (Fwc is not null) num++;
+ return num;
+ }
+
+ ///
+ /// Prune an empty node from the tree.
+ /// Go reference: server/gsl/gsl.go pruneNode
+ ///
+ public void PruneNode(Node n, string token)
+ {
+ if (ReferenceEquals(n, Fwc))
+ Fwc = null;
+ else if (ReferenceEquals(n, Pwc))
+ Pwc = null;
+ else
+ Nodes.Remove(token);
+ }
+}
+
+///
+/// A node contains subscriptions and a pointer to the next level.
+/// Go reference: server/gsl/gsl.go node struct
+///
+internal sealed class Node where T : IEquatable
+{
+ public Level? Next { get; set; }
+ public Dictionary Subs { get; } = new(); // value -> subject
+
+ ///
+ /// Returns true if the node has no subscriptions and no children.
+ /// Go reference: server/gsl/gsl.go isEmpty
+ ///
+ public bool IsEmpty() => Subs.Count == 0 && (Next is null || Next.NumNodes() == 0);
+}
+
+///
+/// Tracks descent into levels during removal for pruning.
+/// Go reference: server/gsl/gsl.go lnt struct
+///
+internal readonly record struct Lnt(Level L, Node N, string T_) where T : IEquatable;
+
+///
+/// A GenericSubjectList stores and efficiently retrieves subscriptions using a trie.
+/// Supports wildcard subjects: '*' matches a single token, '>' matches one or more tokens.
+/// Thread-safe via ReaderWriterLockSlim.
+/// Go reference: server/gsl/gsl.go GenericSublist
+///
public class GenericSubjectList where T : IEquatable
{
+ private const char Pwc = '*';
+ private const char Fwc = '>';
+ private const char Btsep = '.';
+
+ private readonly ReaderWriterLockSlim _lock = new();
+ private readonly Level _root = new();
+ private uint _count;
+
+ ///
+ /// Returns the number of subscriptions.
+ /// Go reference: server/gsl/gsl.go Count
+ ///
+ public uint Count
+ {
+ get
+ {
+ _lock.EnterReadLock();
+ try
+ {
+ return _count;
+ }
+ finally
+ {
+ _lock.ExitReadLock();
+ }
+ }
+ }
+
+ ///
+ /// Insert adds a subscription into the sublist.
+ /// Go reference: server/gsl/gsl.go Insert
+ ///
+ public void Insert(string subject, T value)
+ {
+ _lock.EnterWriteLock();
+ try
+ {
+ var sfwc = false;
+ Node? n = null;
+ var l = _root;
+
+ foreach (var token in TokenizeSubject(subject))
+ {
+ var lt = token.Length;
+ if (lt == 0 || sfwc)
+ throw GslErrors.InvalidSubject;
+
+ if (lt > 1)
+ {
+ l.Nodes.TryGetValue(token, out n);
+ }
+ else
+ {
+ switch (token[0])
+ {
+ case Pwc:
+ n = l.Pwc;
+ break;
+ case Fwc:
+ n = l.Fwc;
+ sfwc = true;
+ break;
+ default:
+ l.Nodes.TryGetValue(token, out n);
+ break;
+ }
+ }
+
+ if (n is null)
+ {
+ n = new Node();
+ if (lt > 1)
+ {
+ l.Nodes[token] = n;
+ }
+ else
+ {
+ switch (token[0])
+ {
+ case Pwc:
+ l.Pwc = n;
+ break;
+ case Fwc:
+ l.Fwc = n;
+ break;
+ default:
+ l.Nodes[token] = n;
+ break;
+ }
+ }
+ }
+
+ n.Next ??= new Level();
+ l = n.Next;
+ }
+
+ // n should never be null here if subject was valid (non-empty)
+ n!.Subs[value] = subject;
+ _count++;
+ }
+ finally
+ {
+ _lock.ExitWriteLock();
+ }
+ }
+
+ ///
+ /// Remove will remove a subscription.
+ /// Go reference: server/gsl/gsl.go Remove
+ ///
+ public void Remove(string subject, T value)
+ {
+ _lock.EnterWriteLock();
+ try
+ {
+ RemoveInternal(subject, value);
+ }
+ finally
+ {
+ _lock.ExitWriteLock();
+ }
+ }
+
+ ///
+ /// Match will match all entries to the literal subject and invoke the callback for each.
+ /// Go reference: server/gsl/gsl.go Match
+ ///
+ public void Match(string subject, Action callback)
+ {
+ MatchInternal(subject, callback, doLock: true);
+ }
+
+ ///
+ /// MatchBytes will match all entries to the literal subject (as bytes) and invoke the callback for each.
+ /// Go reference: server/gsl/gsl.go MatchBytes
+ ///
+ public void MatchBytes(ReadOnlySpan subject, Action callback)
+ {
+ // Convert bytes to string then delegate
+ var subjectStr = System.Text.Encoding.UTF8.GetString(subject);
+ MatchInternal(subjectStr, callback, doLock: true);
+ }
+
+ ///
+ /// HasInterest will return whether or not there is any interest in the subject.
+ /// Go reference: server/gsl/gsl.go HasInterest
+ ///
+ public bool HasInterest(string subject)
+ {
+ return HasInterestInternal(subject, doLock: true, np: null);
+ }
+
+ ///
+ /// NumInterest will return the number of subs interested in the subject.
+ /// Go reference: server/gsl/gsl.go NumInterest
+ ///
+ public int NumInterest(string subject)
+ {
+ var np = new int[1]; // use array to pass by reference
+ HasInterestInternal(subject, doLock: true, np: np);
+ return np[0];
+ }
+
+ ///
+ /// HasInterestStartingIn is a helper for subject tree intersection.
+ /// Go reference: server/gsl/gsl.go HasInterestStartingIn
+ ///
+ public bool HasInterestStartingIn(string subject)
+ {
+ _lock.EnterReadLock();
+ try
+ {
+ Span tokenBuffer = new string[64];
+ var tokens = TokenizeSubjectIntoSpan(subject, tokenBuffer);
+ return HasInterestStartingInLevel(_root, tokens);
+ }
+ finally
+ {
+ _lock.ExitReadLock();
+ }
+ }
+
+ ///
+ /// Returns the maximum number of levels in the trie. Used for testing.
+ /// Go reference: server/gsl/gsl.go numLevels
+ ///
+ internal int NumLevels()
+ {
+ return VisitLevel(_root, 0);
+ }
+
+ // --- Private implementation ---
+
+ ///
+ /// Go reference: server/gsl/gsl.go match
+ ///
+ private void MatchInternal(string subject, Action callback, bool doLock)
+ {
+ Span tokenBuffer = new string[32];
+ var tokens = TokenizeSubjectForMatch(subject, tokenBuffer);
+ if (tokens.Length == 0)
+ return;
+
+ if (doLock) _lock.EnterReadLock();
+ try
+ {
+ MatchLevel(_root, tokens, callback);
+ }
+ finally
+ {
+ if (doLock) _lock.ExitReadLock();
+ }
+ }
+
+ ///
+ /// Go reference: server/gsl/gsl.go hasInterest
+ ///
+ private bool HasInterestInternal(string subject, bool doLock, int[]? np)
+ {
+ Span tokenBuffer = new string[32];
+ var tokens = TokenizeSubjectForMatch(subject, tokenBuffer);
+ if (tokens.Length == 0)
+ return false;
+
+ if (doLock) _lock.EnterReadLock();
+ try
+ {
+ return MatchLevelForAny(_root, tokens, np);
+ }
+ finally
+ {
+ if (doLock) _lock.ExitReadLock();
+ }
+ }
+
+ ///
+ /// Tokenize a subject for match/hasInterest. Returns empty span for invalid subjects
+ /// (empty tokens or trailing separator).
+ /// Go reference: server/gsl/gsl.go match (tokenization section)
+ ///
+ private static ReadOnlySpan TokenizeSubjectForMatch(string subject, Span buffer)
+ {
+ var count = 0;
+ var start = 0;
+ for (var i = 0; i < subject.Length; i++)
+ {
+ if (subject[i] == Btsep)
+ {
+ if (i - start == 0)
+ return ReadOnlySpan.Empty; // empty token
+ if (count >= buffer.Length)
+ return ReadOnlySpan.Empty;
+ buffer[count++] = subject[start..i];
+ start = i + 1;
+ }
+ }
+
+ if (start >= subject.Length)
+ return ReadOnlySpan.Empty; // trailing separator
+
+ if (count >= buffer.Length)
+ return ReadOnlySpan.Empty;
+ buffer[count++] = subject[start..];
+ return buffer[..count];
+ }
+
+ ///
+ /// Tokenize a subject into a span (does not validate empty tokens).
+ /// Go reference: server/gsl/gsl.go tokenizeSubjectIntoSlice
+ ///
+ private static ReadOnlySpan TokenizeSubjectIntoSpan(string subject, Span buffer)
+ {
+ var count = 0;
+ var start = 0;
+ for (var i = 0; i < subject.Length; i++)
+ {
+ if (subject[i] == Btsep)
+ {
+ if (count >= buffer.Length) break;
+ buffer[count++] = subject[start..i];
+ start = i + 1;
+ }
+ }
+
+ if (count < buffer.Length)
+ buffer[count++] = subject[start..];
+ return buffer[..count];
+ }
+
+ ///
+ /// Recursively descend into the trie to match subscriptions.
+ /// Go reference: server/gsl/gsl.go matchLevel
+ ///
+ private static void MatchLevel(Level? l, ReadOnlySpan toks, Action cb)
+ {
+ Node? pwc = null;
+ Node? n = null;
+ for (var i = 0; i < toks.Length; i++)
+ {
+ if (l is null) return;
+
+ if (l.Fwc is not null)
+ CallbacksForResults(l.Fwc, cb);
+
+ pwc = l.Pwc;
+ if (pwc is not null)
+ MatchLevel(pwc.Next, toks[(i + 1)..], cb);
+
+ l.Nodes.TryGetValue(toks[i], out n);
+ l = n?.Next;
+ }
+
+ if (n is not null)
+ CallbacksForResults(n, cb);
+ if (pwc is not null)
+ CallbacksForResults(pwc, cb);
+ }
+
+ ///
+ /// Recursively check if any subscription matches (optimization over full Match).
+ /// Go reference: server/gsl/gsl.go matchLevelForAny
+ ///
+ private static bool MatchLevelForAny(Level? l, ReadOnlySpan toks, int[]? np)
+ {
+ Node? pwc = null;
+ Node? n = null;
+ for (var i = 0; i < toks.Length; i++)
+ {
+ if (l is null) return false;
+
+ if (l.Fwc is not null)
+ {
+ if (np is not null)
+ np[0] += l.Fwc.Subs.Count;
+ return true;
+ }
+
+ pwc = l.Pwc;
+ if (pwc is not null)
+ {
+ if (MatchLevelForAny(pwc.Next, toks[(i + 1)..], np))
+ return true;
+ }
+
+ l.Nodes.TryGetValue(toks[i], out n);
+ l = n?.Next;
+ }
+
+ if (n is not null)
+ {
+ if (np is not null)
+ np[0] += n.Subs.Count;
+ if (n.Subs.Count > 0)
+ return true;
+ }
+
+ if (pwc is not null)
+ {
+ if (np is not null)
+ np[0] += pwc.Subs.Count;
+ return pwc.Subs.Count > 0;
+ }
+
+ return false;
+ }
+
+ ///
+ /// Invoke callback for each subscription in a node.
+ /// Go reference: server/gsl/gsl.go callbacksForResults
+ ///
+ private static void CallbacksForResults(Node n, Action cb)
+ {
+ foreach (var sub in n.Subs.Keys)
+ cb(sub);
+ }
+
+ ///
+ /// Internal remove with lock already held.
+ /// Go reference: server/gsl/gsl.go remove
+ ///
+ private void RemoveInternal(string subject, T value)
+ {
+ var sfwc = false;
+ Node? n = null;
+ Level? l = _root;
+
+ // Track levels for pruning
+ Span> levelsBuffer = new Lnt[32];
+ var levelCount = 0;
+
+ foreach (var token in TokenizeSubject(subject))
+ {
+ var lt = token.Length;
+ if (lt == 0 || sfwc)
+ throw GslErrors.InvalidSubject;
+
+ if (l is null)
+ throw GslErrors.NotFound;
+
+ if (lt > 1)
+ {
+ l.Nodes.TryGetValue(token, out n);
+ }
+ else
+ {
+ switch (token[0])
+ {
+ case Pwc:
+ n = l.Pwc;
+ break;
+ case Fwc:
+ n = l.Fwc;
+ sfwc = true;
+ break;
+ default:
+ l.Nodes.TryGetValue(token, out n);
+ break;
+ }
+ }
+
+ if (n is not null)
+ {
+ levelsBuffer[levelCount++] = new Lnt(l, n, token);
+ l = n.Next;
+ }
+ else
+ {
+ l = null;
+ }
+ }
+
+ if (!RemoveFromNode(n, value))
+ throw GslErrors.NotFound;
+
+ _count--;
+
+ // Prune empty nodes
+ for (var i = levelCount - 1; i >= 0; i--)
+ {
+ var lnt = levelsBuffer[i];
+ if (lnt.N.IsEmpty())
+ lnt.L.PruneNode(lnt.N, lnt.T_);
+ }
+ }
+
+ ///
+ /// Remove the value from the given node.
+ /// Go reference: server/gsl/gsl.go removeFromNode
+ ///
+ private static bool RemoveFromNode(Node? n, T value)
+ {
+ if (n is null) return false;
+ return n.Subs.Remove(value);
+ }
+
+ ///
+ /// Recursively check if there is interest starting at a prefix.
+ /// Go reference: server/gsl/gsl.go hasInterestStartingIn
+ ///
+ private static bool HasInterestStartingInLevel(Level? l, ReadOnlySpan tokens)
+ {
+ if (l is null) return false;
+ if (tokens.Length == 0) return true;
+
+ var token = tokens[0];
+ if (l.Fwc is not null) return true;
+
+ var found = false;
+ if (l.Pwc is not null)
+ found = HasInterestStartingInLevel(l.Pwc.Next, tokens[1..]);
+ if (!found && l.Nodes.TryGetValue(token, out var n))
+ found = HasInterestStartingInLevel(n.Next, tokens[1..]);
+ return found;
+ }
+
+ ///
+ /// Visit levels recursively to compute max depth.
+ /// Go reference: server/gsl/gsl.go visitLevel
+ ///
+ private static int VisitLevel(Level? l, int depth)
+ {
+ if (l is null || l.NumNodes() == 0)
+ return depth;
+
+ depth++;
+ var maxDepth = depth;
+
+ foreach (var n in l.Nodes.Values)
+ {
+ var newDepth = VisitLevel(n.Next, depth);
+ if (newDepth > maxDepth)
+ maxDepth = newDepth;
+ }
+
+ if (l.Pwc is not null)
+ {
+ var pwcDepth = VisitLevel(l.Pwc.Next, depth);
+ if (pwcDepth > maxDepth)
+ maxDepth = pwcDepth;
+ }
+
+ if (l.Fwc is not null)
+ {
+ var fwcDepth = VisitLevel(l.Fwc.Next, depth);
+ if (fwcDepth > maxDepth)
+ maxDepth = fwcDepth;
+ }
+
+ return maxDepth;
+ }
+
+ ///
+ /// Tokenize a subject by splitting on '.'. Returns an enumerable of tokens.
+ /// Used by Insert and Remove.
+ ///
+ private static SplitEnumerable TokenizeSubject(string subject)
+ {
+ return new SplitEnumerable(subject);
+ }
+
+ ///
+ /// A stack-friendly subject tokenizer that splits on '.'.
+ ///
+ private readonly ref struct SplitEnumerable
+ {
+ private readonly string _subject;
+
+ public SplitEnumerable(string subject) => _subject = subject;
+
+ public SplitEnumerator GetEnumerator() => new(_subject);
+ }
+
+ private ref struct SplitEnumerator
+ {
+ private readonly string _subject;
+ private int _start;
+ private bool _done;
+
+ public SplitEnumerator(string subject)
+ {
+ _subject = subject;
+ _start = 0;
+ _done = false;
+ Current = default!;
+ }
+
+ public string Current { get; private set; }
+
+ public bool MoveNext()
+ {
+ if (_done) return false;
+
+ var idx = _subject.IndexOf(Btsep, _start);
+ if (idx >= 0)
+ {
+ Current = _subject[_start..idx];
+ _start = idx + 1;
+ return true;
+ }
+
+ Current = _subject[_start..];
+ _done = true;
+ return true;
+ }
+ }
}
+
+///
+/// SimpleSubjectList is an alias for GenericSubjectList that uses int values,
+/// useful for tracking interest only.
+/// Go reference: server/gsl/gsl.go SimpleSublist
+///
+public class SimpleSubjectList : GenericSubjectList;
diff --git a/src/NATS.Server/Internal/SubjectTree/Nodes.cs b/src/NATS.Server/Internal/SubjectTree/Nodes.cs
new file mode 100644
index 0000000..dab4933
--- /dev/null
+++ b/src/NATS.Server/Internal/SubjectTree/Nodes.cs
@@ -0,0 +1,649 @@
+// Go reference: server/stree/node.go, leaf.go, node4.go, node10.go, node16.go, node48.go, node256.go
+namespace NATS.Server.Internal.SubjectTree;
+
+///
+/// Internal node interface for the Adaptive Radix Tree.
+///
+internal interface INode
+{
+ bool IsLeaf { get; }
+ NodeMeta? Base { get; }
+ void SetPrefix(ReadOnlySpan pre);
+ void AddChild(byte c, INode n);
+ ///
+ /// Returns the child node for the given key byte, or null if not found.
+ /// The returned wrapper allows in-place replacement of the child reference.
+ ///
+ ChildRef? FindChild(byte c);
+ void DeleteChild(byte c);
+ bool IsFull { get; }
+ INode Grow();
+ INode? Shrink();
+ (ReadOnlyMemory[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory[] parts);
+ string Kind { get; }
+ void Iter(Func f);
+ INode?[] Children();
+ ushort NumChildren { get; }
+ byte[] Path();
+}
+
+///
+/// Wrapper that allows in-place replacement of a child reference in a node.
+/// This is analogous to Go's *node pointer.
+///
+internal sealed class ChildRef(Func getter, Action setter)
+{
+ public INode? Node
+ {
+ get => getter();
+ set => setter(value);
+ }
+}
+
+///
+/// Base metadata for internal (non-leaf) nodes.
+///
+internal sealed class NodeMeta
+{
+ public byte[] Prefix { get; set; } = [];
+ public ushort Size { get; set; }
+}
+
+#region Leaf Node
+
+///
+/// Leaf node holding a value and suffix.
+/// Go reference: server/stree/leaf.go
+///
+internal sealed class Leaf : INode
+{
+ public T Value;
+ public byte[] Suffix;
+
+ public Leaf(ReadOnlySpan suffix, T value)
+ {
+ Value = value;
+ Suffix = Parts.CopyBytes(suffix);
+ }
+
+ public bool IsLeaf => true;
+ public NodeMeta? Base => null;
+ public bool IsFull => true;
+ public ushort NumChildren => 0;
+ public string Kind => "LEAF";
+
+ public bool Match(ReadOnlySpan subject) => subject.SequenceEqual(Suffix);
+
+ public void SetSuffix(ReadOnlySpan suffix) => Suffix = Parts.CopyBytes(suffix);
+
+ public byte[] Path() => Suffix;
+
+ public INode?[] Children() => [];
+
+ public void Iter(Func f) { }
+
+ public (ReadOnlyMemory[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory[] parts)
+ => Parts.MatchPartsAgainstFragment(parts, Suffix);
+
+ // These should not be called on a leaf.
+ public void SetPrefix(ReadOnlySpan pre) => throw new InvalidOperationException("setPrefix called on leaf");
+ public void AddChild(byte c, INode n) => throw new InvalidOperationException("addChild called on leaf");
+ public ChildRef? FindChild(byte c) => throw new InvalidOperationException("findChild called on leaf");
+ public INode Grow() => throw new InvalidOperationException("grow called on leaf");
+ public void DeleteChild(byte c) => throw new InvalidOperationException("deleteChild called on leaf");
+ public INode? Shrink() => throw new InvalidOperationException("shrink called on leaf");
+}
+
+#endregion
+
+#region Node4
+
+///
+/// Node with up to 4 children.
+/// Go reference: server/stree/node4.go
+///
+internal sealed class Node4 : INode
+{
+ private readonly INode?[] _child = new INode?[4];
+ private readonly byte[] _key = new byte[4];
+ internal readonly NodeMeta Meta = new();
+
+ public Node4(ReadOnlySpan prefix)
+ {
+ SetPrefix(prefix);
+ }
+
+ public bool IsLeaf => false;
+ public NodeMeta? Base => Meta;
+ public ushort NumChildren => Meta.Size;
+ public bool IsFull => Meta.Size >= 4;
+ public string Kind => "NODE4";
+ public byte[] Path() => Meta.Prefix;
+
+ public void SetPrefix(ReadOnlySpan pre)
+ {
+ Meta.Prefix = pre.ToArray();
+ }
+
+ public void AddChild(byte c, INode n)
+ {
+ if (Meta.Size >= 4) throw new InvalidOperationException("node4 full!");
+ _key[Meta.Size] = c;
+ _child[Meta.Size] = n;
+ Meta.Size++;
+ }
+
+ public ChildRef? FindChild(byte c)
+ {
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ if (_key[i] == c)
+ {
+ var idx = i;
+ return new ChildRef(() => _child[idx], v => _child[idx] = v);
+ }
+ }
+ return null;
+ }
+
+ public void DeleteChild(byte c)
+ {
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ if (_key[i] == c)
+ {
+ var last = Meta.Size - 1;
+ if (i < last)
+ {
+ _key[i] = _key[last];
+ _child[i] = _child[last];
+ _key[last] = 0;
+ _child[last] = null;
+ }
+ else
+ {
+ _key[i] = 0;
+ _child[i] = null;
+ }
+ Meta.Size--;
+ return;
+ }
+ }
+ }
+
+ public INode Grow()
+ {
+ var nn = new Node10(Meta.Prefix);
+ for (int i = 0; i < 4; i++)
+ {
+ nn.AddChild(_key[i], _child[i]!);
+ }
+ return nn;
+ }
+
+ public INode? Shrink()
+ {
+ if (Meta.Size == 1) return _child[0];
+ return null;
+ }
+
+ public void Iter(Func f)
+ {
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ if (!f(_child[i]!)) return;
+ }
+ }
+
+ public INode?[] Children()
+ {
+ var result = new INode?[Meta.Size];
+ Array.Copy(_child, result, Meta.Size);
+ return result;
+ }
+
+ public (ReadOnlyMemory[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory[] parts)
+ => Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
+}
+
+#endregion
+
+#region Node10
+
+///
+/// Node with up to 10 children. Optimized for numeric subject tokens (0-9).
+/// Go reference: server/stree/node10.go
+///
+internal sealed class Node10 : INode
+{
+ private readonly INode?[] _child = new INode?[10];
+ private readonly byte[] _key = new byte[10];
+ internal readonly NodeMeta Meta = new();
+
+ public Node10(ReadOnlySpan prefix)
+ {
+ SetPrefix(prefix);
+ }
+
+ public bool IsLeaf => false;
+ public NodeMeta? Base => Meta;
+ public ushort NumChildren => Meta.Size;
+ public bool IsFull => Meta.Size >= 10;
+ public string Kind => "NODE10";
+ public byte[] Path() => Meta.Prefix;
+
+ public void SetPrefix(ReadOnlySpan pre)
+ {
+ Meta.Prefix = pre.ToArray();
+ }
+
+ public void AddChild(byte c, INode n)
+ {
+ if (Meta.Size >= 10) throw new InvalidOperationException("node10 full!");
+ _key[Meta.Size] = c;
+ _child[Meta.Size] = n;
+ Meta.Size++;
+ }
+
+ public ChildRef? FindChild(byte c)
+ {
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ if (_key[i] == c)
+ {
+ var idx = i;
+ return new ChildRef(() => _child[idx], v => _child[idx] = v);
+ }
+ }
+ return null;
+ }
+
+ public void DeleteChild(byte c)
+ {
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ if (_key[i] == c)
+ {
+ var last = Meta.Size - 1;
+ if (i < last)
+ {
+ _key[i] = _key[last];
+ _child[i] = _child[last];
+ _key[last] = 0;
+ _child[last] = null;
+ }
+ else
+ {
+ _key[i] = 0;
+ _child[i] = null;
+ }
+ Meta.Size--;
+ return;
+ }
+ }
+ }
+
+ public INode Grow()
+ {
+ var nn = new Node16(Meta.Prefix);
+ for (int i = 0; i < 10; i++)
+ {
+ nn.AddChild(_key[i], _child[i]!);
+ }
+ return nn;
+ }
+
+ public INode? Shrink()
+ {
+ if (Meta.Size > 4) return null;
+ var nn = new Node4([]);
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ nn.AddChild(_key[i], _child[i]!);
+ }
+ return nn;
+ }
+
+ public void Iter(Func f)
+ {
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ if (!f(_child[i]!)) return;
+ }
+ }
+
+ public INode?[] Children()
+ {
+ var result = new INode?[Meta.Size];
+ Array.Copy(_child, result, Meta.Size);
+ return result;
+ }
+
+ public (ReadOnlyMemory[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory[] parts)
+ => Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
+}
+
+#endregion
+
+#region Node16
+
+///
+/// Node with up to 16 children.
+/// Go reference: server/stree/node16.go
+///
+internal sealed class Node16 : INode
+{
+ private readonly INode?[] _child = new INode?[16];
+ private readonly byte[] _key = new byte[16];
+ internal readonly NodeMeta Meta = new();
+
+ public Node16(ReadOnlySpan prefix)
+ {
+ SetPrefix(prefix);
+ }
+
+ public bool IsLeaf => false;
+ public NodeMeta? Base => Meta;
+ public ushort NumChildren => Meta.Size;
+ public bool IsFull => Meta.Size >= 16;
+ public string Kind => "NODE16";
+ public byte[] Path() => Meta.Prefix;
+
+ public void SetPrefix(ReadOnlySpan pre)
+ {
+ Meta.Prefix = pre.ToArray();
+ }
+
+ public void AddChild(byte c, INode n)
+ {
+ if (Meta.Size >= 16) throw new InvalidOperationException("node16 full!");
+ _key[Meta.Size] = c;
+ _child[Meta.Size] = n;
+ Meta.Size++;
+ }
+
+ public ChildRef? FindChild(byte c)
+ {
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ if (_key[i] == c)
+ {
+ var idx = i;
+ return new ChildRef(() => _child[idx], v => _child[idx] = v);
+ }
+ }
+ return null;
+ }
+
+ public void DeleteChild(byte c)
+ {
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ if (_key[i] == c)
+ {
+ var last = Meta.Size - 1;
+ if (i < last)
+ {
+ _key[i] = _key[last];
+ _child[i] = _child[last];
+ _key[last] = 0;
+ _child[last] = null;
+ }
+ else
+ {
+ _key[i] = 0;
+ _child[i] = null;
+ }
+ Meta.Size--;
+ return;
+ }
+ }
+ }
+
+ public INode Grow()
+ {
+ var nn = new Node48(Meta.Prefix);
+ for (int i = 0; i < 16; i++)
+ {
+ nn.AddChild(_key[i], _child[i]!);
+ }
+ return nn;
+ }
+
+ public INode? Shrink()
+ {
+ if (Meta.Size > 10) return null;
+ var nn = new Node10([]);
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ nn.AddChild(_key[i], _child[i]!);
+ }
+ return nn;
+ }
+
+ public void Iter(Func f)
+ {
+ for (int i = 0; i < Meta.Size; i++)
+ {
+ if (!f(_child[i]!)) return;
+ }
+ }
+
+ public INode?[] Children()
+ {
+ var result = new INode?[Meta.Size];
+ Array.Copy(_child, result, Meta.Size);
+ return result;
+ }
+
+ public (ReadOnlyMemory[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory[] parts)
+ => Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
+}
+
+#endregion
+
+#region Node48
+
+///
+/// Node with up to 48 children. Uses a 256-byte index array (1-indexed) to map keys to child slots.
+/// Go reference: server/stree/node48.go
+///
+internal sealed class Node48 : INode
+{
+ internal readonly INode?[] Child = new INode?[48];
+ internal readonly byte[] Key = new byte[256]; // 1-indexed: 0 means no entry
+ internal readonly NodeMeta Meta = new();
+
+ public Node48(ReadOnlySpan prefix)
+ {
+ SetPrefix(prefix);
+ }
+
+ public bool IsLeaf => false;
+ public NodeMeta? Base => Meta;
+ public ushort NumChildren => Meta.Size;
+ public bool IsFull => Meta.Size >= 48;
+ public string Kind => "NODE48";
+ public byte[] Path() => Meta.Prefix;
+
+ public void SetPrefix(ReadOnlySpan pre)
+ {
+ Meta.Prefix = pre.ToArray();
+ }
+
+ public void AddChild(byte c, INode n)
+ {
+ if (Meta.Size >= 48) throw new InvalidOperationException("node48 full!");
+ Child[Meta.Size] = n;
+ Key[c] = (byte)(Meta.Size + 1); // 1-indexed
+ Meta.Size++;
+ }
+
+ public ChildRef? FindChild(byte c)
+ {
+ var i = Key[c];
+ if (i == 0) return null;
+ var idx = i - 1;
+ return new ChildRef(() => Child[idx], v => Child[idx] = v);
+ }
+
+ public void DeleteChild(byte c)
+ {
+ var i = Key[c];
+ if (i == 0) return;
+ i--; // Adjust for 1-indexing
+ var last = (byte)(Meta.Size - 1);
+ if (i < last)
+ {
+ Child[i] = Child[last];
+ for (int ic = 0; ic < 256; ic++)
+ {
+ if (Key[ic] == last + 1)
+ {
+ Key[ic] = (byte)(i + 1);
+ break;
+ }
+ }
+ }
+ Child[last] = null;
+ Key[c] = 0;
+ Meta.Size--;
+ }
+
+ public INode Grow()
+ {
+ var nn = new Node256(Meta.Prefix);
+ for (int c = 0; c < 256; c++)
+ {
+ var i = Key[c];
+ if (i > 0)
+ {
+ nn.AddChild((byte)c, Child[i - 1]!);
+ }
+ }
+ return nn;
+ }
+
+ public INode? Shrink()
+ {
+ if (Meta.Size > 16) return null;
+ var nn = new Node16([]);
+ for (int c = 0; c < 256; c++)
+ {
+ var i = Key[c];
+ if (i > 0)
+ {
+ nn.AddChild((byte)c, Child[i - 1]!);
+ }
+ }
+ return nn;
+ }
+
+ public void Iter(Func f)
+ {
+ foreach (var c in Child)
+ {
+ if (c != null && !f(c)) return;
+ }
+ }
+
+ public INode?[] Children()
+ {
+ var result = new INode?[Meta.Size];
+ Array.Copy(Child, result, Meta.Size);
+ return result;
+ }
+
+ public (ReadOnlyMemory[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory[] parts)
+ => Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
+}
+
+#endregion
+
+#region Node256
+
+///
+/// Node with up to 256 children. Direct array indexed by byte value.
+/// Go reference: server/stree/node256.go
+///
+internal sealed class Node256 : INode
+{
+ internal readonly INode?[] Child = new INode?[256];
+ internal readonly NodeMeta Meta = new();
+
+ public Node256(ReadOnlySpan prefix)
+ {
+ SetPrefix(prefix);
+ }
+
+ public bool IsLeaf => false;
+ public NodeMeta? Base => Meta;
+ public ushort NumChildren => Meta.Size;
+ public bool IsFull => false; // node256 is never full
+ public string Kind => "NODE256";
+ public byte[] Path() => Meta.Prefix;
+
+ public void SetPrefix(ReadOnlySpan pre)
+ {
+ Meta.Prefix = pre.ToArray();
+ }
+
+ public void AddChild(byte c, INode n)
+ {
+ Child[c] = n;
+ Meta.Size++;
+ }
+
+ public ChildRef? FindChild(byte c)
+ {
+ if (Child[c] == null) return null;
+ return new ChildRef(() => Child[c], v => Child[c] = v);
+ }
+
+ public void DeleteChild(byte c)
+ {
+ if (Child[c] != null)
+ {
+ Child[c] = null;
+ Meta.Size--;
+ }
+ }
+
+ public INode Grow() => throw new InvalidOperationException("grow can not be called on node256");
+
+ public INode? Shrink()
+ {
+ if (Meta.Size > 48) return null;
+ var nn = new Node48([]);
+ for (int c = 0; c < 256; c++)
+ {
+ if (Child[c] != null)
+ {
+ nn.AddChild((byte)c, Child[c]!);
+ }
+ }
+ return nn;
+ }
+
+ public void Iter(Func f)
+ {
+ for (int i = 0; i < 256; i++)
+ {
+ if (Child[i] != null)
+ {
+ if (!f(Child[i]!)) return;
+ }
+ }
+ }
+
+ public INode?[] Children()
+ {
+ // Return the full 256 array, same as Go
+ return (INode?[])Child.Clone();
+ }
+
+ public (ReadOnlyMemory[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory[] parts)
+ => Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
+}
+
+#endregion
diff --git a/src/NATS.Server/Internal/SubjectTree/Parts.cs b/src/NATS.Server/Internal/SubjectTree/Parts.cs
new file mode 100644
index 0000000..6e5a464
--- /dev/null
+++ b/src/NATS.Server/Internal/SubjectTree/Parts.cs
@@ -0,0 +1,243 @@
+// Go reference: server/stree/parts.go, server/stree/util.go
+namespace NATS.Server.Internal.SubjectTree;
+
+///
+/// Subject tokenization helpers and match logic for the ART.
+///
+internal static class Parts
+{
+ // For subject matching.
+ internal const byte Pwc = (byte)'*';
+ internal const byte Fwc = (byte)'>';
+ internal const byte Tsep = (byte)'.';
+
+ ///
+ /// No pivot available sentinel value (DEL character).
+ ///
+ internal const byte NoPivot = 127;
+
+ ///
+ /// Returns the pivot byte at the given position, or NoPivot if past end.
+ /// Go reference: server/stree/util.go:pivot
+ ///
+ internal static byte Pivot(ReadOnlySpan subject, int pos)
+ {
+ if (pos >= subject.Length) return NoPivot;
+ return subject[pos];
+ }
+
+ ///
+ /// Returns the length of the common prefix between two byte spans.
+ /// Go reference: server/stree/util.go:commonPrefixLen
+ ///
+ internal static int CommonPrefixLen(ReadOnlySpan s1, ReadOnlySpan s2)
+ {
+ var limit = Math.Min(s1.Length, s2.Length);
+ int i = 0;
+ for (; i < limit; i++)
+ {
+ if (s1[i] != s2[i]) break;
+ }
+ return i;
+ }
+
+ ///
+ /// Copy bytes helper.
+ ///
+ internal static byte[] CopyBytes(ReadOnlySpan src)
+ {
+ if (src.Length == 0) return [];
+ return src.ToArray();
+ }
+
+ ///
+ /// Break a filter subject into parts based on wildcards (pwc '*' and fwc '>').
+ /// Go reference: server/stree/parts.go:genParts
+ ///
+ internal static ReadOnlyMemory[] GenParts(ReadOnlySpan filter)
+ {
+ var parts = new List>();
+ // We work on a copy since ReadOnlyMemory needs a backing array
+ var filterArr = filter.ToArray();
+ var filterMem = new ReadOnlyMemory(filterArr);
+ int start = 0;
+ int e = filterArr.Length - 1;
+
+ for (int i = 0; i < filterArr.Length; i++)
+ {
+ if (filterArr[i] == Tsep)
+ {
+ // See if next token is pwc. Either internal or end pwc.
+ if (i < e && filterArr[i + 1] == Pwc && ((i + 2 <= e && filterArr[i + 2] == Tsep) || i + 1 == e))
+ {
+ if (i > start)
+ {
+ parts.Add(filterMem.Slice(start, i + 1 - start));
+ }
+ parts.Add(filterMem.Slice(i + 1, 1));
+ i++; // Skip pwc
+ if (i + 2 <= e)
+ {
+ i++; // Skip next tsep from next part too.
+ }
+ start = i + 1;
+ }
+ else if (i < e && filterArr[i + 1] == Fwc && i + 1 == e)
+ {
+ if (i > start)
+ {
+ parts.Add(filterMem.Slice(start, i + 1 - start));
+ }
+ parts.Add(filterMem.Slice(i + 1, 1));
+ i++; // Skip fwc
+ start = i + 1;
+ }
+ }
+ else if (filterArr[i] == Pwc || filterArr[i] == Fwc)
+ {
+ // Wildcard must be at the start or preceded by tsep.
+ int prev = i - 1;
+ if (prev >= 0 && filterArr[prev] != Tsep)
+ {
+ continue;
+ }
+
+ // Wildcard must be at the end or followed by tsep.
+ int next = i + 1;
+ if (next == e || (next < e && filterArr[next] != Tsep))
+ {
+ continue;
+ }
+
+ // Full wildcard must be terminal.
+ if (filterArr[i] == Fwc && i < e)
+ {
+ break;
+ }
+
+ // We start with a pwc or fwc.
+ parts.Add(filterMem.Slice(i, 1));
+ if (i + 1 <= e)
+ {
+ i++; // Skip next tsep from next part too.
+ }
+ start = i + 1;
+ }
+ }
+
+ if (start < filterArr.Length)
+ {
+ // Check to see if we need to eat a leading tsep.
+ if (filterArr[start] == Tsep)
+ {
+ start++;
+ }
+ parts.Add(filterMem[start..]);
+ }
+
+ return [.. parts];
+ }
+
+ ///
+ /// Match parts against a fragment (prefix for nodes or suffix for leaves).
+ /// Go reference: server/stree/parts.go:matchParts
+ ///
+ internal static (ReadOnlyMemory[] RemainingParts, bool Matched) MatchPartsAgainstFragment(
+ ReadOnlyMemory[] parts, ReadOnlySpan frag)
+ {
+ int lf = frag.Length;
+ if (lf == 0)
+ {
+ return (parts, true);
+ }
+
+ int si = 0;
+ int lpi = parts.Length - 1;
+
+ for (int i = 0; i < parts.Length; i++)
+ {
+ if (si >= lf)
+ {
+ return (parts[i..], true);
+ }
+
+ var part = parts[i].Span;
+ int lp = part.Length;
+
+ // Check for pwc or fwc place holders.
+ if (lp == 1)
+ {
+ if (part[0] == Pwc)
+ {
+ var index = frag[si..].IndexOf(Tsep);
+ // We are trying to match pwc and did not find our tsep.
+ if (index < 0)
+ {
+ if (i == lpi)
+ {
+ return ([], true);
+ }
+ return (parts[i..], true);
+ }
+ si += index + 1;
+ continue;
+ }
+ else if (part[0] == Fwc)
+ {
+ return ([], true);
+ }
+ }
+
+ int end = Math.Min(si + lp, lf);
+ // If part is bigger than the remaining fragment, adjust to a portion of the part.
+ var partToCompare = part;
+ if (si + lp > end)
+ {
+ // Frag is smaller than part itself.
+ partToCompare = part[..(end - si)];
+ }
+
+ if (!partToCompare.SequenceEqual(frag[si..end]))
+ {
+ return (parts, false);
+ }
+
+ // If we still have a portion of the fragment left, update and continue.
+ if (end < lf)
+ {
+ si = end;
+ continue;
+ }
+
+ // If we matched a partial, do not move past current part
+ // but update the part to what was consumed.
+ if (end < si + lp)
+ {
+ if (end >= lf)
+ {
+ // Create a copy with the current part trimmed.
+ var newParts = new ReadOnlyMemory[parts.Length - i];
+ Array.Copy(parts, i, newParts, 0, newParts.Length);
+ newParts[0] = parts[i][(lf - si)..];
+ return (newParts, true);
+ }
+ else
+ {
+ i++;
+ }
+ return (parts[i..], true);
+ }
+
+ if (i == lpi)
+ {
+ return ([], true);
+ }
+
+ // If we are here we are not the last part which means we have a wildcard
+ // gap, so we need to match anything up to next tsep.
+ si += part.Length;
+ }
+
+ return (parts, false);
+ }
+}
diff --git a/src/NATS.Server/Internal/SubjectTree/SubjectTree.cs b/src/NATS.Server/Internal/SubjectTree/SubjectTree.cs
index 74a22d5..1fe4609 100644
--- a/src/NATS.Server/Internal/SubjectTree/SubjectTree.cs
+++ b/src/NATS.Server/Internal/SubjectTree/SubjectTree.cs
@@ -1,7 +1,616 @@
+// Go reference: server/stree/stree.go
namespace NATS.Server.Internal.SubjectTree;
-// Go reference: server/stree/stree.go
-// TODO: Port Adaptive Radix Tree for per-subject state
+///
+/// SubjectTree is an adaptive radix trie (ART) for storing subject information on literal subjects.
+/// Uses dynamic nodes, path compression and lazy expansion.
+/// Go reference: server/stree/stree.go
+///
public class SubjectTree
{
+ internal INode? Root;
+ private int _size;
+
+ ///
+ /// Returns the number of elements stored.
+ ///
+ public int Size => _size;
+
+ ///
+ /// Empties the tree and returns it. If called on a new tree, returns it unchanged.
+ ///
+ public SubjectTree Empty()
+ {
+ Root = null;
+ _size = 0;
+ return this;
+ }
+
+ ///
+ /// Insert a value into the tree. Returns (oldValue, existed).
+ /// If the subject already existed, oldValue is the previous value and existed is true.
+ ///
+ public (T? OldValue, bool Existed) Insert(ReadOnlySpan subject, T value)
+ {
+ // Make sure we never insert anything with a noPivot byte.
+ if (subject.IndexOf(Parts.NoPivot) >= 0)
+ {
+ return (default, false);
+ }
+
+ var (old, updated) = InsertInternal(ref Root, subject.ToArray(), value, 0);
+ if (!updated)
+ {
+ _size++;
+ }
+ return (old, updated);
+ }
+
+ ///
+ /// Find the value for an exact subject match.
+ ///
+ public (T? Value, bool Found) Find(ReadOnlySpan subject)
+ {
+ int si = 0;
+ var n = Root;
+ while (n != null)
+ {
+ if (n.IsLeaf)
+ {
+ var ln = (Leaf)n;
+ if (ln.Match(subject[si..]))
+ {
+ return (ln.Value, true);
+ }
+ return (default, false);
+ }
+
+ // We are a node type here, grab meta portion.
+ var bn = n.Base!;
+ if (bn.Prefix.Length > 0)
+ {
+ var end = Math.Min(si + bn.Prefix.Length, subject.Length);
+ if (!subject[si..end].SequenceEqual(bn.Prefix))
+ {
+ return (default, false);
+ }
+ si += bn.Prefix.Length;
+ }
+
+ var childRef = n.FindChild(Parts.Pivot(subject, si));
+ if (childRef != null)
+ {
+ n = childRef.Node;
+ }
+ else
+ {
+ return (default, false);
+ }
+ }
+ return (default, false);
+ }
+
+ ///
+ /// Delete the item for the given subject.
+ /// Returns (deletedValue, wasFound).
+ ///
+ public (T? Value, bool Found) Delete(ReadOnlySpan subject)
+ {
+ if (subject.Length == 0)
+ {
+ return (default, false);
+ }
+
+ var (val, deleted) = DeleteInternal(ref Root, subject.ToArray(), 0);
+ if (deleted)
+ {
+ _size--;
+ }
+ return (val, deleted);
+ }
+
+ ///
+ /// Match against a filter subject with wildcards and invoke the callback for each matched value.
+ ///
+ public void Match(ReadOnlySpan filter, Action? callback)
+ {
+ if (Root == null || filter.Length == 0 || callback == null)
+ {
+ return;
+ }
+
+ var parts = Parts.GenParts(filter);
+ MatchInternal(Root, parts, [], (subject, val) =>
+ {
+ callback(subject, val);
+ return true;
+ });
+ }
+
+ ///
+ /// Match against a filter subject with wildcards and invoke the callback for each matched value.
+ /// Returning false from the callback stops matching immediately.
+ /// Returns true if matching ran to completion, false if callback stopped it early.
+ ///
+ public bool MatchUntil(ReadOnlySpan filter, Func? callback)
+ {
+ if (Root == null || filter.Length == 0 || callback == null)
+ {
+ return true;
+ }
+
+ var parts = Parts.GenParts(filter);
+ return MatchInternal(Root, parts, [], callback);
+ }
+
+ ///
+ /// Walk all entries in lexicographic order. The callback can return false to terminate.
+ ///
+ public void IterOrdered(Func cb)
+ {
+ if (Root == null) return;
+ IterInternal(Root, [], ordered: true, cb);
+ }
+
+ ///
+ /// Walk all entries in no guaranteed order. The callback can return false to terminate.
+ ///
+ public void IterFast(Func cb)
+ {
+ if (Root == null) return;
+ IterInternal(Root, [], ordered: false, cb);
+ }
+
+ #region Internal Methods
+
+ ///
+ /// Internal recursive insert.
+ /// Go reference: server/stree/stree.go:insert
+ ///
+ private (T? OldValue, bool Updated) InsertInternal(ref INode? nodeRef, byte[] subject, T value, int si)
+ {
+ var n = nodeRef;
+ if (n == null)
+ {
+ nodeRef = new Leaf(subject[si..], value);
+ return (default, false);
+ }
+
+ if (n.IsLeaf)
+ {
+ var ln = (Leaf)n;
+ if (ln.Match(subject.AsSpan(si)))
+ {
+ // Replace with new value.
+ var old = ln.Value;
+ ln.Value = value;
+ return (old, true);
+ }
+
+ // Here we need to split this leaf.
+ int cpi = Parts.CommonPrefixLen(ln.Suffix, subject.AsSpan(si));
+ var nn = new Node4(subject.AsSpan(si, cpi));
+ ln.SetSuffix(ln.Suffix.AsSpan(cpi));
+ si += cpi;
+
+ // Make sure we have different pivot, normally this will be the case unless we have overflowing prefixes.
+ byte p = Parts.Pivot(ln.Suffix, 0);
+ if (cpi > 0 && si < subject.Length && p == subject[si])
+ {
+ // We need to split the original leaf. Recursively call into insert.
+ InsertInternal(ref nodeRef, subject, value, si);
+ // Now add the updated version of nodeRef as a child to the new node4.
+ nn.AddChild(p, nodeRef!);
+ }
+ else
+ {
+ // Can just add this new leaf as a sibling.
+ var nl = new Leaf(subject.AsSpan(si), value);
+ nn.AddChild(Parts.Pivot(nl.Suffix, 0), nl);
+ // Add back original.
+ nn.AddChild(Parts.Pivot(ln.Suffix, 0), ln);
+ }
+
+ nodeRef = nn;
+ return (default, false);
+ }
+
+ // Non-leaf nodes.
+ var bn = n.Base!;
+ if (bn.Prefix.Length > 0)
+ {
+ int cpi = Parts.CommonPrefixLen(bn.Prefix, subject.AsSpan(si));
+ int pli = bn.Prefix.Length;
+ if (cpi >= pli)
+ {
+ // Move past this node.
+ si += pli;
+ var childRef = n.FindChild(Parts.Pivot(subject, si));
+ if (childRef != null)
+ {
+ var childNode = childRef.Node;
+ var result = InsertInternal(ref childNode, subject, value, si);
+ childRef.Node = childNode;
+ return result;
+ }
+ if (n.IsFull)
+ {
+ n = n.Grow();
+ nodeRef = n;
+ }
+ n.AddChild(Parts.Pivot(subject, si), new Leaf(subject.AsSpan(si), value));
+ return (default, false);
+ }
+ else
+ {
+ // We did not match the prefix completely here.
+ var prefix = subject.AsSpan(si, cpi);
+ si += prefix.Length;
+ // We will insert a new node4 and attach our current node below after adjusting prefix.
+ var nn = new Node4(prefix);
+ // Shift the prefix for our original node.
+ n.SetPrefix(bn.Prefix.AsSpan(cpi));
+ nn.AddChild(Parts.Pivot(bn.Prefix, 0), n);
+ // Add in our new leaf.
+ nn.AddChild(Parts.Pivot(subject.AsSpan(si), 0), new Leaf(subject.AsSpan(si), value));
+ // Update our node reference.
+ nodeRef = nn;
+ }
+ }
+ else
+ {
+ var childRef = n.FindChild(Parts.Pivot(subject, si));
+ if (childRef != null)
+ {
+ var childNode = childRef.Node;
+ var result = InsertInternal(ref childNode, subject, value, si);
+ childRef.Node = childNode;
+ return result;
+ }
+ // No prefix and no matched child, so add in new leafnode as needed.
+ if (n.IsFull)
+ {
+ n = n.Grow();
+ nodeRef = n;
+ }
+ n.AddChild(Parts.Pivot(subject, si), new Leaf(subject.AsSpan(si), value));
+ }
+
+ return (default, false);
+ }
+
+ ///
+ /// Internal recursive delete with compaction.
+ /// Go reference: server/stree/stree.go:delete
+ ///
+ private (T? Value, bool Deleted) DeleteInternal(ref INode? nodeRef, byte[] subject, int si)
+ {
+ if (nodeRef == null || subject.Length == 0)
+ {
+ return (default, false);
+ }
+
+ var n = nodeRef;
+ if (n.IsLeaf)
+ {
+ var ln = (Leaf)n;
+ if (ln.Match(subject.AsSpan(si)))
+ {
+ nodeRef = null;
+ return (ln.Value, true);
+ }
+ return (default, false);
+ }
+
+ // Not a leaf node.
+ var bn = n.Base!;
+ if (bn.Prefix.Length > 0)
+ {
+ // subject could be shorter and would panic on bad index.
+ if (subject.Length < si + bn.Prefix.Length)
+ {
+ return (default, false);
+ }
+ if (!subject.AsSpan(si, bn.Prefix.Length).SequenceEqual(bn.Prefix))
+ {
+ return (default, false);
+ }
+ si += bn.Prefix.Length;
+ }
+
+ var p = Parts.Pivot(subject, si);
+ var childRef = n.FindChild(p);
+ if (childRef == null)
+ {
+ return (default, false);
+ }
+
+ var nn = childRef.Node;
+ if (nn != null && nn.IsLeaf)
+ {
+ var ln = (Leaf)nn;
+ if (ln.Match(subject.AsSpan(si)))
+ {
+ n.DeleteChild(p);
+
+ var sn = n.Shrink();
+ if (sn != null)
+ {
+ // Make sure to copy prefix so we force a copy below.
+ var pre = bn.Prefix.ToArray();
+
+ // Need to fix up prefixes/suffixes.
+ if (sn.IsLeaf)
+ {
+ var shrunkLeaf = (Leaf)sn;
+ // Prepend old prefix to leaf suffix.
+ var newSuffix = new byte[pre.Length + shrunkLeaf.Suffix.Length];
+ pre.CopyTo(newSuffix, 0);
+ shrunkLeaf.Suffix.CopyTo(newSuffix, pre.Length);
+ shrunkLeaf.Suffix = newSuffix;
+ }
+ else
+ {
+ // We are a node here, we need to add in the old prefix.
+ if (pre.Length > 0)
+ {
+ var bsn = sn.Base!;
+ var newPrefix = new byte[pre.Length + bsn.Prefix.Length];
+ pre.CopyTo(newPrefix, 0);
+ bsn.Prefix.CopyTo(newPrefix, pre.Length);
+ sn.SetPrefix(newPrefix);
+ }
+ }
+ nodeRef = sn;
+ }
+
+ return (ln.Value, true);
+ }
+ return (default, false);
+ }
+
+ // Recurse into child node.
+ var childNode = childRef.Node;
+ var result = DeleteInternal(ref childNode, subject, si);
+ childRef.Node = childNode;
+ return result;
+ }
+
+ ///
+ /// Internal recursive match.
+ /// Go reference: server/stree/stree.go:match
+ ///
+ internal bool MatchInternal(INode? n, ReadOnlyMemory[] parts, byte[] pre, Func cb)
+ {
+ // Capture if we are sitting on a terminal fwc.
+ bool hasFWC = false;
+ if (parts.Length > 0 && parts[^1].Length > 0 && parts[^1].Span[0] == Parts.Fwc)
+ {
+ hasFWC = true;
+ }
+
+ while (n != null)
+ {
+ var (nparts, matched) = n.MatchParts(parts);
+ if (!matched)
+ {
+ return true;
+ }
+
+ // We have matched here. If we are a leaf and have exhausted all parts or have a FWC, fire callback.
+ if (n.IsLeaf)
+ {
+ if (nparts.Length == 0 || (hasFWC && nparts.Length == 1))
+ {
+ var ln = (Leaf)n;
+ var subject = Concat(pre, ln.Suffix);
+ if (!cb(subject, ln.Value))
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ // We have normal nodes here. Append our prefix.
+ var bn = n.Base!;
+ if (bn.Prefix.Length > 0)
+ {
+ pre = Concat(pre, bn.Prefix);
+ }
+
+ // Check our remaining parts.
+ if (nparts.Length == 0 && !hasFWC)
+ {
+ // We are a node with no parts left and we are not looking at a fwc.
+ bool hasTermPWC = false;
+ if (parts.Length > 0 && parts[^1].Length == 1 && parts[^1].Span[0] == Parts.Pwc)
+ {
+ nparts = parts[^1..];
+ hasTermPWC = true;
+ }
+
+ foreach (var cn in n.Children())
+ {
+ if (cn == null) continue;
+
+ if (cn.IsLeaf)
+ {
+ var ln = (Leaf)cn;
+ if (ln.Suffix.Length == 0)
+ {
+ var subject = Concat(pre, ln.Suffix);
+ if (!cb(subject, ln.Value))
+ {
+ return false;
+ }
+ }
+ else if (hasTermPWC && ln.Suffix.AsSpan().IndexOf(Parts.Tsep) < 0)
+ {
+ var subject = Concat(pre, ln.Suffix);
+ if (!cb(subject, ln.Value))
+ {
+ return false;
+ }
+ }
+ }
+ else if (hasTermPWC)
+ {
+ if (!MatchInternal(cn, nparts, pre, cb))
+ {
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ // If we are sitting on a terminal fwc, put back and continue.
+ if (hasFWC && nparts.Length == 0)
+ {
+ nparts = parts[^1..];
+ }
+
+ // Here we are a node type with a partial match.
+ // Check if the first part is a wildcard.
+ var fp = nparts[0];
+ var pvt = Parts.Pivot(fp.Span, 0);
+
+ if (fp.Length == 1 && (pvt == Parts.Pwc || pvt == Parts.Fwc))
+ {
+ // We need to iterate over all children here for the current node
+ // to see if we match further down.
+ foreach (var cn in n.Children())
+ {
+ if (cn != null)
+ {
+ if (!MatchInternal(cn, nparts, pre, cb))
+ {
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ // Here we have normal traversal, so find the next child.
+ var next = n.FindChild(pvt);
+ if (next == null)
+ {
+ return true;
+ }
+ n = next.Node;
+ parts = nparts;
+ }
+ return true;
+ }
+
+ ///
+ /// Internal iter function to walk nodes.
+ /// Go reference: server/stree/stree.go:iter
+ ///
+ internal bool IterInternal(INode n, byte[] pre, bool ordered, Func cb)
+ {
+ if (n.IsLeaf)
+ {
+ var ln = (Leaf)n;
+ return cb(Concat(pre, ln.Suffix), ln.Value);
+ }
+
+ // We are normal node here.
+ var bn = n.Base!;
+ if (bn.Prefix.Length > 0)
+ {
+ pre = Concat(pre, bn.Prefix);
+ }
+
+ if (!ordered)
+ {
+ foreach (var cn in n.Children())
+ {
+ if (cn == null) continue;
+ if (!IterInternal(cn, pre, false, cb))
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ // Collect non-null children and sort by path for lexicographic order.
+ var children = n.Children().Where(c => c != null).ToList();
+ children.Sort((a, b) =>
+ {
+ var pa = a!.Path();
+ var pb = b!.Path();
+ return pa.AsSpan().SequenceCompareTo(pb);
+ });
+
+ foreach (var cn in children)
+ {
+ if (!IterInternal(cn!, pre, true, cb))
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ ///
+ /// Helper to concatenate two byte arrays.
+ ///
+ private static byte[] Concat(byte[] a, byte[] b)
+ {
+ if (a.Length == 0) return b;
+ if (b.Length == 0) return a;
+ var result = new byte[a.Length + b.Length];
+ a.CopyTo(result, 0);
+ b.CopyTo(result, a.Length);
+ return result;
+ }
+
+ #endregion
+}
+
+///
+/// Static helper methods for SubjectTree operations.
+///
+public static class SubjectTreeHelper
+{
+ ///
+ /// Iterates the smaller of the two provided subject trees and looks for matching entries in the other.
+ /// Go reference: server/stree/stree.go:LazyIntersect
+ ///
+ public static void LazyIntersect(SubjectTree? tl, SubjectTree? tr, Action cb)
+ {
+ if (tl == null || tr == null || tl.Root == null || tr.Root == null)
+ {
+ return;
+ }
+
+ if (tl.Size <= tr.Size)
+ {
+ tl.IterFast((key, v1) =>
+ {
+ var (v2, ok) = tr.Find(key);
+ if (ok)
+ {
+ cb(key, v1, v2!);
+ }
+ return true;
+ });
+ }
+ else
+ {
+ tr.IterFast((key, v2) =>
+ {
+ var (v1, ok) = tl.Find(key);
+ if (ok)
+ {
+ cb(key, v1!, v2);
+ }
+ return true;
+ });
+ }
+ }
}
diff --git a/src/NATS.Server/Internal/TimeHashWheel/HashWheel.cs b/src/NATS.Server/Internal/TimeHashWheel/HashWheel.cs
index 00ed916..7d3dd16 100644
--- a/src/NATS.Server/Internal/TimeHashWheel/HashWheel.cs
+++ b/src/NATS.Server/Internal/TimeHashWheel/HashWheel.cs
@@ -1,7 +1,414 @@
+// Go reference: server/thw/thw.go
+// Time hash wheel for efficient TTL expiration tracking.
+// Fixed-size array of slots (the wheel), each containing a dictionary of (seq, expires) entries.
+// Slot index = (expires / tickResolution) % wheelSize.
+
+using System.Buffers.Binary;
+using System.Diagnostics;
+
namespace NATS.Server.Internal.TimeHashWheel;
-// Go reference: server/thw/thw.go
-// TODO: Port time hash wheel for TTL expiration
+///
+/// A timing hash wheel for efficient TTL expiration management.
+/// Uses a fixed-size circular buffer of slots, where each slot holds entries
+/// that expire within the same time tick. Supports O(1) add/remove and
+/// efficient batch expiration scanning.
+///
public class HashWheel
{
+ // Go: tickDuration = int64(time.Second) — tick duration in nanoseconds.
+ private const long TickDuration = 1_000_000_000;
+
+ // Go: wheelBits = 12, wheelSize = 1 << 12 = 4096, wheelMask = 4095.
+ private const int WheelBits = 12;
+ internal const int WheelSize = 1 << WheelBits;
+ private const int WheelMask = WheelSize - 1;
+
+ // Go: headerLen = 17 — 1 byte magic + 2 x uint64.
+ private const int HeaderLen = 17;
+
+ private Slot?[] _wheel;
+ private long _lowest;
+ private ulong _count;
+
+ public HashWheel()
+ {
+ _wheel = new Slot?[WheelSize];
+ _lowest = long.MaxValue;
+ }
+
+ ///
+ /// Gets the number of entries in the wheel.
+ ///
+ // Go: Count() server/thw/thw.go:190
+ public ulong Count => _count;
+
+ ///
+ /// Calculates the slot position for a given expiration time.
+ ///
+ // Go: getPosition server/thw/thw.go:66
+ private static int GetPosition(long expires)
+ {
+ return (int)((expires / TickDuration) & WheelMask);
+ }
+
+ ///
+ /// Schedules a new timer task. If the sequence already exists in the target slot,
+ /// its expiration is updated without incrementing the count.
+ ///
+ // Go: Add server/thw/thw.go:79
+ public void Add(ulong seq, long expires)
+ {
+ var pos = GetPosition(expires);
+
+ // Initialize the slot lazily.
+ _wheel[pos] ??= new Slot();
+
+ var slot = _wheel[pos]!;
+ if (!slot.Entries.ContainsKey(seq))
+ {
+ _count++;
+ }
+
+ slot.Entries[seq] = expires;
+
+ // Update slot's lowest expiration if this is earlier.
+ if (expires < slot.Lowest)
+ {
+ slot.Lowest = expires;
+ // Update global lowest if this is now the earliest.
+ if (expires < _lowest)
+ {
+ _lowest = expires;
+ }
+ }
+ }
+
+ ///
+ /// Removes a timer task. Returns true if the task was found and removed,
+ /// false if the task was not found.
+ ///
+ // Go: Remove server/thw/thw.go:103
+ public bool Remove(ulong seq, long expires)
+ {
+ var pos = GetPosition(expires);
+ var slot = _wheel[pos];
+
+ if (slot is null)
+ {
+ return false;
+ }
+
+ if (!slot.Entries.Remove(seq))
+ {
+ return false;
+ }
+
+ _count--;
+
+ // If the slot is empty, set it to null to free memory.
+ if (slot.Entries.Count == 0)
+ {
+ _wheel[pos] = null;
+ }
+
+ return true;
+ }
+
+ ///
+ /// Updates the expiration time of an existing timer task by removing it from
+ /// the old slot and adding it to the new one.
+ ///
+ // Go: Update server/thw/thw.go:123
+ public void Update(ulong seq, long oldExpires, long newExpires)
+ {
+ Remove(seq, oldExpires);
+ Add(seq, newExpires);
+ }
+
+ ///
+ /// Processes all expired tasks using the current time. The callback receives each
+ /// expired entry's sequence and expiration time. If the callback returns true,
+ /// the entry is removed; if false, it remains for future expiration checks.
+ ///
+ // Go: ExpireTasks server/thw/thw.go:133
+ public void ExpireTasks(Func callback)
+ {
+ var now = Stopwatch.GetTimestamp();
+ // Convert to nanoseconds for consistency with the Go implementation.
+ var nowNanos = (long)((double)now / Stopwatch.Frequency * 1_000_000_000);
+ ExpireTasksInternal(nowNanos, callback);
+ }
+
+ ///
+ /// Internal expiration method that accepts an explicit timestamp.
+ /// Used by tests that need deterministic time control.
+ ///
+ // Go: expireTasks server/thw/thw.go:138
+ internal void ExpireTasksInternal(long ts, Func callback)
+ {
+ // Quick return if nothing is expired.
+ if (_lowest > ts)
+ {
+ return;
+ }
+
+ var globalLowest = long.MaxValue;
+ for (var pos = 0; pos < _wheel.Length; pos++)
+ {
+ var slot = _wheel[pos];
+
+ // Skip slot if nothing to expire.
+ if (slot is null || slot.Lowest > ts)
+ {
+ if (slot is not null && slot.Lowest < globalLowest)
+ {
+ globalLowest = slot.Lowest;
+ }
+
+ continue;
+ }
+
+ // Track new lowest while processing expirations.
+ var slotLowest = long.MaxValue;
+ var toRemove = new List();
+
+ foreach (var (seq, expires) in slot.Entries)
+ {
+ if (expires <= ts && callback(seq, expires))
+ {
+ toRemove.Add(seq);
+ continue;
+ }
+
+ if (expires < slotLowest)
+ {
+ slotLowest = expires;
+ }
+ }
+
+ foreach (var seq in toRemove)
+ {
+ slot.Entries.Remove(seq);
+ _count--;
+ }
+
+ // Nil out if we are empty.
+ if (slot.Entries.Count == 0)
+ {
+ _wheel[pos] = null;
+ }
+ else
+ {
+ slot.Lowest = slotLowest;
+ if (slotLowest < globalLowest)
+ {
+ globalLowest = slotLowest;
+ }
+ }
+ }
+
+ _lowest = globalLowest;
+ }
+
+ ///
+ /// Returns the earliest expiration time if it is before the given time.
+ /// Returns if no expirations exist before the specified time.
+ ///
+ // Go: GetNextExpiration server/thw/thw.go:182
+ public long GetNextExpiration(long before)
+ {
+ if (_lowest < before)
+ {
+ return _lowest;
+ }
+
+ return long.MaxValue;
+ }
+
+ ///
+ /// Encodes the wheel state into a binary snapshot for persistence.
+ /// The high sequence number is included and will be returned on decode.
+ /// Format: [1 byte magic version][8 bytes entry count][8 bytes highSeq][varint expires, uvarint seq pairs...]
+ ///
+ // Go: Encode server/thw/thw.go:197
+ public byte[] Encode(ulong highSeq)
+ {
+ // Estimate capacity: header + entries * (max varint size * 2).
+ var estimatedSize = HeaderLen + (int)(_count * 2 * 10);
+ var buffer = new byte[estimatedSize];
+ var offset = 0;
+
+ // Magic version byte.
+ buffer[offset++] = 1;
+
+ // Entry count (little-endian uint64).
+ BinaryPrimitives.WriteUInt64LittleEndian(buffer.AsSpan(offset), _count);
+ offset += 8;
+
+ // High sequence stamp (little-endian uint64).
+ BinaryPrimitives.WriteUInt64LittleEndian(buffer.AsSpan(offset), highSeq);
+ offset += 8;
+
+ // Write all entries as varint(expires) + uvarint(seq) pairs.
+ foreach (var slot in _wheel)
+ {
+ if (slot?.Entries is null)
+ {
+ continue;
+ }
+
+ foreach (var (seq, expires) in slot.Entries)
+ {
+ // Ensure buffer has enough space.
+ if (offset + 20 > buffer.Length)
+ {
+ Array.Resize(ref buffer, buffer.Length * 2);
+ }
+
+ offset += WriteVarint(buffer.AsSpan(offset), expires);
+ offset += WriteUvarint(buffer.AsSpan(offset), seq);
+ }
+ }
+
+ return buffer.AsSpan(0, offset).ToArray();
+ }
+
+ ///
+ /// Decodes a binary-encoded snapshot and replaces the contents of this wheel.
+ /// Returns the high sequence number from the snapshot and the number of bytes consumed.
+ ///
+ // Go: Decode server/thw/thw.go:216
+ public (ulong HighSeq, int BytesRead) Decode(ReadOnlySpan buf)
+ {
+ if (buf.Length < HeaderLen)
+ {
+ throw new InvalidOperationException("Buffer too short for hash wheel header.");
+ }
+
+ if (buf[0] != 1)
+ {
+ throw new InvalidOperationException("Unknown hash wheel encoding version.");
+ }
+
+ // Reset the wheel.
+ _wheel = new Slot?[WheelSize];
+ _lowest = long.MaxValue;
+ _count = 0;
+
+ var count = BinaryPrimitives.ReadUInt64LittleEndian(buf[1..]);
+ var highSeq = BinaryPrimitives.ReadUInt64LittleEndian(buf[9..]);
+
+ var offset = HeaderLen;
+ for (ulong i = 0; i < count; i++)
+ {
+ var (ts, tn) = ReadVarint(buf[offset..]);
+ if (tn <= 0)
+ {
+ throw new InvalidOperationException("Unexpected end of buffer reading varint.");
+ }
+
+ var (seq, vn) = ReadUvarint(buf[(offset + tn)..]);
+ if (vn <= 0)
+ {
+ throw new InvalidOperationException("Unexpected end of buffer reading uvarint.");
+ }
+
+ Add(seq, ts);
+ offset += tn + vn;
+ }
+
+ return (highSeq, offset);
+ }
+
+ // Varint encoding/decoding compatible with Go's encoding/binary.
+
+ ///
+ /// Writes a signed varint (zigzag-encoded) to the buffer.
+ /// Compatible with Go's binary.AppendVarint / binary.Varint.
+ ///
+ private static int WriteVarint(Span buffer, long value)
+ {
+ // Zigzag encode: (value << 1) ^ (value >> 63)
+ var zigzag = (ulong)((value << 1) ^ (value >> 63));
+ return WriteUvarint(buffer, zigzag);
+ }
+
+ ///
+ /// Writes an unsigned varint to the buffer.
+ /// Compatible with Go's binary.AppendUvarint / binary.Uvarint.
+ ///
+ private static int WriteUvarint(Span buffer, ulong value)
+ {
+ var i = 0;
+ while (value >= 0x80)
+ {
+ buffer[i++] = (byte)(value | 0x80);
+ value >>= 7;
+ }
+
+ buffer[i++] = (byte)value;
+ return i;
+ }
+
+ ///
+ /// Reads a signed varint (zigzag-encoded) from the buffer.
+ /// Returns the value and the number of bytes consumed.
+ ///
+ private static (long Value, int BytesRead) ReadVarint(ReadOnlySpan buffer)
+ {
+ var (zigzag, n) = ReadUvarint(buffer);
+ if (n <= 0)
+ {
+ return (0, n);
+ }
+
+ // Zigzag decode: (zigzag >> 1) ^ -(zigzag & 1)
+ var value = (long)(zigzag >> 1) ^ -(long)(zigzag & 1);
+ return (value, n);
+ }
+
+ ///
+ /// Reads an unsigned varint from the buffer.
+ /// Returns the value and the number of bytes consumed.
+ ///
+ private static (ulong Value, int BytesRead) ReadUvarint(ReadOnlySpan buffer)
+ {
+ ulong result = 0;
+ var shift = 0;
+ for (var i = 0; i < buffer.Length; i++)
+ {
+ var b = buffer[i];
+ result |= (ulong)(b & 0x7F) << shift;
+ if ((b & 0x80) == 0)
+ {
+ return (result, i + 1);
+ }
+
+ shift += 7;
+ if (shift >= 64)
+ {
+ return (0, -1); // Overflow.
+ }
+ }
+
+ return (0, -1); // Buffer too short.
+ }
+
+ ///
+ /// Internal access to the wheel slots for testing encode/decode round-trip verification.
+ ///
+ internal Slot?[] Wheel => _wheel;
+
+ ///
+ /// Represents a single slot in the wheel containing entries that hash to the same position.
+ ///
+ internal sealed class Slot
+ {
+ // Go: slot.entries — map of sequence to expires.
+ public Dictionary Entries { get; } = new();
+
+ // Go: slot.lowest — lowest expiration time in this slot.
+ public long Lowest { get; set; } = long.MaxValue;
+ }
}
diff --git a/tests/NATS.Server.Tests/Internal/Avl/SequenceSetTests.cs b/tests/NATS.Server.Tests/Internal/Avl/SequenceSetTests.cs
new file mode 100644
index 0000000..3e82efe
--- /dev/null
+++ b/tests/NATS.Server.Tests/Internal/Avl/SequenceSetTests.cs
@@ -0,0 +1,540 @@
+// Copyright 2024 The NATS Authors
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+using System.Diagnostics;
+using NATS.Server.Internal.Avl;
+
+namespace NATS.Server.Tests.Internal.Avl;
+
+///
+/// Tests for the AVL-backed SequenceSet, ported from Go server/avl/seqset_test.go
+/// and server/avl/norace_test.go.
+///
+public class SequenceSetTests
+{
+ private const int NumEntries = SequenceSet.NumEntries; // 2048
+ private const int BitsPerBucket = SequenceSet.BitsPerBucket;
+ private const int NumBuckets = SequenceSet.NumBuckets;
+
+ // Go: TestSeqSetBasics server/avl/seqset_test.go:22
+ [Fact]
+ public void Basics_InsertExistsDelete()
+ {
+ var ss = new SequenceSet();
+
+ ulong[] seqs = [22, 222, 2000, 2, 2, 4];
+ foreach (var seq in seqs)
+ {
+ ss.Insert(seq);
+ ss.Exists(seq).ShouldBeTrue();
+ }
+
+ ss.Nodes.ShouldBe(1);
+ ss.Size.ShouldBe(seqs.Length - 1); // One dup (2 appears twice)
+ var (lh, rh) = ss.Heights();
+ lh.ShouldBe(0);
+ rh.ShouldBe(0);
+ }
+
+ // Go: TestSeqSetLeftLean server/avl/seqset_test.go:38
+ [Fact]
+ public void LeftLean_TreeBalancesCorrectly()
+ {
+ var ss = new SequenceSet();
+
+ // Insert from high to low to create a left-leaning tree.
+ for (var i = (ulong)(4 * NumEntries); i > 0; i--)
+ {
+ ss.Insert(i);
+ }
+
+ ss.Nodes.ShouldBe(5);
+ ss.Size.ShouldBe(4 * NumEntries);
+ var (lh, rh) = ss.Heights();
+ lh.ShouldBe(2);
+ rh.ShouldBe(1);
+ }
+
+ // Go: TestSeqSetRightLean server/avl/seqset_test.go:52
+ [Fact]
+ public void RightLean_TreeBalancesCorrectly()
+ {
+ var ss = new SequenceSet();
+
+ // Insert from low to high to create a right-leaning tree.
+ for (var i = 0UL; i < (ulong)(4 * NumEntries); i++)
+ {
+ ss.Insert(i);
+ }
+
+ ss.Nodes.ShouldBe(4);
+ ss.Size.ShouldBe(4 * NumEntries);
+ var (lh, rh) = ss.Heights();
+ lh.ShouldBe(1);
+ rh.ShouldBe(2);
+ }
+
+ // Go: TestSeqSetCorrectness server/avl/seqset_test.go:66
+ [Fact]
+ public void Correctness_RandomInsertDelete()
+ {
+ // Generate 100k sequences across 500k range.
+ const int num = 100_000;
+ const int max = 500_000;
+
+ var rng = new Random(42);
+ var set = new HashSet();
+ var ss = new SequenceSet();
+
+ for (var i = 0; i < num; i++)
+ {
+ var n = (ulong)rng.NextInt64(max + 1);
+ ss.Insert(n);
+ set.Add(n);
+ }
+
+ for (var i = 0UL; i <= max; i++)
+ {
+ ss.Exists(i).ShouldBe(set.Contains(i));
+ }
+ }
+
+ // Go: TestSeqSetRange server/avl/seqset_test.go:85
+ [Fact]
+ public void Range_IteratesInOrder()
+ {
+ var num = 2 * NumEntries + 22;
+ var nums = new List(num);
+ for (var i = 0; i < num; i++)
+ {
+ nums.Add((ulong)i);
+ }
+
+ // Shuffle and insert.
+ var rng = new Random(42);
+ Shuffle(nums, rng);
+
+ var ss = new SequenceSet();
+ foreach (var n in nums)
+ {
+ ss.Insert(n);
+ }
+
+ // Range should produce ascending order.
+ var result = new List();
+ ss.Range(n =>
+ {
+ result.Add(n);
+ return true;
+ });
+
+ result.Count.ShouldBe(num);
+ for (var i = 0UL; i < (ulong)num; i++)
+ {
+ result[(int)i].ShouldBe(i);
+ }
+
+ // Test truncating the range call.
+ result.Clear();
+ ss.Range(n =>
+ {
+ if (n >= 10)
+ {
+ return false;
+ }
+
+ result.Add(n);
+ return true;
+ });
+
+ result.Count.ShouldBe(10);
+ for (var i = 0UL; i < 10; i++)
+ {
+ result[(int)i].ShouldBe(i);
+ }
+ }
+
+ // Go: TestSeqSetDelete server/avl/seqset_test.go:123
+ [Fact]
+ public void Delete_VariousPatterns()
+ {
+ var ss = new SequenceSet();
+
+ ulong[] seqs = [22, 222, 2222, 2, 2, 4];
+ foreach (var seq in seqs)
+ {
+ ss.Insert(seq);
+ }
+
+ foreach (var seq in seqs)
+ {
+ ss.Delete(seq);
+ ss.Exists(seq).ShouldBeFalse();
+ }
+
+ ss.Root.ShouldBeNull();
+ }
+
+ // Go: TestSeqSetInsertAndDeletePedantic server/avl/seqset_test.go:139
+ [Fact]
+ public void InsertAndDelete_PedanticVerification()
+ {
+ var ss = new SequenceSet();
+
+ var num = 50 * NumEntries + 22;
+ var nums = new List(num);
+ for (var i = 0; i < num; i++)
+ {
+ nums.Add((ulong)i);
+ }
+
+ var rng = new Random(42);
+ Shuffle(nums, rng);
+
+ // Insert all, verify balanced after each insert.
+ foreach (var n in nums)
+ {
+ ss.Insert(n);
+ VerifyBalanced(ss);
+ }
+
+ ss.Root.ShouldNotBeNull();
+
+ // Delete all, verify balanced after each delete.
+ foreach (var n in nums)
+ {
+ ss.Delete(n);
+ VerifyBalanced(ss);
+ ss.Exists(n).ShouldBeFalse();
+ if (ss.Size > 0)
+ {
+ ss.Root.ShouldNotBeNull();
+ }
+ }
+
+ ss.Root.ShouldBeNull();
+ }
+
+ // Go: TestSeqSetMinMax server/avl/seqset_test.go:181
+ [Fact]
+ public void MinMax_TracksCorrectly()
+ {
+ var ss = new SequenceSet();
+
+ // Simple single node.
+ ulong[] seqs = [22, 222, 2222, 2, 2, 4];
+ foreach (var seq in seqs)
+ {
+ ss.Insert(seq);
+ }
+
+ var (min, max) = ss.MinMax();
+ min.ShouldBe(2UL);
+ max.ShouldBe(2222UL);
+
+ // Multi-node
+ ss.Empty();
+
+ var num = 22 * NumEntries + 22;
+ var nums = new List(num);
+ for (var i = 0; i < num; i++)
+ {
+ nums.Add((ulong)i);
+ }
+
+ var rng = new Random(42);
+ Shuffle(nums, rng);
+ foreach (var n in nums)
+ {
+ ss.Insert(n);
+ }
+
+ (min, max) = ss.MinMax();
+ min.ShouldBe(0UL);
+ max.ShouldBe((ulong)(num - 1));
+ }
+
+ // Go: TestSeqSetClone server/avl/seqset_test.go:210
+ [Fact]
+ public void Clone_IndependentCopy()
+ {
+ // Generate 100k sequences across 500k range.
+ const int num = 100_000;
+ const int max = 500_000;
+
+ var rng = new Random(42);
+ var ss = new SequenceSet();
+ for (var i = 0; i < num; i++)
+ {
+ ss.Insert((ulong)rng.NextInt64(max + 1));
+ }
+
+ var ssc = ss.Clone();
+ ssc.Size.ShouldBe(ss.Size);
+ ssc.Nodes.ShouldBe(ss.Nodes);
+ }
+
+ // Go: TestSeqSetUnion server/avl/seqset_test.go:225
+ [Fact]
+ public void Union_MergesSets()
+ {
+ var ss1 = new SequenceSet();
+ var ss2 = new SequenceSet();
+
+ ulong[] seqs1 = [22, 222, 2222, 2, 2, 4];
+ foreach (var seq in seqs1)
+ {
+ ss1.Insert(seq);
+ }
+
+ ulong[] seqs2 = [33, 333, 3333, 3, 33_333, 333_333];
+ foreach (var seq in seqs2)
+ {
+ ss2.Insert(seq);
+ }
+
+ var ss = SequenceSet.CreateUnion(ss1, ss2);
+ ss.Size.ShouldBe(11);
+
+ ulong[] allSeqs = [.. seqs1, .. seqs2];
+ foreach (var n in allSeqs)
+ {
+ ss.Exists(n).ShouldBeTrue();
+ }
+ }
+
+ // Go: TestSeqSetFirst server/avl/seqset_test.go:247
+ [Fact]
+ public void First_ReturnsMinimum()
+ {
+ var ss = new SequenceSet();
+
+ ulong[] seqs = [22, 222, 2222, 222_222];
+ foreach (var seq in seqs)
+ {
+ // Normal case where we pick first/base.
+ ss.Insert(seq);
+ ss.Root!.Base.ShouldBe((seq / (ulong)NumEntries) * (ulong)NumEntries);
+ ss.Empty();
+
+ // Where we set the minimum start value.
+ ss.SetInitialMin(seq);
+ ss.Insert(seq);
+ ss.Root!.Base.ShouldBe(seq);
+ ss.Empty();
+ }
+ }
+
+ // Go: TestSeqSetDistinctUnion server/avl/seqset_test.go:265
+ [Fact]
+ public void DistinctUnion_NoOverlap()
+ {
+ var ss1 = new SequenceSet();
+ ulong[] seqs1 = [1, 10, 100, 200];
+ foreach (var seq in seqs1)
+ {
+ ss1.Insert(seq);
+ }
+
+ var ss2 = new SequenceSet();
+ ulong[] seqs2 = [5000, 6100, 6200, 6222];
+ foreach (var seq in seqs2)
+ {
+ ss2.Insert(seq);
+ }
+
+ var ss = ss1.Clone();
+ ulong[] allSeqs = [.. seqs1, .. seqs2];
+
+ ss.Union(ss2);
+ ss.Size.ShouldBe(allSeqs.Length);
+ foreach (var seq in allSeqs)
+ {
+ ss.Exists(seq).ShouldBeTrue();
+ }
+ }
+
+ // Go: TestSeqSetDecodeV1 server/avl/seqset_test.go:289
+ [Fact]
+ public void DecodeV1_BackwardsCompatible()
+ {
+ // Encoding from v1 which was 64 buckets.
+ ulong[] seqs = [22, 222, 2222, 222_222, 2_222_222];
+ var encStr =
+ "FgEDAAAABQAAAABgAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAADgIQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAA==";
+
+ var enc = Convert.FromBase64String(encStr);
+ var (ss, _) = SequenceSet.Decode(enc);
+
+ ss.Size.ShouldBe(seqs.Length);
+ foreach (var seq in seqs)
+ {
+ ss.Exists(seq).ShouldBeTrue();
+ }
+ }
+
+ // Go: TestNoRaceSeqSetSizeComparison server/avl/norace_test.go:33
+ [Fact]
+ public void SizeComparison_LargeSet()
+ {
+ // Create 5M random entries out of 7M range.
+ const int num = 5_000_000;
+ const int max = 7_000_000;
+
+ var rng = new Random(42);
+ var seqs = new ulong[num];
+ for (var i = 0; i < num; i++)
+ {
+ seqs[i] = (ulong)rng.NextInt64(max + 1);
+ }
+
+ // Insert into a dictionary to compare.
+ var dmap = new HashSet(num);
+ foreach (var n in seqs)
+ {
+ dmap.Add(n);
+ }
+
+ // Insert into SequenceSet.
+ var ss = new SequenceSet();
+ foreach (var n in seqs)
+ {
+ ss.Insert(n);
+ }
+
+ // Verify sizes match.
+ ss.Size.ShouldBe(dmap.Count);
+
+ // Verify SequenceSet uses very few nodes relative to its element count.
+ // With 2048 entries per node and 7M range, we expect ~ceil(7M/2048) = ~3419 nodes at most.
+ ss.Nodes.ShouldBeLessThan(5000);
+ }
+
+ // Go: TestNoRaceSeqSetEncodeLarge server/avl/norace_test.go:81
+ [Fact]
+ public void EncodeLarge_RoundTrips()
+ {
+ const int num = 2_500_000;
+ const int max = 5_000_000;
+
+ var rng = new Random(42);
+ var ss = new SequenceSet();
+ for (var i = 0; i < num; i++)
+ {
+ ss.Insert((ulong)rng.NextInt64(max + 1));
+ }
+
+ var sw = Stopwatch.StartNew();
+ var buf = ss.Encode();
+ sw.Stop();
+
+ // Encode should be fast (the Go test uses 1ms, we allow more for .NET JIT).
+ sw.Elapsed.ShouldBeLessThan(TimeSpan.FromSeconds(1));
+
+ sw.Restart();
+ var (ss2, bytesRead) = SequenceSet.Decode(buf);
+ sw.Stop();
+
+ sw.Elapsed.ShouldBeLessThan(TimeSpan.FromSeconds(1));
+ bytesRead.ShouldBe(buf.Length);
+ ss2.Nodes.ShouldBe(ss.Nodes);
+ ss2.Size.ShouldBe(ss.Size);
+ }
+
+ // Go: TestNoRaceSeqSetRelativeSpeed server/avl/norace_test.go:123
+ [Fact]
+ public void RelativeSpeed_Performance()
+ {
+ const int num = 1_000_000;
+ const int max = 3_000_000;
+
+ var rng = new Random(42);
+ var seqs = new ulong[num];
+ for (var i = 0; i < num; i++)
+ {
+ seqs[i] = (ulong)rng.NextInt64(max + 1);
+ }
+
+ // SequenceSet insert.
+ var sw = Stopwatch.StartNew();
+ var ss = new SequenceSet();
+ foreach (var n in seqs)
+ {
+ ss.Insert(n);
+ }
+
+ var ssInsert = sw.Elapsed;
+
+ // SequenceSet lookup.
+ sw.Restart();
+ foreach (var n in seqs)
+ {
+ ss.Exists(n).ShouldBeTrue();
+ }
+
+ var ssLookup = sw.Elapsed;
+
+ // Dictionary insert.
+ sw.Restart();
+ var dmap = new HashSet();
+ foreach (var n in seqs)
+ {
+ dmap.Add(n);
+ }
+
+ var mapInsert = sw.Elapsed;
+
+ // Dictionary lookup.
+ sw.Restart();
+ foreach (var n in seqs)
+ {
+ dmap.Contains(n).ShouldBeTrue();
+ }
+
+ var mapLookup = sw.Elapsed;
+
+ // Relaxed bounds: SequenceSet insert should be no more than 10x slower.
+ // (.NET JIT and test host overhead can be significant vs Go's simpler runtime.)
+ ssInsert.ShouldBeLessThan(mapInsert * 10);
+ ssLookup.ShouldBeLessThan(mapLookup * 10);
+ }
+
+ /// Verifies the AVL tree is balanced at every node.
+ private static void VerifyBalanced(SequenceSet ss)
+ {
+ if (ss.Root == null)
+ {
+ return;
+ }
+
+ // Check all node heights and balance factors.
+ SequenceSet.Node.NodeIter(ss.Root, n =>
+ {
+ var expectedHeight = SequenceSet.Node.MaxHeight(n) + 1;
+ n.Height.ShouldBe(expectedHeight, $"Node height is wrong for node with base {n.Base}");
+ });
+
+ var bf = SequenceSet.Node.BalanceFactor(ss.Root);
+ bf.ShouldBeInRange(-1, 1, "Tree is unbalanced at root");
+ }
+
+ /// Fisher-Yates shuffle.
+ private static void Shuffle(List list, Random rng)
+ {
+ for (var i = list.Count - 1; i > 0; i--)
+ {
+ var j = rng.Next(i + 1);
+ (list[i], list[j]) = (list[j], list[i]);
+ }
+ }
+}
diff --git a/tests/NATS.Server.Tests/Internal/Gsl/GenericSubjectListTests.cs b/tests/NATS.Server.Tests/Internal/Gsl/GenericSubjectListTests.cs
new file mode 100644
index 0000000..ac15c99
--- /dev/null
+++ b/tests/NATS.Server.Tests/Internal/Gsl/GenericSubjectListTests.cs
@@ -0,0 +1,429 @@
+// Go reference: server/gsl/gsl_test.go
+// Tests for GenericSubjectList trie-based subject matching.
+
+using NATS.Server.Internal.Gsl;
+
+namespace NATS.Server.Tests.Internal.Gsl;
+
+public class GenericSubjectListTests
+{
+ ///
+ /// Helper: count matches for a subject.
+ ///
+ private static int CountMatches(GenericSubjectList s, string subject) where T : IEquatable
+ {
+ var count = 0;
+ s.Match(subject, _ => count++);
+ return count;
+ }
+
+ // Go: TestGenericSublistInit server/gsl/gsl_test.go:23
+ [Fact]
+ public void Init_EmptyList()
+ {
+ var s = new GenericSubjectList();
+ s.Count.ShouldBe(0u);
+ }
+
+ // Go: TestGenericSublistInsertCount server/gsl/gsl_test.go:29
+ [Fact]
+ public void InsertCount_TracksCorrectly()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("foo", 1);
+ s.Insert("bar", 2);
+ s.Insert("foo.bar", 3);
+ s.Count.ShouldBe(3u);
+ }
+
+ // Go: TestGenericSublistSimple server/gsl/gsl_test.go:37
+ [Fact]
+ public void Simple_ExactMatch()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("foo", 1);
+ CountMatches(s, "foo").ShouldBe(1);
+ }
+
+ // Go: TestGenericSublistSimpleMultiTokens server/gsl/gsl_test.go:43
+ [Fact]
+ public void SimpleMultiTokens_Match()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("foo.bar.baz", 1);
+ CountMatches(s, "foo.bar.baz").ShouldBe(1);
+ }
+
+ // Go: TestGenericSublistPartialWildcard server/gsl/gsl_test.go:49
+ [Fact]
+ public void PartialWildcard_StarMatches()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("a.b.c", 1);
+ s.Insert("a.*.c", 2);
+ CountMatches(s, "a.b.c").ShouldBe(2);
+ }
+
+ // Go: TestGenericSublistPartialWildcardAtEnd server/gsl/gsl_test.go:56
+ [Fact]
+ public void PartialWildcardAtEnd_StarMatches()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("a.b.c", 1);
+ s.Insert("a.b.*", 2);
+ CountMatches(s, "a.b.c").ShouldBe(2);
+ }
+
+ // Go: TestGenericSublistFullWildcard server/gsl/gsl_test.go:63
+ [Fact]
+ public void FullWildcard_GreaterThanMatches()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("a.b.c", 1);
+ s.Insert("a.>", 2);
+ CountMatches(s, "a.b.c").ShouldBe(2);
+ CountMatches(s, "a.>").ShouldBe(1);
+ }
+
+ // Go: TestGenericSublistRemove server/gsl/gsl_test.go:71
+ [Fact]
+ public void Remove_DecreasesCount()
+ {
+ var s = new GenericSubjectList();
+
+ s.Insert("a.b.c.d", 1);
+ s.Count.ShouldBe(1u);
+ CountMatches(s, "a.b.c.d").ShouldBe(1);
+
+ s.Remove("a.b.c.d", 1);
+ s.Count.ShouldBe(0u);
+ CountMatches(s, "a.b.c.d").ShouldBe(0);
+ }
+
+ // Go: TestGenericSublistRemoveWildcard server/gsl/gsl_test.go:83
+ [Fact]
+ public void RemoveWildcard_CleansUp()
+ {
+ var s = new GenericSubjectList();
+
+ s.Insert("a.b.c.d", 11);
+ s.Insert("a.b.*.d", 22);
+ s.Insert("a.b.>", 33);
+ s.Count.ShouldBe(3u);
+ CountMatches(s, "a.b.c.d").ShouldBe(3);
+
+ s.Remove("a.b.*.d", 22);
+ s.Count.ShouldBe(2u);
+ CountMatches(s, "a.b.c.d").ShouldBe(2);
+
+ s.Remove("a.b.>", 33);
+ s.Count.ShouldBe(1u);
+ CountMatches(s, "a.b.c.d").ShouldBe(1);
+
+ s.Remove("a.b.c.d", 11);
+ s.Count.ShouldBe(0u);
+ CountMatches(s, "a.b.c.d").ShouldBe(0);
+ }
+
+ // Go: TestGenericSublistRemoveCleanup server/gsl/gsl_test.go:105
+ [Fact]
+ public void RemoveCleanup_PrunesEmptyNodes()
+ {
+ var s = new GenericSubjectList();
+ s.NumLevels().ShouldBe(0);
+ s.Insert("a.b.c.d.e.f", 1);
+ s.NumLevels().ShouldBe(6);
+ s.Remove("a.b.c.d.e.f", 1);
+ s.NumLevels().ShouldBe(0);
+ }
+
+ // Go: TestGenericSublistRemoveCleanupWildcards server/gsl/gsl_test.go:114
+ [Fact]
+ public void RemoveCleanupWildcards_PrunesEmptyNodes()
+ {
+ var s = new GenericSubjectList();
+ s.NumLevels().ShouldBe(0);
+ s.Insert("a.b.*.d.e.>", 1);
+ s.NumLevels().ShouldBe(6);
+ s.Remove("a.b.*.d.e.>", 1);
+ s.NumLevels().ShouldBe(0);
+ }
+
+ // Go: TestGenericSublistInvalidSubjectsInsert server/gsl/gsl_test.go:123
+ [Fact]
+ public void InvalidSubjectsInsert_RejectsInvalid()
+ {
+ var s = new GenericSubjectList();
+
+ // Empty tokens and FWC not terminal
+ Should.Throw(() => s.Insert(".foo", 1));
+ Should.Throw(() => s.Insert("foo.", 1));
+ Should.Throw(() => s.Insert("foo..bar", 1));
+ Should.Throw(() => s.Insert("foo.bar..baz", 1));
+ Should.Throw(() => s.Insert("foo.>.baz", 1));
+ }
+
+ // Go: TestGenericSublistBadSubjectOnRemove server/gsl/gsl_test.go:134
+ [Fact]
+ public void BadSubjectOnRemove_RejectsInvalid()
+ {
+ var s = new GenericSubjectList();
+ Should.Throw(() => s.Insert("a.b..d", 1));
+ Should.Throw(() => s.Remove("a.b..d", 1));
+ Should.Throw(() => s.Remove("a.>.b", 1));
+ }
+
+ // Go: TestGenericSublistTwoTokenPubMatchSingleTokenSub server/gsl/gsl_test.go:141
+ [Fact]
+ public void TwoTokenPub_DoesNotMatchSingleTokenSub()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("foo", 1);
+ CountMatches(s, "foo").ShouldBe(1);
+ CountMatches(s, "foo.bar").ShouldBe(0);
+ }
+
+ // Go: TestGenericSublistInsertWithWildcardsAsLiterals server/gsl/gsl_test.go:148
+ [Fact]
+ public void InsertWithWildcardsAsLiterals_TreatsAsLiteral()
+ {
+ var s = new GenericSubjectList();
+ var subjects = new[] { "foo.*-", "foo.>-" };
+ for (var i = 0; i < subjects.Length; i++)
+ {
+ s.Insert(subjects[i], i);
+ CountMatches(s, "foo.bar").ShouldBe(0);
+ CountMatches(s, subjects[i]).ShouldBe(1);
+ }
+ }
+
+ // Go: TestGenericSublistRemoveWithWildcardsAsLiterals server/gsl/gsl_test.go:157
+ [Fact]
+ public void RemoveWithWildcardsAsLiterals_RemovesCorrectly()
+ {
+ var s = new GenericSubjectList();
+ var subjects = new[] { "foo.*-", "foo.>-" };
+ for (var i = 0; i < subjects.Length; i++)
+ {
+ s.Insert(subjects[i], i);
+ CountMatches(s, "foo.bar").ShouldBe(0);
+ CountMatches(s, subjects[i]).ShouldBe(1);
+ Should.Throw(() => s.Remove("foo.bar", i));
+ s.Count.ShouldBe(1u);
+ s.Remove(subjects[i], i);
+ s.Count.ShouldBe(0u);
+ }
+ }
+
+ // Go: TestGenericSublistMatchWithEmptyTokens server/gsl/gsl_test.go:170
+ [Theory]
+ [InlineData(".foo")]
+ [InlineData("..foo")]
+ [InlineData("foo..")]
+ [InlineData("foo.")]
+ [InlineData("foo..bar")]
+ [InlineData("foo...bar")]
+ public void MatchWithEmptyTokens_HandlesEdgeCase(string subject)
+ {
+ var s = new GenericSubjectList();
+ s.Insert(">", 1);
+ CountMatches(s, subject).ShouldBe(0);
+ }
+
+ // Go: TestGenericSublistHasInterest server/gsl/gsl_test.go:180
+ [Fact]
+ public void HasInterest_ReturnsTrueForMatchingSubjects()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("foo", 11);
+
+ // Expect to find that "foo" matches but "bar" doesn't.
+ s.HasInterest("foo").ShouldBeTrue();
+ s.HasInterest("bar").ShouldBeFalse();
+
+ // Call Match on a subject we know there is no match.
+ CountMatches(s, "bar").ShouldBe(0);
+ s.HasInterest("bar").ShouldBeFalse();
+
+ // Remove fooSub and check interest again
+ s.Remove("foo", 11);
+ s.HasInterest("foo").ShouldBeFalse();
+
+ // Try with partial wildcard *
+ s.Insert("foo.*", 22);
+ s.HasInterest("foo").ShouldBeFalse();
+ s.HasInterest("foo.bar").ShouldBeTrue();
+ s.HasInterest("foo.bar.baz").ShouldBeFalse();
+
+ // Remove sub, there should be no interest
+ s.Remove("foo.*", 22);
+ s.HasInterest("foo").ShouldBeFalse();
+ s.HasInterest("foo.bar").ShouldBeFalse();
+ s.HasInterest("foo.bar.baz").ShouldBeFalse();
+
+ // Try with full wildcard >
+ s.Insert("foo.>", 33);
+ s.HasInterest("foo").ShouldBeFalse();
+ s.HasInterest("foo.bar").ShouldBeTrue();
+ s.HasInterest("foo.bar.baz").ShouldBeTrue();
+
+ s.Remove("foo.>", 33);
+ s.HasInterest("foo").ShouldBeFalse();
+ s.HasInterest("foo.bar").ShouldBeFalse();
+ s.HasInterest("foo.bar.baz").ShouldBeFalse();
+
+ // Try with *.>
+ s.Insert("*.>", 44);
+ s.HasInterest("foo").ShouldBeFalse();
+ s.HasInterest("foo.bar").ShouldBeTrue();
+ s.HasInterest("foo.baz").ShouldBeTrue();
+ s.Remove("*.>", 44);
+
+ // Try with *.bar
+ s.Insert("*.bar", 55);
+ s.HasInterest("foo").ShouldBeFalse();
+ s.HasInterest("foo.bar").ShouldBeTrue();
+ s.HasInterest("foo.baz").ShouldBeFalse();
+ s.Remove("*.bar", 55);
+
+ // Try with *
+ s.Insert("*", 66);
+ s.HasInterest("foo").ShouldBeTrue();
+ s.HasInterest("foo.bar").ShouldBeFalse();
+ s.Remove("*", 66);
+ }
+
+ // Go: TestGenericSublistHasInterestOverlapping server/gsl/gsl_test.go:237
+ [Fact]
+ public void HasInterestOverlapping_HandlesOverlap()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("stream.A.child", 11);
+ s.Insert("stream.*", 11);
+ s.HasInterest("stream.A.child").ShouldBeTrue();
+ s.HasInterest("stream.A").ShouldBeTrue();
+ }
+
+ // Go: TestGenericSublistHasInterestStartingInRace server/gsl/gsl_test.go:247
+ [Fact]
+ public async Task HasInterestStartingIn_ThreadSafe()
+ {
+ var s = new GenericSubjectList();
+
+ // Pre-populate with some patterns
+ for (var i = 0; i < 10; i++)
+ {
+ s.Insert("foo.bar.baz", i);
+ s.Insert("foo.*.baz", i + 10);
+ s.Insert("foo.>", i + 20);
+ }
+
+ const int iterations = 1000;
+ var tasks = new List();
+
+ // Task 1: repeatedly call HasInterestStartingIn
+ tasks.Add(Task.Run(() =>
+ {
+ for (var i = 0; i < iterations; i++)
+ {
+ s.HasInterestStartingIn("foo");
+ s.HasInterestStartingIn("foo.bar");
+ s.HasInterestStartingIn("foo.bar.baz");
+ s.HasInterestStartingIn("other.subject");
+ }
+ }));
+
+ // Task 2: repeatedly modify the sublist
+ tasks.Add(Task.Run(() =>
+ {
+ for (var i = 0; i < iterations; i++)
+ {
+ var val = 1000 + i;
+ var ch = (char)('a' + (i % 26));
+ s.Insert($"test.subject.{ch}", val);
+ s.Insert("foo.*.test", val);
+ s.Remove($"test.subject.{ch}", val);
+ s.Remove("foo.*.test", val);
+ }
+ }));
+
+ // Task 3: also call HasInterest (which does lock)
+ tasks.Add(Task.Run(() =>
+ {
+ for (var i = 0; i < iterations; i++)
+ {
+ s.HasInterest("foo.bar.baz");
+ s.HasInterest("foo.something.baz");
+ }
+ }));
+
+ // Wait for all tasks - should not throw (no deadlocks or data races)
+ await Task.WhenAll(tasks);
+ }
+
+ // Go: TestGenericSublistNumInterest server/gsl/gsl_test.go:298
+ [Fact]
+ public void NumInterest_CountsMatchingSubscriptions()
+ {
+ var s = new GenericSubjectList();
+ s.Insert("foo", 11);
+
+ // Helper to check both Match count and NumInterest agree
+ void RequireNumInterest(string subj, int expected)
+ {
+ CountMatches(s, subj).ShouldBe(expected);
+ s.NumInterest(subj).ShouldBe(expected);
+ }
+
+ // Expect to find that "foo" matches but "bar" doesn't.
+ RequireNumInterest("foo", 1);
+ RequireNumInterest("bar", 0);
+
+ // Remove fooSub and check interest again
+ s.Remove("foo", 11);
+ RequireNumInterest("foo", 0);
+
+ // Try with partial wildcard *
+ s.Insert("foo.*", 22);
+ RequireNumInterest("foo", 0);
+ RequireNumInterest("foo.bar", 1);
+ RequireNumInterest("foo.bar.baz", 0);
+
+ // Remove sub, there should be no interest
+ s.Remove("foo.*", 22);
+ RequireNumInterest("foo", 0);
+ RequireNumInterest("foo.bar", 0);
+ RequireNumInterest("foo.bar.baz", 0);
+
+ // Full wildcard >
+ s.Insert("foo.>", 33);
+ RequireNumInterest("foo", 0);
+ RequireNumInterest("foo.bar", 1);
+ RequireNumInterest("foo.bar.baz", 1);
+
+ s.Remove("foo.>", 33);
+ RequireNumInterest("foo", 0);
+ RequireNumInterest("foo.bar", 0);
+ RequireNumInterest("foo.bar.baz", 0);
+
+ // *.>
+ s.Insert("*.>", 44);
+ RequireNumInterest("foo", 0);
+ RequireNumInterest("foo.bar", 1);
+ RequireNumInterest("foo.bar.baz", 1);
+ s.Remove("*.>", 44);
+
+ // *.bar
+ s.Insert("*.bar", 55);
+ RequireNumInterest("foo", 0);
+ RequireNumInterest("foo.bar", 1);
+ RequireNumInterest("foo.bar.baz", 0);
+ s.Remove("*.bar", 55);
+
+ // *
+ s.Insert("*", 66);
+ RequireNumInterest("foo", 1);
+ RequireNumInterest("foo.bar", 0);
+ s.Remove("*", 66);
+ }
+}
diff --git a/tests/NATS.Server.Tests/Internal/SubjectTree/SubjectTreeTests.cs b/tests/NATS.Server.Tests/Internal/SubjectTree/SubjectTreeTests.cs
new file mode 100644
index 0000000..7060da8
--- /dev/null
+++ b/tests/NATS.Server.Tests/Internal/SubjectTree/SubjectTreeTests.cs
@@ -0,0 +1,1783 @@
+// Go reference: server/stree/stree_test.go
+using System.Security.Cryptography;
+using System.Text;
+using NATS.Server.Internal.SubjectTree;
+
+namespace NATS.Server.Tests.Internal.SubjectTree;
+
+///
+/// Tests for the Adaptive Radix Tree (ART) based SubjectTree.
+/// Ported from Go: server/stree/stree_test.go (59 tests)
+///
+public class SubjectTreeTests
+{
+ private static byte[] B(string s) => Encoding.UTF8.GetBytes(s);
+
+ private static void MatchCount(SubjectTree st, string filter, int expected)
+ {
+ var matches = new List();
+ st.Match(B(filter), (_, v) => matches.Add(v));
+ matches.Count.ShouldBe(expected, $"filter={filter}");
+ }
+
+ private static (int Count, bool Completed) MatchUntilCount(SubjectTree st, string filter, int stopAfter)
+ {
+ int n = 0;
+ var completed = st.MatchUntil(B(filter), (_, _) =>
+ {
+ n++;
+ return n < stopAfter;
+ });
+ return (n, completed);
+ }
+
+ #region Basic CRUD
+
+ // Go: TestSubjectTreeBasics server/stree/stree_test.go:33
+ [Fact]
+ public void TestSubjectTreeBasics()
+ {
+ var st = new SubjectTree();
+ st.Size.ShouldBe(0);
+
+ // Single leaf
+ var (old, updated) = st.Insert(B("foo.bar.baz"), 22);
+ old.ShouldBe(default);
+ updated.ShouldBeFalse();
+ st.Size.ShouldBe(1);
+
+ // Find shouldn't work with a wildcard.
+ var (_, found) = st.Find(B("foo.bar.*"));
+ found.ShouldBeFalse();
+
+ // But it should with a literal. Find with single leaf.
+ var (v, found2) = st.Find(B("foo.bar.baz"));
+ found2.ShouldBeTrue();
+ v.ShouldBe(22);
+
+ // Update single leaf
+ var (old2, updated2) = st.Insert(B("foo.bar.baz"), 33);
+ old2.ShouldBe(22);
+ updated2.ShouldBeTrue();
+ st.Size.ShouldBe(1);
+
+ // Split the tree
+ var (old3, updated3) = st.Insert(B("foo.bar"), 22);
+ old3.ShouldBe(default);
+ updated3.ShouldBeFalse();
+ st.Size.ShouldBe(2);
+
+ // Now we have node4 -> leaf*2
+ var (v2, found3) = st.Find(B("foo.bar"));
+ found3.ShouldBeTrue();
+ v2.ShouldBe(22);
+
+ // Make sure we can still retrieve the original after the split.
+ var (v3, found4) = st.Find(B("foo.bar.baz"));
+ found4.ShouldBeTrue();
+ v3.ShouldBe(33);
+ }
+
+ // Go: TestSubjectTreeNoPrefix server/stree/stree_test.go:432
+ [Fact]
+ public void TestSubjectTreeNoPrefix()
+ {
+ var st = new SubjectTree();
+ for (int i = 0; i < 26; i++)
+ {
+ var subj = B($"{(char)('A' + i)}");
+ var (old, updated) = st.Insert(subj, 22);
+ old.ShouldBe(default);
+ updated.ShouldBeFalse();
+ }
+
+ st.Root.ShouldBeOfType();
+ var n = (Node48)st.Root!;
+ n.NumChildren.ShouldBe((ushort)26);
+
+ var (v, found) = st.Delete(B("B"));
+ found.ShouldBeTrue();
+ v.ShouldBe(22);
+ n.NumChildren.ShouldBe((ushort)25);
+
+ var (v2, found2) = st.Delete(B("Z"));
+ found2.ShouldBeTrue();
+ v2.ShouldBe(22);
+ n.NumChildren.ShouldBe((ushort)24);
+ }
+
+ // Go: TestSubjectTreeEmpty server/stree/stree_test.go:1330
+ [Fact]
+ public void TestSubjectTreeEmpty()
+ {
+ // Test Empty on new tree
+ var st = new SubjectTree();
+ st.Size.ShouldBe(0);
+ var st2 = st.Empty();
+ st2.ShouldBeSameAs(st); // Should return same instance
+ st2.Size.ShouldBe(0);
+
+ // Test Empty on tree with data
+ st.Insert(B("foo.bar"), 1);
+ st.Insert(B("foo.baz"), 2);
+ st.Insert(B("bar.baz"), 3);
+ st.Size.ShouldBe(3);
+
+ // Empty should clear everything
+ st2 = st.Empty();
+ st2.ShouldBeSameAs(st); // Should return same instance
+ st.Size.ShouldBe(0);
+ st.Root.ShouldBeNull();
+
+ // Verify we can't find old entries
+ st.Find(B("foo.bar")).Found.ShouldBeFalse();
+ st.Find(B("foo.baz")).Found.ShouldBeFalse();
+ st.Find(B("bar.baz")).Found.ShouldBeFalse();
+
+ // Verify we can insert new entries after Empty
+ var (old, updated) = st.Insert(B("new.entry"), 42);
+ old.ShouldBe(default);
+ updated.ShouldBeFalse();
+ st.Size.ShouldBe(1);
+
+ var (v, found) = st.Find(B("new.entry"));
+ found.ShouldBeTrue();
+ v.ShouldBe(42);
+ }
+
+ // Go: TestSizeOnNilTree server/stree/stree_test.go:1667
+ [Fact]
+ public void TestSizeOnNilTree()
+ {
+ // In C# we can't have a null reference call Size, but we test a new tree
+ var st = new SubjectTree();
+ st.Size.ShouldBe(0);
+ }
+
+ // Go: TestFindEdgeCases server/stree/stree_test.go:1672
+ [Fact]
+ public void TestFindEdgeCases()
+ {
+ var st = new SubjectTree();
+
+ // Test Find with empty subject at root level
+ st.Insert(B("foo.bar.baz"), 1);
+ st.Insert(B("foo"), 2);
+
+ // This should create a tree structure, now test finding with edge cases
+ var (v, found) = st.Find(B(""));
+ found.ShouldBeFalse();
+ }
+
+ #endregion
+
+ #region Node Growth/Shrink
+
+ // Go: TestSubjectTreeNodeGrow server/stree/stree_test.go:69
+ [Fact]
+ public void TestSubjectTreeNodeGrow()
+ {
+ var st = new SubjectTree();
+ for (int i = 0; i < 4; i++)
+ {
+ var subj = B($"foo.bar.{(char)('A' + i)}");
+ var (old, updated) = st.Insert(subj, 22);
+ old.ShouldBe(default);
+ updated.ShouldBeFalse();
+ }
+
+ // We have filled a node4.
+ st.Root.ShouldBeOfType();
+
+ // This one will trigger us to grow.
+ var (old2, updated2) = st.Insert(B("foo.bar.E"), 22);
+ old2.ShouldBe(default);
+ updated2.ShouldBeFalse();
+ st.Root.ShouldBeOfType();
+
+ for (int i = 5; i < 10; i++)
+ {
+ var subj = B($"foo.bar.{(char)('A' + i)}");
+ var (old3, updated3) = st.Insert(subj, 22);
+ old3.ShouldBe(default);
+ updated3.ShouldBeFalse();
+ }
+
+ // This one will trigger us to grow.
+ var (old4, updated4) = st.Insert(B("foo.bar.K"), 22);
+ old4.ShouldBe(default);
+ updated4.ShouldBeFalse();
+ // We have filled a node10.
+ st.Root.ShouldBeOfType();
+
+ for (int i = 11; i < 16; i++)
+ {
+ var subj = B($"foo.bar.{(char)('A' + i)}");
+ var (old5, updated5) = st.Insert(subj, 22);
+ old5.ShouldBe(default);
+ updated5.ShouldBeFalse();
+ }
+
+ // This one will trigger us to grow.
+ var (old6, updated6) = st.Insert(B("foo.bar.Q"), 22);
+ old6.ShouldBe(default);
+ updated6.ShouldBeFalse();
+ st.Root.ShouldBeOfType();
+
+ // Fill the node48.
+ for (int i = 17; i < 48; i++)
+ {
+ var subj = B($"foo.bar.{(char)('A' + i)}");
+ var (old7, updated7) = st.Insert(subj, 22);
+ old7.ShouldBe(default);
+ updated7.ShouldBeFalse();
+ }
+
+ // This one will trigger us to grow.
+ var subj8 = B($"foo.bar.{(char)('A' + 49)}");
+ var (old8, updated8) = st.Insert(subj8, 22);
+ old8.ShouldBe(default);
+ updated8.ShouldBeFalse();
+ st.Root.ShouldBeOfType();
+ }
+
+ // Go: TestSubjectTreeNodePrefixMismatch server/stree/stree_test.go:127
+ [Fact]
+ public void TestSubjectTreeNodePrefixMismatch()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.bar.A"), 11);
+ st.Insert(B("foo.bar.B"), 22);
+ st.Insert(B("foo.bar.C"), 33);
+ // Grab current root. Split below will cause update.
+ var or = st.Root;
+ // This one will force a split of the node
+ st.Insert(B("foo.foo.A"), 44);
+ st.Root.ShouldNotBeSameAs(or);
+
+ // Now make sure we can retrieve correctly.
+ st.Find(B("foo.bar.A")).Value.ShouldBe(11);
+ st.Find(B("foo.bar.B")).Value.ShouldBe(22);
+ st.Find(B("foo.bar.C")).Value.ShouldBe(33);
+ st.Find(B("foo.foo.A")).Value.ShouldBe(44);
+ }
+
+ // Go: TestNode256Operations server/stree/stree_test.go:1493
+ [Fact]
+ public void TestNode256Operations()
+ {
+ // Test node256 creation and basic operations
+ var n = new Node256(B("prefix"));
+ n.IsFull.ShouldBeFalse(); // node256 is never full
+
+ // Test findChild when child doesn't exist
+ var child = n.FindChild((byte)'a');
+ child.ShouldBeNull();
+
+ // Add a child and find it
+ var leaf = new Leaf(B("suffix"), 42);
+ n.AddChild((byte)'a', leaf);
+ child = n.FindChild((byte)'a');
+ child.ShouldNotBeNull();
+ n.Meta.Size.ShouldBe((ushort)1);
+
+ // Test iter function
+ int iterCount = 0;
+ n.Iter((_) => { iterCount++; return true; });
+ iterCount.ShouldBe(1);
+
+ // Test iter with early termination
+ n.AddChild((byte)'b', new Leaf(B("suffix2"), 43));
+ n.AddChild((byte)'c', new Leaf(B("suffix3"), 44));
+ iterCount = 0;
+ n.Iter((_) => { iterCount++; return false; });
+ iterCount.ShouldBe(1);
+
+ // Test children() method
+ var children = n.Children();
+ children.Length.ShouldBe(256);
+
+ // Test that grow() panics
+ Should.Throw(() => n.Grow())
+ .Message.ShouldBe("grow can not be called on node256");
+ }
+
+ // Go: TestNode256Shrink server/stree/stree_test.go:1542
+ [Fact]
+ public void TestNode256Shrink()
+ {
+ var n256 = new Node256(B("prefix"));
+
+ // Add 49 children
+ for (int i = 0; i < 49; i++)
+ {
+ n256.AddChild((byte)i, new Leaf([(byte)i], i));
+ }
+ n256.Meta.Size.ShouldBe((ushort)49);
+
+ // Shrink should not happen yet (> 48 children)
+ var shrunk = n256.Shrink();
+ shrunk.ShouldBeNull();
+
+ // Delete one to get to 48 children
+ n256.DeleteChild(0);
+ n256.Meta.Size.ShouldBe((ushort)48);
+
+ // Now shrink should return a node48
+ shrunk = n256.Shrink();
+ shrunk.ShouldNotBeNull();
+ shrunk.ShouldBeOfType();
+
+ // Verify the shrunk node has all remaining children
+ for (int i = 1; i < 49; i++)
+ {
+ var child = shrunk.FindChild((byte)i);
+ child.ShouldNotBeNull();
+ }
+ }
+
+ // Go: TestNodeShrinkNotNeeded server/stree/stree_test.go:1850
+ [Fact]
+ public void TestNodeShrinkNotNeeded()
+ {
+ // Test node10 shrink when not needed (has more than 4 children)
+ var n10 = new Node10(B("prefix"));
+ for (int i = 0; i < 5; i++)
+ {
+ n10.AddChild((byte)('a' + i), new Leaf([(byte)('0' + i)], i));
+ }
+ var shrunk = n10.Shrink();
+ shrunk.ShouldBeNull(); // Should not shrink
+
+ // Test node16 shrink when not needed (has more than 10 children)
+ var n16 = new Node16(B("prefix"));
+ for (int i = 0; i < 11; i++)
+ {
+ n16.AddChild((byte)i, new Leaf([(byte)i], i));
+ }
+ shrunk = n16.Shrink();
+ shrunk.ShouldBeNull(); // Should not shrink
+ }
+
+ #endregion
+
+ #region Delete
+
+ // Go: TestSubjectTreeNodeDelete server/stree/stree_test.go:152
+ [Fact]
+ public void TestSubjectTreeNodeDelete()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.bar.A"), 22);
+ var (v, found) = st.Delete(B("foo.bar.A"));
+ found.ShouldBeTrue();
+ v.ShouldBe(22);
+ st.Root.ShouldBeNull();
+
+ var (v2, found2) = st.Delete(B("foo.bar.A"));
+ found2.ShouldBeFalse();
+ v2.ShouldBe(default);
+
+ var (v3, found3) = st.Find(B("foo.foo.A"));
+ found3.ShouldBeFalse();
+ v3.ShouldBe(default);
+
+ // Kick to a node4.
+ st.Insert(B("foo.bar.A"), 11);
+ st.Insert(B("foo.bar.B"), 22);
+ st.Insert(B("foo.bar.C"), 33);
+
+ // Make sure we can delete and that we shrink back to leaf.
+ var (v4, found4) = st.Delete(B("foo.bar.C"));
+ found4.ShouldBeTrue();
+ v4.ShouldBe(33);
+ var (v5, found5) = st.Delete(B("foo.bar.B"));
+ found5.ShouldBeTrue();
+ v5.ShouldBe(22);
+ // We should have shrunk here.
+ st.Root!.IsLeaf.ShouldBeTrue();
+ var (v6, found6) = st.Delete(B("foo.bar.A"));
+ found6.ShouldBeTrue();
+ v6.ShouldBe(11);
+ st.Root.ShouldBeNull();
+
+ // Now pop up to a node10 and make sure we can shrink back down.
+ for (int i = 0; i < 5; i++)
+ {
+ var subj = $"foo.bar.{(char)('A' + i)}";
+ st.Insert(B(subj), 22);
+ }
+ st.Root.ShouldBeOfType();
+ var (v7, found7) = st.Delete(B("foo.bar.A"));
+ found7.ShouldBeTrue();
+ v7.ShouldBe(22);
+ st.Root.ShouldBeOfType();
+
+ // Now pop up to node16
+ for (int i = 0; i < 11; i++)
+ {
+ var subj = $"foo.bar.{(char)('A' + i)}";
+ st.Insert(B(subj), 22);
+ }
+ st.Root.ShouldBeOfType();
+ var (v8, found8) = st.Delete(B("foo.bar.A"));
+ found8.ShouldBeTrue();
+ v8.ShouldBe(22);
+ st.Root.ShouldBeOfType();
+ st.Find(B("foo.bar.B")).Found.ShouldBeTrue();
+ st.Find(B("foo.bar.B")).Value.ShouldBe(22);
+
+ // Now pop up to node48
+ st = new SubjectTree();
+ for (int i = 0; i < 17; i++)
+ {
+ var subj = $"foo.bar.{(char)('A' + i)}";
+ st.Insert(B(subj), 22);
+ }
+ st.Root.ShouldBeOfType();
+ var (v9, found9) = st.Delete(B("foo.bar.A"));
+ found9.ShouldBeTrue();
+ v9.ShouldBe(22);
+ st.Root.ShouldBeOfType();
+ st.Find(B("foo.bar.B")).Found.ShouldBeTrue();
+
+ // Now pop up to node256
+ st = new SubjectTree();
+ for (int i = 0; i < 49; i++)
+ {
+ var subj = $"foo.bar.{(char)('A' + i)}";
+ st.Insert(B(subj), 22);
+ }
+ st.Root.ShouldBeOfType();
+ var (v10, found10) = st.Delete(B("foo.bar.A"));
+ found10.ShouldBeTrue();
+ v10.ShouldBe(22);
+ st.Root.ShouldBeOfType();
+ st.Find(B("foo.bar.B")).Found.ShouldBeTrue();
+ }
+
+ // Go: TestSubjectTreeNodesAndPaths server/stree/stree_test.go:243
+ [Fact]
+ public void TestSubjectTreeNodesAndPaths()
+ {
+ var st = new SubjectTree();
+ void Check(string subj)
+ {
+ var (v, found) = st.Find(B(subj));
+ found.ShouldBeTrue();
+ v.ShouldBe(22);
+ }
+
+ st.Insert(B("foo.bar.A"), 22);
+ st.Insert(B("foo.bar.B"), 22);
+ st.Insert(B("foo.bar.C"), 22);
+ st.Insert(B("foo.bar"), 22);
+ Check("foo.bar.A");
+ Check("foo.bar.B");
+ Check("foo.bar.C");
+ Check("foo.bar");
+
+ // This will do several things in terms of shrinking and pruning
+ st.Delete(B("foo.bar"));
+ Check("foo.bar.A");
+ Check("foo.bar.B");
+ Check("foo.bar.C");
+ }
+
+ // Go: TestSubjectTreeDeleteShortSubjectNoPanic server/stree/stree_test.go:1308
+ [Fact]
+ public void TestSubjectTreeDeleteShortSubjectNoPanic()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.bar.baz"), 1);
+ st.Insert(B("foo.bar.qux"), 2);
+
+ var (v, found) = st.Delete(B("foo.bar"));
+ found.ShouldBeFalse();
+ v.ShouldBe(default);
+
+ st.Find(B("foo.bar.baz")).Value.ShouldBe(1);
+ st.Find(B("foo.bar.qux")).Value.ShouldBe(2);
+ }
+
+ // Go: TestDeleteEdgeCases server/stree/stree_test.go:1947
+ [Fact]
+ public void TestDeleteEdgeCases()
+ {
+ var st = new SubjectTree();
+
+ // Test delete on empty tree
+ var (val, deleted) = st.Delete(B("foo"));
+ deleted.ShouldBeFalse();
+ val.ShouldBe(default);
+
+ // Test delete with empty subject
+ st.Insert(B("foo"), 1);
+ var (val2, deleted2) = st.Delete(B(""));
+ deleted2.ShouldBeFalse();
+ val2.ShouldBe(default);
+
+ // Test delete with subject shorter than prefix
+ st = new SubjectTree();
+ st.Insert(B("verylongprefix.suffix"), 1);
+ st.Insert(B("verylongprefix.suffix2"), 2);
+ var (val3, deleted3) = st.Delete(B("very"));
+ deleted3.ShouldBeFalse();
+ val3.ShouldBe(default);
+ }
+
+ // Go: TestDeleteNilNodePointer server/stree/stree_test.go:2095
+ [Fact]
+ public void TestDeleteNilNodePointer()
+ {
+ var st = new SubjectTree();
+ // Test delete on empty tree (no root)
+ var (val, deleted) = st.Delete(B("foo"));
+ deleted.ShouldBeFalse();
+ val.ShouldBe(default);
+ }
+
+ // Go: TestDeleteChildEdgeCasesMore server/stree/stree_test.go:2036
+ [Fact]
+ public void TestDeleteChildEdgeCasesMore()
+ {
+ // Test the edge case in node10 deleteChild where we don't swap (last element)
+ var n10 = new Node10(B("prefix"));
+ n10.AddChild((byte)'a', new Leaf(B("1"), 1));
+ n10.AddChild((byte)'b', new Leaf(B("2"), 2));
+ n10.AddChild((byte)'c', new Leaf(B("3"), 3));
+
+ // Delete the last child
+ n10.DeleteChild((byte)'c');
+ n10.Meta.Size.ShouldBe((ushort)2);
+
+ // Test the edge case in node16 deleteChild where we don't swap (last element)
+ var n16 = new Node16(B("prefix"));
+ n16.AddChild((byte)'a', new Leaf(B("1"), 1));
+ n16.AddChild((byte)'b', new Leaf(B("2"), 2));
+ n16.AddChild((byte)'c', new Leaf(B("3"), 3));
+
+ // Delete the last child
+ n16.DeleteChild((byte)'c');
+ n16.Meta.Size.ShouldBe((ushort)2);
+ }
+
+ #endregion
+
+ #region Construction/Structure
+
+ // Go: TestSubjectTreeConstruction server/stree/stree_test.go:268
+ [Fact]
+ public void TestSubjectTreeConstruction()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.bar.A"), 1);
+ st.Insert(B("foo.bar.B"), 2);
+ st.Insert(B("foo.bar.C"), 3);
+ st.Insert(B("foo.baz.A"), 11);
+ st.Insert(B("foo.baz.B"), 22);
+ st.Insert(B("foo.baz.C"), 33);
+ st.Insert(B("foo.bar"), 42);
+
+ void CheckNode(INode? n, string kind, string pathStr, ushort numChildren)
+ {
+ n.ShouldNotBeNull();
+ n.Kind.ShouldBe(kind);
+ Encoding.UTF8.GetString(n.Path()).ShouldBe(pathStr);
+ n.NumChildren.ShouldBe(numChildren);
+ }
+
+ CheckNode(st.Root, "NODE4", "foo.ba", 2);
+ var nn = st.Root!.FindChild((byte)'r');
+ CheckNode(nn!.Node, "NODE4", "r", 2);
+ CheckNode(nn.Node!.FindChild(Parts.NoPivot)!.Node, "LEAF", "", 0);
+ var rnn = nn.Node!.FindChild((byte)'.');
+ CheckNode(rnn!.Node, "NODE4", ".", 3);
+ CheckNode(rnn.Node!.FindChild((byte)'A')!.Node, "LEAF", "A", 0);
+ CheckNode(rnn.Node!.FindChild((byte)'B')!.Node, "LEAF", "B", 0);
+ CheckNode(rnn.Node!.FindChild((byte)'C')!.Node, "LEAF", "C", 0);
+ var znn = st.Root!.FindChild((byte)'z');
+ CheckNode(znn!.Node, "NODE4", "z.", 3);
+ CheckNode(znn.Node!.FindChild((byte)'A')!.Node, "LEAF", "A", 0);
+ CheckNode(znn.Node!.FindChild((byte)'B')!.Node, "LEAF", "B", 0);
+ CheckNode(znn.Node!.FindChild((byte)'C')!.Node, "LEAF", "C", 0);
+
+ // Now delete "foo.bar" and make sure put ourselves back together properly.
+ var (v, found) = st.Delete(B("foo.bar"));
+ found.ShouldBeTrue();
+ v.ShouldBe(42);
+
+ CheckNode(st.Root, "NODE4", "foo.ba", 2);
+ nn = st.Root!.FindChild((byte)'r');
+ CheckNode(nn!.Node, "NODE4", "r.", 3);
+ CheckNode(nn.Node!.FindChild((byte)'A')!.Node, "LEAF", "A", 0);
+ CheckNode(nn.Node!.FindChild((byte)'B')!.Node, "LEAF", "B", 0);
+ CheckNode(nn.Node!.FindChild((byte)'C')!.Node, "LEAF", "C", 0);
+ znn = st.Root!.FindChild((byte)'z');
+ CheckNode(znn!.Node, "NODE4", "z.", 3);
+ CheckNode(znn.Node!.FindChild((byte)'A')!.Node, "LEAF", "A", 0);
+ CheckNode(znn.Node!.FindChild((byte)'B')!.Node, "LEAF", "B", 0);
+ CheckNode(znn.Node!.FindChild((byte)'C')!.Node, "LEAF", "C", 0);
+ }
+
+ #endregion
+
+ #region Matching
+
+ // Go: TestSubjectTreeMatchLeafOnly server/stree/stree_test.go:331
+ [Fact]
+ public void TestSubjectTreeMatchLeafOnly()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.bar.baz.A"), 1);
+
+ // Check all placements of pwc in token space.
+ MatchCount(st, "foo.bar.*.A", 1);
+ MatchCount(st, "foo.*.baz.A", 1);
+ MatchCount(st, "foo.*.*.A", 1);
+ MatchCount(st, "foo.*.*.*", 1);
+ MatchCount(st, "*.*.*.*", 1);
+
+ // Now check fwc.
+ MatchCount(st, ">", 1);
+ MatchCount(st, "foo.>", 1);
+ MatchCount(st, "foo.*.>", 1);
+ MatchCount(st, "foo.bar.>", 1);
+ MatchCount(st, "foo.bar.*.>", 1);
+
+ // Check partials so they do not trigger on leafs.
+ MatchCount(st, "foo.bar.baz", 0);
+ }
+
+ // Go: TestSubjectTreeMatchNodes server/stree/stree_test.go:352
+ [Fact]
+ public void TestSubjectTreeMatchNodes()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.bar.A"), 1);
+ st.Insert(B("foo.bar.B"), 2);
+ st.Insert(B("foo.bar.C"), 3);
+ st.Insert(B("foo.baz.A"), 11);
+ st.Insert(B("foo.baz.B"), 22);
+ st.Insert(B("foo.baz.C"), 33);
+
+ // Test literals.
+ MatchCount(st, "foo.bar.A", 1);
+ MatchCount(st, "foo.baz.A", 1);
+ MatchCount(st, "foo.bar", 0);
+ // Test internal pwc
+ MatchCount(st, "foo.*.A", 2);
+ // Test terminal pwc
+ MatchCount(st, "foo.bar.*", 3);
+ MatchCount(st, "foo.baz.*", 3);
+ // Check fwc
+ MatchCount(st, ">", 6);
+ MatchCount(st, "foo.>", 6);
+ MatchCount(st, "foo.bar.>", 3);
+ MatchCount(st, "foo.baz.>", 3);
+ // Make sure we do not have false positives on prefix matches.
+ MatchCount(st, "foo.ba", 0);
+
+ // Now add in "foo.bar" to make a more complex tree construction and re-test.
+ st.Insert(B("foo.bar"), 42);
+
+ // Test literals.
+ MatchCount(st, "foo.bar.A", 1);
+ MatchCount(st, "foo.baz.A", 1);
+ MatchCount(st, "foo.bar", 1);
+ // Test internal pwc
+ MatchCount(st, "foo.*.A", 2);
+ // Test terminal pwc
+ MatchCount(st, "foo.bar.*", 3);
+ MatchCount(st, "foo.baz.*", 3);
+ // Check fwc
+ MatchCount(st, ">", 7);
+ MatchCount(st, "foo.>", 7);
+ MatchCount(st, "foo.bar.>", 3);
+ MatchCount(st, "foo.baz.>", 3);
+ }
+
+ // Go: TestSubjectTreeMatchUntil server/stree/stree_test.go:407
+ [Fact]
+ public void TestSubjectTreeMatchUntil()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.bar.A"), 1);
+ st.Insert(B("foo.bar.B"), 2);
+ st.Insert(B("foo.bar.C"), 3);
+ st.Insert(B("foo.baz.A"), 11);
+ st.Insert(B("foo.baz.B"), 22);
+ st.Insert(B("foo.baz.C"), 33);
+ st.Insert(B("foo.bar"), 42);
+
+ // Ensure early stop terminates traversal.
+ var (count, completed) = MatchUntilCount(st, "foo.>", 3);
+ count.ShouldBe(3);
+ completed.ShouldBeFalse();
+
+ // Match completes
+ (count, completed) = MatchUntilCount(st, "foo.bar", 3);
+ count.ShouldBe(1);
+ completed.ShouldBeTrue();
+
+ (count, completed) = MatchUntilCount(st, "foo.baz.*", 4);
+ count.ShouldBe(3);
+ completed.ShouldBeTrue();
+ }
+
+ // Go: TestSubjectTreePartialTerminalWildcardBugMatch server/stree/stree_test.go:453
+ [Fact]
+ public void TestSubjectTreePartialTerminalWildcardBugMatch()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("STATE.GLOBAL.CELL1.7PDSGAALXNN000010.PROPERTY-A"), 5);
+ st.Insert(B("STATE.GLOBAL.CELL1.7PDSGAALXNN000010.PROPERTY-B"), 1);
+ st.Insert(B("STATE.GLOBAL.CELL1.7PDSGAALXNN000010.PROPERTY-C"), 2);
+ MatchCount(st, "STATE.GLOBAL.CELL1.7PDSGAALXNN000010.*", 3);
+ }
+
+ // Go: TestSubjectTreeMatchSubjectParam server/stree/stree_test.go:461
+ [Fact]
+ public void TestSubjectTreeMatchSubjectParam()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.bar.A"), 1);
+ st.Insert(B("foo.bar.B"), 2);
+ st.Insert(B("foo.bar.C"), 3);
+ st.Insert(B("foo.baz.A"), 11);
+ st.Insert(B("foo.baz.B"), 22);
+ st.Insert(B("foo.baz.C"), 33);
+ st.Insert(B("foo.bar"), 42);
+
+ var checkValMap = new Dictionary
+ {
+ ["foo.bar.A"] = 1,
+ ["foo.bar.B"] = 2,
+ ["foo.bar.C"] = 3,
+ ["foo.baz.A"] = 11,
+ ["foo.baz.B"] = 22,
+ ["foo.baz.C"] = 33,
+ ["foo.bar"] = 42,
+ };
+
+ // Make sure we get a proper subject parameter and it matches our value properly.
+ st.Match(B(">"), (subject, v) =>
+ {
+ var key = Encoding.UTF8.GetString(subject);
+ checkValMap.ShouldContainKey(key);
+ v.ShouldBe(checkValMap[key]);
+ });
+ }
+
+ // Go: TestSubjectTreeMatchRandomDoublePWC server/stree/stree_test.go:490
+ [Fact]
+ public void TestSubjectTreeMatchRandomDoublePWC()
+ {
+ var st = new SubjectTree();
+ var rng = new Random(42);
+ for (int i = 1; i <= 10_000; i++)
+ {
+ var subj = $"foo.{rng.Next(20) + 1}.{i}";
+ st.Insert(B(subj), 42);
+ }
+ MatchCount(st, "foo.*.*", 10_000);
+
+ // Check with pwc and short interior token.
+ int seen = 0;
+ st.Match(B("*.2.*"), (_, _) => seen++);
+
+ // Now check via walk to make sure we are right.
+ int verified = 0;
+ st.IterOrdered((subject, _) =>
+ {
+ var tokens = Encoding.UTF8.GetString(subject).Split('.');
+ tokens.Length.ShouldBe(3);
+ if (tokens[1] == "2") verified++;
+ return true;
+ });
+ seen.ShouldBe(verified);
+
+ seen = 0;
+ verified = 0;
+ st.Match(B("*.*.222"), (_, _) => seen++);
+ st.IterOrdered((subject, _) =>
+ {
+ var tokens = Encoding.UTF8.GetString(subject).Split('.');
+ tokens.Length.ShouldBe(3);
+ if (tokens[2] == "222") verified++;
+ return true;
+ });
+ seen.ShouldBe(verified);
+ }
+
+ // Go: TestSubjectTreeMatchTsepSecondThenPartialPartBug server/stree/stree_test.go:643
+ [Fact]
+ public void TestSubjectTreeMatchTsepSecondThenPartialPartBug()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.xxxxx.foo1234.zz"), 22);
+ st.Insert(B("foo.yyy.foo123.zz"), 22);
+ st.Insert(B("foo.yyybar789.zz"), 22);
+ st.Insert(B("foo.yyy.foo12345.zz"), 22);
+ st.Insert(B("foo.yyy.foo12345.yy"), 22);
+ st.Insert(B("foo.yyy.foo123456789.zz"), 22);
+ MatchCount(st, "foo.*.foo123456789.*", 1);
+ MatchCount(st, "foo.*.*.zzz.foo.>", 0);
+ }
+
+ // Go: TestSubjectTreeMatchMultipleWildcardBasic server/stree/stree_test.go:655
+ [Fact]
+ public void TestSubjectTreeMatchMultipleWildcardBasic()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("A.B.C.D.0.G.H.I.0"), 22);
+ st.Insert(B("A.B.C.D.1.G.H.I.0"), 22);
+ MatchCount(st, "A.B.*.D.1.*.*.I.0", 1);
+ }
+
+ // Go: TestSubjectTreeMatchInvalidWildcard server/stree/stree_test.go:662
+ [Fact]
+ public void TestSubjectTreeMatchInvalidWildcard()
+ {
+ var st = new SubjectTree();
+ st.Insert(B("foo.123"), 22);
+ st.Insert(B("one.two.three.four.five"), 22);
+ st.Insert(B("'*.123"), 22);
+ st.Insert(B("bar"), 22);
+ MatchCount(st, "invalid.>", 0);
+ MatchCount(st, "foo.>.bar", 0);
+ MatchCount(st, ">", 4);
+ MatchCount(st, "'*.*", 1);
+ MatchCount(st, "'*.*.*'", 0);
+ // None of these should match.
+ MatchCount(st, "`>`", 0);
+ MatchCount(st, "\">\u0022", 0);
+ MatchCount(st, "'>'", 0);
+ MatchCount(st, "'*.>'", 0);
+ MatchCount(st, "'*.>.", 0);
+ MatchCount(st, "`invalid.>`", 0);
+ MatchCount(st, "'*.*'", 0);
+ }
+
+ // Go: TestSubjectTreeMatchNoCallbackDupe server/stree/stree_test.go:881
+ [Fact]
+ public void TestSubjectTreeMatchNoCallbackDupe()
+ {
+ var st = new SubjectTree