feat: port internal data structures from Go (Wave 2)
- AVL SequenceSet: sparse sequence set with AVL tree, 16 tests - Subject Tree: Adaptive Radix Tree (ART) with 5 node tiers, 59 tests - Generic Subject List: trie-based subject matcher, 21 tests - Time Hash Wheel: O(1) TTL expiration wheel, 8 tests Total: 106 new tests (1,081 → 1,187 passing)
This commit is contained in:
@@ -1,7 +1,777 @@
|
||||
namespace NATS.Server.Internal.Avl;
|
||||
// Copyright 2024 The NATS Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Go reference: server/avl/seqset.go
|
||||
// TODO: Port AVL-backed sparse sequence set
|
||||
|
||||
using System.Buffers.Binary;
|
||||
using System.Numerics;
|
||||
|
||||
namespace NATS.Server.Internal.Avl;
|
||||
|
||||
/// <summary>
|
||||
/// SequenceSet is a memory and encoding optimized set for storing unsigned ints.
|
||||
/// Uses an AVL tree with nodes that hold bitmasks for set membership.
|
||||
/// Not thread safe.
|
||||
/// </summary>
|
||||
public class SequenceSet
|
||||
{
|
||||
internal const int BitsPerBucket = 64;
|
||||
internal const int NumBuckets = 32;
|
||||
internal const int NumEntries = NumBuckets * BitsPerBucket; // 2048
|
||||
|
||||
private const byte Magic = 22;
|
||||
private const byte Version = 2;
|
||||
private const int HdrLen = 2;
|
||||
private const int MinLen = HdrLen + 8; // magic + version + num nodes + num entries
|
||||
|
||||
internal Node? Root;
|
||||
private int _size;
|
||||
private int _nodes;
|
||||
private bool _changed;
|
||||
|
||||
/// <summary>Number of items in the set.</summary>
|
||||
public int Size => _size;
|
||||
|
||||
/// <summary>Number of nodes in the tree.</summary>
|
||||
public int Nodes => _nodes;
|
||||
|
||||
/// <summary>Fast check of the set being empty.</summary>
|
||||
public bool IsEmpty => Root == null;
|
||||
|
||||
/// <summary>Insert will insert the sequence into the set. The tree will be balanced inline.</summary>
|
||||
public void Insert(ulong seq)
|
||||
{
|
||||
Root = Node.Insert(Root, seq, ref _changed, ref _nodes);
|
||||
if (_changed)
|
||||
{
|
||||
_changed = false;
|
||||
_size++;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Returns true if the sequence is a member of this set.</summary>
|
||||
public bool Exists(ulong seq)
|
||||
{
|
||||
var n = Root;
|
||||
while (n != null)
|
||||
{
|
||||
if (seq < n.Base)
|
||||
{
|
||||
n = n.Left;
|
||||
}
|
||||
else if (seq >= n.Base + NumEntries)
|
||||
{
|
||||
n = n.Right;
|
||||
}
|
||||
else
|
||||
{
|
||||
return n.ExistsBit(seq);
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the initial minimum sequence when known. More effectively utilizes space.
|
||||
/// The set must be empty.
|
||||
/// </summary>
|
||||
public void SetInitialMin(ulong min)
|
||||
{
|
||||
if (!IsEmpty)
|
||||
{
|
||||
throw new InvalidOperationException("Set not empty");
|
||||
}
|
||||
|
||||
Root = new Node { Base = min, Height = 1 };
|
||||
_nodes = 1;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes the sequence from the set. Returns true if the sequence was present.
|
||||
/// </summary>
|
||||
public bool Delete(ulong seq)
|
||||
{
|
||||
if (Root == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
Root = Node.Delete(Root, seq, ref _changed, ref _nodes);
|
||||
if (_changed)
|
||||
{
|
||||
_changed = false;
|
||||
_size--;
|
||||
if (_size == 0)
|
||||
{
|
||||
Empty();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>Clears all items from the set.</summary>
|
||||
public void Empty()
|
||||
{
|
||||
Root = null;
|
||||
_size = 0;
|
||||
_nodes = 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Invokes the callback for each item in ascending order.
|
||||
/// If the callback returns false, iteration terminates.
|
||||
/// </summary>
|
||||
public void Range(Func<ulong, bool> callback) => Node.Iter(Root, callback);
|
||||
|
||||
/// <summary>Returns the left and right heights of the tree root.</summary>
|
||||
public (int Left, int Right) Heights()
|
||||
{
|
||||
if (Root == null)
|
||||
{
|
||||
return (0, 0);
|
||||
}
|
||||
|
||||
var l = Root.Left?.Height ?? 0;
|
||||
var r = Root.Right?.Height ?? 0;
|
||||
return (l, r);
|
||||
}
|
||||
|
||||
/// <summary>Returns min, max, and number of set items.</summary>
|
||||
public (ulong Min, ulong Max, ulong Num) State()
|
||||
{
|
||||
if (Root == null)
|
||||
{
|
||||
return (0, 0, 0);
|
||||
}
|
||||
|
||||
var (min, max) = MinMax();
|
||||
return (min, max, (ulong)_size);
|
||||
}
|
||||
|
||||
/// <summary>Returns the minimum and maximum values in the set.</summary>
|
||||
public (ulong Min, ulong Max) MinMax()
|
||||
{
|
||||
if (Root == null)
|
||||
{
|
||||
return (0, 0);
|
||||
}
|
||||
|
||||
ulong min = 0;
|
||||
for (var l = Root; l != null; l = l.Left)
|
||||
{
|
||||
if (l.Left == null)
|
||||
{
|
||||
min = l.Min();
|
||||
}
|
||||
}
|
||||
|
||||
ulong max = 0;
|
||||
for (var r = Root; r != null; r = r.Right)
|
||||
{
|
||||
if (r.Right == null)
|
||||
{
|
||||
max = r.Max();
|
||||
}
|
||||
}
|
||||
|
||||
return (min, max);
|
||||
}
|
||||
|
||||
/// <summary>Returns a deep clone of this SequenceSet.</summary>
|
||||
public SequenceSet Clone()
|
||||
{
|
||||
var css = new SequenceSet { _nodes = _nodes, _size = _size };
|
||||
css.Root = CloneNode(Root);
|
||||
return css;
|
||||
}
|
||||
|
||||
/// <summary>Unions this set with one or more other sets by inserting all their elements.</summary>
|
||||
public void Union(params SequenceSet[] others)
|
||||
{
|
||||
foreach (var other in others)
|
||||
{
|
||||
Node.NodeIter(other.Root, n =>
|
||||
{
|
||||
for (var nb = 0; nb < NumBuckets; nb++)
|
||||
{
|
||||
var b = n.Bits[nb];
|
||||
for (var pos = 0UL; b != 0; pos++)
|
||||
{
|
||||
if ((b & 1) == 1)
|
||||
{
|
||||
var seq = n.Base + ((ulong)nb * BitsPerBucket) + pos;
|
||||
Insert(seq);
|
||||
}
|
||||
|
||||
b >>= 1;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Returns a union of all provided sets.</summary>
|
||||
public static SequenceSet CreateUnion(params SequenceSet[] sets)
|
||||
{
|
||||
if (sets.Length == 0)
|
||||
{
|
||||
return new SequenceSet();
|
||||
}
|
||||
|
||||
// Sort descending by size so we clone the largest.
|
||||
var sorted = sets.OrderByDescending(s => s.Size).ToArray();
|
||||
var ss = sorted[0].Clone();
|
||||
|
||||
for (var i = 1; i < sorted.Length; i++)
|
||||
{
|
||||
sorted[i].Range(n =>
|
||||
{
|
||||
ss.Insert(n);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
return ss;
|
||||
}
|
||||
|
||||
/// <summary>Returns the bytes needed for encoding.</summary>
|
||||
public int EncodeLength() => MinLen + (_nodes * ((NumBuckets + 1) * 8 + 2));
|
||||
|
||||
/// <summary>Encodes the set to a compact binary format.</summary>
|
||||
public byte[] Encode()
|
||||
{
|
||||
var encLen = EncodeLength();
|
||||
var buf = new byte[encLen];
|
||||
|
||||
buf[0] = Magic;
|
||||
buf[1] = Version;
|
||||
var i = HdrLen;
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(buf.AsSpan(i), (uint)_nodes);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(buf.AsSpan(i + 4), (uint)_size);
|
||||
i += 8;
|
||||
|
||||
Node.NodeIter(Root, n =>
|
||||
{
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(buf.AsSpan(i), n.Base);
|
||||
i += 8;
|
||||
for (var bi = 0; bi < NumBuckets; bi++)
|
||||
{
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(buf.AsSpan(i), n.Bits[bi]);
|
||||
i += 8;
|
||||
}
|
||||
|
||||
BinaryPrimitives.WriteUInt16LittleEndian(buf.AsSpan(i), (ushort)n.Height);
|
||||
i += 2;
|
||||
});
|
||||
|
||||
return buf.AsSpan(0, i).ToArray();
|
||||
}
|
||||
|
||||
/// <summary>Decodes a SequenceSet from a binary buffer. Returns the set and number of bytes read.</summary>
|
||||
public static (SequenceSet Set, int BytesRead) Decode(ReadOnlySpan<byte> buf)
|
||||
{
|
||||
if (buf.Length < MinLen || buf[0] != Magic)
|
||||
{
|
||||
throw new InvalidOperationException("Bad encoding");
|
||||
}
|
||||
|
||||
return buf[1] switch
|
||||
{
|
||||
1 => DecodeV1(buf),
|
||||
2 => DecodeV2(buf),
|
||||
_ => throw new InvalidOperationException("Bad version"),
|
||||
};
|
||||
}
|
||||
|
||||
private static (SequenceSet Set, int BytesRead) DecodeV2(ReadOnlySpan<byte> buf)
|
||||
{
|
||||
var index = 2;
|
||||
var nn = (int)BinaryPrimitives.ReadUInt32LittleEndian(buf[index..]);
|
||||
var sz = (int)BinaryPrimitives.ReadUInt32LittleEndian(buf[(index + 4)..]);
|
||||
index += 8;
|
||||
|
||||
var expectedLen = MinLen + (nn * ((NumBuckets + 1) * 8 + 2));
|
||||
if (buf.Length < expectedLen)
|
||||
{
|
||||
throw new InvalidOperationException("Bad encoding");
|
||||
}
|
||||
|
||||
var ss = new SequenceSet { _size = sz };
|
||||
|
||||
for (var i = 0; i < nn; i++)
|
||||
{
|
||||
var n = new Node
|
||||
{
|
||||
Base = BinaryPrimitives.ReadUInt64LittleEndian(buf[index..]),
|
||||
};
|
||||
index += 8;
|
||||
|
||||
for (var bi = 0; bi < NumBuckets; bi++)
|
||||
{
|
||||
n.Bits[bi] = BinaryPrimitives.ReadUInt64LittleEndian(buf[index..]);
|
||||
index += 8;
|
||||
}
|
||||
|
||||
n.Height = BinaryPrimitives.ReadUInt16LittleEndian(buf[index..]);
|
||||
index += 2;
|
||||
|
||||
ss.InsertNode(n);
|
||||
}
|
||||
|
||||
return (ss, index);
|
||||
}
|
||||
|
||||
private static (SequenceSet Set, int BytesRead) DecodeV1(ReadOnlySpan<byte> buf)
|
||||
{
|
||||
const int v1NumBuckets = 64;
|
||||
var index = 2;
|
||||
var nn = (int)BinaryPrimitives.ReadUInt32LittleEndian(buf[index..]);
|
||||
var sz = (int)BinaryPrimitives.ReadUInt32LittleEndian(buf[(index + 4)..]);
|
||||
index += 8;
|
||||
|
||||
var expectedLen = MinLen + (nn * ((v1NumBuckets + 1) * 8 + 2));
|
||||
if (buf.Length < expectedLen)
|
||||
{
|
||||
throw new InvalidOperationException("Bad encoding");
|
||||
}
|
||||
|
||||
var ss = new SequenceSet();
|
||||
|
||||
for (var i = 0; i < nn; i++)
|
||||
{
|
||||
var nodeBase = BinaryPrimitives.ReadUInt64LittleEndian(buf[index..]);
|
||||
index += 8;
|
||||
|
||||
for (var nb = 0UL; nb < v1NumBuckets; nb++)
|
||||
{
|
||||
var n = BinaryPrimitives.ReadUInt64LittleEndian(buf[index..]);
|
||||
for (var pos = 0UL; n != 0; pos++)
|
||||
{
|
||||
if ((n & 1) == 1)
|
||||
{
|
||||
var seq = nodeBase + (nb * BitsPerBucket) + pos;
|
||||
ss.Insert(seq);
|
||||
}
|
||||
|
||||
n >>= 1;
|
||||
}
|
||||
|
||||
index += 8;
|
||||
}
|
||||
|
||||
// Skip encoded height.
|
||||
index += 2;
|
||||
}
|
||||
|
||||
if (ss.Size != sz)
|
||||
{
|
||||
throw new InvalidOperationException("Bad encoding");
|
||||
}
|
||||
|
||||
return (ss, index);
|
||||
}
|
||||
|
||||
/// <summary>Inserts a decoded node directly into the tree (no rebalancing needed for ordered inserts).</summary>
|
||||
private void InsertNode(Node n)
|
||||
{
|
||||
_nodes++;
|
||||
|
||||
if (Root == null)
|
||||
{
|
||||
Root = n;
|
||||
return;
|
||||
}
|
||||
|
||||
for (var p = Root; ;)
|
||||
{
|
||||
if (n.Base < p.Base)
|
||||
{
|
||||
if (p.Left == null)
|
||||
{
|
||||
p.Left = n;
|
||||
return;
|
||||
}
|
||||
|
||||
p = p.Left;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (p.Right == null)
|
||||
{
|
||||
p.Right = n;
|
||||
return;
|
||||
}
|
||||
|
||||
p = p.Right;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Node? CloneNode(Node? src)
|
||||
{
|
||||
if (src == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var n = new Node { Base = src.Base, Height = src.Height };
|
||||
Array.Copy(src.Bits, n.Bits, NumBuckets);
|
||||
n.Left = CloneNode(src.Left);
|
||||
n.Right = CloneNode(src.Right);
|
||||
return n;
|
||||
}
|
||||
|
||||
/// <summary>AVL tree node that stores a bitmask covering NumEntries (2048) consecutive sequences.</summary>
|
||||
internal sealed class Node
|
||||
{
|
||||
public ulong Base;
|
||||
public readonly ulong[] Bits = new ulong[NumBuckets];
|
||||
public Node? Left;
|
||||
public Node? Right;
|
||||
public int Height;
|
||||
|
||||
/// <summary>Sets the bit for the given sequence. Reports whether it was newly inserted.</summary>
|
||||
public void SetBit(ulong seq, ref bool inserted)
|
||||
{
|
||||
seq -= Base;
|
||||
var i = seq / BitsPerBucket;
|
||||
var mask = 1UL << (int)(seq % BitsPerBucket);
|
||||
if ((Bits[i] & mask) == 0)
|
||||
{
|
||||
Bits[i] |= mask;
|
||||
inserted = true;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Clears the bit for the given sequence. Returns true if this node is now empty.</summary>
|
||||
public bool ClearBit(ulong seq, ref bool deleted)
|
||||
{
|
||||
seq -= Base;
|
||||
var i = seq / BitsPerBucket;
|
||||
var mask = 1UL << (int)(seq % BitsPerBucket);
|
||||
if ((Bits[i] & mask) != 0)
|
||||
{
|
||||
Bits[i] &= ~mask;
|
||||
deleted = true;
|
||||
}
|
||||
|
||||
for (var b = 0; b < NumBuckets; b++)
|
||||
{
|
||||
if (Bits[b] != 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>Checks if the bit for the given sequence is set.</summary>
|
||||
public bool ExistsBit(ulong seq)
|
||||
{
|
||||
seq -= Base;
|
||||
var i = seq / BitsPerBucket;
|
||||
var mask = 1UL << (int)(seq % BitsPerBucket);
|
||||
return (Bits[i] & mask) != 0;
|
||||
}
|
||||
|
||||
/// <summary>Returns the minimum sequence in this node (node must not be empty).</summary>
|
||||
public ulong Min()
|
||||
{
|
||||
for (var i = 0; i < NumBuckets; i++)
|
||||
{
|
||||
if (Bits[i] != 0)
|
||||
{
|
||||
return Base + (ulong)(i * BitsPerBucket) + (ulong)BitOperations.TrailingZeroCount(Bits[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/// <summary>Returns the maximum sequence in this node (node must not be empty).</summary>
|
||||
public ulong Max()
|
||||
{
|
||||
for (var i = NumBuckets - 1; i >= 0; i--)
|
||||
{
|
||||
if (Bits[i] != 0)
|
||||
{
|
||||
return Base + (ulong)(i * BitsPerBucket) + (ulong)(BitsPerBucket - BitOperations.LeadingZeroCount(Bits[i] >> 1));
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/// <summary>Inserts a sequence into the subtree rooted at this node, rebalancing as needed.</summary>
|
||||
public static Node Insert(Node? n, ulong seq, ref bool inserted, ref int nodes)
|
||||
{
|
||||
if (n == null)
|
||||
{
|
||||
var nodeBase = (seq / NumEntries) * NumEntries;
|
||||
var newNode = new Node { Base = nodeBase, Height = 1 };
|
||||
newNode.SetBit(seq, ref inserted);
|
||||
nodes++;
|
||||
return newNode;
|
||||
}
|
||||
|
||||
if (seq < n.Base)
|
||||
{
|
||||
n.Left = Insert(n.Left, seq, ref inserted, ref nodes);
|
||||
}
|
||||
else if (seq >= n.Base + NumEntries)
|
||||
{
|
||||
n.Right = Insert(n.Right, seq, ref inserted, ref nodes);
|
||||
}
|
||||
else
|
||||
{
|
||||
n.SetBit(seq, ref inserted);
|
||||
}
|
||||
|
||||
n.Height = MaxHeight(n) + 1;
|
||||
|
||||
var bf = BalanceFactor(n);
|
||||
if (bf > 1)
|
||||
{
|
||||
if (BalanceFactor(n.Left) < 0)
|
||||
{
|
||||
n.Left = RotateLeft(n.Left!);
|
||||
}
|
||||
|
||||
return RotateRight(n);
|
||||
}
|
||||
else if (bf < -1)
|
||||
{
|
||||
if (BalanceFactor(n.Right) > 0)
|
||||
{
|
||||
n.Right = RotateRight(n.Right!);
|
||||
}
|
||||
|
||||
return RotateLeft(n);
|
||||
}
|
||||
|
||||
return n;
|
||||
}
|
||||
|
||||
/// <summary>Deletes a sequence from the subtree rooted at this node, rebalancing as needed.</summary>
|
||||
public static Node? Delete(Node? n, ulong seq, ref bool deleted, ref int nodes)
|
||||
{
|
||||
if (n == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (seq < n.Base)
|
||||
{
|
||||
n.Left = Delete(n.Left, seq, ref deleted, ref nodes);
|
||||
}
|
||||
else if (seq >= n.Base + NumEntries)
|
||||
{
|
||||
n.Right = Delete(n.Right, seq, ref deleted, ref nodes);
|
||||
}
|
||||
else if (n.ClearBit(seq, ref deleted))
|
||||
{
|
||||
// Node is now empty, remove it.
|
||||
nodes--;
|
||||
if (n.Left == null)
|
||||
{
|
||||
n = n.Right;
|
||||
}
|
||||
else if (n.Right == null)
|
||||
{
|
||||
n = n.Left;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Both children present: insert left subtree into the leftmost position of right subtree.
|
||||
n.Right = InsertNodePrev(n.Right, n.Left);
|
||||
n = n.Right;
|
||||
}
|
||||
}
|
||||
|
||||
if (n != null)
|
||||
{
|
||||
n.Height = MaxHeight(n) + 1;
|
||||
}
|
||||
|
||||
var bf = BalanceFactor(n);
|
||||
if (bf > 1)
|
||||
{
|
||||
if (BalanceFactor(n!.Left) < 0)
|
||||
{
|
||||
n.Left = RotateLeft(n.Left!);
|
||||
}
|
||||
|
||||
return RotateRight(n);
|
||||
}
|
||||
else if (bf < -1)
|
||||
{
|
||||
if (BalanceFactor(n!.Right) > 0)
|
||||
{
|
||||
n.Right = RotateRight(n.Right!);
|
||||
}
|
||||
|
||||
return RotateLeft(n);
|
||||
}
|
||||
|
||||
return n;
|
||||
}
|
||||
|
||||
/// <summary>Inserts nn into the leftmost position of n's subtree, then rebalances.</summary>
|
||||
private static Node InsertNodePrev(Node n, Node nn)
|
||||
{
|
||||
if (n.Left == null)
|
||||
{
|
||||
n.Left = nn;
|
||||
}
|
||||
else
|
||||
{
|
||||
n.Left = InsertNodePrev(n.Left, nn);
|
||||
}
|
||||
|
||||
n.Height = MaxHeight(n) + 1;
|
||||
|
||||
var bf = BalanceFactor(n);
|
||||
if (bf > 1)
|
||||
{
|
||||
if (BalanceFactor(n.Left) < 0)
|
||||
{
|
||||
n.Left = RotateLeft(n.Left!);
|
||||
}
|
||||
|
||||
return RotateRight(n);
|
||||
}
|
||||
else if (bf < -1)
|
||||
{
|
||||
if (BalanceFactor(n.Right) > 0)
|
||||
{
|
||||
n.Right = RotateRight(n.Right!);
|
||||
}
|
||||
|
||||
return RotateLeft(n);
|
||||
}
|
||||
|
||||
return n;
|
||||
}
|
||||
|
||||
/// <summary>Left rotation.</summary>
|
||||
private static Node RotateLeft(Node n)
|
||||
{
|
||||
var r = n.Right;
|
||||
if (r != null)
|
||||
{
|
||||
n.Right = r.Left;
|
||||
r.Left = n;
|
||||
n.Height = MaxHeight(n) + 1;
|
||||
r.Height = MaxHeight(r) + 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
n.Right = null;
|
||||
n.Height = MaxHeight(n) + 1;
|
||||
}
|
||||
|
||||
return r ?? n;
|
||||
}
|
||||
|
||||
/// <summary>Right rotation.</summary>
|
||||
private static Node RotateRight(Node n)
|
||||
{
|
||||
var l = n.Left;
|
||||
if (l != null)
|
||||
{
|
||||
n.Left = l.Right;
|
||||
l.Right = n;
|
||||
n.Height = MaxHeight(n) + 1;
|
||||
l.Height = MaxHeight(l) + 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
n.Left = null;
|
||||
n.Height = MaxHeight(n) + 1;
|
||||
}
|
||||
|
||||
return l ?? n;
|
||||
}
|
||||
|
||||
/// <summary>Returns the balance factor (left height - right height).</summary>
|
||||
internal static int BalanceFactor(Node? n)
|
||||
{
|
||||
if (n == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var lh = n.Left?.Height ?? 0;
|
||||
var rh = n.Right?.Height ?? 0;
|
||||
return lh - rh;
|
||||
}
|
||||
|
||||
/// <summary>Returns the max of left and right child heights.</summary>
|
||||
internal static int MaxHeight(Node? n)
|
||||
{
|
||||
if (n == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var lh = n.Left?.Height ?? 0;
|
||||
var rh = n.Right?.Height ?? 0;
|
||||
return Math.Max(lh, rh);
|
||||
}
|
||||
|
||||
/// <summary>Iterates nodes in pre-order (root, left, right) for encoding.</summary>
|
||||
internal static void NodeIter(Node? n, Action<Node> f)
|
||||
{
|
||||
if (n == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
f(n);
|
||||
NodeIter(n.Left, f);
|
||||
NodeIter(n.Right, f);
|
||||
}
|
||||
|
||||
/// <summary>Iterates items in ascending order. Returns false if iteration was terminated early.</summary>
|
||||
internal static bool Iter(Node? n, Func<ulong, bool> f)
|
||||
{
|
||||
if (n == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!Iter(n.Left, f))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
for (var num = n.Base; num < n.Base + NumEntries; num++)
|
||||
{
|
||||
if (n.ExistsBit(num))
|
||||
{
|
||||
if (!f(num))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Iter(n.Right, f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,650 @@
|
||||
// Go reference: server/gsl/gsl.go
|
||||
// Trie-based generic subject list with wildcard support for NATS subject matching.
|
||||
|
||||
namespace NATS.Server.Internal.Gsl;
|
||||
|
||||
// Go reference: server/gsl/gsl.go
|
||||
// TODO: Port generic trie-based subject list
|
||||
/// <summary>
|
||||
/// Sublist related errors.
|
||||
/// </summary>
|
||||
public static class GslErrors
|
||||
{
|
||||
public static readonly InvalidOperationException InvalidSubject = new("gsl: invalid subject");
|
||||
public static readonly KeyNotFoundException NotFound = new("gsl: no matches found");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A level represents a group of nodes and special pointers to wildcard nodes.
|
||||
/// Go reference: server/gsl/gsl.go level struct
|
||||
/// </summary>
|
||||
internal sealed class Level<T> where T : IEquatable<T>
|
||||
{
|
||||
public Dictionary<string, Node<T>> Nodes { get; } = new();
|
||||
public Node<T>? Pwc { get; set; } // partial wildcard '*'
|
||||
public Node<T>? Fwc { get; set; } // full wildcard '>'
|
||||
|
||||
public int NumNodes()
|
||||
{
|
||||
var num = Nodes.Count;
|
||||
if (Pwc is not null) num++;
|
||||
if (Fwc is not null) num++;
|
||||
return num;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prune an empty node from the tree.
|
||||
/// Go reference: server/gsl/gsl.go pruneNode
|
||||
/// </summary>
|
||||
public void PruneNode(Node<T> n, string token)
|
||||
{
|
||||
if (ReferenceEquals(n, Fwc))
|
||||
Fwc = null;
|
||||
else if (ReferenceEquals(n, Pwc))
|
||||
Pwc = null;
|
||||
else
|
||||
Nodes.Remove(token);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A node contains subscriptions and a pointer to the next level.
|
||||
/// Go reference: server/gsl/gsl.go node struct
|
||||
/// </summary>
|
||||
internal sealed class Node<T> where T : IEquatable<T>
|
||||
{
|
||||
public Level<T>? Next { get; set; }
|
||||
public Dictionary<T, string> Subs { get; } = new(); // value -> subject
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the node has no subscriptions and no children.
|
||||
/// Go reference: server/gsl/gsl.go isEmpty
|
||||
/// </summary>
|
||||
public bool IsEmpty() => Subs.Count == 0 && (Next is null || Next.NumNodes() == 0);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tracks descent into levels during removal for pruning.
|
||||
/// Go reference: server/gsl/gsl.go lnt struct
|
||||
/// </summary>
|
||||
internal readonly record struct Lnt<T>(Level<T> L, Node<T> N, string T_) where T : IEquatable<T>;
|
||||
|
||||
/// <summary>
|
||||
/// A GenericSubjectList stores and efficiently retrieves subscriptions using a trie.
|
||||
/// Supports wildcard subjects: '*' matches a single token, '>' matches one or more tokens.
|
||||
/// Thread-safe via ReaderWriterLockSlim.
|
||||
/// Go reference: server/gsl/gsl.go GenericSublist
|
||||
/// </summary>
|
||||
public class GenericSubjectList<T> where T : IEquatable<T>
|
||||
{
|
||||
private const char Pwc = '*';
|
||||
private const char Fwc = '>';
|
||||
private const char Btsep = '.';
|
||||
|
||||
private readonly ReaderWriterLockSlim _lock = new();
|
||||
private readonly Level<T> _root = new();
|
||||
private uint _count;
|
||||
|
||||
/// <summary>
|
||||
/// Returns the number of subscriptions.
|
||||
/// Go reference: server/gsl/gsl.go Count
|
||||
/// </summary>
|
||||
public uint Count
|
||||
{
|
||||
get
|
||||
{
|
||||
_lock.EnterReadLock();
|
||||
try
|
||||
{
|
||||
return _count;
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.ExitReadLock();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Insert adds a subscription into the sublist.
|
||||
/// Go reference: server/gsl/gsl.go Insert
|
||||
/// </summary>
|
||||
public void Insert(string subject, T value)
|
||||
{
|
||||
_lock.EnterWriteLock();
|
||||
try
|
||||
{
|
||||
var sfwc = false;
|
||||
Node<T>? n = null;
|
||||
var l = _root;
|
||||
|
||||
foreach (var token in TokenizeSubject(subject))
|
||||
{
|
||||
var lt = token.Length;
|
||||
if (lt == 0 || sfwc)
|
||||
throw GslErrors.InvalidSubject;
|
||||
|
||||
if (lt > 1)
|
||||
{
|
||||
l.Nodes.TryGetValue(token, out n);
|
||||
}
|
||||
else
|
||||
{
|
||||
switch (token[0])
|
||||
{
|
||||
case Pwc:
|
||||
n = l.Pwc;
|
||||
break;
|
||||
case Fwc:
|
||||
n = l.Fwc;
|
||||
sfwc = true;
|
||||
break;
|
||||
default:
|
||||
l.Nodes.TryGetValue(token, out n);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (n is null)
|
||||
{
|
||||
n = new Node<T>();
|
||||
if (lt > 1)
|
||||
{
|
||||
l.Nodes[token] = n;
|
||||
}
|
||||
else
|
||||
{
|
||||
switch (token[0])
|
||||
{
|
||||
case Pwc:
|
||||
l.Pwc = n;
|
||||
break;
|
||||
case Fwc:
|
||||
l.Fwc = n;
|
||||
break;
|
||||
default:
|
||||
l.Nodes[token] = n;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
n.Next ??= new Level<T>();
|
||||
l = n.Next;
|
||||
}
|
||||
|
||||
// n should never be null here if subject was valid (non-empty)
|
||||
n!.Subs[value] = subject;
|
||||
_count++;
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.ExitWriteLock();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Remove will remove a subscription.
|
||||
/// Go reference: server/gsl/gsl.go Remove
|
||||
/// </summary>
|
||||
public void Remove(string subject, T value)
|
||||
{
|
||||
_lock.EnterWriteLock();
|
||||
try
|
||||
{
|
||||
RemoveInternal(subject, value);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.ExitWriteLock();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Match will match all entries to the literal subject and invoke the callback for each.
|
||||
/// Go reference: server/gsl/gsl.go Match
|
||||
/// </summary>
|
||||
public void Match(string subject, Action<T> callback)
|
||||
{
|
||||
MatchInternal(subject, callback, doLock: true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// MatchBytes will match all entries to the literal subject (as bytes) and invoke the callback for each.
|
||||
/// Go reference: server/gsl/gsl.go MatchBytes
|
||||
/// </summary>
|
||||
public void MatchBytes(ReadOnlySpan<byte> subject, Action<T> callback)
|
||||
{
|
||||
// Convert bytes to string then delegate
|
||||
var subjectStr = System.Text.Encoding.UTF8.GetString(subject);
|
||||
MatchInternal(subjectStr, callback, doLock: true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// HasInterest will return whether or not there is any interest in the subject.
|
||||
/// Go reference: server/gsl/gsl.go HasInterest
|
||||
/// </summary>
|
||||
public bool HasInterest(string subject)
|
||||
{
|
||||
return HasInterestInternal(subject, doLock: true, np: null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// NumInterest will return the number of subs interested in the subject.
|
||||
/// Go reference: server/gsl/gsl.go NumInterest
|
||||
/// </summary>
|
||||
public int NumInterest(string subject)
|
||||
{
|
||||
var np = new int[1]; // use array to pass by reference
|
||||
HasInterestInternal(subject, doLock: true, np: np);
|
||||
return np[0];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// HasInterestStartingIn is a helper for subject tree intersection.
|
||||
/// Go reference: server/gsl/gsl.go HasInterestStartingIn
|
||||
/// </summary>
|
||||
public bool HasInterestStartingIn(string subject)
|
||||
{
|
||||
_lock.EnterReadLock();
|
||||
try
|
||||
{
|
||||
Span<string> tokenBuffer = new string[64];
|
||||
var tokens = TokenizeSubjectIntoSpan(subject, tokenBuffer);
|
||||
return HasInterestStartingInLevel(_root, tokens);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.ExitReadLock();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the maximum number of levels in the trie. Used for testing.
|
||||
/// Go reference: server/gsl/gsl.go numLevels
|
||||
/// </summary>
|
||||
internal int NumLevels()
|
||||
{
|
||||
return VisitLevel(_root, 0);
|
||||
}
|
||||
|
||||
// --- Private implementation ---
|
||||
|
||||
/// <summary>
|
||||
/// Go reference: server/gsl/gsl.go match
|
||||
/// </summary>
|
||||
private void MatchInternal(string subject, Action<T> callback, bool doLock)
|
||||
{
|
||||
Span<string> tokenBuffer = new string[32];
|
||||
var tokens = TokenizeSubjectForMatch(subject, tokenBuffer);
|
||||
if (tokens.Length == 0)
|
||||
return;
|
||||
|
||||
if (doLock) _lock.EnterReadLock();
|
||||
try
|
||||
{
|
||||
MatchLevel(_root, tokens, callback);
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (doLock) _lock.ExitReadLock();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Go reference: server/gsl/gsl.go hasInterest
|
||||
/// </summary>
|
||||
private bool HasInterestInternal(string subject, bool doLock, int[]? np)
|
||||
{
|
||||
Span<string> tokenBuffer = new string[32];
|
||||
var tokens = TokenizeSubjectForMatch(subject, tokenBuffer);
|
||||
if (tokens.Length == 0)
|
||||
return false;
|
||||
|
||||
if (doLock) _lock.EnterReadLock();
|
||||
try
|
||||
{
|
||||
return MatchLevelForAny(_root, tokens, np);
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (doLock) _lock.ExitReadLock();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tokenize a subject for match/hasInterest. Returns empty span for invalid subjects
|
||||
/// (empty tokens or trailing separator).
|
||||
/// Go reference: server/gsl/gsl.go match (tokenization section)
|
||||
/// </summary>
|
||||
private static ReadOnlySpan<string> TokenizeSubjectForMatch(string subject, Span<string> buffer)
|
||||
{
|
||||
var count = 0;
|
||||
var start = 0;
|
||||
for (var i = 0; i < subject.Length; i++)
|
||||
{
|
||||
if (subject[i] == Btsep)
|
||||
{
|
||||
if (i - start == 0)
|
||||
return ReadOnlySpan<string>.Empty; // empty token
|
||||
if (count >= buffer.Length)
|
||||
return ReadOnlySpan<string>.Empty;
|
||||
buffer[count++] = subject[start..i];
|
||||
start = i + 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (start >= subject.Length)
|
||||
return ReadOnlySpan<string>.Empty; // trailing separator
|
||||
|
||||
if (count >= buffer.Length)
|
||||
return ReadOnlySpan<string>.Empty;
|
||||
buffer[count++] = subject[start..];
|
||||
return buffer[..count];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tokenize a subject into a span (does not validate empty tokens).
|
||||
/// Go reference: server/gsl/gsl.go tokenizeSubjectIntoSlice
|
||||
/// </summary>
|
||||
private static ReadOnlySpan<string> TokenizeSubjectIntoSpan(string subject, Span<string> buffer)
|
||||
{
|
||||
var count = 0;
|
||||
var start = 0;
|
||||
for (var i = 0; i < subject.Length; i++)
|
||||
{
|
||||
if (subject[i] == Btsep)
|
||||
{
|
||||
if (count >= buffer.Length) break;
|
||||
buffer[count++] = subject[start..i];
|
||||
start = i + 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (count < buffer.Length)
|
||||
buffer[count++] = subject[start..];
|
||||
return buffer[..count];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recursively descend into the trie to match subscriptions.
|
||||
/// Go reference: server/gsl/gsl.go matchLevel
|
||||
/// </summary>
|
||||
private static void MatchLevel(Level<T>? l, ReadOnlySpan<string> toks, Action<T> cb)
|
||||
{
|
||||
Node<T>? pwc = null;
|
||||
Node<T>? n = null;
|
||||
for (var i = 0; i < toks.Length; i++)
|
||||
{
|
||||
if (l is null) return;
|
||||
|
||||
if (l.Fwc is not null)
|
||||
CallbacksForResults(l.Fwc, cb);
|
||||
|
||||
pwc = l.Pwc;
|
||||
if (pwc is not null)
|
||||
MatchLevel(pwc.Next, toks[(i + 1)..], cb);
|
||||
|
||||
l.Nodes.TryGetValue(toks[i], out n);
|
||||
l = n?.Next;
|
||||
}
|
||||
|
||||
if (n is not null)
|
||||
CallbacksForResults(n, cb);
|
||||
if (pwc is not null)
|
||||
CallbacksForResults(pwc, cb);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recursively check if any subscription matches (optimization over full Match).
|
||||
/// Go reference: server/gsl/gsl.go matchLevelForAny
|
||||
/// </summary>
|
||||
private static bool MatchLevelForAny(Level<T>? l, ReadOnlySpan<string> toks, int[]? np)
|
||||
{
|
||||
Node<T>? pwc = null;
|
||||
Node<T>? n = null;
|
||||
for (var i = 0; i < toks.Length; i++)
|
||||
{
|
||||
if (l is null) return false;
|
||||
|
||||
if (l.Fwc is not null)
|
||||
{
|
||||
if (np is not null)
|
||||
np[0] += l.Fwc.Subs.Count;
|
||||
return true;
|
||||
}
|
||||
|
||||
pwc = l.Pwc;
|
||||
if (pwc is not null)
|
||||
{
|
||||
if (MatchLevelForAny(pwc.Next, toks[(i + 1)..], np))
|
||||
return true;
|
||||
}
|
||||
|
||||
l.Nodes.TryGetValue(toks[i], out n);
|
||||
l = n?.Next;
|
||||
}
|
||||
|
||||
if (n is not null)
|
||||
{
|
||||
if (np is not null)
|
||||
np[0] += n.Subs.Count;
|
||||
if (n.Subs.Count > 0)
|
||||
return true;
|
||||
}
|
||||
|
||||
if (pwc is not null)
|
||||
{
|
||||
if (np is not null)
|
||||
np[0] += pwc.Subs.Count;
|
||||
return pwc.Subs.Count > 0;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Invoke callback for each subscription in a node.
|
||||
/// Go reference: server/gsl/gsl.go callbacksForResults
|
||||
/// </summary>
|
||||
private static void CallbacksForResults(Node<T> n, Action<T> cb)
|
||||
{
|
||||
foreach (var sub in n.Subs.Keys)
|
||||
cb(sub);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Internal remove with lock already held.
|
||||
/// Go reference: server/gsl/gsl.go remove
|
||||
/// </summary>
|
||||
private void RemoveInternal(string subject, T value)
|
||||
{
|
||||
var sfwc = false;
|
||||
Node<T>? n = null;
|
||||
Level<T>? l = _root;
|
||||
|
||||
// Track levels for pruning
|
||||
Span<Lnt<T>> levelsBuffer = new Lnt<T>[32];
|
||||
var levelCount = 0;
|
||||
|
||||
foreach (var token in TokenizeSubject(subject))
|
||||
{
|
||||
var lt = token.Length;
|
||||
if (lt == 0 || sfwc)
|
||||
throw GslErrors.InvalidSubject;
|
||||
|
||||
if (l is null)
|
||||
throw GslErrors.NotFound;
|
||||
|
||||
if (lt > 1)
|
||||
{
|
||||
l.Nodes.TryGetValue(token, out n);
|
||||
}
|
||||
else
|
||||
{
|
||||
switch (token[0])
|
||||
{
|
||||
case Pwc:
|
||||
n = l.Pwc;
|
||||
break;
|
||||
case Fwc:
|
||||
n = l.Fwc;
|
||||
sfwc = true;
|
||||
break;
|
||||
default:
|
||||
l.Nodes.TryGetValue(token, out n);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (n is not null)
|
||||
{
|
||||
levelsBuffer[levelCount++] = new Lnt<T>(l, n, token);
|
||||
l = n.Next;
|
||||
}
|
||||
else
|
||||
{
|
||||
l = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (!RemoveFromNode(n, value))
|
||||
throw GslErrors.NotFound;
|
||||
|
||||
_count--;
|
||||
|
||||
// Prune empty nodes
|
||||
for (var i = levelCount - 1; i >= 0; i--)
|
||||
{
|
||||
var lnt = levelsBuffer[i];
|
||||
if (lnt.N.IsEmpty())
|
||||
lnt.L.PruneNode(lnt.N, lnt.T_);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Remove the value from the given node.
|
||||
/// Go reference: server/gsl/gsl.go removeFromNode
|
||||
/// </summary>
|
||||
private static bool RemoveFromNode(Node<T>? n, T value)
|
||||
{
|
||||
if (n is null) return false;
|
||||
return n.Subs.Remove(value);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recursively check if there is interest starting at a prefix.
|
||||
/// Go reference: server/gsl/gsl.go hasInterestStartingIn
|
||||
/// </summary>
|
||||
private static bool HasInterestStartingInLevel(Level<T>? l, ReadOnlySpan<string> tokens)
|
||||
{
|
||||
if (l is null) return false;
|
||||
if (tokens.Length == 0) return true;
|
||||
|
||||
var token = tokens[0];
|
||||
if (l.Fwc is not null) return true;
|
||||
|
||||
var found = false;
|
||||
if (l.Pwc is not null)
|
||||
found = HasInterestStartingInLevel(l.Pwc.Next, tokens[1..]);
|
||||
if (!found && l.Nodes.TryGetValue(token, out var n))
|
||||
found = HasInterestStartingInLevel(n.Next, tokens[1..]);
|
||||
return found;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Visit levels recursively to compute max depth.
|
||||
/// Go reference: server/gsl/gsl.go visitLevel
|
||||
/// </summary>
|
||||
private static int VisitLevel(Level<T>? l, int depth)
|
||||
{
|
||||
if (l is null || l.NumNodes() == 0)
|
||||
return depth;
|
||||
|
||||
depth++;
|
||||
var maxDepth = depth;
|
||||
|
||||
foreach (var n in l.Nodes.Values)
|
||||
{
|
||||
var newDepth = VisitLevel(n.Next, depth);
|
||||
if (newDepth > maxDepth)
|
||||
maxDepth = newDepth;
|
||||
}
|
||||
|
||||
if (l.Pwc is not null)
|
||||
{
|
||||
var pwcDepth = VisitLevel(l.Pwc.Next, depth);
|
||||
if (pwcDepth > maxDepth)
|
||||
maxDepth = pwcDepth;
|
||||
}
|
||||
|
||||
if (l.Fwc is not null)
|
||||
{
|
||||
var fwcDepth = VisitLevel(l.Fwc.Next, depth);
|
||||
if (fwcDepth > maxDepth)
|
||||
maxDepth = fwcDepth;
|
||||
}
|
||||
|
||||
return maxDepth;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tokenize a subject by splitting on '.'. Returns an enumerable of tokens.
|
||||
/// Used by Insert and Remove.
|
||||
/// </summary>
|
||||
private static SplitEnumerable TokenizeSubject(string subject)
|
||||
{
|
||||
return new SplitEnumerable(subject);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A stack-friendly subject tokenizer that splits on '.'.
|
||||
/// </summary>
|
||||
private readonly ref struct SplitEnumerable
|
||||
{
|
||||
private readonly string _subject;
|
||||
|
||||
public SplitEnumerable(string subject) => _subject = subject;
|
||||
|
||||
public SplitEnumerator GetEnumerator() => new(_subject);
|
||||
}
|
||||
|
||||
private ref struct SplitEnumerator
|
||||
{
|
||||
private readonly string _subject;
|
||||
private int _start;
|
||||
private bool _done;
|
||||
|
||||
public SplitEnumerator(string subject)
|
||||
{
|
||||
_subject = subject;
|
||||
_start = 0;
|
||||
_done = false;
|
||||
Current = default!;
|
||||
}
|
||||
|
||||
public string Current { get; private set; }
|
||||
|
||||
public bool MoveNext()
|
||||
{
|
||||
if (_done) return false;
|
||||
|
||||
var idx = _subject.IndexOf(Btsep, _start);
|
||||
if (idx >= 0)
|
||||
{
|
||||
Current = _subject[_start..idx];
|
||||
_start = idx + 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
Current = _subject[_start..];
|
||||
_done = true;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SimpleSubjectList is an alias for GenericSubjectList that uses int values,
|
||||
/// useful for tracking interest only.
|
||||
/// Go reference: server/gsl/gsl.go SimpleSublist
|
||||
/// </summary>
|
||||
public class SimpleSubjectList : GenericSubjectList<int>;
|
||||
|
||||
649
src/NATS.Server/Internal/SubjectTree/Nodes.cs
Normal file
649
src/NATS.Server/Internal/SubjectTree/Nodes.cs
Normal file
@@ -0,0 +1,649 @@
|
||||
// Go reference: server/stree/node.go, leaf.go, node4.go, node10.go, node16.go, node48.go, node256.go
|
||||
namespace NATS.Server.Internal.SubjectTree;
|
||||
|
||||
/// <summary>
|
||||
/// Internal node interface for the Adaptive Radix Tree.
|
||||
/// </summary>
|
||||
internal interface INode
|
||||
{
|
||||
bool IsLeaf { get; }
|
||||
NodeMeta? Base { get; }
|
||||
void SetPrefix(ReadOnlySpan<byte> pre);
|
||||
void AddChild(byte c, INode n);
|
||||
/// <summary>
|
||||
/// Returns the child node for the given key byte, or null if not found.
|
||||
/// The returned wrapper allows in-place replacement of the child reference.
|
||||
/// </summary>
|
||||
ChildRef? FindChild(byte c);
|
||||
void DeleteChild(byte c);
|
||||
bool IsFull { get; }
|
||||
INode Grow();
|
||||
INode? Shrink();
|
||||
(ReadOnlyMemory<byte>[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory<byte>[] parts);
|
||||
string Kind { get; }
|
||||
void Iter(Func<INode, bool> f);
|
||||
INode?[] Children();
|
||||
ushort NumChildren { get; }
|
||||
byte[] Path();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Wrapper that allows in-place replacement of a child reference in a node.
|
||||
/// This is analogous to Go's *node pointer.
|
||||
/// </summary>
|
||||
internal sealed class ChildRef(Func<INode?> getter, Action<INode?> setter)
|
||||
{
|
||||
public INode? Node
|
||||
{
|
||||
get => getter();
|
||||
set => setter(value);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Base metadata for internal (non-leaf) nodes.
|
||||
/// </summary>
|
||||
internal sealed class NodeMeta
|
||||
{
|
||||
public byte[] Prefix { get; set; } = [];
|
||||
public ushort Size { get; set; }
|
||||
}
|
||||
|
||||
#region Leaf Node
|
||||
|
||||
/// <summary>
|
||||
/// Leaf node holding a value and suffix.
|
||||
/// Go reference: server/stree/leaf.go
|
||||
/// </summary>
|
||||
internal sealed class Leaf<T> : INode
|
||||
{
|
||||
public T Value;
|
||||
public byte[] Suffix;
|
||||
|
||||
public Leaf(ReadOnlySpan<byte> suffix, T value)
|
||||
{
|
||||
Value = value;
|
||||
Suffix = Parts.CopyBytes(suffix);
|
||||
}
|
||||
|
||||
public bool IsLeaf => true;
|
||||
public NodeMeta? Base => null;
|
||||
public bool IsFull => true;
|
||||
public ushort NumChildren => 0;
|
||||
public string Kind => "LEAF";
|
||||
|
||||
public bool Match(ReadOnlySpan<byte> subject) => subject.SequenceEqual(Suffix);
|
||||
|
||||
public void SetSuffix(ReadOnlySpan<byte> suffix) => Suffix = Parts.CopyBytes(suffix);
|
||||
|
||||
public byte[] Path() => Suffix;
|
||||
|
||||
public INode?[] Children() => [];
|
||||
|
||||
public void Iter(Func<INode, bool> f) { }
|
||||
|
||||
public (ReadOnlyMemory<byte>[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory<byte>[] parts)
|
||||
=> Parts.MatchPartsAgainstFragment(parts, Suffix);
|
||||
|
||||
// These should not be called on a leaf.
|
||||
public void SetPrefix(ReadOnlySpan<byte> pre) => throw new InvalidOperationException("setPrefix called on leaf");
|
||||
public void AddChild(byte c, INode n) => throw new InvalidOperationException("addChild called on leaf");
|
||||
public ChildRef? FindChild(byte c) => throw new InvalidOperationException("findChild called on leaf");
|
||||
public INode Grow() => throw new InvalidOperationException("grow called on leaf");
|
||||
public void DeleteChild(byte c) => throw new InvalidOperationException("deleteChild called on leaf");
|
||||
public INode? Shrink() => throw new InvalidOperationException("shrink called on leaf");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Node4
|
||||
|
||||
/// <summary>
|
||||
/// Node with up to 4 children.
|
||||
/// Go reference: server/stree/node4.go
|
||||
/// </summary>
|
||||
internal sealed class Node4 : INode
|
||||
{
|
||||
private readonly INode?[] _child = new INode?[4];
|
||||
private readonly byte[] _key = new byte[4];
|
||||
internal readonly NodeMeta Meta = new();
|
||||
|
||||
public Node4(ReadOnlySpan<byte> prefix)
|
||||
{
|
||||
SetPrefix(prefix);
|
||||
}
|
||||
|
||||
public bool IsLeaf => false;
|
||||
public NodeMeta? Base => Meta;
|
||||
public ushort NumChildren => Meta.Size;
|
||||
public bool IsFull => Meta.Size >= 4;
|
||||
public string Kind => "NODE4";
|
||||
public byte[] Path() => Meta.Prefix;
|
||||
|
||||
public void SetPrefix(ReadOnlySpan<byte> pre)
|
||||
{
|
||||
Meta.Prefix = pre.ToArray();
|
||||
}
|
||||
|
||||
public void AddChild(byte c, INode n)
|
||||
{
|
||||
if (Meta.Size >= 4) throw new InvalidOperationException("node4 full!");
|
||||
_key[Meta.Size] = c;
|
||||
_child[Meta.Size] = n;
|
||||
Meta.Size++;
|
||||
}
|
||||
|
||||
public ChildRef? FindChild(byte c)
|
||||
{
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
if (_key[i] == c)
|
||||
{
|
||||
var idx = i;
|
||||
return new ChildRef(() => _child[idx], v => _child[idx] = v);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void DeleteChild(byte c)
|
||||
{
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
if (_key[i] == c)
|
||||
{
|
||||
var last = Meta.Size - 1;
|
||||
if (i < last)
|
||||
{
|
||||
_key[i] = _key[last];
|
||||
_child[i] = _child[last];
|
||||
_key[last] = 0;
|
||||
_child[last] = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
_key[i] = 0;
|
||||
_child[i] = null;
|
||||
}
|
||||
Meta.Size--;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public INode Grow()
|
||||
{
|
||||
var nn = new Node10(Meta.Prefix);
|
||||
for (int i = 0; i < 4; i++)
|
||||
{
|
||||
nn.AddChild(_key[i], _child[i]!);
|
||||
}
|
||||
return nn;
|
||||
}
|
||||
|
||||
public INode? Shrink()
|
||||
{
|
||||
if (Meta.Size == 1) return _child[0];
|
||||
return null;
|
||||
}
|
||||
|
||||
public void Iter(Func<INode, bool> f)
|
||||
{
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
if (!f(_child[i]!)) return;
|
||||
}
|
||||
}
|
||||
|
||||
public INode?[] Children()
|
||||
{
|
||||
var result = new INode?[Meta.Size];
|
||||
Array.Copy(_child, result, Meta.Size);
|
||||
return result;
|
||||
}
|
||||
|
||||
public (ReadOnlyMemory<byte>[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory<byte>[] parts)
|
||||
=> Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Node10
|
||||
|
||||
/// <summary>
|
||||
/// Node with up to 10 children. Optimized for numeric subject tokens (0-9).
|
||||
/// Go reference: server/stree/node10.go
|
||||
/// </summary>
|
||||
internal sealed class Node10 : INode
|
||||
{
|
||||
private readonly INode?[] _child = new INode?[10];
|
||||
private readonly byte[] _key = new byte[10];
|
||||
internal readonly NodeMeta Meta = new();
|
||||
|
||||
public Node10(ReadOnlySpan<byte> prefix)
|
||||
{
|
||||
SetPrefix(prefix);
|
||||
}
|
||||
|
||||
public bool IsLeaf => false;
|
||||
public NodeMeta? Base => Meta;
|
||||
public ushort NumChildren => Meta.Size;
|
||||
public bool IsFull => Meta.Size >= 10;
|
||||
public string Kind => "NODE10";
|
||||
public byte[] Path() => Meta.Prefix;
|
||||
|
||||
public void SetPrefix(ReadOnlySpan<byte> pre)
|
||||
{
|
||||
Meta.Prefix = pre.ToArray();
|
||||
}
|
||||
|
||||
public void AddChild(byte c, INode n)
|
||||
{
|
||||
if (Meta.Size >= 10) throw new InvalidOperationException("node10 full!");
|
||||
_key[Meta.Size] = c;
|
||||
_child[Meta.Size] = n;
|
||||
Meta.Size++;
|
||||
}
|
||||
|
||||
public ChildRef? FindChild(byte c)
|
||||
{
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
if (_key[i] == c)
|
||||
{
|
||||
var idx = i;
|
||||
return new ChildRef(() => _child[idx], v => _child[idx] = v);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void DeleteChild(byte c)
|
||||
{
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
if (_key[i] == c)
|
||||
{
|
||||
var last = Meta.Size - 1;
|
||||
if (i < last)
|
||||
{
|
||||
_key[i] = _key[last];
|
||||
_child[i] = _child[last];
|
||||
_key[last] = 0;
|
||||
_child[last] = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
_key[i] = 0;
|
||||
_child[i] = null;
|
||||
}
|
||||
Meta.Size--;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public INode Grow()
|
||||
{
|
||||
var nn = new Node16(Meta.Prefix);
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
nn.AddChild(_key[i], _child[i]!);
|
||||
}
|
||||
return nn;
|
||||
}
|
||||
|
||||
public INode? Shrink()
|
||||
{
|
||||
if (Meta.Size > 4) return null;
|
||||
var nn = new Node4([]);
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
nn.AddChild(_key[i], _child[i]!);
|
||||
}
|
||||
return nn;
|
||||
}
|
||||
|
||||
public void Iter(Func<INode, bool> f)
|
||||
{
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
if (!f(_child[i]!)) return;
|
||||
}
|
||||
}
|
||||
|
||||
public INode?[] Children()
|
||||
{
|
||||
var result = new INode?[Meta.Size];
|
||||
Array.Copy(_child, result, Meta.Size);
|
||||
return result;
|
||||
}
|
||||
|
||||
public (ReadOnlyMemory<byte>[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory<byte>[] parts)
|
||||
=> Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Node16
|
||||
|
||||
/// <summary>
|
||||
/// Node with up to 16 children.
|
||||
/// Go reference: server/stree/node16.go
|
||||
/// </summary>
|
||||
internal sealed class Node16 : INode
|
||||
{
|
||||
private readonly INode?[] _child = new INode?[16];
|
||||
private readonly byte[] _key = new byte[16];
|
||||
internal readonly NodeMeta Meta = new();
|
||||
|
||||
public Node16(ReadOnlySpan<byte> prefix)
|
||||
{
|
||||
SetPrefix(prefix);
|
||||
}
|
||||
|
||||
public bool IsLeaf => false;
|
||||
public NodeMeta? Base => Meta;
|
||||
public ushort NumChildren => Meta.Size;
|
||||
public bool IsFull => Meta.Size >= 16;
|
||||
public string Kind => "NODE16";
|
||||
public byte[] Path() => Meta.Prefix;
|
||||
|
||||
public void SetPrefix(ReadOnlySpan<byte> pre)
|
||||
{
|
||||
Meta.Prefix = pre.ToArray();
|
||||
}
|
||||
|
||||
public void AddChild(byte c, INode n)
|
||||
{
|
||||
if (Meta.Size >= 16) throw new InvalidOperationException("node16 full!");
|
||||
_key[Meta.Size] = c;
|
||||
_child[Meta.Size] = n;
|
||||
Meta.Size++;
|
||||
}
|
||||
|
||||
public ChildRef? FindChild(byte c)
|
||||
{
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
if (_key[i] == c)
|
||||
{
|
||||
var idx = i;
|
||||
return new ChildRef(() => _child[idx], v => _child[idx] = v);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void DeleteChild(byte c)
|
||||
{
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
if (_key[i] == c)
|
||||
{
|
||||
var last = Meta.Size - 1;
|
||||
if (i < last)
|
||||
{
|
||||
_key[i] = _key[last];
|
||||
_child[i] = _child[last];
|
||||
_key[last] = 0;
|
||||
_child[last] = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
_key[i] = 0;
|
||||
_child[i] = null;
|
||||
}
|
||||
Meta.Size--;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public INode Grow()
|
||||
{
|
||||
var nn = new Node48(Meta.Prefix);
|
||||
for (int i = 0; i < 16; i++)
|
||||
{
|
||||
nn.AddChild(_key[i], _child[i]!);
|
||||
}
|
||||
return nn;
|
||||
}
|
||||
|
||||
public INode? Shrink()
|
||||
{
|
||||
if (Meta.Size > 10) return null;
|
||||
var nn = new Node10([]);
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
nn.AddChild(_key[i], _child[i]!);
|
||||
}
|
||||
return nn;
|
||||
}
|
||||
|
||||
public void Iter(Func<INode, bool> f)
|
||||
{
|
||||
for (int i = 0; i < Meta.Size; i++)
|
||||
{
|
||||
if (!f(_child[i]!)) return;
|
||||
}
|
||||
}
|
||||
|
||||
public INode?[] Children()
|
||||
{
|
||||
var result = new INode?[Meta.Size];
|
||||
Array.Copy(_child, result, Meta.Size);
|
||||
return result;
|
||||
}
|
||||
|
||||
public (ReadOnlyMemory<byte>[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory<byte>[] parts)
|
||||
=> Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Node48
|
||||
|
||||
/// <summary>
|
||||
/// Node with up to 48 children. Uses a 256-byte index array (1-indexed) to map keys to child slots.
|
||||
/// Go reference: server/stree/node48.go
|
||||
/// </summary>
|
||||
internal sealed class Node48 : INode
|
||||
{
|
||||
internal readonly INode?[] Child = new INode?[48];
|
||||
internal readonly byte[] Key = new byte[256]; // 1-indexed: 0 means no entry
|
||||
internal readonly NodeMeta Meta = new();
|
||||
|
||||
public Node48(ReadOnlySpan<byte> prefix)
|
||||
{
|
||||
SetPrefix(prefix);
|
||||
}
|
||||
|
||||
public bool IsLeaf => false;
|
||||
public NodeMeta? Base => Meta;
|
||||
public ushort NumChildren => Meta.Size;
|
||||
public bool IsFull => Meta.Size >= 48;
|
||||
public string Kind => "NODE48";
|
||||
public byte[] Path() => Meta.Prefix;
|
||||
|
||||
public void SetPrefix(ReadOnlySpan<byte> pre)
|
||||
{
|
||||
Meta.Prefix = pre.ToArray();
|
||||
}
|
||||
|
||||
public void AddChild(byte c, INode n)
|
||||
{
|
||||
if (Meta.Size >= 48) throw new InvalidOperationException("node48 full!");
|
||||
Child[Meta.Size] = n;
|
||||
Key[c] = (byte)(Meta.Size + 1); // 1-indexed
|
||||
Meta.Size++;
|
||||
}
|
||||
|
||||
public ChildRef? FindChild(byte c)
|
||||
{
|
||||
var i = Key[c];
|
||||
if (i == 0) return null;
|
||||
var idx = i - 1;
|
||||
return new ChildRef(() => Child[idx], v => Child[idx] = v);
|
||||
}
|
||||
|
||||
public void DeleteChild(byte c)
|
||||
{
|
||||
var i = Key[c];
|
||||
if (i == 0) return;
|
||||
i--; // Adjust for 1-indexing
|
||||
var last = (byte)(Meta.Size - 1);
|
||||
if (i < last)
|
||||
{
|
||||
Child[i] = Child[last];
|
||||
for (int ic = 0; ic < 256; ic++)
|
||||
{
|
||||
if (Key[ic] == last + 1)
|
||||
{
|
||||
Key[ic] = (byte)(i + 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Child[last] = null;
|
||||
Key[c] = 0;
|
||||
Meta.Size--;
|
||||
}
|
||||
|
||||
public INode Grow()
|
||||
{
|
||||
var nn = new Node256(Meta.Prefix);
|
||||
for (int c = 0; c < 256; c++)
|
||||
{
|
||||
var i = Key[c];
|
||||
if (i > 0)
|
||||
{
|
||||
nn.AddChild((byte)c, Child[i - 1]!);
|
||||
}
|
||||
}
|
||||
return nn;
|
||||
}
|
||||
|
||||
public INode? Shrink()
|
||||
{
|
||||
if (Meta.Size > 16) return null;
|
||||
var nn = new Node16([]);
|
||||
for (int c = 0; c < 256; c++)
|
||||
{
|
||||
var i = Key[c];
|
||||
if (i > 0)
|
||||
{
|
||||
nn.AddChild((byte)c, Child[i - 1]!);
|
||||
}
|
||||
}
|
||||
return nn;
|
||||
}
|
||||
|
||||
public void Iter(Func<INode, bool> f)
|
||||
{
|
||||
foreach (var c in Child)
|
||||
{
|
||||
if (c != null && !f(c)) return;
|
||||
}
|
||||
}
|
||||
|
||||
public INode?[] Children()
|
||||
{
|
||||
var result = new INode?[Meta.Size];
|
||||
Array.Copy(Child, result, Meta.Size);
|
||||
return result;
|
||||
}
|
||||
|
||||
public (ReadOnlyMemory<byte>[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory<byte>[] parts)
|
||||
=> Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Node256
|
||||
|
||||
/// <summary>
|
||||
/// Node with up to 256 children. Direct array indexed by byte value.
|
||||
/// Go reference: server/stree/node256.go
|
||||
/// </summary>
|
||||
internal sealed class Node256 : INode
|
||||
{
|
||||
internal readonly INode?[] Child = new INode?[256];
|
||||
internal readonly NodeMeta Meta = new();
|
||||
|
||||
public Node256(ReadOnlySpan<byte> prefix)
|
||||
{
|
||||
SetPrefix(prefix);
|
||||
}
|
||||
|
||||
public bool IsLeaf => false;
|
||||
public NodeMeta? Base => Meta;
|
||||
public ushort NumChildren => Meta.Size;
|
||||
public bool IsFull => false; // node256 is never full
|
||||
public string Kind => "NODE256";
|
||||
public byte[] Path() => Meta.Prefix;
|
||||
|
||||
public void SetPrefix(ReadOnlySpan<byte> pre)
|
||||
{
|
||||
Meta.Prefix = pre.ToArray();
|
||||
}
|
||||
|
||||
public void AddChild(byte c, INode n)
|
||||
{
|
||||
Child[c] = n;
|
||||
Meta.Size++;
|
||||
}
|
||||
|
||||
public ChildRef? FindChild(byte c)
|
||||
{
|
||||
if (Child[c] == null) return null;
|
||||
return new ChildRef(() => Child[c], v => Child[c] = v);
|
||||
}
|
||||
|
||||
public void DeleteChild(byte c)
|
||||
{
|
||||
if (Child[c] != null)
|
||||
{
|
||||
Child[c] = null;
|
||||
Meta.Size--;
|
||||
}
|
||||
}
|
||||
|
||||
public INode Grow() => throw new InvalidOperationException("grow can not be called on node256");
|
||||
|
||||
public INode? Shrink()
|
||||
{
|
||||
if (Meta.Size > 48) return null;
|
||||
var nn = new Node48([]);
|
||||
for (int c = 0; c < 256; c++)
|
||||
{
|
||||
if (Child[c] != null)
|
||||
{
|
||||
nn.AddChild((byte)c, Child[c]!);
|
||||
}
|
||||
}
|
||||
return nn;
|
||||
}
|
||||
|
||||
public void Iter(Func<INode, bool> f)
|
||||
{
|
||||
for (int i = 0; i < 256; i++)
|
||||
{
|
||||
if (Child[i] != null)
|
||||
{
|
||||
if (!f(Child[i]!)) return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public INode?[] Children()
|
||||
{
|
||||
// Return the full 256 array, same as Go
|
||||
return (INode?[])Child.Clone();
|
||||
}
|
||||
|
||||
public (ReadOnlyMemory<byte>[] RemainingParts, bool Matched) MatchParts(ReadOnlyMemory<byte>[] parts)
|
||||
=> Parts.MatchPartsAgainstFragment(parts, Meta.Prefix);
|
||||
}
|
||||
|
||||
#endregion
|
||||
243
src/NATS.Server/Internal/SubjectTree/Parts.cs
Normal file
243
src/NATS.Server/Internal/SubjectTree/Parts.cs
Normal file
@@ -0,0 +1,243 @@
|
||||
// Go reference: server/stree/parts.go, server/stree/util.go
|
||||
namespace NATS.Server.Internal.SubjectTree;
|
||||
|
||||
/// <summary>
|
||||
/// Subject tokenization helpers and match logic for the ART.
|
||||
/// </summary>
|
||||
internal static class Parts
|
||||
{
|
||||
// For subject matching.
|
||||
internal const byte Pwc = (byte)'*';
|
||||
internal const byte Fwc = (byte)'>';
|
||||
internal const byte Tsep = (byte)'.';
|
||||
|
||||
/// <summary>
|
||||
/// No pivot available sentinel value (DEL character).
|
||||
/// </summary>
|
||||
internal const byte NoPivot = 127;
|
||||
|
||||
/// <summary>
|
||||
/// Returns the pivot byte at the given position, or NoPivot if past end.
|
||||
/// Go reference: server/stree/util.go:pivot
|
||||
/// </summary>
|
||||
internal static byte Pivot(ReadOnlySpan<byte> subject, int pos)
|
||||
{
|
||||
if (pos >= subject.Length) return NoPivot;
|
||||
return subject[pos];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the length of the common prefix between two byte spans.
|
||||
/// Go reference: server/stree/util.go:commonPrefixLen
|
||||
/// </summary>
|
||||
internal static int CommonPrefixLen(ReadOnlySpan<byte> s1, ReadOnlySpan<byte> s2)
|
||||
{
|
||||
var limit = Math.Min(s1.Length, s2.Length);
|
||||
int i = 0;
|
||||
for (; i < limit; i++)
|
||||
{
|
||||
if (s1[i] != s2[i]) break;
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Copy bytes helper.
|
||||
/// </summary>
|
||||
internal static byte[] CopyBytes(ReadOnlySpan<byte> src)
|
||||
{
|
||||
if (src.Length == 0) return [];
|
||||
return src.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Break a filter subject into parts based on wildcards (pwc '*' and fwc '>').
|
||||
/// Go reference: server/stree/parts.go:genParts
|
||||
/// </summary>
|
||||
internal static ReadOnlyMemory<byte>[] GenParts(ReadOnlySpan<byte> filter)
|
||||
{
|
||||
var parts = new List<ReadOnlyMemory<byte>>();
|
||||
// We work on a copy since ReadOnlyMemory needs a backing array
|
||||
var filterArr = filter.ToArray();
|
||||
var filterMem = new ReadOnlyMemory<byte>(filterArr);
|
||||
int start = 0;
|
||||
int e = filterArr.Length - 1;
|
||||
|
||||
for (int i = 0; i < filterArr.Length; i++)
|
||||
{
|
||||
if (filterArr[i] == Tsep)
|
||||
{
|
||||
// See if next token is pwc. Either internal or end pwc.
|
||||
if (i < e && filterArr[i + 1] == Pwc && ((i + 2 <= e && filterArr[i + 2] == Tsep) || i + 1 == e))
|
||||
{
|
||||
if (i > start)
|
||||
{
|
||||
parts.Add(filterMem.Slice(start, i + 1 - start));
|
||||
}
|
||||
parts.Add(filterMem.Slice(i + 1, 1));
|
||||
i++; // Skip pwc
|
||||
if (i + 2 <= e)
|
||||
{
|
||||
i++; // Skip next tsep from next part too.
|
||||
}
|
||||
start = i + 1;
|
||||
}
|
||||
else if (i < e && filterArr[i + 1] == Fwc && i + 1 == e)
|
||||
{
|
||||
if (i > start)
|
||||
{
|
||||
parts.Add(filterMem.Slice(start, i + 1 - start));
|
||||
}
|
||||
parts.Add(filterMem.Slice(i + 1, 1));
|
||||
i++; // Skip fwc
|
||||
start = i + 1;
|
||||
}
|
||||
}
|
||||
else if (filterArr[i] == Pwc || filterArr[i] == Fwc)
|
||||
{
|
||||
// Wildcard must be at the start or preceded by tsep.
|
||||
int prev = i - 1;
|
||||
if (prev >= 0 && filterArr[prev] != Tsep)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Wildcard must be at the end or followed by tsep.
|
||||
int next = i + 1;
|
||||
if (next == e || (next < e && filterArr[next] != Tsep))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Full wildcard must be terminal.
|
||||
if (filterArr[i] == Fwc && i < e)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
// We start with a pwc or fwc.
|
||||
parts.Add(filterMem.Slice(i, 1));
|
||||
if (i + 1 <= e)
|
||||
{
|
||||
i++; // Skip next tsep from next part too.
|
||||
}
|
||||
start = i + 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (start < filterArr.Length)
|
||||
{
|
||||
// Check to see if we need to eat a leading tsep.
|
||||
if (filterArr[start] == Tsep)
|
||||
{
|
||||
start++;
|
||||
}
|
||||
parts.Add(filterMem[start..]);
|
||||
}
|
||||
|
||||
return [.. parts];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Match parts against a fragment (prefix for nodes or suffix for leaves).
|
||||
/// Go reference: server/stree/parts.go:matchParts
|
||||
/// </summary>
|
||||
internal static (ReadOnlyMemory<byte>[] RemainingParts, bool Matched) MatchPartsAgainstFragment(
|
||||
ReadOnlyMemory<byte>[] parts, ReadOnlySpan<byte> frag)
|
||||
{
|
||||
int lf = frag.Length;
|
||||
if (lf == 0)
|
||||
{
|
||||
return (parts, true);
|
||||
}
|
||||
|
||||
int si = 0;
|
||||
int lpi = parts.Length - 1;
|
||||
|
||||
for (int i = 0; i < parts.Length; i++)
|
||||
{
|
||||
if (si >= lf)
|
||||
{
|
||||
return (parts[i..], true);
|
||||
}
|
||||
|
||||
var part = parts[i].Span;
|
||||
int lp = part.Length;
|
||||
|
||||
// Check for pwc or fwc place holders.
|
||||
if (lp == 1)
|
||||
{
|
||||
if (part[0] == Pwc)
|
||||
{
|
||||
var index = frag[si..].IndexOf(Tsep);
|
||||
// We are trying to match pwc and did not find our tsep.
|
||||
if (index < 0)
|
||||
{
|
||||
if (i == lpi)
|
||||
{
|
||||
return ([], true);
|
||||
}
|
||||
return (parts[i..], true);
|
||||
}
|
||||
si += index + 1;
|
||||
continue;
|
||||
}
|
||||
else if (part[0] == Fwc)
|
||||
{
|
||||
return ([], true);
|
||||
}
|
||||
}
|
||||
|
||||
int end = Math.Min(si + lp, lf);
|
||||
// If part is bigger than the remaining fragment, adjust to a portion of the part.
|
||||
var partToCompare = part;
|
||||
if (si + lp > end)
|
||||
{
|
||||
// Frag is smaller than part itself.
|
||||
partToCompare = part[..(end - si)];
|
||||
}
|
||||
|
||||
if (!partToCompare.SequenceEqual(frag[si..end]))
|
||||
{
|
||||
return (parts, false);
|
||||
}
|
||||
|
||||
// If we still have a portion of the fragment left, update and continue.
|
||||
if (end < lf)
|
||||
{
|
||||
si = end;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we matched a partial, do not move past current part
|
||||
// but update the part to what was consumed.
|
||||
if (end < si + lp)
|
||||
{
|
||||
if (end >= lf)
|
||||
{
|
||||
// Create a copy with the current part trimmed.
|
||||
var newParts = new ReadOnlyMemory<byte>[parts.Length - i];
|
||||
Array.Copy(parts, i, newParts, 0, newParts.Length);
|
||||
newParts[0] = parts[i][(lf - si)..];
|
||||
return (newParts, true);
|
||||
}
|
||||
else
|
||||
{
|
||||
i++;
|
||||
}
|
||||
return (parts[i..], true);
|
||||
}
|
||||
|
||||
if (i == lpi)
|
||||
{
|
||||
return ([], true);
|
||||
}
|
||||
|
||||
// If we are here we are not the last part which means we have a wildcard
|
||||
// gap, so we need to match anything up to next tsep.
|
||||
si += part.Length;
|
||||
}
|
||||
|
||||
return (parts, false);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,616 @@
|
||||
// Go reference: server/stree/stree.go
|
||||
namespace NATS.Server.Internal.SubjectTree;
|
||||
|
||||
// Go reference: server/stree/stree.go
|
||||
// TODO: Port Adaptive Radix Tree for per-subject state
|
||||
/// <summary>
|
||||
/// SubjectTree is an adaptive radix trie (ART) for storing subject information on literal subjects.
|
||||
/// Uses dynamic nodes, path compression and lazy expansion.
|
||||
/// Go reference: server/stree/stree.go
|
||||
/// </summary>
|
||||
public class SubjectTree<T>
|
||||
{
|
||||
internal INode? Root;
|
||||
private int _size;
|
||||
|
||||
/// <summary>
|
||||
/// Returns the number of elements stored.
|
||||
/// </summary>
|
||||
public int Size => _size;
|
||||
|
||||
/// <summary>
|
||||
/// Empties the tree and returns it. If called on a new tree, returns it unchanged.
|
||||
/// </summary>
|
||||
public SubjectTree<T> Empty()
|
||||
{
|
||||
Root = null;
|
||||
_size = 0;
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Insert a value into the tree. Returns (oldValue, existed).
|
||||
/// If the subject already existed, oldValue is the previous value and existed is true.
|
||||
/// </summary>
|
||||
public (T? OldValue, bool Existed) Insert(ReadOnlySpan<byte> subject, T value)
|
||||
{
|
||||
// Make sure we never insert anything with a noPivot byte.
|
||||
if (subject.IndexOf(Parts.NoPivot) >= 0)
|
||||
{
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
var (old, updated) = InsertInternal(ref Root, subject.ToArray(), value, 0);
|
||||
if (!updated)
|
||||
{
|
||||
_size++;
|
||||
}
|
||||
return (old, updated);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Find the value for an exact subject match.
|
||||
/// </summary>
|
||||
public (T? Value, bool Found) Find(ReadOnlySpan<byte> subject)
|
||||
{
|
||||
int si = 0;
|
||||
var n = Root;
|
||||
while (n != null)
|
||||
{
|
||||
if (n.IsLeaf)
|
||||
{
|
||||
var ln = (Leaf<T>)n;
|
||||
if (ln.Match(subject[si..]))
|
||||
{
|
||||
return (ln.Value, true);
|
||||
}
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
// We are a node type here, grab meta portion.
|
||||
var bn = n.Base!;
|
||||
if (bn.Prefix.Length > 0)
|
||||
{
|
||||
var end = Math.Min(si + bn.Prefix.Length, subject.Length);
|
||||
if (!subject[si..end].SequenceEqual(bn.Prefix))
|
||||
{
|
||||
return (default, false);
|
||||
}
|
||||
si += bn.Prefix.Length;
|
||||
}
|
||||
|
||||
var childRef = n.FindChild(Parts.Pivot(subject, si));
|
||||
if (childRef != null)
|
||||
{
|
||||
n = childRef.Node;
|
||||
}
|
||||
else
|
||||
{
|
||||
return (default, false);
|
||||
}
|
||||
}
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Delete the item for the given subject.
|
||||
/// Returns (deletedValue, wasFound).
|
||||
/// </summary>
|
||||
public (T? Value, bool Found) Delete(ReadOnlySpan<byte> subject)
|
||||
{
|
||||
if (subject.Length == 0)
|
||||
{
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
var (val, deleted) = DeleteInternal(ref Root, subject.ToArray(), 0);
|
||||
if (deleted)
|
||||
{
|
||||
_size--;
|
||||
}
|
||||
return (val, deleted);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Match against a filter subject with wildcards and invoke the callback for each matched value.
|
||||
/// </summary>
|
||||
public void Match(ReadOnlySpan<byte> filter, Action<byte[], T>? callback)
|
||||
{
|
||||
if (Root == null || filter.Length == 0 || callback == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var parts = Parts.GenParts(filter);
|
||||
MatchInternal(Root, parts, [], (subject, val) =>
|
||||
{
|
||||
callback(subject, val);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Match against a filter subject with wildcards and invoke the callback for each matched value.
|
||||
/// Returning false from the callback stops matching immediately.
|
||||
/// Returns true if matching ran to completion, false if callback stopped it early.
|
||||
/// </summary>
|
||||
public bool MatchUntil(ReadOnlySpan<byte> filter, Func<byte[], T, bool>? callback)
|
||||
{
|
||||
if (Root == null || filter.Length == 0 || callback == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
var parts = Parts.GenParts(filter);
|
||||
return MatchInternal(Root, parts, [], callback);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Walk all entries in lexicographic order. The callback can return false to terminate.
|
||||
/// </summary>
|
||||
public void IterOrdered(Func<byte[], T, bool> cb)
|
||||
{
|
||||
if (Root == null) return;
|
||||
IterInternal(Root, [], ordered: true, cb);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Walk all entries in no guaranteed order. The callback can return false to terminate.
|
||||
/// </summary>
|
||||
public void IterFast(Func<byte[], T, bool> cb)
|
||||
{
|
||||
if (Root == null) return;
|
||||
IterInternal(Root, [], ordered: false, cb);
|
||||
}
|
||||
|
||||
#region Internal Methods
|
||||
|
||||
/// <summary>
|
||||
/// Internal recursive insert.
|
||||
/// Go reference: server/stree/stree.go:insert
|
||||
/// </summary>
|
||||
private (T? OldValue, bool Updated) InsertInternal(ref INode? nodeRef, byte[] subject, T value, int si)
|
||||
{
|
||||
var n = nodeRef;
|
||||
if (n == null)
|
||||
{
|
||||
nodeRef = new Leaf<T>(subject[si..], value);
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
if (n.IsLeaf)
|
||||
{
|
||||
var ln = (Leaf<T>)n;
|
||||
if (ln.Match(subject.AsSpan(si)))
|
||||
{
|
||||
// Replace with new value.
|
||||
var old = ln.Value;
|
||||
ln.Value = value;
|
||||
return (old, true);
|
||||
}
|
||||
|
||||
// Here we need to split this leaf.
|
||||
int cpi = Parts.CommonPrefixLen(ln.Suffix, subject.AsSpan(si));
|
||||
var nn = new Node4(subject.AsSpan(si, cpi));
|
||||
ln.SetSuffix(ln.Suffix.AsSpan(cpi));
|
||||
si += cpi;
|
||||
|
||||
// Make sure we have different pivot, normally this will be the case unless we have overflowing prefixes.
|
||||
byte p = Parts.Pivot(ln.Suffix, 0);
|
||||
if (cpi > 0 && si < subject.Length && p == subject[si])
|
||||
{
|
||||
// We need to split the original leaf. Recursively call into insert.
|
||||
InsertInternal(ref nodeRef, subject, value, si);
|
||||
// Now add the updated version of nodeRef as a child to the new node4.
|
||||
nn.AddChild(p, nodeRef!);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Can just add this new leaf as a sibling.
|
||||
var nl = new Leaf<T>(subject.AsSpan(si), value);
|
||||
nn.AddChild(Parts.Pivot(nl.Suffix, 0), nl);
|
||||
// Add back original.
|
||||
nn.AddChild(Parts.Pivot(ln.Suffix, 0), ln);
|
||||
}
|
||||
|
||||
nodeRef = nn;
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
// Non-leaf nodes.
|
||||
var bn = n.Base!;
|
||||
if (bn.Prefix.Length > 0)
|
||||
{
|
||||
int cpi = Parts.CommonPrefixLen(bn.Prefix, subject.AsSpan(si));
|
||||
int pli = bn.Prefix.Length;
|
||||
if (cpi >= pli)
|
||||
{
|
||||
// Move past this node.
|
||||
si += pli;
|
||||
var childRef = n.FindChild(Parts.Pivot(subject, si));
|
||||
if (childRef != null)
|
||||
{
|
||||
var childNode = childRef.Node;
|
||||
var result = InsertInternal(ref childNode, subject, value, si);
|
||||
childRef.Node = childNode;
|
||||
return result;
|
||||
}
|
||||
if (n.IsFull)
|
||||
{
|
||||
n = n.Grow();
|
||||
nodeRef = n;
|
||||
}
|
||||
n.AddChild(Parts.Pivot(subject, si), new Leaf<T>(subject.AsSpan(si), value));
|
||||
return (default, false);
|
||||
}
|
||||
else
|
||||
{
|
||||
// We did not match the prefix completely here.
|
||||
var prefix = subject.AsSpan(si, cpi);
|
||||
si += prefix.Length;
|
||||
// We will insert a new node4 and attach our current node below after adjusting prefix.
|
||||
var nn = new Node4(prefix);
|
||||
// Shift the prefix for our original node.
|
||||
n.SetPrefix(bn.Prefix.AsSpan(cpi));
|
||||
nn.AddChild(Parts.Pivot(bn.Prefix, 0), n);
|
||||
// Add in our new leaf.
|
||||
nn.AddChild(Parts.Pivot(subject.AsSpan(si), 0), new Leaf<T>(subject.AsSpan(si), value));
|
||||
// Update our node reference.
|
||||
nodeRef = nn;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var childRef = n.FindChild(Parts.Pivot(subject, si));
|
||||
if (childRef != null)
|
||||
{
|
||||
var childNode = childRef.Node;
|
||||
var result = InsertInternal(ref childNode, subject, value, si);
|
||||
childRef.Node = childNode;
|
||||
return result;
|
||||
}
|
||||
// No prefix and no matched child, so add in new leafnode as needed.
|
||||
if (n.IsFull)
|
||||
{
|
||||
n = n.Grow();
|
||||
nodeRef = n;
|
||||
}
|
||||
n.AddChild(Parts.Pivot(subject, si), new Leaf<T>(subject.AsSpan(si), value));
|
||||
}
|
||||
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Internal recursive delete with compaction.
|
||||
/// Go reference: server/stree/stree.go:delete
|
||||
/// </summary>
|
||||
private (T? Value, bool Deleted) DeleteInternal(ref INode? nodeRef, byte[] subject, int si)
|
||||
{
|
||||
if (nodeRef == null || subject.Length == 0)
|
||||
{
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
var n = nodeRef;
|
||||
if (n.IsLeaf)
|
||||
{
|
||||
var ln = (Leaf<T>)n;
|
||||
if (ln.Match(subject.AsSpan(si)))
|
||||
{
|
||||
nodeRef = null;
|
||||
return (ln.Value, true);
|
||||
}
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
// Not a leaf node.
|
||||
var bn = n.Base!;
|
||||
if (bn.Prefix.Length > 0)
|
||||
{
|
||||
// subject could be shorter and would panic on bad index.
|
||||
if (subject.Length < si + bn.Prefix.Length)
|
||||
{
|
||||
return (default, false);
|
||||
}
|
||||
if (!subject.AsSpan(si, bn.Prefix.Length).SequenceEqual(bn.Prefix))
|
||||
{
|
||||
return (default, false);
|
||||
}
|
||||
si += bn.Prefix.Length;
|
||||
}
|
||||
|
||||
var p = Parts.Pivot(subject, si);
|
||||
var childRef = n.FindChild(p);
|
||||
if (childRef == null)
|
||||
{
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
var nn = childRef.Node;
|
||||
if (nn != null && nn.IsLeaf)
|
||||
{
|
||||
var ln = (Leaf<T>)nn;
|
||||
if (ln.Match(subject.AsSpan(si)))
|
||||
{
|
||||
n.DeleteChild(p);
|
||||
|
||||
var sn = n.Shrink();
|
||||
if (sn != null)
|
||||
{
|
||||
// Make sure to copy prefix so we force a copy below.
|
||||
var pre = bn.Prefix.ToArray();
|
||||
|
||||
// Need to fix up prefixes/suffixes.
|
||||
if (sn.IsLeaf)
|
||||
{
|
||||
var shrunkLeaf = (Leaf<T>)sn;
|
||||
// Prepend old prefix to leaf suffix.
|
||||
var newSuffix = new byte[pre.Length + shrunkLeaf.Suffix.Length];
|
||||
pre.CopyTo(newSuffix, 0);
|
||||
shrunkLeaf.Suffix.CopyTo(newSuffix, pre.Length);
|
||||
shrunkLeaf.Suffix = newSuffix;
|
||||
}
|
||||
else
|
||||
{
|
||||
// We are a node here, we need to add in the old prefix.
|
||||
if (pre.Length > 0)
|
||||
{
|
||||
var bsn = sn.Base!;
|
||||
var newPrefix = new byte[pre.Length + bsn.Prefix.Length];
|
||||
pre.CopyTo(newPrefix, 0);
|
||||
bsn.Prefix.CopyTo(newPrefix, pre.Length);
|
||||
sn.SetPrefix(newPrefix);
|
||||
}
|
||||
}
|
||||
nodeRef = sn;
|
||||
}
|
||||
|
||||
return (ln.Value, true);
|
||||
}
|
||||
return (default, false);
|
||||
}
|
||||
|
||||
// Recurse into child node.
|
||||
var childNode = childRef.Node;
|
||||
var result = DeleteInternal(ref childNode, subject, si);
|
||||
childRef.Node = childNode;
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Internal recursive match.
|
||||
/// Go reference: server/stree/stree.go:match
|
||||
/// </summary>
|
||||
internal bool MatchInternal(INode? n, ReadOnlyMemory<byte>[] parts, byte[] pre, Func<byte[], T, bool> cb)
|
||||
{
|
||||
// Capture if we are sitting on a terminal fwc.
|
||||
bool hasFWC = false;
|
||||
if (parts.Length > 0 && parts[^1].Length > 0 && parts[^1].Span[0] == Parts.Fwc)
|
||||
{
|
||||
hasFWC = true;
|
||||
}
|
||||
|
||||
while (n != null)
|
||||
{
|
||||
var (nparts, matched) = n.MatchParts(parts);
|
||||
if (!matched)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// We have matched here. If we are a leaf and have exhausted all parts or have a FWC, fire callback.
|
||||
if (n.IsLeaf)
|
||||
{
|
||||
if (nparts.Length == 0 || (hasFWC && nparts.Length == 1))
|
||||
{
|
||||
var ln = (Leaf<T>)n;
|
||||
var subject = Concat(pre, ln.Suffix);
|
||||
if (!cb(subject, ln.Value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// We have normal nodes here. Append our prefix.
|
||||
var bn = n.Base!;
|
||||
if (bn.Prefix.Length > 0)
|
||||
{
|
||||
pre = Concat(pre, bn.Prefix);
|
||||
}
|
||||
|
||||
// Check our remaining parts.
|
||||
if (nparts.Length == 0 && !hasFWC)
|
||||
{
|
||||
// We are a node with no parts left and we are not looking at a fwc.
|
||||
bool hasTermPWC = false;
|
||||
if (parts.Length > 0 && parts[^1].Length == 1 && parts[^1].Span[0] == Parts.Pwc)
|
||||
{
|
||||
nparts = parts[^1..];
|
||||
hasTermPWC = true;
|
||||
}
|
||||
|
||||
foreach (var cn in n.Children())
|
||||
{
|
||||
if (cn == null) continue;
|
||||
|
||||
if (cn.IsLeaf)
|
||||
{
|
||||
var ln = (Leaf<T>)cn;
|
||||
if (ln.Suffix.Length == 0)
|
||||
{
|
||||
var subject = Concat(pre, ln.Suffix);
|
||||
if (!cb(subject, ln.Value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (hasTermPWC && ln.Suffix.AsSpan().IndexOf(Parts.Tsep) < 0)
|
||||
{
|
||||
var subject = Concat(pre, ln.Suffix);
|
||||
if (!cb(subject, ln.Value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (hasTermPWC)
|
||||
{
|
||||
if (!MatchInternal(cn, nparts, pre, cb))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// If we are sitting on a terminal fwc, put back and continue.
|
||||
if (hasFWC && nparts.Length == 0)
|
||||
{
|
||||
nparts = parts[^1..];
|
||||
}
|
||||
|
||||
// Here we are a node type with a partial match.
|
||||
// Check if the first part is a wildcard.
|
||||
var fp = nparts[0];
|
||||
var pvt = Parts.Pivot(fp.Span, 0);
|
||||
|
||||
if (fp.Length == 1 && (pvt == Parts.Pwc || pvt == Parts.Fwc))
|
||||
{
|
||||
// We need to iterate over all children here for the current node
|
||||
// to see if we match further down.
|
||||
foreach (var cn in n.Children())
|
||||
{
|
||||
if (cn != null)
|
||||
{
|
||||
if (!MatchInternal(cn, nparts, pre, cb))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Here we have normal traversal, so find the next child.
|
||||
var next = n.FindChild(pvt);
|
||||
if (next == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
n = next.Node;
|
||||
parts = nparts;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Internal iter function to walk nodes.
|
||||
/// Go reference: server/stree/stree.go:iter
|
||||
/// </summary>
|
||||
internal bool IterInternal(INode n, byte[] pre, bool ordered, Func<byte[], T, bool> cb)
|
||||
{
|
||||
if (n.IsLeaf)
|
||||
{
|
||||
var ln = (Leaf<T>)n;
|
||||
return cb(Concat(pre, ln.Suffix), ln.Value);
|
||||
}
|
||||
|
||||
// We are normal node here.
|
||||
var bn = n.Base!;
|
||||
if (bn.Prefix.Length > 0)
|
||||
{
|
||||
pre = Concat(pre, bn.Prefix);
|
||||
}
|
||||
|
||||
if (!ordered)
|
||||
{
|
||||
foreach (var cn in n.Children())
|
||||
{
|
||||
if (cn == null) continue;
|
||||
if (!IterInternal(cn, pre, false, cb))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Collect non-null children and sort by path for lexicographic order.
|
||||
var children = n.Children().Where(c => c != null).ToList();
|
||||
children.Sort((a, b) =>
|
||||
{
|
||||
var pa = a!.Path();
|
||||
var pb = b!.Path();
|
||||
return pa.AsSpan().SequenceCompareTo(pb);
|
||||
});
|
||||
|
||||
foreach (var cn in children)
|
||||
{
|
||||
if (!IterInternal(cn!, pre, true, cb))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Helper to concatenate two byte arrays.
|
||||
/// </summary>
|
||||
private static byte[] Concat(byte[] a, byte[] b)
|
||||
{
|
||||
if (a.Length == 0) return b;
|
||||
if (b.Length == 0) return a;
|
||||
var result = new byte[a.Length + b.Length];
|
||||
a.CopyTo(result, 0);
|
||||
b.CopyTo(result, a.Length);
|
||||
return result;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Static helper methods for SubjectTree operations.
|
||||
/// </summary>
|
||||
public static class SubjectTreeHelper
|
||||
{
|
||||
/// <summary>
|
||||
/// Iterates the smaller of the two provided subject trees and looks for matching entries in the other.
|
||||
/// Go reference: server/stree/stree.go:LazyIntersect
|
||||
/// </summary>
|
||||
public static void LazyIntersect<TL, TR>(SubjectTree<TL>? tl, SubjectTree<TR>? tr, Action<byte[], TL, TR> cb)
|
||||
{
|
||||
if (tl == null || tr == null || tl.Root == null || tr.Root == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (tl.Size <= tr.Size)
|
||||
{
|
||||
tl.IterFast((key, v1) =>
|
||||
{
|
||||
var (v2, ok) = tr.Find(key);
|
||||
if (ok)
|
||||
{
|
||||
cb(key, v1, v2!);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
tr.IterFast((key, v2) =>
|
||||
{
|
||||
var (v1, ok) = tl.Find(key);
|
||||
if (ok)
|
||||
{
|
||||
cb(key, v1!, v2);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,414 @@
|
||||
// Go reference: server/thw/thw.go
|
||||
// Time hash wheel for efficient TTL expiration tracking.
|
||||
// Fixed-size array of slots (the wheel), each containing a dictionary of (seq, expires) entries.
|
||||
// Slot index = (expires / tickResolution) % wheelSize.
|
||||
|
||||
using System.Buffers.Binary;
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace NATS.Server.Internal.TimeHashWheel;
|
||||
|
||||
// Go reference: server/thw/thw.go
|
||||
// TODO: Port time hash wheel for TTL expiration
|
||||
/// <summary>
|
||||
/// A timing hash wheel for efficient TTL expiration management.
|
||||
/// Uses a fixed-size circular buffer of slots, where each slot holds entries
|
||||
/// that expire within the same time tick. Supports O(1) add/remove and
|
||||
/// efficient batch expiration scanning.
|
||||
/// </summary>
|
||||
public class HashWheel
|
||||
{
|
||||
// Go: tickDuration = int64(time.Second) — tick duration in nanoseconds.
|
||||
private const long TickDuration = 1_000_000_000;
|
||||
|
||||
// Go: wheelBits = 12, wheelSize = 1 << 12 = 4096, wheelMask = 4095.
|
||||
private const int WheelBits = 12;
|
||||
internal const int WheelSize = 1 << WheelBits;
|
||||
private const int WheelMask = WheelSize - 1;
|
||||
|
||||
// Go: headerLen = 17 — 1 byte magic + 2 x uint64.
|
||||
private const int HeaderLen = 17;
|
||||
|
||||
private Slot?[] _wheel;
|
||||
private long _lowest;
|
||||
private ulong _count;
|
||||
|
||||
public HashWheel()
|
||||
{
|
||||
_wheel = new Slot?[WheelSize];
|
||||
_lowest = long.MaxValue;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of entries in the wheel.
|
||||
/// </summary>
|
||||
// Go: Count() server/thw/thw.go:190
|
||||
public ulong Count => _count;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the slot position for a given expiration time.
|
||||
/// </summary>
|
||||
// Go: getPosition server/thw/thw.go:66
|
||||
private static int GetPosition(long expires)
|
||||
{
|
||||
return (int)((expires / TickDuration) & WheelMask);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Schedules a new timer task. If the sequence already exists in the target slot,
|
||||
/// its expiration is updated without incrementing the count.
|
||||
/// </summary>
|
||||
// Go: Add server/thw/thw.go:79
|
||||
public void Add(ulong seq, long expires)
|
||||
{
|
||||
var pos = GetPosition(expires);
|
||||
|
||||
// Initialize the slot lazily.
|
||||
_wheel[pos] ??= new Slot();
|
||||
|
||||
var slot = _wheel[pos]!;
|
||||
if (!slot.Entries.ContainsKey(seq))
|
||||
{
|
||||
_count++;
|
||||
}
|
||||
|
||||
slot.Entries[seq] = expires;
|
||||
|
||||
// Update slot's lowest expiration if this is earlier.
|
||||
if (expires < slot.Lowest)
|
||||
{
|
||||
slot.Lowest = expires;
|
||||
// Update global lowest if this is now the earliest.
|
||||
if (expires < _lowest)
|
||||
{
|
||||
_lowest = expires;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes a timer task. Returns true if the task was found and removed,
|
||||
/// false if the task was not found.
|
||||
/// </summary>
|
||||
// Go: Remove server/thw/thw.go:103
|
||||
public bool Remove(ulong seq, long expires)
|
||||
{
|
||||
var pos = GetPosition(expires);
|
||||
var slot = _wheel[pos];
|
||||
|
||||
if (slot is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!slot.Entries.Remove(seq))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
_count--;
|
||||
|
||||
// If the slot is empty, set it to null to free memory.
|
||||
if (slot.Entries.Count == 0)
|
||||
{
|
||||
_wheel[pos] = null;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates the expiration time of an existing timer task by removing it from
|
||||
/// the old slot and adding it to the new one.
|
||||
/// </summary>
|
||||
// Go: Update server/thw/thw.go:123
|
||||
public void Update(ulong seq, long oldExpires, long newExpires)
|
||||
{
|
||||
Remove(seq, oldExpires);
|
||||
Add(seq, newExpires);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Processes all expired tasks using the current time. The callback receives each
|
||||
/// expired entry's sequence and expiration time. If the callback returns true,
|
||||
/// the entry is removed; if false, it remains for future expiration checks.
|
||||
/// </summary>
|
||||
// Go: ExpireTasks server/thw/thw.go:133
|
||||
public void ExpireTasks(Func<ulong, long, bool> callback)
|
||||
{
|
||||
var now = Stopwatch.GetTimestamp();
|
||||
// Convert to nanoseconds for consistency with the Go implementation.
|
||||
var nowNanos = (long)((double)now / Stopwatch.Frequency * 1_000_000_000);
|
||||
ExpireTasksInternal(nowNanos, callback);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Internal expiration method that accepts an explicit timestamp.
|
||||
/// Used by tests that need deterministic time control.
|
||||
/// </summary>
|
||||
// Go: expireTasks server/thw/thw.go:138
|
||||
internal void ExpireTasksInternal(long ts, Func<ulong, long, bool> callback)
|
||||
{
|
||||
// Quick return if nothing is expired.
|
||||
if (_lowest > ts)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var globalLowest = long.MaxValue;
|
||||
for (var pos = 0; pos < _wheel.Length; pos++)
|
||||
{
|
||||
var slot = _wheel[pos];
|
||||
|
||||
// Skip slot if nothing to expire.
|
||||
if (slot is null || slot.Lowest > ts)
|
||||
{
|
||||
if (slot is not null && slot.Lowest < globalLowest)
|
||||
{
|
||||
globalLowest = slot.Lowest;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Track new lowest while processing expirations.
|
||||
var slotLowest = long.MaxValue;
|
||||
var toRemove = new List<ulong>();
|
||||
|
||||
foreach (var (seq, expires) in slot.Entries)
|
||||
{
|
||||
if (expires <= ts && callback(seq, expires))
|
||||
{
|
||||
toRemove.Add(seq);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (expires < slotLowest)
|
||||
{
|
||||
slotLowest = expires;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var seq in toRemove)
|
||||
{
|
||||
slot.Entries.Remove(seq);
|
||||
_count--;
|
||||
}
|
||||
|
||||
// Nil out if we are empty.
|
||||
if (slot.Entries.Count == 0)
|
||||
{
|
||||
_wheel[pos] = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
slot.Lowest = slotLowest;
|
||||
if (slotLowest < globalLowest)
|
||||
{
|
||||
globalLowest = slotLowest;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_lowest = globalLowest;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the earliest expiration time if it is before the given time.
|
||||
/// Returns <see cref="long.MaxValue"/> if no expirations exist before the specified time.
|
||||
/// </summary>
|
||||
// Go: GetNextExpiration server/thw/thw.go:182
|
||||
public long GetNextExpiration(long before)
|
||||
{
|
||||
if (_lowest < before)
|
||||
{
|
||||
return _lowest;
|
||||
}
|
||||
|
||||
return long.MaxValue;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Encodes the wheel state into a binary snapshot for persistence.
|
||||
/// The high sequence number is included and will be returned on decode.
|
||||
/// Format: [1 byte magic version][8 bytes entry count][8 bytes highSeq][varint expires, uvarint seq pairs...]
|
||||
/// </summary>
|
||||
// Go: Encode server/thw/thw.go:197
|
||||
public byte[] Encode(ulong highSeq)
|
||||
{
|
||||
// Estimate capacity: header + entries * (max varint size * 2).
|
||||
var estimatedSize = HeaderLen + (int)(_count * 2 * 10);
|
||||
var buffer = new byte[estimatedSize];
|
||||
var offset = 0;
|
||||
|
||||
// Magic version byte.
|
||||
buffer[offset++] = 1;
|
||||
|
||||
// Entry count (little-endian uint64).
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(buffer.AsSpan(offset), _count);
|
||||
offset += 8;
|
||||
|
||||
// High sequence stamp (little-endian uint64).
|
||||
BinaryPrimitives.WriteUInt64LittleEndian(buffer.AsSpan(offset), highSeq);
|
||||
offset += 8;
|
||||
|
||||
// Write all entries as varint(expires) + uvarint(seq) pairs.
|
||||
foreach (var slot in _wheel)
|
||||
{
|
||||
if (slot?.Entries is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var (seq, expires) in slot.Entries)
|
||||
{
|
||||
// Ensure buffer has enough space.
|
||||
if (offset + 20 > buffer.Length)
|
||||
{
|
||||
Array.Resize(ref buffer, buffer.Length * 2);
|
||||
}
|
||||
|
||||
offset += WriteVarint(buffer.AsSpan(offset), expires);
|
||||
offset += WriteUvarint(buffer.AsSpan(offset), seq);
|
||||
}
|
||||
}
|
||||
|
||||
return buffer.AsSpan(0, offset).ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Decodes a binary-encoded snapshot and replaces the contents of this wheel.
|
||||
/// Returns the high sequence number from the snapshot and the number of bytes consumed.
|
||||
/// </summary>
|
||||
// Go: Decode server/thw/thw.go:216
|
||||
public (ulong HighSeq, int BytesRead) Decode(ReadOnlySpan<byte> buf)
|
||||
{
|
||||
if (buf.Length < HeaderLen)
|
||||
{
|
||||
throw new InvalidOperationException("Buffer too short for hash wheel header.");
|
||||
}
|
||||
|
||||
if (buf[0] != 1)
|
||||
{
|
||||
throw new InvalidOperationException("Unknown hash wheel encoding version.");
|
||||
}
|
||||
|
||||
// Reset the wheel.
|
||||
_wheel = new Slot?[WheelSize];
|
||||
_lowest = long.MaxValue;
|
||||
_count = 0;
|
||||
|
||||
var count = BinaryPrimitives.ReadUInt64LittleEndian(buf[1..]);
|
||||
var highSeq = BinaryPrimitives.ReadUInt64LittleEndian(buf[9..]);
|
||||
|
||||
var offset = HeaderLen;
|
||||
for (ulong i = 0; i < count; i++)
|
||||
{
|
||||
var (ts, tn) = ReadVarint(buf[offset..]);
|
||||
if (tn <= 0)
|
||||
{
|
||||
throw new InvalidOperationException("Unexpected end of buffer reading varint.");
|
||||
}
|
||||
|
||||
var (seq, vn) = ReadUvarint(buf[(offset + tn)..]);
|
||||
if (vn <= 0)
|
||||
{
|
||||
throw new InvalidOperationException("Unexpected end of buffer reading uvarint.");
|
||||
}
|
||||
|
||||
Add(seq, ts);
|
||||
offset += tn + vn;
|
||||
}
|
||||
|
||||
return (highSeq, offset);
|
||||
}
|
||||
|
||||
// Varint encoding/decoding compatible with Go's encoding/binary.
|
||||
|
||||
/// <summary>
|
||||
/// Writes a signed varint (zigzag-encoded) to the buffer.
|
||||
/// Compatible with Go's binary.AppendVarint / binary.Varint.
|
||||
/// </summary>
|
||||
private static int WriteVarint(Span<byte> buffer, long value)
|
||||
{
|
||||
// Zigzag encode: (value << 1) ^ (value >> 63)
|
||||
var zigzag = (ulong)((value << 1) ^ (value >> 63));
|
||||
return WriteUvarint(buffer, zigzag);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Writes an unsigned varint to the buffer.
|
||||
/// Compatible with Go's binary.AppendUvarint / binary.Uvarint.
|
||||
/// </summary>
|
||||
private static int WriteUvarint(Span<byte> buffer, ulong value)
|
||||
{
|
||||
var i = 0;
|
||||
while (value >= 0x80)
|
||||
{
|
||||
buffer[i++] = (byte)(value | 0x80);
|
||||
value >>= 7;
|
||||
}
|
||||
|
||||
buffer[i++] = (byte)value;
|
||||
return i;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads a signed varint (zigzag-encoded) from the buffer.
|
||||
/// Returns the value and the number of bytes consumed.
|
||||
/// </summary>
|
||||
private static (long Value, int BytesRead) ReadVarint(ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
var (zigzag, n) = ReadUvarint(buffer);
|
||||
if (n <= 0)
|
||||
{
|
||||
return (0, n);
|
||||
}
|
||||
|
||||
// Zigzag decode: (zigzag >> 1) ^ -(zigzag & 1)
|
||||
var value = (long)(zigzag >> 1) ^ -(long)(zigzag & 1);
|
||||
return (value, n);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads an unsigned varint from the buffer.
|
||||
/// Returns the value and the number of bytes consumed.
|
||||
/// </summary>
|
||||
private static (ulong Value, int BytesRead) ReadUvarint(ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
ulong result = 0;
|
||||
var shift = 0;
|
||||
for (var i = 0; i < buffer.Length; i++)
|
||||
{
|
||||
var b = buffer[i];
|
||||
result |= (ulong)(b & 0x7F) << shift;
|
||||
if ((b & 0x80) == 0)
|
||||
{
|
||||
return (result, i + 1);
|
||||
}
|
||||
|
||||
shift += 7;
|
||||
if (shift >= 64)
|
||||
{
|
||||
return (0, -1); // Overflow.
|
||||
}
|
||||
}
|
||||
|
||||
return (0, -1); // Buffer too short.
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Internal access to the wheel slots for testing encode/decode round-trip verification.
|
||||
/// </summary>
|
||||
internal Slot?[] Wheel => _wheel;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a single slot in the wheel containing entries that hash to the same position.
|
||||
/// </summary>
|
||||
internal sealed class Slot
|
||||
{
|
||||
// Go: slot.entries — map of sequence to expires.
|
||||
public Dictionary<ulong, long> Entries { get; } = new();
|
||||
|
||||
// Go: slot.lowest — lowest expiration time in this slot.
|
||||
public long Lowest { get; set; } = long.MaxValue;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user