// Copyright 2023-2025 The NATS Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Adapted from server/subject_transform.go in the NATS server Go source.
using System.Text.RegularExpressions;
namespace ZB.MOM.NatsNet.Server.Internal;
// -------------------------------------------------------------------------
// Subject token constants (mirrors const block in server/sublist.go)
// -------------------------------------------------------------------------
internal static class SubjectTokens
{
internal const char Pwc = '*'; // partial wildcard character
internal const string Pwcs = "*"; // partial wildcard string
internal const char Fwc = '>'; // full wildcard character
internal const string Fwcs = ">"; // full wildcard string
internal const string Tsep = "."; // token separator string
internal const char Btsep = '.'; // token separator character
internal const string Empty = ""; // _EMPTY_
}
// -------------------------------------------------------------------------
// Transform type constants (mirrors enum in subject_transform.go)
// -------------------------------------------------------------------------
internal static class TransformType
{
internal const short NoTransform = 0;
internal const short BadTransform = 1;
internal const short Partition = 2;
internal const short Wildcard = 3;
internal const short SplitFromLeft = 4;
internal const short SplitFromRight = 5;
internal const short SliceFromLeft = 6;
internal const short SliceFromRight = 7;
internal const short Split = 8;
internal const short Left = 9;
internal const short Right = 10;
internal const short Random = 11;
}
// -------------------------------------------------------------------------
// ISubjectTransformer interface (mirrors SubjectTransformer in Go)
// -------------------------------------------------------------------------
///
/// Transforms NATS subjects according to a source-to-destination mapping.
/// Mirrors SubjectTransformer in server/subject_transform.go.
///
public interface ISubjectTransformer
{
(string result, Exception? err) Match(string subject);
string TransformSubject(string subject);
string TransformTokenizedSubject(string[] tokens);
}
// -------------------------------------------------------------------------
// SubjectTransform class
// -------------------------------------------------------------------------
///
/// Subject mapping and transform engine.
/// Mirrors subjectTransform in server/subject_transform.go.
///
public sealed class SubjectTransform : ISubjectTransformer
{
private readonly string _src;
private readonly string _dest;
private readonly string[] _dtoks; // destination tokens
private readonly string[] _stoks; // source tokens
private readonly short[] _dtokmftypes;
private readonly int[][] _dtokmftokindexesargs;
private readonly int[] _dtokmfintargs;
private readonly string[] _dtokmfstringargs;
// Subject mapping function regexes (mirrors var block in Go).
private static readonly Regex CommaSep = new(@",\s*", RegexOptions.Compiled);
private static readonly Regex PartitionRe = new(@"\{\{\s*[pP]artition\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private static readonly Regex WildcardRe = new(@"\{\{\s*[wW]ildcard\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private static readonly Regex SplitFromLeftRe = new(@"\{\{\s*[sS]plit[fF]rom[lL]eft\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private static readonly Regex SplitFromRightRe = new(@"\{\{\s*[sS]plit[fF]rom[rR]ight\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private static readonly Regex SliceFromLeftRe = new(@"\{\{\s*[sS]lice[fF]rom[lL]eft\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private static readonly Regex SliceFromRightRe = new(@"\{\{\s*[sS]lice[fF]rom[rR]ight\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private static readonly Regex SplitRe = new(@"\{\{\s*[sS]plit\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private static readonly Regex LeftRe = new(@"\{\{\s*[lL]eft\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private static readonly Regex RightRe = new(@"\{\{\s*[rR]ight\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private static readonly Regex RandomRe = new(@"\{\{\s*[rR]andom\s*\((.*)\)\s*\}\}", RegexOptions.Compiled);
private SubjectTransform(
string src, string dest,
string[] dtoks, string[] stoks,
short[] dtokmftypes, int[][] dtokmftokindexesargs,
int[] dtokmfintargs, string[] dtokmfstringargs)
{
_src = src;
_dest = dest;
_dtoks = dtoks;
_stoks = stoks;
_dtokmftypes = dtokmftypes;
_dtokmftokindexesargs = dtokmftokindexesargs;
_dtokmfintargs = dtokmfintargs;
_dtokmfstringargs = dtokmfstringargs;
}
///
/// Creates a new transform with optional strict mode.
/// Returns (null, null) when dest is empty (no transform needed).
/// Mirrors NewSubjectTransformWithStrict.
///
public static (SubjectTransform? transform, Exception? err) NewWithStrict(
string src, string dest, bool strict)
{
if (dest == SubjectTokens.Empty)
return (null, null);
if (src == SubjectTokens.Empty)
src = SubjectTokens.Fwcs;
var (sv, stokens, npwcs, hasFwc) = SubjectInfo(src);
var (dv, dtokens, dnpwcs, dHasFwc) = SubjectInfo(dest);
if (!sv || !dv || dnpwcs > 0 || hasFwc != dHasFwc)
return (null, ServerErrors.ErrBadSubject);
var dtokMfTypes = new List();
var dtokMfIndexes = new List();
var dtokMfIntArgs = new List();
var dtokMfStringArgs = new List();
if (npwcs > 0 || hasFwc)
{
// Build source-token index map for partial wildcards.
var sti = new Dictionary();
for (var i = 0; i < stokens.Length; i++)
{
if (stokens[i].Length == 1 && stokens[i][0] == SubjectTokens.Pwc)
sti[sti.Count + 1] = i;
}
var nphs = 0;
foreach (var token in dtokens)
{
var (tt, tidxs, tint, tstr, terr) = IndexPlaceHolders(token);
if (terr != null) return (null, terr);
if (strict && tt != TransformType.NoTransform && tt != TransformType.Wildcard)
return (null, new MappingDestinationException(token, ServerErrors.ErrMappingDestinationNotSupportedForImport));
if (tt == TransformType.NoTransform)
{
dtokMfTypes.Add(TransformType.NoTransform);
dtokMfIndexes.Add([-1]);
dtokMfIntArgs.Add(-1);
dtokMfStringArgs.Add(SubjectTokens.Empty);
}
else if (tt == TransformType.Random)
{
dtokMfTypes.Add(TransformType.Random);
dtokMfIndexes.Add([]);
dtokMfIntArgs.Add(tint);
dtokMfStringArgs.Add(SubjectTokens.Empty);
}
else
{
nphs += tidxs.Length;
var stis = new List();
foreach (var wildcardIndex in tidxs)
{
if (wildcardIndex > npwcs)
return (null, new MappingDestinationException(
$"{token}: [{wildcardIndex}]",
ServerErrors.ErrMappingDestinationIndexOutOfRange));
stis.Add(sti.GetValueOrDefault(wildcardIndex, 0));
}
dtokMfTypes.Add(tt);
dtokMfIndexes.Add([.. stis]);
dtokMfIntArgs.Add(tint);
dtokMfStringArgs.Add(tstr);
}
}
if (strict && nphs < npwcs)
return (null, new MappingDestinationException(dest, ServerErrors.ErrMappingDestinationNotUsingAllWildcards));
}
else
{
foreach (var token in dtokens)
{
var (tt, _, tint, _, terr) = IndexPlaceHolders(token);
if (terr != null) return (null, terr);
if (tt == TransformType.NoTransform)
{
dtokMfTypes.Add(TransformType.NoTransform);
dtokMfIndexes.Add([-1]);
dtokMfIntArgs.Add(-1);
dtokMfStringArgs.Add(SubjectTokens.Empty);
}
else if (tt == TransformType.Random || tt == TransformType.Partition)
{
dtokMfTypes.Add(tt);
dtokMfIndexes.Add([]);
dtokMfIntArgs.Add(tint);
dtokMfStringArgs.Add(SubjectTokens.Empty);
}
else
{
return (null, new MappingDestinationException(token, ServerErrors.ErrMappingDestinationIndexOutOfRange));
}
}
}
return (new SubjectTransform(
src, dest,
dtokens, stokens,
[.. dtokMfTypes], [.. dtokMfIndexes],
[.. dtokMfIntArgs], [.. dtokMfStringArgs]), null);
}
///
/// Creates a non-strict transform. Mirrors NewSubjectTransform.
///
public static (SubjectTransform? transform, Exception? err) New(string src, string dest) =>
NewWithStrict(src, dest, false);
///
/// Creates a strict transform (only Wildcard function allowed).
/// Mirrors NewSubjectTransformStrict.
///
public static (SubjectTransform? transform, Exception? err) NewStrict(string src, string dest) =>
NewWithStrict(src, dest, true);
///
/// Validates a subject mapping destination. Checks each token for valid syntax,
/// validates mustache-style mapping functions against known regexes, then verifies
/// the full transform can be created. Mirrors Go's ValidateMapping.
///
public static Exception? ValidateMapping(string src, string dest)
{
if (string.IsNullOrEmpty(dest))
return null;
bool sfwc = false;
foreach (var t in dest.Split(SubjectTokens.Btsep))
{
var length = t.Length;
if (length == 0 || sfwc)
return new MappingDestinationException(t, ServerErrors.ErrInvalidMappingDestinationSubject);
// If it looks like a mapping function, validate against known patterns.
if (length > 4 && t[0] == '{' && t[1] == '{' && t[length - 2] == '}' && t[length - 1] == '}')
{
if (!PartitionRe.IsMatch(t) &&
!WildcardRe.IsMatch(t) &&
!SplitFromLeftRe.IsMatch(t) &&
!SplitFromRightRe.IsMatch(t) &&
!SliceFromLeftRe.IsMatch(t) &&
!SliceFromRightRe.IsMatch(t) &&
!SplitRe.IsMatch(t) &&
!RandomRe.IsMatch(t))
{
return new MappingDestinationException(t, ServerErrors.ErrUnknownMappingDestinationFunction);
}
continue;
}
if (length == 1 && t[0] == SubjectTokens.Fwc)
sfwc = true;
else if (t.AsSpan().ContainsAny("\t\n\f\r "))
return ServerErrors.ErrInvalidMappingDestinationSubject;
}
// Verify that the transform can actually be created.
var (_, err) = New(src, dest);
return err;
}
///
/// Attempts to match a published subject against the source pattern.
/// Returns the transformed subject or an error.
/// Mirrors subjectTransform.Match.
///
public (string result, Exception? err) Match(string subject)
{
if ((_src == SubjectTokens.Fwcs || _src == SubjectTokens.Empty) &&
(_dest == SubjectTokens.Fwcs || _dest == SubjectTokens.Empty))
return (subject, null);
var tts = TokenizeSubject(subject);
if (!IsValidLiteralSubject(tts))
return (SubjectTokens.Empty, ServerErrors.ErrBadSubject);
if (_src == SubjectTokens.Empty || _src == SubjectTokens.Fwcs ||
IsSubsetMatch(tts, _src))
return (TransformTokenizedSubject(tts), null);
return (SubjectTokens.Empty, ServerErrors.ErrNoTransforms);
}
///
/// Transforms a dot-separated subject string.
/// Mirrors subjectTransform.TransformSubject.
///
public string TransformSubject(string subject) =>
TransformTokenizedSubject(TokenizeSubject(subject));
///
/// Core token-by-token transform engine.
/// Mirrors subjectTransform.TransformTokenizedSubject.
///
public string TransformTokenizedSubject(string[] tokens)
{
if (_dtokmftypes.Length == 0)
return _dest;
var b = new System.Text.StringBuilder();
var li = _dtokmftypes.Length - 1;
for (var i = 0; i < _dtokmftypes.Length; i++)
{
var mfType = _dtokmftypes[i];
if (mfType == TransformType.NoTransform)
{
if (_dtoks[i].Length == 1 && _dtoks[i][0] == SubjectTokens.Fwc)
break;
b.Append(_dtoks[i]);
}
else
{
switch (mfType)
{
case TransformType.Partition:
{
byte[] keyBytes;
if (_dtokmftokindexesargs[i].Length > 0)
{
var sb = new System.Text.StringBuilder();
foreach (var srcTok in _dtokmftokindexesargs[i])
sb.Append(tokens[srcTok]);
keyBytes = System.Text.Encoding.UTF8.GetBytes(sb.ToString());
}
else
{
keyBytes = System.Text.Encoding.UTF8.GetBytes(string.Join(".", tokens));
}
b.Append(GetHashPartition(keyBytes, _dtokmfintargs[i]));
break;
}
case TransformType.Wildcard:
if (_dtokmftokindexesargs.Length > i &&
_dtokmftokindexesargs[i].Length > 0 &&
tokens.Length > _dtokmftokindexesargs[i][0])
{
b.Append(tokens[_dtokmftokindexesargs[i][0]]);
}
break;
case TransformType.SplitFromLeft:
{
var src = tokens[_dtokmftokindexesargs[i][0]];
var pos = _dtokmfintargs[i];
if (pos > 0 && pos < src.Length)
{
b.Append(src[..pos]);
b.Append(SubjectTokens.Tsep);
b.Append(src[pos..]);
}
else
{
b.Append(src);
}
break;
}
case TransformType.SplitFromRight:
{
var src = tokens[_dtokmftokindexesargs[i][0]];
var pos = _dtokmfintargs[i];
if (pos > 0 && pos < src.Length)
{
b.Append(src[..(src.Length - pos)]);
b.Append(SubjectTokens.Tsep);
b.Append(src[(src.Length - pos)..]);
}
else
{
b.Append(src);
}
break;
}
case TransformType.SliceFromLeft:
{
var src = tokens[_dtokmftokindexesargs[i][0]];
var sz = _dtokmfintargs[i];
if (sz > 0 && sz < src.Length)
{
var j = 0;
while (j + sz <= src.Length)
{
if (j != 0) b.Append(SubjectTokens.Tsep);
b.Append(src[j..(j + sz)]);
if (j + sz != src.Length && j + sz + sz > src.Length)
{
b.Append(SubjectTokens.Tsep);
b.Append(src[(j + sz)..]);
break;
}
j += sz;
}
}
else
{
b.Append(src);
}
break;
}
case TransformType.SliceFromRight:
{
var src = tokens[_dtokmftokindexesargs[i][0]];
var sz = _dtokmfintargs[i];
if (sz > 0 && sz < src.Length)
{
var rem = src.Length % sz;
if (rem > 0)
{
b.Append(src[..rem]);
b.Append(SubjectTokens.Tsep);
}
var j = rem;
while (j + sz <= src.Length)
{
b.Append(src[j..(j + sz)]);
if (j + sz < src.Length) b.Append(SubjectTokens.Tsep);
j += sz;
}
}
else
{
b.Append(src);
}
break;
}
case TransformType.Split:
{
var src = tokens[_dtokmftokindexesargs[i][0]];
var parts = src.Split(_dtokmfstringargs[i]);
for (var j = 0; j < parts.Length; j++)
{
if (parts[j] != SubjectTokens.Empty)
b.Append(parts[j]);
if (j < parts.Length - 1 &&
parts[j + 1] != SubjectTokens.Empty &&
!(j == 0 && parts[j] == SubjectTokens.Empty))
b.Append(SubjectTokens.Tsep);
}
break;
}
case TransformType.Left:
{
var src = tokens[_dtokmftokindexesargs[i][0]];
var sz = _dtokmfintargs[i];
b.Append(sz > 0 && sz < src.Length ? src[..sz] : src);
break;
}
case TransformType.Right:
{
var src = tokens[_dtokmftokindexesargs[i][0]];
var sz = _dtokmfintargs[i];
b.Append(sz > 0 && sz < src.Length ? src[(src.Length - sz)..] : src);
break;
}
case TransformType.Random:
b.Append(GetRandomPartition(_dtokmfintargs[i]));
break;
}
}
if (i < li)
b.Append(SubjectTokens.Btsep);
}
// Append remaining source tokens when destination ends with ">".
if (_dtoks.Length > 0 && _dtoks[^1] == SubjectTokens.Fwcs)
{
var stokLen = _stoks.Length;
for (var i = stokLen - 1; i < tokens.Length; i++)
{
b.Append(tokens[i]);
if (i < tokens.Length - 1)
b.Append(SubjectTokens.Btsep);
}
}
return b.ToString();
}
///
/// Reverses this transform (src ↔ dest).
/// Mirrors subjectTransform.reverse.
///
internal SubjectTransform? Reverse()
{
if (_dtokmftokindexesargs.Length == 0)
{
var (rtr, _) = NewStrict(_dest, _src);
return rtr;
}
var (nsrc, phs) = TransformUntokenize(_dest);
var nda = new List();
foreach (var token in _stoks)
{
if (token == SubjectTokens.Pwcs)
{
if (phs.Length == 0) return null;
nda.Add(phs[0]);
phs = phs[1..];
}
else
{
nda.Add(token);
}
}
var ndest = string.Join(SubjectTokens.Tsep, nda);
var (rtrFinal, _) = NewStrict(nsrc, ndest);
return rtrFinal;
}
// -------------------------------------------------------------------------
// Static helpers exposed internally
// -------------------------------------------------------------------------
///
/// Returns the args extracted from a mapping-function token using the given regex.
/// Mirrors getMappingFunctionArgs.
///
internal static string[]? GetMappingFunctionArgs(Regex functionRegex, string token)
{
var m = functionRegex.Match(token);
if (m.Success && m.Groups.Count > 1)
return CommaSep.Split(m.Groups[1].Value);
return null;
}
///
/// Helper for transform functions that take (wildcardIndex, int) args.
/// Mirrors transformIndexIntArgsHelper.
///
internal static (short tt, int[] indexes, int intArg, string strArg, Exception? err)
TransformIndexIntArgsHelper(string token, string[] args, short transformType)
{
if (args.Length < 2)
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationNotEnoughArgs));
if (args.Length > 2)
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationTooManyArgs));
if (!int.TryParse(args[0].Trim(), out var idx))
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationInvalidArg));
if (!int.TryParse(args[1].Trim(), out var intVal))
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationInvalidArg));
return (transformType, [idx], intVal, SubjectTokens.Empty, null);
}
///
/// Parses a destination token and returns its transform type and arguments.
/// Mirrors indexPlaceHolders.
///
internal static (short tt, int[] indexes, int intArg, string strArg, Exception? err)
IndexPlaceHolders(string token)
{
var length = token.Length;
if (length > 1)
{
if (token[0] == '$')
{
if (!int.TryParse(token[1..], out var tp))
return (TransformType.NoTransform, [-1], -1, SubjectTokens.Empty, null);
return (TransformType.Wildcard, [tp], -1, SubjectTokens.Empty, null);
}
if (length > 4 && token[0] == '{' && token[1] == '{' &&
token[length - 2] == '}' && token[length - 1] == '}')
{
// {{wildcard(n)}}
var args = GetMappingFunctionArgs(WildcardRe, token);
if (args != null)
{
if (args.Length == 1 && args[0] == SubjectTokens.Empty)
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationNotEnoughArgs));
if (args.Length == 1)
{
if (!int.TryParse(args[0].Trim(), out var ti))
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationInvalidArg));
return (TransformType.Wildcard, [ti], -1, SubjectTokens.Empty, null);
}
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationTooManyArgs));
}
// {{partition(n[,t1,t2,...])}}
args = GetMappingFunctionArgs(PartitionRe, token);
if (args != null)
{
if (args.Length < 1)
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationNotEnoughArgs));
if (!int.TryParse(args[0].Trim(), out var partN) || (long)partN > int.MaxValue)
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationInvalidArg));
if (args.Length == 1)
return (TransformType.Partition, [], partN, SubjectTokens.Empty, null);
var tidxs = new List();
foreach (var t in args[1..])
{
if (!int.TryParse(t.Trim(), out var ti2))
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationInvalidArg));
tidxs.Add(ti2);
}
return (TransformType.Partition, [.. tidxs], partN, SubjectTokens.Empty, null);
}
// {{SplitFromLeft(t, n)}}
args = GetMappingFunctionArgs(SplitFromLeftRe, token);
if (args != null) return TransformIndexIntArgsHelper(token, args, TransformType.SplitFromLeft);
// {{SplitFromRight(t, n)}}
args = GetMappingFunctionArgs(SplitFromRightRe, token);
if (args != null) return TransformIndexIntArgsHelper(token, args, TransformType.SplitFromRight);
// {{SliceFromLeft(t, n)}}
args = GetMappingFunctionArgs(SliceFromLeftRe, token);
if (args != null) return TransformIndexIntArgsHelper(token, args, TransformType.SliceFromLeft);
// {{SliceFromRight(t, n)}}
args = GetMappingFunctionArgs(SliceFromRightRe, token);
if (args != null) return TransformIndexIntArgsHelper(token, args, TransformType.SliceFromRight);
// {{right(t, n)}}
args = GetMappingFunctionArgs(RightRe, token);
if (args != null) return TransformIndexIntArgsHelper(token, args, TransformType.Right);
// {{left(t, n)}}
args = GetMappingFunctionArgs(LeftRe, token);
if (args != null) return TransformIndexIntArgsHelper(token, args, TransformType.Left);
// {{split(t, delim)}}
args = GetMappingFunctionArgs(SplitRe, token);
if (args != null)
{
if (args.Length < 2)
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationNotEnoughArgs));
if (args.Length > 2)
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationTooManyArgs));
if (!int.TryParse(args[0].Trim(), out var splitIdx))
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationInvalidArg));
if (args[1].Contains(' ') || args[1].Contains(SubjectTokens.Tsep))
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationInvalidArg));
return (TransformType.Split, [splitIdx], -1, args[1], null);
}
// {{random(n)}}
args = GetMappingFunctionArgs(RandomRe, token);
if (args != null)
{
if (args.Length != 1)
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationNotEnoughArgs));
if (!int.TryParse(args[0].Trim(), out var randN) || (long)randN > int.MaxValue)
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrMappingDestinationInvalidArg));
return (TransformType.Random, [], randN, SubjectTokens.Empty, null);
}
return (TransformType.BadTransform, [], -1, SubjectTokens.Empty,
new MappingDestinationException(token, ServerErrors.ErrUnknownMappingDestinationFunction));
}
}
return (TransformType.NoTransform, [-1], -1, SubjectTokens.Empty, null);
}
///
/// Tokenises a subject with wildcards into a formal transform destination.
/// e.g. "foo.*.*" → "foo.$1.$2".
/// Mirrors transformTokenize.
///
public static string TransformTokenize(string subject)
{
var i = 1;
var parts = new List();
foreach (var token in subject.Split(SubjectTokens.Btsep))
{
if (token == SubjectTokens.Pwcs)
{
parts.Add($"${i++}");
}
else
{
parts.Add(token);
}
}
return string.Join(SubjectTokens.Tsep, parts);
}
///
/// Converts a transform destination back to a wildcard subject + placeholder list.
/// Mirrors transformUntokenize.
///
public static (string subject, string[] placeholders) TransformUntokenize(string subject)
{
var phs = new List();
var nda = new List();
foreach (var token in subject.Split(SubjectTokens.Btsep))
{
var args = GetMappingFunctionArgs(WildcardRe, token);
var isWildcardPlaceholder =
(token.Length > 1 && token[0] == '$' && token[1] >= '1' && token[1] <= '9') ||
(args?.Length == 1 && args[0] != SubjectTokens.Empty);
if (isWildcardPlaceholder)
{
phs.Add(token);
nda.Add(SubjectTokens.Pwcs);
}
else
{
nda.Add(token);
}
}
return (string.Join(SubjectTokens.Tsep, nda), [.. phs]);
}
///
/// Tokenises a subject into an array of dot-separated tokens.
/// Mirrors tokenizeSubject.
///
public static string[] TokenizeSubject(string subject) =>
subject.Split(SubjectTokens.Btsep);
///
/// Returns (valid, tokens, numPwcs, hasFwc) for a subject string.
/// Mirrors subjectInfo.
///
public static (bool valid, string[] tokens, int npwcs, bool hasFwc) SubjectInfo(string subject)
{
if (subject == string.Empty)
return (false, [], 0, false);
var npwcs = 0;
var sfwc = false;
var tokens = subject.Split(SubjectTokens.Tsep);
foreach (var t in tokens)
{
if (t.Length == 0 || sfwc)
return (false, [], 0, false);
if (t.Length > 1) continue;
switch (t[0])
{
case SubjectTokens.Fwc:
sfwc = true;
break;
case SubjectTokens.Pwc:
npwcs++;
break;
}
}
return (true, tokens, npwcs, sfwc);
}
// -------------------------------------------------------------------------
// Internal helpers used by Match
// -------------------------------------------------------------------------
///
/// Returns true if all tokens are literal (no wildcards).
/// Mirrors isValidLiteralSubject in server/sublist.go.
///
internal static bool IsValidLiteralSubject(string[] tokens)
{
foreach (var t in tokens)
{
if (t.Length == 0) return false;
if (t.Length == 1 && (t[0] == SubjectTokens.Pwc || t[0] == SubjectTokens.Fwc))
return false;
}
return true;
}
///
/// Returns true if match the pattern .
/// Mirrors isSubsetMatch in server/sublist.go.
///
internal static bool IsSubsetMatch(string[] tokens, string test)
{
var testToks = TokenizeSubjectIntoSlice(test);
return IsSubsetMatchTokenized(tokens, testToks);
}
private static string[] TokenizeSubjectIntoSlice(string subject)
{
var result = new List();
var start = 0;
for (var i = 0; i < subject.Length; i++)
{
if (subject[i] == SubjectTokens.Btsep)
{
result.Add(subject[start..i]);
start = i + 1;
}
}
result.Add(subject[start..]);
return [.. result];
}
private static bool IsSubsetMatchTokenized(string[] tokens, string[] test)
{
for (var i = 0; i < test.Length; i++)
{
if (i >= tokens.Length) return false;
var t2 = test[i];
if (t2.Length == 0) return false;
if (t2.Length == 1 && t2[0] == SubjectTokens.Fwc) return true;
var t1 = tokens[i];
if (t1.Length == 0) return false;
if (t1.Length == 1 && t1[0] == SubjectTokens.Fwc) return false;
if (t1.Length == 1 && t1[0] == SubjectTokens.Pwc)
{
if (!(t2.Length == 1 && t2[0] == SubjectTokens.Pwc)) return false;
if (i >= test.Length) return true;
continue;
}
if (!(t2.Length == 1 && t2[0] == SubjectTokens.Pwc) &&
string.Compare(t1, t2, StringComparison.Ordinal) != 0)
return false;
}
return tokens.Length == test.Length;
}
private string GetRandomPartition(int ceiling)
{
if (ceiling == 0) return "0";
return (Random.Shared.Next() % ceiling).ToString();
}
private static string GetHashPartition(byte[] key, int numBuckets)
{
if (numBuckets == 0) return "0";
// FNV-1a 32-bit hash — mirrors fnv.New32a() in Go.
const uint FnvPrime = 16777619;
const uint FnvOffset = 2166136261;
var hash = FnvOffset;
foreach (var b in key) { hash ^= b; hash *= FnvPrime; }
return ((int)(hash % (uint)numBuckets)).ToString();
}
}