feat: add benchmark test project for Go vs .NET server comparison

Side-by-side performance benchmarks using NATS.Client.Core against both
servers on ephemeral ports. Includes core pub/sub, request/reply latency,
and JetStream throughput tests with comparison output and
benchmarks_comparison.md results. Also fixes timestamp flakiness in
StoreInterfaceTests by using explicit timestamps.
This commit is contained in:
Joseph Doherty
2026-03-13 01:23:31 -04:00
parent e9c86c51c3
commit 37575dc41c
28 changed files with 2264 additions and 12 deletions

View File

@@ -0,0 +1,93 @@
using System.Diagnostics;
using NATS.Client.Core;
using NATS.Client.JetStream;
using NATS.Client.JetStream.Models;
using NATS.Server.Benchmark.Tests.Harness;
using NATS.Server.Benchmark.Tests.Infrastructure;
using Xunit.Abstractions;
namespace NATS.Server.Benchmark.Tests.JetStream;
[Collection("Benchmark-JetStream")]
public class AsyncPublishTests(JetStreamServerPairFixture fixture, ITestOutputHelper output)
{
[Fact]
[Trait("Category", "Benchmark")]
public async Task JSAsyncPublish_128B_FileStore()
{
const int payloadSize = 128;
const int messageCount = 5_000;
const int batchSize = 100;
var dotnetResult = await RunAsyncPublish("JS Async Publish (128B File)", "DotNet", payloadSize, messageCount, batchSize, fixture.CreateDotNetClient);
if (fixture.GoAvailable)
{
var goResult = await RunAsyncPublish("JS Async Publish (128B File)", "Go", payloadSize, messageCount, batchSize, fixture.CreateGoClient);
BenchmarkResultWriter.WriteComparison(output, goResult, dotnetResult);
}
else
{
BenchmarkResultWriter.WriteSingle(output, dotnetResult);
}
}
private static async Task<BenchmarkResult> RunAsyncPublish(string name, string serverType, int payloadSize, int messageCount, int batchSize, Func<NatsConnection> createClient)
{
var payload = new byte[payloadSize];
var streamName = $"BENCH_ASYNC_{serverType.ToUpperInvariant()}_{Guid.NewGuid():N}"[..30];
var subject = $"bench.js.async.{serverType.ToLowerInvariant()}";
await using var nats = createClient();
await nats.ConnectAsync();
var js = new NatsJSContext(nats);
await js.CreateStreamAsync(new StreamConfig(streamName, [subject])
{
Storage = StreamConfigStorage.File,
Retention = StreamConfigRetention.Limits,
MaxMsgs = 10_000_000,
});
try
{
// Warmup
for (var i = 0; i < 500; i++)
await js.PublishAsync(subject, payload);
// Measurement — fire-and-gather in batches
var sw = Stopwatch.StartNew();
var tasks = new List<ValueTask<PubAckResponse>>(batchSize);
for (var i = 0; i < messageCount; i++)
{
tasks.Add(js.PublishAsync(subject, payload));
if (tasks.Count >= batchSize)
{
foreach (var t in tasks)
await t;
tasks.Clear();
}
}
foreach (var t in tasks)
await t;
sw.Stop();
return new BenchmarkResult
{
Name = name,
ServerType = serverType,
TotalMessages = messageCount,
TotalBytes = (long)messageCount * payloadSize,
Duration = sw.Elapsed,
};
}
finally
{
await js.DeleteStreamAsync(streamName);
}
}
}

View File

@@ -0,0 +1,109 @@
using System.Diagnostics;
using NATS.Client.Core;
using NATS.Client.JetStream;
using NATS.Client.JetStream.Models;
using NATS.Server.Benchmark.Tests.Harness;
using NATS.Server.Benchmark.Tests.Infrastructure;
using Xunit.Abstractions;
namespace NATS.Server.Benchmark.Tests.JetStream;
[Collection("Benchmark-JetStream")]
public class DurableConsumerFetchTests(JetStreamServerPairFixture fixture, ITestOutputHelper output)
{
[Fact]
[Trait("Category", "Benchmark")]
public async Task JSDurableFetch_Throughput()
{
const int payloadSize = 128;
const int messageCount = 5_000;
const int fetchBatchSize = 500;
var dotnetResult = await RunDurableFetch("JS Durable Fetch (128B)", "DotNet", payloadSize, messageCount, fetchBatchSize, fixture.CreateDotNetClient);
if (fixture.GoAvailable)
{
var goResult = await RunDurableFetch("JS Durable Fetch (128B)", "Go", payloadSize, messageCount, fetchBatchSize, fixture.CreateGoClient);
BenchmarkResultWriter.WriteComparison(output, goResult, dotnetResult);
}
else
{
BenchmarkResultWriter.WriteSingle(output, dotnetResult);
}
}
private static async Task<BenchmarkResult> RunDurableFetch(
string name, string serverType, int payloadSize, int messageCount, int fetchBatchSize,
Func<NatsConnection> createClient)
{
var payload = new byte[payloadSize];
var streamName = $"BENCH_DUR_{serverType.ToUpperInvariant()}_{Guid.NewGuid():N}"[..30];
var subject = $"bench.js.durable.{serverType.ToLowerInvariant()}";
var consumerName = $"bench-dur-{serverType.ToLowerInvariant()}";
await using var nats = createClient();
await nats.ConnectAsync();
var js = new NatsJSContext(nats);
await js.CreateStreamAsync(new StreamConfig(streamName, [subject])
{
Storage = StreamConfigStorage.Memory,
Retention = StreamConfigRetention.Limits,
MaxMsgs = 10_000_000,
});
try
{
// Pre-populate stream
var pubTasks = new List<ValueTask<PubAckResponse>>(1000);
for (var i = 0; i < messageCount; i++)
{
pubTasks.Add(js.PublishAsync(subject, payload));
if (pubTasks.Count >= 1000)
{
foreach (var t in pubTasks)
await t;
pubTasks.Clear();
}
}
foreach (var t in pubTasks)
await t;
// Create durable consumer
var consumer = await js.CreateOrUpdateConsumerAsync(streamName, new ConsumerConfig(consumerName)
{
AckPolicy = ConsumerConfigAckPolicy.None,
});
// Fetch in batches
var received = 0;
var sw = Stopwatch.StartNew();
while (received < messageCount)
{
await foreach (var msg in consumer.FetchAsync<byte[]>(new NatsJSFetchOpts { MaxMsgs = fetchBatchSize }))
{
received++;
if (received >= messageCount)
break;
}
}
sw.Stop();
return new BenchmarkResult
{
Name = name,
ServerType = serverType,
TotalMessages = received,
TotalBytes = (long)received * payloadSize,
Duration = sw.Elapsed,
};
}
finally
{
await js.DeleteStreamAsync(streamName);
}
}
}

View File

@@ -0,0 +1,108 @@
using System.Diagnostics;
using NATS.Client.Core;
using NATS.Client.JetStream;
using NATS.Client.JetStream.Models;
using NATS.Server.Benchmark.Tests.Harness;
using NATS.Server.Benchmark.Tests.Infrastructure;
using Xunit.Abstractions;
namespace NATS.Server.Benchmark.Tests.JetStream;
[Collection("Benchmark-JetStream")]
public class OrderedConsumerTests(JetStreamServerPairFixture fixture, ITestOutputHelper output)
{
[Fact]
[Trait("Category", "Benchmark")]
public async Task JSOrderedConsumer_Throughput()
{
const int payloadSize = 128;
const int messageCount = 5_000;
BenchmarkResult? dotnetResult = null;
try
{
dotnetResult = await RunOrderedConsume("JS Ordered Consumer (128B)", "DotNet", payloadSize, messageCount, fixture.CreateDotNetClient);
}
catch (Exception ex) when (ex.GetType().Name.Contains("NatsJS"))
{
output.WriteLine($"[DotNet] Ordered consumer not fully supported: {ex.Message}");
}
if (fixture.GoAvailable)
{
var goResult = await RunOrderedConsume("JS Ordered Consumer (128B)", "Go", payloadSize, messageCount, fixture.CreateGoClient);
if (dotnetResult is not null)
BenchmarkResultWriter.WriteComparison(output, goResult, dotnetResult);
else
BenchmarkResultWriter.WriteSingle(output, goResult);
}
else if (dotnetResult is not null)
{
BenchmarkResultWriter.WriteSingle(output, dotnetResult);
}
}
private static async Task<BenchmarkResult> RunOrderedConsume(string name, string serverType, int payloadSize, int messageCount, Func<NatsConnection> createClient)
{
var payload = new byte[payloadSize];
var streamName = $"BENCH_ORD_{serverType.ToUpperInvariant()}_{Guid.NewGuid():N}"[..30];
var subject = $"bench.js.ordered.{serverType.ToLowerInvariant()}";
await using var nats = createClient();
await nats.ConnectAsync();
var js = new NatsJSContext(nats);
await js.CreateStreamAsync(new StreamConfig(streamName, [subject])
{
Storage = StreamConfigStorage.Memory,
Retention = StreamConfigRetention.Limits,
MaxMsgs = 10_000_000,
});
try
{
// Pre-populate stream
var pubTasks = new List<ValueTask<PubAckResponse>>(1000);
for (var i = 0; i < messageCount; i++)
{
pubTasks.Add(js.PublishAsync(subject, payload));
if (pubTasks.Count >= 1000)
{
foreach (var t in pubTasks)
await t;
pubTasks.Clear();
}
}
foreach (var t in pubTasks)
await t;
// Consume via ordered consumer
var consumer = await js.CreateOrderedConsumerAsync(streamName);
var received = 0;
var sw = Stopwatch.StartNew();
await foreach (var msg in consumer.ConsumeAsync<byte[]>())
{
received++;
if (received >= messageCount)
break;
}
sw.Stop();
return new BenchmarkResult
{
Name = name,
ServerType = serverType,
TotalMessages = received,
TotalBytes = (long)received * payloadSize,
Duration = sw.Elapsed,
};
}
finally
{
await js.DeleteStreamAsync(streamName);
}
}
}

View File

@@ -0,0 +1,62 @@
using NATS.Client.Core;
using NATS.Client.JetStream;
using NATS.Client.JetStream.Models;
using NATS.Server.Benchmark.Tests.Harness;
using NATS.Server.Benchmark.Tests.Infrastructure;
using Xunit.Abstractions;
namespace NATS.Server.Benchmark.Tests.JetStream;
[Collection("Benchmark-JetStream")]
public class SyncPublishTests(JetStreamServerPairFixture fixture, ITestOutputHelper output)
{
private readonly BenchmarkRunner _runner = new() { WarmupCount = 500, MeasurementCount = 10_000 };
[Fact]
[Trait("Category", "Benchmark")]
public async Task JSSyncPublish_16B_MemoryStore()
{
const int payloadSize = 16;
var dotnetResult = await RunSyncPublish("JS Sync Publish (16B Memory)", "DotNet", payloadSize, fixture.CreateDotNetClient);
if (fixture.GoAvailable)
{
var goResult = await RunSyncPublish("JS Sync Publish (16B Memory)", "Go", payloadSize, fixture.CreateGoClient);
BenchmarkResultWriter.WriteComparison(output, goResult, dotnetResult);
}
else
{
BenchmarkResultWriter.WriteSingle(output, dotnetResult);
}
}
private async Task<BenchmarkResult> RunSyncPublish(string name, string serverType, int payloadSize, Func<NatsConnection> createClient)
{
var payload = new byte[payloadSize];
var streamName = $"BENCH_SYNC_{serverType.ToUpperInvariant()}_{Guid.NewGuid():N}"[..30];
var subject = $"bench.js.sync.{serverType.ToLowerInvariant()}";
await using var nats = createClient();
await nats.ConnectAsync();
var js = new NatsJSContext(nats);
await js.CreateStreamAsync(new StreamConfig(streamName, [subject])
{
Storage = StreamConfigStorage.Memory,
Retention = StreamConfigRetention.Limits,
MaxMsgs = 1_000_000,
});
try
{
var result = await _runner.MeasureThroughputAsync(name, serverType, payloadSize,
async _ => await js.PublishAsync(subject, payload));
return result;
}
finally
{
await js.DeleteStreamAsync(streamName);
}
}
}