feat: add benchmark test project for Go vs .NET server comparison
Side-by-side performance benchmarks using NATS.Client.Core against both servers on ephemeral ports. Includes core pub/sub, request/reply latency, and JetStream throughput tests with comparison output and benchmarks_comparison.md results. Also fixes timestamp flakiness in StoreInterfaceTests by using explicit timestamps.
This commit is contained in:
@@ -0,0 +1,106 @@
|
||||
using System.Diagnostics;
|
||||
using NATS.Client.Core;
|
||||
using NATS.Server.Benchmark.Tests.Harness;
|
||||
using NATS.Server.Benchmark.Tests.Infrastructure;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace NATS.Server.Benchmark.Tests.RequestReply;
|
||||
|
||||
[Collection("Benchmark-Core")]
|
||||
public class MultiClientLatencyTests(CoreServerPairFixture fixture, ITestOutputHelper output)
|
||||
{
|
||||
[Fact]
|
||||
[Trait("Category", "Benchmark")]
|
||||
public async Task RequestReply_10Clients2Services_16B()
|
||||
{
|
||||
const int payloadSize = 16;
|
||||
const int requestsPerClient = 1_000;
|
||||
const int clientCount = 10;
|
||||
const int serviceCount = 2;
|
||||
|
||||
var dotnetResult = await RunMultiLatency("Request-Reply 10Cx2S (16B)", "DotNet", payloadSize, requestsPerClient, clientCount, serviceCount, fixture.CreateDotNetClient);
|
||||
|
||||
if (fixture.GoAvailable)
|
||||
{
|
||||
var goResult = await RunMultiLatency("Request-Reply 10Cx2S (16B)", "Go", payloadSize, requestsPerClient, clientCount, serviceCount, fixture.CreateGoClient);
|
||||
BenchmarkResultWriter.WriteComparison(output, goResult, dotnetResult);
|
||||
}
|
||||
else
|
||||
{
|
||||
BenchmarkResultWriter.WriteSingle(output, dotnetResult);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<BenchmarkResult> RunMultiLatency(
|
||||
string name, string serverType, int payloadSize, int requestsPerClient,
|
||||
int clientCount, int serviceCount, Func<NatsConnection> createClient)
|
||||
{
|
||||
var payload = new byte[payloadSize];
|
||||
const string subject = "bench.reqrep.multi";
|
||||
var queueGroup = $"bench-svc-{serverType.ToLowerInvariant()}";
|
||||
|
||||
// Start service responders on a queue group
|
||||
var serviceClients = new NatsConnection[serviceCount];
|
||||
var serviceSubs = new INatsSub<byte[]>[serviceCount];
|
||||
var serviceTasks = new Task[serviceCount];
|
||||
|
||||
for (var i = 0; i < serviceCount; i++)
|
||||
{
|
||||
serviceClients[i] = createClient();
|
||||
await serviceClients[i].ConnectAsync();
|
||||
serviceSubs[i] = await serviceClients[i].SubscribeCoreAsync<byte[]>(subject, queueGroup: queueGroup);
|
||||
var client = serviceClients[i];
|
||||
var sub = serviceSubs[i];
|
||||
serviceTasks[i] = Task.Run(async () =>
|
||||
{
|
||||
await foreach (var msg in sub.Msgs.ReadAllAsync())
|
||||
{
|
||||
if (msg.ReplyTo is not null)
|
||||
await client.PublishAsync(msg.ReplyTo, payload);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await Task.Delay(50);
|
||||
|
||||
// Run concurrent clients
|
||||
var totalMessages = requestsPerClient * clientCount;
|
||||
var tracker = new LatencyTracker(totalMessages);
|
||||
var sw = Stopwatch.StartNew();
|
||||
|
||||
var clientTasks = new Task[clientCount];
|
||||
for (var c = 0; c < clientCount; c++)
|
||||
{
|
||||
clientTasks[c] = Task.Run(async () =>
|
||||
{
|
||||
await using var client = createClient();
|
||||
await client.ConnectAsync();
|
||||
|
||||
for (var i = 0; i < requestsPerClient; i++)
|
||||
{
|
||||
var start = Stopwatch.GetTimestamp();
|
||||
await client.RequestAsync<byte[], byte[]>(subject, payload);
|
||||
tracker.Record(Stopwatch.GetTimestamp() - start);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await Task.WhenAll(clientTasks);
|
||||
sw.Stop();
|
||||
|
||||
foreach (var sub in serviceSubs)
|
||||
await sub.UnsubscribeAsync();
|
||||
foreach (var client in serviceClients)
|
||||
await client.DisposeAsync();
|
||||
|
||||
return new BenchmarkResult
|
||||
{
|
||||
Name = name,
|
||||
ServerType = serverType,
|
||||
TotalMessages = totalMessages,
|
||||
TotalBytes = (long)totalMessages * payloadSize,
|
||||
Duration = sw.Elapsed,
|
||||
Latencies = tracker.ComputePercentiles(),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
using NATS.Client.Core;
|
||||
using NATS.Server.Benchmark.Tests.Harness;
|
||||
using NATS.Server.Benchmark.Tests.Infrastructure;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace NATS.Server.Benchmark.Tests.RequestReply;
|
||||
|
||||
[Collection("Benchmark-Core")]
|
||||
public class SingleClientLatencyTests(CoreServerPairFixture fixture, ITestOutputHelper output)
|
||||
{
|
||||
private readonly BenchmarkRunner _runner = new() { WarmupCount = 500, MeasurementCount = 10_000 };
|
||||
|
||||
[Fact]
|
||||
[Trait("Category", "Benchmark")]
|
||||
public async Task RequestReply_SingleClient_128B()
|
||||
{
|
||||
const int payloadSize = 128;
|
||||
const string subject = "bench.reqrep.single";
|
||||
|
||||
var dotnetResult = await RunLatency("Request-Reply Single (128B)", "DotNet", subject, payloadSize, fixture.CreateDotNetClient);
|
||||
|
||||
if (fixture.GoAvailable)
|
||||
{
|
||||
var goResult = await RunLatency("Request-Reply Single (128B)", "Go", subject, payloadSize, fixture.CreateGoClient);
|
||||
BenchmarkResultWriter.WriteComparison(output, goResult, dotnetResult);
|
||||
}
|
||||
else
|
||||
{
|
||||
BenchmarkResultWriter.WriteSingle(output, dotnetResult);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<BenchmarkResult> RunLatency(string name, string serverType, string subject, int payloadSize, Func<NatsConnection> createClient)
|
||||
{
|
||||
var payload = new byte[payloadSize];
|
||||
|
||||
await using var serviceClient = createClient();
|
||||
await using var requestClient = createClient();
|
||||
await serviceClient.ConnectAsync();
|
||||
await requestClient.ConnectAsync();
|
||||
|
||||
// Start service responder
|
||||
var sub = await serviceClient.SubscribeCoreAsync<byte[]>(subject);
|
||||
var responderTask = Task.Run(async () =>
|
||||
{
|
||||
await foreach (var msg in sub.Msgs.ReadAllAsync())
|
||||
{
|
||||
if (msg.ReplyTo is not null)
|
||||
await serviceClient.PublishAsync(msg.ReplyTo, payload);
|
||||
}
|
||||
});
|
||||
|
||||
await Task.Delay(50);
|
||||
|
||||
var result = await _runner.MeasureLatencyAsync(name, serverType, payloadSize,
|
||||
async _ =>
|
||||
{
|
||||
await requestClient.RequestAsync<byte[], byte[]>(subject, payload);
|
||||
});
|
||||
|
||||
await sub.UnsubscribeAsync();
|
||||
return result;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user