feat: add benchmark test project for Go vs .NET server comparison
Side-by-side performance benchmarks using NATS.Client.Core against both servers on ephemeral ports. Includes core pub/sub, request/reply latency, and JetStream throughput tests with comparison output and benchmarks_comparison.md results. Also fixes timestamp flakiness in StoreInterfaceTests by using explicit timestamps.
This commit is contained in:
80
tests/NATS.Server.Benchmark.Tests/Harness/BenchmarkRunner.cs
Normal file
80
tests/NATS.Server.Benchmark.Tests/Harness/BenchmarkRunner.cs
Normal file
@@ -0,0 +1,80 @@
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace NATS.Server.Benchmark.Tests.Harness;
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight benchmark runner with warmup + timed measurement.
|
||||
/// </summary>
|
||||
public sealed class BenchmarkRunner
|
||||
{
|
||||
public int WarmupCount { get; init; } = 1_000;
|
||||
public int MeasurementCount { get; init; } = 100_000;
|
||||
|
||||
/// <summary>
|
||||
/// Measures throughput for a fire-and-forget style workload (pub-only or pub+sub).
|
||||
/// The <paramref name="action"/> is called <see cref="MeasurementCount"/> times.
|
||||
/// </summary>
|
||||
public async Task<BenchmarkResult> MeasureThroughputAsync(
|
||||
string name,
|
||||
string serverType,
|
||||
int payloadSize,
|
||||
Func<int, Task> action)
|
||||
{
|
||||
// Warmup
|
||||
for (var i = 0; i < WarmupCount; i++)
|
||||
await action(i);
|
||||
|
||||
// Measurement
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (var i = 0; i < MeasurementCount; i++)
|
||||
await action(i);
|
||||
sw.Stop();
|
||||
|
||||
return new BenchmarkResult
|
||||
{
|
||||
Name = name,
|
||||
ServerType = serverType,
|
||||
TotalMessages = MeasurementCount,
|
||||
TotalBytes = (long)MeasurementCount * payloadSize,
|
||||
Duration = sw.Elapsed,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Measures latency for a request-reply style workload.
|
||||
/// Records per-iteration round-trip time and computes percentiles.
|
||||
/// </summary>
|
||||
public async Task<BenchmarkResult> MeasureLatencyAsync(
|
||||
string name,
|
||||
string serverType,
|
||||
int payloadSize,
|
||||
Func<int, Task> roundTripAction)
|
||||
{
|
||||
// Warmup
|
||||
for (var i = 0; i < WarmupCount; i++)
|
||||
await roundTripAction(i);
|
||||
|
||||
// Measurement with per-iteration timing
|
||||
var tracker = new LatencyTracker(MeasurementCount);
|
||||
var overallSw = Stopwatch.StartNew();
|
||||
|
||||
for (var i = 0; i < MeasurementCount; i++)
|
||||
{
|
||||
var start = Stopwatch.GetTimestamp();
|
||||
await roundTripAction(i);
|
||||
tracker.Record(Stopwatch.GetTimestamp() - start);
|
||||
}
|
||||
|
||||
overallSw.Stop();
|
||||
|
||||
return new BenchmarkResult
|
||||
{
|
||||
Name = name,
|
||||
ServerType = serverType,
|
||||
TotalMessages = MeasurementCount,
|
||||
TotalBytes = (long)MeasurementCount * payloadSize,
|
||||
Duration = overallSw.Elapsed,
|
||||
Latencies = tracker.ComputePercentiles(),
|
||||
};
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user