docs: add FileStore benchmarks and storage notes

This commit is contained in:
Joseph Doherty
2026-03-13 11:34:19 -04:00
parent f57edca5a8
commit ca2d8019a1
4 changed files with 174 additions and 32 deletions

View File

@@ -0,0 +1,151 @@
using System.Diagnostics;
using NATS.Server.JetStream.Storage;
using Xunit.Abstractions;
namespace NATS.Server.Benchmark.Tests.JetStream;
[Collection("Benchmark-JetStream")]
public class FileStoreAppendBenchmarks(ITestOutputHelper output)
{
[Fact]
[Trait("Category", "Benchmark")]
public async Task FileStore_AppendAsync_128B_Throughput()
{
var payload = new byte[128];
var dir = CreateDirectory("append");
var opts = CreateOptions(dir);
try
{
await using var store = new FileStore(opts);
await MeasureAsync("FileStore AppendAsync (128B)", operations: 20_000, payload.Length,
i => store.AppendAsync($"bench.append.{i % 8}", payload, default).AsTask());
}
finally
{
DeleteDirectory(dir);
}
}
[Fact]
[Trait("Category", "Benchmark")]
public void FileStore_LoadLastBySubject_Throughput()
{
var payload = new byte[64];
var dir = CreateDirectory("load-last");
var opts = CreateOptions(dir);
try
{
using var store = new FileStore(opts);
for (var i = 0; i < 25_000; i++)
store.StoreMsg($"bench.subject.{i % 16}", null, payload, 0L);
Measure("FileStore LoadLastBySubject (hot)", operations: 50_000, payload.Length,
() =>
{
var loaded = store.LoadLastBySubjectAsync("bench.subject.7", default).GetAwaiter().GetResult();
if (loaded is null || loaded.Payload.Length != payload.Length)
throw new InvalidOperationException("LoadLastBySubjectAsync returned an unexpected result.");
});
}
finally
{
DeleteDirectory(dir);
}
}
[Fact]
[Trait("Category", "Benchmark")]
public void FileStore_PurgeEx_Trim_Overhead()
{
var payload = new byte[96];
var dir = CreateDirectory("purge-trim");
var opts = CreateOptions(dir);
try
{
using var store = new FileStore(opts);
for (var i = 0; i < 12_000; i++)
store.StoreMsg($"bench.purge.{i % 6}", null, payload, 0L);
Measure("FileStore PurgeEx+Trim", operations: 2_000, payload.Length,
() =>
{
store.PurgeEx("bench.purge.1", 0, 8);
store.TrimToMaxMessages(10_000);
store.StoreMsg("bench.purge.1", null, payload, 0L);
});
}
finally
{
DeleteDirectory(dir);
}
}
private async Task MeasureAsync(string name, int operations, int payloadSize, Func<int, Task> action)
{
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var beforeAlloc = GC.GetAllocatedBytesForCurrentThread();
var sw = Stopwatch.StartNew();
for (var i = 0; i < operations; i++)
await action(i);
sw.Stop();
WriteResult(name, operations, (long)operations * payloadSize, sw.Elapsed, GC.GetAllocatedBytesForCurrentThread() - beforeAlloc);
}
private void Measure(string name, int operations, int payloadSize, Action action)
{
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var beforeAlloc = GC.GetAllocatedBytesForCurrentThread();
var sw = Stopwatch.StartNew();
for (var i = 0; i < operations; i++)
action();
sw.Stop();
WriteResult(name, operations, (long)operations * payloadSize, sw.Elapsed, GC.GetAllocatedBytesForCurrentThread() - beforeAlloc);
}
private void WriteResult(string name, int operations, long totalBytes, TimeSpan elapsed, long allocatedBytes)
{
var opsPerSecond = operations / elapsed.TotalSeconds;
var megabytesPerSecond = totalBytes / elapsed.TotalSeconds / (1024.0 * 1024.0);
var bytesPerOperation = allocatedBytes / (double)operations;
output.WriteLine($"=== {name} ===");
output.WriteLine($"Ops: {opsPerSecond:N0} ops/s");
output.WriteLine($"Data: {megabytesPerSecond:F1} MB/s");
output.WriteLine($"Alloc: {bytesPerOperation:F1} B/op");
output.WriteLine($"Elapsed: {elapsed.TotalMilliseconds:F0} ms");
output.WriteLine("");
}
private static string CreateDirectory(string suffix)
=> Path.Combine(Path.GetTempPath(), $"nats-js-filestore-bench-{suffix}-{Guid.NewGuid():N}");
private static FileStoreOptions CreateOptions(string dir)
{
Directory.CreateDirectory(dir);
return new FileStoreOptions
{
Directory = dir,
BlockSizeBytes = 256 * 1024,
};
}
private static void DeleteDirectory(string dir)
{
if (Directory.Exists(dir))
Directory.Delete(dir, recursive: true);
}
}