28 Commits

Author SHA1 Message Date
Joseph Doherty
01df4ccff3 chore: update porting reports 2026-02-27 04:45:23 -05:00
Joseph Doherty
4ba6b2642e Merge branch 'worktree-agent-ac3fde22'
# Conflicts:
#	reports/current.md
#	reports/report_0a6e6bf.md
2026-02-27 04:44:05 -05:00
Joseph Doherty
21bb760e63 Merge branch 'worktree-agent-a0a5dc7b'
# Conflicts:
#	reports/current.md
#	reports/report_0a6e6bf.md
2026-02-27 04:43:58 -05:00
Joseph Doherty
4901249511 Merge branch 'worktree-agent-a54fc93d'
# Conflicts:
#	reports/current.md
#	reports/report_0a6e6bf.md
2026-02-27 04:43:51 -05:00
Joseph Doherty
7769966e2e feat(porttracker): add library batch-update and batch-map commands 2026-02-27 04:43:11 -05:00
Joseph Doherty
750916caed feat(porttracker): add module batch-update and batch-map commands 2026-02-27 04:42:49 -05:00
Joseph Doherty
b63f66fbdc feat(porttracker): add test batch-update and batch-map commands 2026-02-27 04:42:30 -05:00
Joseph Doherty
2a900bf56a feat(porttracker): add feature batch-update and batch-map commands 2026-02-27 04:42:17 -05:00
Joseph Doherty
0a6e6bf60d feat(porttracker): add BatchFilters shared infrastructure 2026-02-27 04:40:27 -05:00
Joseph Doherty
3f6c5f243d feat(porttracker): add ExecuteInTransaction to Database 2026-02-27 04:38:59 -05:00
Joseph Doherty
a99092d0bd docs: add PortTracker batch operations implementation plan
7 tasks: Database transaction helper, BatchFilters infrastructure,
batch commands for feature/test/module/library, and smoke tests.
2026-02-27 04:37:36 -05:00
Joseph Doherty
97be7a25a2 docs: add PortTracker batch operations design
Design for batch-update and batch-map subcommands across all entity
types (feature, test, module, library) with shared filter infrastructure
and dry-run-by-default safety.
2026-02-27 04:34:52 -05:00
Joseph Doherty
11ec33da53 fix: mark server module features as deferred, not verified
Add 'deferred' to features.status CHECK constraint (table migration).
Server module (module_id=8) 3394 features: verified → deferred.
These features have ported implementations but their unit tests are
deferred pending a runnable .NET server end-to-end.
Small module features (modules 1-7, 9-12) remain verified.
2026-02-26 21:53:53 -05:00
Joseph Doherty
1c5921d2c1 fix(p7-10): fix integration test quality issues (server guard, parallelism, flakiness, exception propagation) 2026-02-26 20:21:29 -05:00
Joseph Doherty
3e35ffadce chore: remove UnitTest1.cs scaffolding artifact from unit test project 2026-02-26 20:17:25 -05:00
Joseph Doherty
6a1df6b6f8 feat(p7-10): mark deferred tests, add integration tests, close Phase 7
- 2126 server-integration tests marked deferred
- NatsServerBehaviorTests.cs replaces UnitTest1.cs placeholder
- Server module and all features marked verified
- stub tests cleared to deferred
2026-02-26 20:14:38 -05:00
Joseph Doherty
9552f6e7e9 fix(p7-09): move DirectoryStoreTests to Accounts/, add missing PriorityPolicy test case 2026-02-26 20:10:04 -05:00
Joseph Doherty
f0faaffe69 feat(p7-09): JetStream unit tests — versioning (12), dirstore (12), batching/errors deferred (66)
Port session P7-09: add tests from jetstream_versioning_test.go (T:1791–1808),
dirstore_test.go (T:285–296), jetstream_batching_test.go (T:716–744),
jetstream_errors_test.go (T:1381–1384), and accounts_test.go (T:80–110).

- JetStreamVersioningTests: 12 active unit tests + 6 deferred (server-required)
- DirectoryStoreTests: 12 filesystem tests using fake JWTs (no NKeys dependency)
- JetStreamBatchingTests: 29 deferred stubs (all require running JetStream cluster)
- JetStreamErrorsTests: 4 deferred stubs (NewJS* factories not yet ported)
- accounts_test.go T:80–110: 31 deferred (all use RunServerWithConfig)

Fix DirJwtStore.cs expiration bugs:
  - Use DateTimeOffset.UtcNow.UtcTicks (not Unix-relative ticks) for expiry comparison
  - Replace in-place JwtItem mutation with new-object replacement so DrainStale
    can detect stale heap entries via ReferenceEquals check

Add JetStreamVersioning.cs methods: SetStaticStreamMetadata,
SetDynamicStreamMetadata, CopyStreamMetadata, SetStaticConsumerMetadata,
SetDynamicConsumerMetadata, SetDynamicConsumerInfoMetadata, CopyConsumerMetadata.

Tests: 725 pass, 53 skipped/deferred, 0 failures.
DB: +24 complete, +66 deferred.
2026-02-26 20:02:00 -05:00
Joseph Doherty
6e90eea736 feat(p7-07): defer all 249 filestore tests — FileStore implementation is a stub
All methods on JetStreamFileStore throw NotImplementedException (session 18
placeholder). Marked all 249 unit_tests (IDs 351–599) for server/filestore_test.go
as deferred in porting.db. No test file created; tests will be written once the
FileStore implementation is ported. All 701 existing unit tests continue to pass.
2026-02-26 19:40:05 -05:00
Joseph Doherty
0950580967 feat(p7-06): port memstore & store interface tests (38 tests)
Add JetStreamMemoryStoreTests (27 tests, T:2023-2056) and
StorageEngineTests (11 tests, T:2943-2957) covering the JetStream
memory store and IStreamStore interface. Fix 10 bugs in MemStore.cs
discovered during test authoring: FirstSeq constructor, Truncate(0)
SubjectTree reset, PurgeEx subject-filtered implementation,
UpdateConfig MaxMsgsPer enforcement, FilteredStateLocked partial
range scan, StoreRawMsgLocked DiscardNewPer, MultiLastSeqs maxSeq
fallback scan + LastNeedsUpdate recalculation, AllLastSeqs
LastNeedsUpdate recalculation, LoadLastLocked LazySubjectState
recalculation, GetSeqFromTime ts==last equality, and timestamp
precision (100-ns throughout). 20 tests deferred (internal fields,
benchmarks, TTL, filestore-only). All 701 unit tests pass.
2026-02-26 19:35:58 -05:00
Joseph Doherty
917cd33442 feat(p7-05): fill signal & log stubs — SignalHandlerTests, ServerLoggerTests
- Add RemovePassFromTrace, RemoveAuthTokenFromTrace, RemoveSecretsFromTrace
  static methods to ServerLogging (mirrors removeSecretsFromTrace/redact in
  server/client.go); uses same regex patterns as Go source to redact only the
  first match's value with [REDACTED].
- Update ClientConnection.RemoveSecretsFromTrace stub to delegate to
  ServerLogging.RemoveSecretsFromTrace.
- Add 2 unit tests to SignalHandlerTests (T:2919 invalid command, T:2920 invalid
  PID); mark 14 process-injection/subprocess tests as deferred ([Fact(Skip=…)]).
- Create ServerLoggerTests with 3 test methods (T:2020, T:2021, T:2022) covering
  NoPasswordsFromConnectTrace, RemovePassFromTrace (8 theory cases),
  RemoveAuthTokenFromTrace (8 theory cases).
- DB: 3 log tests → complete, 2 signal tests → complete, 14 signal tests → deferred.
- All 663 unit tests pass (was 645), 14 deferred skipped.
2026-02-26 19:15:57 -05:00
Joseph Doherty
364329cc1e feat(p7-04): fill auth & config-check stubs — 1 written, 39 deferred
auth_test.go (6): T:153 GetAuthErrClosedState written as pure unit test;
T:147/149-152 use RunServer/RunServerWithConfig → deferred.
auth_callout_test.go (31): all use NewAuthTest (RunServer) → all deferred.
config_check_test.go (3): depend on Go .conf-format parser not yet ported → deferred.
Adds 7 new test methods to AuthHandlerTests; suite grows 638→645.
2026-02-26 19:07:44 -05:00
Joseph Doherty
91f86b9f51 feat(p7-03): fill jwt_test.go stubs — all 88 marked deferred
All 88 unit test stubs in server/jwt_test.go (IDs 1809–1896) depend on
server infrastructure (RunServer, opTrustBasicSetup, newClientForServer,
s.LookupAccount, s.UpdateAccountClaims, etc.) and cannot be exercised as
pure unit tests. Marked all 88 as 'deferred' for Phase 8 integration testing.
Full suite remains at 638 passing tests.
2026-02-26 19:04:02 -05:00
Joseph Doherty
f0b4138459 feat(p7-02): fill opts_test.go stubs — ServerOptionsTests
Write 3 unit tests portable without a running server:
- ListenMonitoringDefault (T:2524): SetBaselineOptions propagates Host → HttpHost
- GetStorageSize (T:2576): StorageSizeJsonConverter.Parse K/M/G/T suffixes
- ClusterNameAndGatewayNameConflict (T:2571): ValidateOptions returns ErrClusterNameConfigConflict

Mark 74 opts_test.go stubs deferred: tests require either the NATS
conf-format parser (not yet ported), a running server (RunServer/NewServer),
or CLI flag-parsing infrastructure (ConfigureOptions).

Fix StorageSizeJsonConverter.Parse to return 0 for empty input,
matching Go getStorageSize("") == (0, nil).

Total unit tests: 638 passing.
2026-02-26 19:00:18 -05:00
Joseph Doherty
8b63a6f6c2 feat(p7-01): verify 11 small modules (114 tests), mark thw benchmarks n/a 2026-02-26 18:53:54 -05:00
Joseph Doherty
08620388f1 feat(p7-01): add 'deferred' status to unit_tests schema
SQLite table recreated (no ALTER TABLE support for CHECK constraints).
porting-schema.sql updated to match. Row count unchanged at 3257.
2026-02-26 18:50:50 -05:00
Joseph Doherty
7750b46f9f docs: Phase 7 implementation plan — 11 tasks, 10 sessions
Covers schema migration, small-module verification, 224 stub fills,
401 new unit tests, 2126 deferred server-integration tests, and
NatsServerBehaviorTests integration baseline.
2026-02-26 18:49:24 -05:00
Joseph Doherty
d09de1c5cf docs: Phase 7 design — porting verification approach
Defines two-layer test strategy (unit vs server-integration/deferred),
10-session structure, schema extension adding deferred status, and
completion criteria for Phase 7.
2026-02-26 18:38:28 -05:00
60 changed files with 8259 additions and 63 deletions

View File

@@ -0,0 +1,144 @@
# Phase 7: Porting Verification — Design
**Date:** 2026-02-26
**Scope:** Verify all ported code through targeted testing; mark server-integration tests as `deferred`
---
## Context
After Phase 6 (23 porting sessions + 93 stub completions), the DB state entering Phase 7:
| Item | Count |
|------|-------|
| Features complete | 3,596 / 3,673 (77 n_a) |
| Unit tests complete | 319 |
| Unit tests stub | 224 |
| Unit tests not_started | 2,533 |
| Unit tests n_a | 181 |
| Unit tests total | 3,257 |
635 unit tests currently pass. 166 `NotImplementedException` stubs remain in the server — the .NET server is not yet runnable end-to-end.
---
## Key Design Decision: Two Test Layers
Go test files (`jetstream_test.go`, `monitor_test.go`, etc.) all use `RunBasicJetStreamServer()` / `RunServer()` — they start a real NATS server over TCP, then connect via NATS client. These are server-integration tests regardless of whether they target a single node or a cluster.
| Layer | Tests | Treatment |
|-------|-------|-----------|
| **Unit** | Pure component logic (no server startup) | Port & verify in Phase 7 |
| **Server-integration** | Require running NatsServer + NATS client | Status `deferred` |
---
## Schema Extension
Add `deferred` to the `unit_tests.status` CHECK constraint:
```sql
-- Migration: add 'deferred' to unit_tests status enum
-- Recreate table with updated constraint or use SQLite trigger workaround
```
`deferred` = test blocked on running server or cluster infrastructure. Distinct from `n_a` (not applicable to this port).
---
## Test Classification
### Unit Tests to Port (~631 new tests)
| Go source file | Not-started / Stub | Component |
|---|---|---|
| `opts_test.go` | 77 stubs + remaining | Config parsing / binding |
| `jwt_test.go` | 88 stubs | JWT decode / validate |
| `auth_test.go` | 6 stubs | Auth handler logic |
| `auth_callout_test.go` | 31 stubs | Auth callout types / helpers |
| `signal_test.go` | 16 stubs | Signal handler registration |
| `log_test.go` | 3 stubs | Logger behaviour |
| `config_check_test.go` | 3 stubs | Config validation |
| `memstore_test.go` | 41 not_started | Memory store logic |
| `store_test.go` | 17 not_started | Store interface contract |
| `filestore_test.go` | 249 not_started | File store read/write/purge |
| `jetstream_errors_test.go` | 4 not_started | Error type checks |
| `jetstream_versioning_test.go` | 18 not_started | Version compatibility |
| `jetstream_batching_test.go` | 29 not_started | Batching logic |
| `dirstore_test.go` | 12 not_started | JWT directory store |
| `accounts_test.go` | 31 not_started | Account logic (unit subset) |
| `thw` module | 6 not_started | Time hash wheel |
### Server-Integration Tests → `deferred` (~1,799 tests)
| Go source file | Count | Deferred reason |
|---|---|---|
| `jetstream_test.go` | 320 | Needs running server |
| `jetstream_consumer_test.go` | 161 | Needs running server |
| `monitor_test.go` | 103 | HTTP monitoring endpoints |
| `reload_test.go` | 73 | Live config reload |
| `routes_test.go` | 70 | Multi-server routing |
| `events_test.go` | 52 | Server event bus |
| `server_test.go` | 20 | Server lifecycle |
| `jetstream_cluster_*` (×4) | 456 | Multi-node cluster |
| `mqtt_test.go` + extras | ~162 | MQTT server |
| `websocket_test.go` | 109 | WebSocket server |
| `raft_test.go` | 104 | Raft consensus |
| `leafnode_test.go` + proxy | 120 | Leaf node infrastructure |
| `gateway_test.go` | 88 | Gateway infrastructure |
| `jetstream_super_cluster_test.go` | 47 | Super-cluster |
| `norace_*` tests | ~141 | Race-detector / timing |
| Benchmark tests | ~20 | Performance only |
| Other cluster/misc | ~53 | Cluster infrastructure |
---
## Session Structure (10 sessions)
| Session | Scope | New tests | Source files |
|---------|-------|-----------|---|
| **P7-01** | Schema migration + small module verification | 0 new (114 existing) | ats, avl, certidp, gsl, pse, stree, thw, tpm |
| **P7-02** | Opts & config stubs + remaining opts tests | ~95 | `opts_test.go` |
| **P7-03** | JWT stubs | 88 | `jwt_test.go` |
| **P7-04** | Auth stubs + auth callout stubs | 37 | `auth_test.go`, `auth_callout_test.go`, `config_check_test.go` |
| **P7-05** | Signal + log stubs | 19 | `signal_test.go`, `log_test.go` |
| **P7-06** | Store unit tests — memory + interface | ~58 | `memstore_test.go`, `store_test.go` |
| **P7-07** | File store unit tests (first half) | ~125 | `filestore_test.go` lines 1~4,000 |
| **P7-08** | File store unit tests (second half) | ~124 | `filestore_test.go` remainder |
| **P7-09** | JetStream unit tests — errors, versioning, batching, dirstore, accounts | ~94 | `jetstream_errors_test.go`, `jetstream_versioning_test.go`, `jetstream_batching_test.go`, `dirstore_test.go`, `accounts_test.go` |
| **P7-10** | Mark deferred, integration tests, DB final update, Phase 7 close | — | DB sweep + Gitea milestones 7 & 8 |
**Total new tests written: ~640**
---
## Verification Flow (per session)
1. Write / fill tests → build → run → confirm green
2. Mark tests `complete` in DB (new tests) then `verified`
3. Mark small modules `verified` in DB (P7-01); server module at P7-10
4. `./reports/generate-report.sh` → commit
---
## Integration Tests (P7-10)
Replace the placeholder `UnitTest1.cs` with `NatsServerBehaviorTests.cs`. Tests run against the **Go NATS server** (not the .NET server) to establish a behavioral baseline:
- Basic pub/sub
- Wildcard matching (`foo.*`, `foo.>`)
- Queue groups
- Connect/disconnect lifecycle
- Protocol error handling
---
## Completion Definition
Phase 7 is complete when:
- All non-`n_a`, non-`deferred` tests are `verified`
- `dotnet run --project tools/NatsNet.PortTracker -- phase check 7 --db porting.db` passes
- Gitea issues #45#52 closed
- Gitea milestones 7 and 8 closed
The ~1,799 `deferred` tests remain for a future phase once the .NET server is end-to-end runnable.

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,17 @@
{
"planPath": "docs/plans/2026-02-26-phase7-plan.md",
"tasks": [
{"id": 13, "subject": "Task 1: Schema Migration — Add deferred status", "status": "pending"},
{"id": 14, "subject": "Task 2: P7-01 — Small module verification (114 tests)", "status": "pending", "blockedBy": [13]},
{"id": 15, "subject": "Task 3: P7-02 — Opts stubs (77 tests)", "status": "pending", "blockedBy": [14]},
{"id": 16, "subject": "Task 4: P7-03 — JWT stubs (88 tests)", "status": "pending", "blockedBy": [14]},
{"id": 17, "subject": "Task 5: P7-04 — Auth & config-check stubs (40 tests)", "status": "pending", "blockedBy": [14]},
{"id": 18, "subject": "Task 6: P7-05 — Signal & log stubs (19 tests)", "status": "pending", "blockedBy": [14]},
{"id": 19, "subject": "Task 7: P7-06 — Memory store & store interface tests (58 tests)", "status": "pending", "blockedBy": [14]},
{"id": 20, "subject": "Task 8: P7-07 — File store tests, first half (~125 tests)", "status": "pending", "blockedBy": [14]},
{"id": 21, "subject": "Task 9: P7-08 — File store tests, second half (~124 tests)", "status": "pending", "blockedBy": [20]},
{"id": 22, "subject": "Task 10: P7-09 — JetStream unit tests (94 tests)", "status": "pending", "blockedBy": [14]},
{"id": 23, "subject": "Task 11: P7-10 — Mark deferred, integration tests, close phase", "status": "pending", "blockedBy": [15, 16, 17, 18, 19, 21, 22]}
],
"lastUpdated": "2026-02-26T00:00:00Z"
}

View File

@@ -0,0 +1,120 @@
# PortTracker Batch Operations Design
**Date:** 2026-02-27
**Status:** Approved
## Problem
The PortTracker CLI only supports one-at-a-time operations for status updates, mappings, and N/A marking. With ~3700 features and ~3300 tests, bulk operations require dropping to raw `sqlite3` commands. This is error-prone and bypasses any validation the CLI could provide.
## Design
### Approach
Add `batch-update` and `batch-map` subcommands under each existing entity command (`feature`, `test`, `module`, `library`). All batch commands share a common filter + dry-run infrastructure.
### Shared Batch Infrastructure
A new `BatchFilters` static class in `Commands/BatchFilters.cs` provides:
**Filter Options** (combined with AND logic):
- `--ids <range>` — ID range like `100-200`, comma-separated `1,5,10`, or mixed `1-5,10,20-25`
- `--module <id>` — filter by module_id (feature/test only)
- `--status <status>` — filter by current status value
**Dry-Run Default:**
- Without `--execute`, commands show a preview: "Would affect N items:" + table of matching rows
- With `--execute`, changes are applied inside a transaction and "Updated N items." is printed
- At least one filter is required (no accidental "update everything" with zero filters)
**Shared Methods:**
- `AddFilterOptions(Command cmd, bool includeModuleFilter)` — adds the common options to a command
- `BuildWhereClause(...)` — returns SQL WHERE clause + parameters from parsed filter values
- `PreviewOrExecute(Database db, string table, string selectSql, string updateSql, params[], bool execute)` — handles dry-run preview vs actual execution
### Feature Batch Commands
**`feature batch-update`**
- Filters: `--ids`, `--module`, `--status`
- Setters: `--set-status` (required), `--set-notes` (optional)
- Flag: `--execute`
**`feature batch-map`**
- Filters: `--ids`, `--module`, `--status`
- Setters: `--set-project`, `--set-class`, `--set-method` (at least one required)
- Flag: `--execute`
### Test Batch Commands
**`test batch-update`**
- Filters: `--ids`, `--module`, `--status`
- Setters: `--set-status` (required), `--set-notes` (optional)
- Flag: `--execute`
**`test batch-map`**
- Filters: `--ids`, `--module`, `--status`
- Setters: `--set-project`, `--set-class`, `--set-method` (at least one required)
- Flag: `--execute`
### Module Batch Commands
**`module batch-update`**
- Filters: `--ids`, `--status`
- Setters: `--set-status` (required), `--set-notes` (optional)
- Flag: `--execute`
**`module batch-map`**
- Filters: `--ids`, `--status`
- Setters: `--set-project`, `--set-namespace`, `--set-class` (at least one required)
- Flag: `--execute`
### Library Batch Commands
**`library batch-update`**
- Filters: `--ids`, `--status`
- Setters: `--set-status` (required), `--set-notes` (optional, maps to `dotnet_usage_notes`)
- Flag: `--execute`
**`library batch-map`**
- Filters: `--ids`, `--status`
- Setters: `--set-package`, `--set-namespace`, `--set-notes` (at least one required)
- Flag: `--execute`
## Examples
```bash
# Preview: which features in module 5 are not_started?
porttracker feature batch-update --module 5 --status not_started --set-status deferred
# Execute: defer all features in module 5 with a reason
porttracker feature batch-update --module 5 --status not_started --set-status deferred --set-notes "needs server runtime" --execute
# Execute: mark tests 500-750 as deferred
porttracker test batch-update --ids 500-750 --set-status deferred --set-notes "server-integration" --execute
# Execute: batch-map all features in module 3 to a .NET project
porttracker feature batch-map --module 3 --set-project "ZB.MOM.NatsNet.Server" --execute
# Preview: what libraries are unmapped?
porttracker library batch-update --status not_mapped --set-status mapped
# Execute: batch-map libraries
porttracker library batch-map --ids 1-20 --set-package "Microsoft.Extensions.Logging" --set-namespace "Microsoft.Extensions.Logging" --execute
```
## File Changes
| File | Change |
|------|--------|
| `Commands/BatchFilters.cs` | New — shared filter options, WHERE builder, preview/execute logic |
| `Commands/FeatureCommands.cs` | Add `batch-update` and `batch-map` subcommands |
| `Commands/TestCommands.cs` | Add `batch-update` and `batch-map` subcommands |
| `Commands/ModuleCommands.cs` | Add `batch-update` and `batch-map` subcommands |
| `Commands/LibraryCommands.cs` | Add `batch-update` and `batch-map` subcommands |
| `Data/Database.cs` | Add `ExecuteInTransaction` helper for batch safety |
## Non-Goals
- No batch create or batch delete — not needed for the porting workflow
- No raw `--where` SQL escape hatch — structured filters cover all use cases
- No interactive y/n prompts — dry-run + `--execute` flag is sufficient and scriptable

View File

@@ -0,0 +1,919 @@
# PortTracker Batch Operations Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers-extended-cc:executing-plans to implement this plan task-by-task.
**Goal:** Add batch-update and batch-map subcommands to all PortTracker entity commands (feature, test, module, library) with shared filter infrastructure and dry-run-by-default safety.
**Architecture:** A shared `BatchFilters` static class provides reusable filter options (`--ids`, `--module`, `--status`), WHERE clause building, and the dry-run/execute pattern. Each entity command file gets two new subcommands that delegate filtering and execution to `BatchFilters`. The `Database` class gets an `ExecuteInTransaction` helper.
**Tech Stack:** .NET 10, System.CommandLine v3 preview, Microsoft.Data.Sqlite
**Design doc:** `docs/plans/2026-02-27-porttracker-batch-design.md`
---
### Task 0: Add ExecuteInTransaction to Database
**Files:**
- Modify: `tools/NatsNet.PortTracker/Data/Database.cs:73` (before Dispose)
**Step 1: Add the method**
Add this method to `Database.cs` before the `Dispose()` method (line 73):
```csharp
public int ExecuteInTransaction(string sql, params (string name, object? value)[] parameters)
{
using var transaction = _connection.BeginTransaction();
try
{
using var cmd = CreateCommand(sql);
cmd.Transaction = transaction;
foreach (var (name, value) in parameters)
cmd.Parameters.AddWithValue(name, value ?? DBNull.Value);
var affected = cmd.ExecuteNonQuery();
transaction.Commit();
return affected;
}
catch
{
transaction.Rollback();
throw;
}
}
```
**Step 2: Verify it compiles**
Run: `dotnet build tools/NatsNet.PortTracker/NatsNet.PortTracker.csproj`
Expected: Build succeeded.
**Step 3: Commit**
```bash
git add tools/NatsNet.PortTracker/Data/Database.cs
git commit -m "feat(porttracker): add ExecuteInTransaction to Database"
```
---
### Task 1: Create BatchFilters shared infrastructure
**Files:**
- Create: `tools/NatsNet.PortTracker/Commands/BatchFilters.cs`
**Step 1: Create the file**
Create `tools/NatsNet.PortTracker/Commands/BatchFilters.cs` with this content:
```csharp
using System.CommandLine;
using NatsNet.PortTracker.Data;
namespace NatsNet.PortTracker.Commands;
public static class BatchFilters
{
public static Option<string?> IdsOption() => new("--ids")
{
Description = "ID range: 100-200, 1,5,10, or mixed 1-5,10,20-25"
};
public static Option<int?> ModuleOption() => new("--module")
{
Description = "Filter by module ID"
};
public static Option<string?> StatusOption() => new("--status")
{
Description = "Filter by current status"
};
public static Option<bool> ExecuteOption() => new("--execute")
{
Description = "Actually apply changes (default is dry-run preview)",
DefaultValueFactory = _ => false
};
public static void AddFilterOptions(Command cmd, bool includeModuleFilter)
{
cmd.Add(IdsOption());
if (includeModuleFilter)
cmd.Add(ModuleOption());
cmd.Add(StatusOption());
cmd.Add(ExecuteOption());
}
public static List<int> ParseIds(string? idsSpec)
{
if (string.IsNullOrWhiteSpace(idsSpec)) return [];
var ids = new List<int>();
foreach (var part in idsSpec.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
{
if (part.Contains('-'))
{
var range = part.Split('-', 2);
if (int.TryParse(range[0], out var start) && int.TryParse(range[1], out var end))
{
for (var i = start; i <= end; i++)
ids.Add(i);
}
else
{
Console.WriteLine($"Warning: invalid range '{part}', skipping.");
}
}
else if (int.TryParse(part, out var id))
{
ids.Add(id);
}
else
{
Console.WriteLine($"Warning: invalid ID '{part}', skipping.");
}
}
return ids;
}
public static (string whereClause, List<(string name, object? value)> parameters) BuildWhereClause(
string? idsSpec, int? moduleId, string? status, string idColumn = "id", string moduleColumn = "module_id")
{
var clauses = new List<string>();
var parameters = new List<(string name, object? value)>();
if (!string.IsNullOrWhiteSpace(idsSpec))
{
var ids = ParseIds(idsSpec);
if (ids.Count > 0)
{
var placeholders = new List<string>();
for (var i = 0; i < ids.Count; i++)
{
placeholders.Add($"@id{i}");
parameters.Add(($"@id{i}", ids[i]));
}
clauses.Add($"{idColumn} IN ({string.Join(", ", placeholders)})");
}
}
if (moduleId is not null)
{
clauses.Add($"{moduleColumn} = @moduleFilter");
parameters.Add(("@moduleFilter", moduleId));
}
if (!string.IsNullOrWhiteSpace(status))
{
clauses.Add("status = @statusFilter");
parameters.Add(("@statusFilter", status));
}
if (clauses.Count == 0)
return ("", parameters);
return (" WHERE " + string.Join(" AND ", clauses), parameters);
}
public static void PreviewOrExecute(
Database db,
string table,
string displayColumns,
string updateSetClause,
List<(string name, object? value)> updateParams,
string whereClause,
List<(string name, object? value)> filterParams,
bool execute)
{
// Count matching rows
var countSql = $"SELECT COUNT(*) FROM {table}{whereClause}";
var count = db.ExecuteScalar<long>(countSql, filterParams.ToArray());
if (count == 0)
{
Console.WriteLine("No items match the specified filters.");
return;
}
// Preview
var previewSql = $"SELECT {displayColumns} FROM {table}{whereClause} ORDER BY id";
var rows = db.Query(previewSql, filterParams.ToArray());
if (!execute)
{
Console.WriteLine($"Would affect {count} items:");
Console.WriteLine();
PrintPreviewTable(rows);
Console.WriteLine();
Console.WriteLine("Add --execute to apply these changes.");
return;
}
// Execute
var allParams = new List<(string name, object? value)>();
allParams.AddRange(updateParams);
allParams.AddRange(filterParams);
var updateSql = $"UPDATE {table} SET {updateSetClause}{whereClause}";
var affected = db.ExecuteInTransaction(updateSql, allParams.ToArray());
Console.WriteLine($"Updated {affected} items.");
}
private static void PrintPreviewTable(List<Dictionary<string, object?>> rows)
{
if (rows.Count == 0) return;
var columns = rows[0].Keys.ToList();
var widths = columns.Select(c => c.Length).ToList();
foreach (var row in rows)
{
for (var i = 0; i < columns.Count; i++)
{
var val = row[columns[i]]?.ToString() ?? "";
if (val.Length > widths[i]) widths[i] = Math.Min(val.Length, 40);
}
}
// Header
var header = string.Join(" ", columns.Select((c, i) => Truncate(c, widths[i]).PadRight(widths[i])));
Console.WriteLine(header);
Console.WriteLine(new string('-', header.Length));
// Rows (cap at 50 for preview)
var displayRows = rows.Take(50).ToList();
foreach (var row in displayRows)
{
var line = string.Join(" ", columns.Select((c, i) =>
Truncate(row[c]?.ToString() ?? "", widths[i]).PadRight(widths[i])));
Console.WriteLine(line);
}
if (rows.Count > 50)
Console.WriteLine($" ... and {rows.Count - 50} more");
}
private static string Truncate(string s, int maxLen)
{
return s.Length <= maxLen ? s : s[..(maxLen - 2)] + "..";
}
}
```
**Step 2: Verify it compiles**
Run: `dotnet build tools/NatsNet.PortTracker/NatsNet.PortTracker.csproj`
Expected: Build succeeded.
**Step 3: Commit**
```bash
git add tools/NatsNet.PortTracker/Commands/BatchFilters.cs
git commit -m "feat(porttracker): add BatchFilters shared infrastructure"
```
---
### Task 2: Add batch commands to FeatureCommands
**Files:**
- Modify: `tools/NatsNet.PortTracker/Commands/FeatureCommands.cs:169-175`
**Step 1: Add batch-update and batch-map subcommands**
In `FeatureCommands.cs`, insert the batch commands before the `return featureCommand;` line (line 175). Add them after the existing `featureCommand.Add(naCmd);` at line 173.
Replace lines 169-175 with:
```csharp
featureCommand.Add(listCmd);
featureCommand.Add(showCmd);
featureCommand.Add(updateCmd);
featureCommand.Add(mapCmd);
featureCommand.Add(naCmd);
featureCommand.Add(CreateBatchUpdate(dbOption));
featureCommand.Add(CreateBatchMap(dbOption));
return featureCommand;
```
Then add these two static methods to the class (before the `Truncate` method at line 178):
```csharp
private static Command CreateBatchUpdate(Option<string> dbOption)
{
var cmd = new Command("batch-update", "Bulk update feature status");
var idsOpt = BatchFilters.IdsOption();
var moduleOpt = BatchFilters.ModuleOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setStatus = new Option<string>("--set-status") { Description = "New status to set", Required = true };
var setNotes = new Option<string?>("--set-notes") { Description = "Notes to set" };
cmd.Add(idsOpt);
cmd.Add(moduleOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setStatus);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var module = parseResult.GetValue(moduleOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var newStatus = parseResult.GetValue(setStatus)!;
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && module is null && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --module, --status) is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, module, status);
var setClauses = new List<string> { "status = @newStatus" };
var updateParams = new List<(string, object?)> { ("@newStatus", newStatus) };
if (notes is not null)
{
setClauses.Add("notes = @newNotes");
updateParams.Add(("@newNotes", notes));
}
BatchFilters.PreviewOrExecute(db, "features",
"id, name, status, module_id, notes",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static Command CreateBatchMap(Option<string> dbOption)
{
var cmd = new Command("batch-map", "Bulk map features to .NET methods");
var idsOpt = BatchFilters.IdsOption();
var moduleOpt = BatchFilters.ModuleOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setProject = new Option<string?>("--set-project") { Description = ".NET project" };
var setClass = new Option<string?>("--set-class") { Description = ".NET class" };
var setMethod = new Option<string?>("--set-method") { Description = ".NET method" };
cmd.Add(idsOpt);
cmd.Add(moduleOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setProject);
cmd.Add(setClass);
cmd.Add(setMethod);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var module = parseResult.GetValue(moduleOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var project = parseResult.GetValue(setProject);
var cls = parseResult.GetValue(setClass);
var method = parseResult.GetValue(setMethod);
if (string.IsNullOrWhiteSpace(ids) && module is null && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --module, --status) is required.");
return;
}
if (project is null && cls is null && method is null)
{
Console.WriteLine("Error: at least one of --set-project, --set-class, --set-method is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, module, status);
var setClauses = new List<string>();
var updateParams = new List<(string, object?)>();
if (project is not null) { setClauses.Add("dotnet_project = @setProject"); updateParams.Add(("@setProject", project)); }
if (cls is not null) { setClauses.Add("dotnet_class = @setClass"); updateParams.Add(("@setClass", cls)); }
if (method is not null) { setClauses.Add("dotnet_method = @setMethod"); updateParams.Add(("@setMethod", method)); }
BatchFilters.PreviewOrExecute(db, "features",
"id, name, status, dotnet_project, dotnet_class, dotnet_method",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
```
**Step 2: Verify it compiles**
Run: `dotnet build tools/NatsNet.PortTracker/NatsNet.PortTracker.csproj`
Expected: Build succeeded.
**Step 3: Smoke test dry-run**
Run: `dotnet run --project tools/NatsNet.PortTracker -- feature batch-update --module 1 --status not_started --set-status deferred --db porting.db`
Expected: Preview output showing matching features (or "No items match").
**Step 4: Commit**
```bash
git add tools/NatsNet.PortTracker/Commands/FeatureCommands.cs
git commit -m "feat(porttracker): add feature batch-update and batch-map commands"
```
---
### Task 3: Add batch commands to TestCommands
**Files:**
- Modify: `tools/NatsNet.PortTracker/Commands/TestCommands.cs:130-135`
**Step 1: Add batch-update and batch-map subcommands**
In `TestCommands.cs`, replace lines 130-135 with:
```csharp
testCommand.Add(listCmd);
testCommand.Add(showCmd);
testCommand.Add(updateCmd);
testCommand.Add(mapCmd);
testCommand.Add(CreateBatchUpdate(dbOption));
testCommand.Add(CreateBatchMap(dbOption));
return testCommand;
```
Then add these two static methods before the `Truncate` method (line 138):
```csharp
private static Command CreateBatchUpdate(Option<string> dbOption)
{
var cmd = new Command("batch-update", "Bulk update test status");
var idsOpt = BatchFilters.IdsOption();
var moduleOpt = BatchFilters.ModuleOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setStatus = new Option<string>("--set-status") { Description = "New status to set", Required = true };
var setNotes = new Option<string?>("--set-notes") { Description = "Notes to set" };
cmd.Add(idsOpt);
cmd.Add(moduleOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setStatus);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var module = parseResult.GetValue(moduleOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var newStatus = parseResult.GetValue(setStatus)!;
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && module is null && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --module, --status) is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, module, status);
var setClauses = new List<string> { "status = @newStatus" };
var updateParams = new List<(string, object?)> { ("@newStatus", newStatus) };
if (notes is not null)
{
setClauses.Add("notes = @newNotes");
updateParams.Add(("@newNotes", notes));
}
BatchFilters.PreviewOrExecute(db, "unit_tests",
"id, name, status, module_id, notes",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static Command CreateBatchMap(Option<string> dbOption)
{
var cmd = new Command("batch-map", "Bulk map tests to .NET test methods");
var idsOpt = BatchFilters.IdsOption();
var moduleOpt = BatchFilters.ModuleOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setProject = new Option<string?>("--set-project") { Description = ".NET test project" };
var setClass = new Option<string?>("--set-class") { Description = ".NET test class" };
var setMethod = new Option<string?>("--set-method") { Description = ".NET test method" };
cmd.Add(idsOpt);
cmd.Add(moduleOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setProject);
cmd.Add(setClass);
cmd.Add(setMethod);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var module = parseResult.GetValue(moduleOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var project = parseResult.GetValue(setProject);
var cls = parseResult.GetValue(setClass);
var method = parseResult.GetValue(setMethod);
if (string.IsNullOrWhiteSpace(ids) && module is null && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --module, --status) is required.");
return;
}
if (project is null && cls is null && method is null)
{
Console.WriteLine("Error: at least one of --set-project, --set-class, --set-method is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, module, status);
var setClauses = new List<string>();
var updateParams = new List<(string, object?)>();
if (project is not null) { setClauses.Add("dotnet_project = @setProject"); updateParams.Add(("@setProject", project)); }
if (cls is not null) { setClauses.Add("dotnet_class = @setClass"); updateParams.Add(("@setClass", cls)); }
if (method is not null) { setClauses.Add("dotnet_method = @setMethod"); updateParams.Add(("@setMethod", method)); }
BatchFilters.PreviewOrExecute(db, "unit_tests",
"id, name, status, dotnet_project, dotnet_class, dotnet_method",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
```
**Step 2: Verify it compiles**
Run: `dotnet build tools/NatsNet.PortTracker/NatsNet.PortTracker.csproj`
Expected: Build succeeded.
**Step 3: Smoke test dry-run**
Run: `dotnet run --project tools/NatsNet.PortTracker -- test batch-update --status not_started --set-status deferred --db porting.db`
Expected: Preview output showing matching tests (or "No items match").
**Step 4: Commit**
```bash
git add tools/NatsNet.PortTracker/Commands/TestCommands.cs
git commit -m "feat(porttracker): add test batch-update and batch-map commands"
```
---
### Task 4: Add batch commands to ModuleCommands
**Files:**
- Modify: `tools/NatsNet.PortTracker/Commands/ModuleCommands.cs:145-152`
**Step 1: Add batch-update and batch-map subcommands**
In `ModuleCommands.cs`, replace lines 145-152 with:
```csharp
moduleCommand.Add(listCmd);
moduleCommand.Add(showCmd);
moduleCommand.Add(updateCmd);
moduleCommand.Add(mapCmd);
moduleCommand.Add(naCmd);
moduleCommand.Add(CreateBatchUpdate(dbOption));
moduleCommand.Add(CreateBatchMap(dbOption));
return moduleCommand;
}
```
Then add these two static methods before the closing `}` of the class:
```csharp
private static Command CreateBatchUpdate(Option<string> dbOption)
{
var cmd = new Command("batch-update", "Bulk update module status");
var idsOpt = BatchFilters.IdsOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setStatus = new Option<string>("--set-status") { Description = "New status to set", Required = true };
var setNotes = new Option<string?>("--set-notes") { Description = "Notes to set" };
cmd.Add(idsOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setStatus);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var newStatus = parseResult.GetValue(setStatus)!;
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --status) is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, null, status);
var setClauses = new List<string> { "status = @newStatus" };
var updateParams = new List<(string, object?)> { ("@newStatus", newStatus) };
if (notes is not null)
{
setClauses.Add("notes = @newNotes");
updateParams.Add(("@newNotes", notes));
}
BatchFilters.PreviewOrExecute(db, "modules",
"id, name, status, notes",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static Command CreateBatchMap(Option<string> dbOption)
{
var cmd = new Command("batch-map", "Bulk map modules to .NET projects");
var idsOpt = BatchFilters.IdsOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setProject = new Option<string?>("--set-project") { Description = ".NET project" };
var setNamespace = new Option<string?>("--set-namespace") { Description = ".NET namespace" };
var setClass = new Option<string?>("--set-class") { Description = ".NET class" };
cmd.Add(idsOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setProject);
cmd.Add(setNamespace);
cmd.Add(setClass);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var project = parseResult.GetValue(setProject);
var ns = parseResult.GetValue(setNamespace);
var cls = parseResult.GetValue(setClass);
if (string.IsNullOrWhiteSpace(ids) && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --status) is required.");
return;
}
if (project is null && ns is null && cls is null)
{
Console.WriteLine("Error: at least one of --set-project, --set-namespace, --set-class is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, null, status);
var setClauses = new List<string>();
var updateParams = new List<(string, object?)>();
if (project is not null) { setClauses.Add("dotnet_project = @setProject"); updateParams.Add(("@setProject", project)); }
if (ns is not null) { setClauses.Add("dotnet_namespace = @setNamespace"); updateParams.Add(("@setNamespace", ns)); }
if (cls is not null) { setClauses.Add("dotnet_class = @setClass"); updateParams.Add(("@setClass", cls)); }
BatchFilters.PreviewOrExecute(db, "modules",
"id, name, status, dotnet_project, dotnet_namespace, dotnet_class",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
```
**Step 2: Verify it compiles**
Run: `dotnet build tools/NatsNet.PortTracker/NatsNet.PortTracker.csproj`
Expected: Build succeeded.
**Step 3: Commit**
```bash
git add tools/NatsNet.PortTracker/Commands/ModuleCommands.cs
git commit -m "feat(porttracker): add module batch-update and batch-map commands"
```
---
### Task 5: Add batch commands to LibraryCommands
**Files:**
- Modify: `tools/NatsNet.PortTracker/Commands/LibraryCommands.cs:86-91`
**Step 1: Add batch-update and batch-map subcommands**
In `LibraryCommands.cs`, replace lines 86-91 with:
```csharp
libraryCommand.Add(listCmd);
libraryCommand.Add(mapCmd);
libraryCommand.Add(suggestCmd);
libraryCommand.Add(CreateBatchUpdate(dbOption));
libraryCommand.Add(CreateBatchMap(dbOption));
return libraryCommand;
}
```
Then add these two static methods before the `Truncate` method:
```csharp
private static Command CreateBatchUpdate(Option<string> dbOption)
{
var cmd = new Command("batch-update", "Bulk update library status");
var idsOpt = BatchFilters.IdsOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setStatus = new Option<string>("--set-status") { Description = "New status to set", Required = true };
var setNotes = new Option<string?>("--set-notes") { Description = "Usage notes to set" };
cmd.Add(idsOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setStatus);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var newStatus = parseResult.GetValue(setStatus)!;
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --status) is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, null, status);
var setClauses = new List<string> { "status = @newStatus" };
var updateParams = new List<(string, object?)> { ("@newStatus", newStatus) };
if (notes is not null)
{
setClauses.Add("dotnet_usage_notes = @newNotes");
updateParams.Add(("@newNotes", notes));
}
BatchFilters.PreviewOrExecute(db, "library_mappings",
"id, go_import_path, status, dotnet_usage_notes",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static Command CreateBatchMap(Option<string> dbOption)
{
var cmd = new Command("batch-map", "Bulk map libraries to .NET packages");
var idsOpt = BatchFilters.IdsOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setPackage = new Option<string?>("--set-package") { Description = ".NET NuGet package" };
var setNamespace = new Option<string?>("--set-namespace") { Description = ".NET namespace" };
var setNotes = new Option<string?>("--set-notes") { Description = "Usage notes" };
cmd.Add(idsOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setPackage);
cmd.Add(setNamespace);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var package = parseResult.GetValue(setPackage);
var ns = parseResult.GetValue(setNamespace);
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --status) is required.");
return;
}
if (package is null && ns is null && notes is null)
{
Console.WriteLine("Error: at least one of --set-package, --set-namespace, --set-notes is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, null, status);
var setClauses = new List<string>();
var updateParams = new List<(string, object?)>();
if (package is not null) { setClauses.Add("dotnet_package = @setPackage"); updateParams.Add(("@setPackage", package)); }
if (ns is not null) { setClauses.Add("dotnet_namespace = @setNamespace"); updateParams.Add(("@setNamespace", ns)); }
if (notes is not null) { setClauses.Add("dotnet_usage_notes = @setNotes"); updateParams.Add(("@setNotes", notes)); }
BatchFilters.PreviewOrExecute(db, "library_mappings",
"id, go_import_path, status, dotnet_package, dotnet_namespace",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
```
**Step 2: Verify it compiles**
Run: `dotnet build tools/NatsNet.PortTracker/NatsNet.PortTracker.csproj`
Expected: Build succeeded.
**Step 3: Commit**
```bash
git add tools/NatsNet.PortTracker/Commands/LibraryCommands.cs
git commit -m "feat(porttracker): add library batch-update and batch-map commands"
```
---
### Task 6: End-to-end smoke test
**Files:** None — testing only.
**Step 1: Test feature batch-update dry-run**
Run: `dotnet run --project tools/NatsNet.PortTracker -- feature batch-update --status deferred --set-status deferred --db porting.db`
Expected: Preview showing deferred features.
**Step 2: Test test batch-update dry-run**
Run: `dotnet run --project tools/NatsNet.PortTracker -- test batch-update --ids 1-5 --set-status verified --db porting.db`
Expected: Preview showing tests 1-5.
**Step 3: Test module batch-update dry-run**
Run: `dotnet run --project tools/NatsNet.PortTracker -- module batch-update --status verified --set-status verified --db porting.db`
Expected: Preview showing verified modules.
**Step 4: Test library batch-map dry-run**
Run: `dotnet run --project tools/NatsNet.PortTracker -- library batch-map --status mapped --set-package "test" --db porting.db`
Expected: Preview showing mapped libraries.
**Step 5: Test error cases**
Run: `dotnet run --project tools/NatsNet.PortTracker -- feature batch-update --set-status deferred --db porting.db`
Expected: "Error: at least one filter (--ids, --module, --status) is required."
Run: `dotnet run --project tools/NatsNet.PortTracker -- feature batch-map --ids 1-5 --db porting.db`
Expected: "Error: at least one of --set-project, --set-class, --set-method is required."
**Step 6: Test help output**
Run: `dotnet run --project tools/NatsNet.PortTracker -- feature batch-update --help`
Expected: Shows all options with descriptions.
**Step 7: Final commit**
No code changes — this task is verification only. If any issues found, fix and commit with appropriate message.

View File

@@ -0,0 +1,13 @@
{
"planPath": "docs/plans/2026-02-27-porttracker-batch-plan.md",
"tasks": [
{"id": 0, "nativeId": 7, "subject": "Task 0: Add ExecuteInTransaction to Database", "status": "pending"},
{"id": 1, "nativeId": 8, "subject": "Task 1: Create BatchFilters shared infrastructure", "status": "pending", "blockedBy": [0]},
{"id": 2, "nativeId": 9, "subject": "Task 2: Add batch commands to FeatureCommands", "status": "pending", "blockedBy": [1]},
{"id": 3, "nativeId": 10, "subject": "Task 3: Add batch commands to TestCommands", "status": "pending", "blockedBy": [1]},
{"id": 4, "nativeId": 11, "subject": "Task 4: Add batch commands to ModuleCommands", "status": "pending", "blockedBy": [1]},
{"id": 5, "nativeId": 12, "subject": "Task 5: Add batch commands to LibraryCommands", "status": "pending", "blockedBy": [1]},
{"id": 6, "nativeId": 13, "subject": "Task 6: End-to-end smoke test", "status": "pending", "blockedBy": [2, 3, 4, 5]}
],
"lastUpdated": "2026-02-27T00:00:00Z"
}

View File

@@ -624,7 +624,7 @@ public sealed class DirJwtStore : IDisposable
/// Deletes the JWT for <paramref name="publicKey"/> according to <see cref="_deleteType"/>.
/// Mirrors Go <c>DirJWTStore.delete</c>.
/// </summary>
private void Delete(string publicKey)
public void Delete(string publicKey)
{
if (_readonly)
{
@@ -795,7 +795,7 @@ public sealed class DirJwtStore : IDisposable
// Background timer — mirrors Go goroutine + time.Ticker.
var timer = new Timer(_ =>
{
var now = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds() * TimeSpan.TicksPerMillisecond;
var now = DateTimeOffset.UtcNow.UtcTicks;
while (true)
{
@@ -1104,14 +1104,13 @@ internal sealed class ExpirationTracker
// Remove old hash contribution from rolling XOR.
XorAssign(_hash, existing.Hash);
// Update in-place.
existing.Expiration = exp;
existing.Hash = hash;
// Re-enqueue with updated priority (PriorityQueue does not support update;
// use a version counter approach — mark old entry stale, enqueue fresh).
existing.Version++;
_heap.Enqueue(existing, exp);
// Create a new JwtItem so the old heap entry becomes a stale orphan.
// DrainStale uses ReferenceEquals(current, top) to detect orphans:
// the old heap entry points to the old JwtItem object which is no longer
// in _idx, so it will be drained on the next PeekExpired call.
var updated = new JwtItem(publicKey, exp, hash);
_idx[publicKey] = updated;
_heap.Enqueue(updated, exp);
}
else
{
@@ -1141,9 +1140,11 @@ internal sealed class ExpirationTracker
? long.MaxValue
: (DateTimeOffset.UtcNow + Ttl).UtcTicks;
item.Expiration = newExp;
item.Version++;
_heap.Enqueue(item, newExp);
// Replace with a new JwtItem so the old heap entry becomes a stale orphan
// (DrainStale detects staleness via ReferenceEquals).
var updated = new JwtItem(publicKey, newExp, item.Hash);
_idx[publicKey] = updated;
_heap.Enqueue(updated, newExp);
}
if (EvictOnLimit)

View File

@@ -1139,7 +1139,8 @@ public sealed partial class ClientConnection
internal void ProcessErr(string err) { /* TODO session 09 */ }
// features 442-443: removeSecretsFromTrace, redact
internal static string RemoveSecretsFromTrace(string s) => s;
// Delegates to ServerLogging.RemoveSecretsFromTrace (the real implementation lives there).
internal static string RemoveSecretsFromTrace(string s) => ServerLogging.RemoveSecretsFromTrace(s);
internal static string Redact(string s) => s;
// feature 444: computeRTT

View File

@@ -152,6 +152,8 @@ public sealed class StorageSizeJsonConverter : JsonConverter<long>
public static long Parse(string s)
{
// Mirrors Go getStorageSize: empty string returns 0 with no error.
if (string.IsNullOrWhiteSpace(s)) return 0;
if (long.TryParse(s, out var n)) return n;
var m = Pattern.Match(s.Trim());
if (!m.Success) throw new FormatException($"Invalid storage size: \"{s}\"");

View File

@@ -14,6 +14,7 @@
// Adapted from server/log.go in the NATS server Go source.
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace ZB.MOM.NatsNet.Server.Internal;
@@ -156,6 +157,53 @@ public sealed class ServerLogging
var statement = string.Format(format, args);
Warnf("{0}", statement);
}
// ---- Trace sanitization ----
// Mirrors removeSecretsFromTrace / redact in server/client.go.
// passPat = `"?\s*pass\S*?"?\s*[:=]\s*"?(([^",\r\n}])*)` — captures the value of any pass/password field.
// tokenPat = `"?\s*auth_token\S*?"?\s*[:=]\s*"?(([^",\r\n}])*)` — captures auth_token value.
// Only the FIRST match is redacted (mirrors the Go break-after-first-match behaviour).
// Go: "?\s*pass\S*?"?\s*[:=]\s*"?(([^",\r\n}])*)
private static readonly Regex s_passPattern = new(
@"""?\s*pass\S*?""?\s*[:=]\s*""?(([^"",\r\n}])*)",
RegexOptions.Compiled);
// Go: "?\s*auth_token\S*?"?\s*[:=]\s*"?(([^",\r\n}])*)
private static readonly Regex s_authTokenPattern = new(
@"""?\s*auth_token\S*?""?\s*[:=]\s*""?(([^"",\r\n}])*)",
RegexOptions.Compiled);
/// <summary>
/// Removes passwords from a protocol trace string.
/// Mirrors <c>removeSecretsFromTrace</c> in client.go (pass step).
/// Only the first occurrence is redacted.
/// </summary>
public static string RemovePassFromTrace(string s)
=> RedactFirst(s_passPattern, s);
/// <summary>
/// Removes auth_token from a protocol trace string.
/// Mirrors <c>removeSecretsFromTrace</c> in client.go (auth_token step).
/// Only the first occurrence is redacted.
/// </summary>
public static string RemoveAuthTokenFromTrace(string s)
=> RedactFirst(s_authTokenPattern, s);
/// <summary>
/// Removes both passwords and auth tokens from a protocol trace string.
/// Mirrors <c>removeSecretsFromTrace</c> in client.go.
/// </summary>
public static string RemoveSecretsFromTrace(string s)
=> RemoveAuthTokenFromTrace(RemovePassFromTrace(s));
private static string RedactFirst(Regex pattern, string s)
{
var m = pattern.Match(s);
if (!m.Success) return s;
var cap = m.Groups[1]; // captured value substring
return string.Concat(s.AsSpan(0, cap.Index), "[REDACTED]", s.AsSpan(cap.Index + cap.Length));
}
}
/// <summary>

View File

@@ -103,4 +103,224 @@ public static class JetStreamVersioning
if (string.IsNullOrEmpty(reqApiLevelHeader)) return false;
return !int.TryParse(reqApiLevelHeader, out var minLevel) || JsApiLevel < minLevel;
}
// ---- Stream metadata mutations ----
/// <summary>
/// Sets the required API level in stream config metadata based on which v2.11+/v2.12+ features
/// the stream config uses. Removes any dynamic fields first.
/// Mirrors <c>setStaticStreamMetadata</c>.
/// </summary>
public static void SetStaticStreamMetadata(StreamConfig cfg)
{
cfg.Metadata ??= new Dictionary<string, string>();
DeleteDynamicMetadata(cfg.Metadata);
var requiredApiLevel = 0;
void Requires(int level) { if (level > requiredApiLevel) requiredApiLevel = level; }
if (cfg.AllowMsgTTL || cfg.SubjectDeleteMarkerTTL > TimeSpan.Zero)
Requires(ApiLevelForTTL);
if (cfg.AllowMsgCounter)
Requires(ApiLevelForCounters);
if (cfg.AllowAtomicPublish)
Requires(ApiLevelForAtomicPublish);
if (cfg.AllowMsgSchedules)
Requires(ApiLevelForMsgSchedules);
if (cfg.PersistMode == PersistModeType.AsyncPersistMode)
Requires(ApiLevelForAsyncPersist);
cfg.Metadata[JsRequiredLevelMetadataKey] = requiredApiLevel.ToString();
}
/// <summary>
/// Returns a shallow copy of the stream config with dynamic versioning fields added to a new
/// metadata dictionary. Does not mutate <paramref name="cfg"/>.
/// Mirrors <c>setDynamicStreamMetadata</c>.
/// </summary>
public static StreamConfig SetDynamicStreamMetadata(StreamConfig cfg)
{
// Shallow-copy the struct-like record: clone all fields then replace metadata.
var newCfg = cfg.Clone();
newCfg.Metadata = new Dictionary<string, string>();
if (cfg.Metadata != null)
foreach (var kv in cfg.Metadata)
newCfg.Metadata[kv.Key] = kv.Value;
newCfg.Metadata[JsServerVersionMetadataKey] = ServerConstants.Version;
newCfg.Metadata[JsServerLevelMetadataKey] = JsApiLevel.ToString();
return newCfg;
}
/// <summary>
/// Copies the required-level versioning field from <paramref name="prevCfg"/> into
/// <paramref name="cfg"/>, removing dynamic fields and deleting the key if absent in prevCfg.
/// Mirrors <c>copyStreamMetadata</c>.
/// </summary>
public static void CopyStreamMetadata(StreamConfig cfg, StreamConfig? prevCfg)
{
if (cfg.Metadata != null)
DeleteDynamicMetadata(cfg.Metadata);
SetOrDeleteInStreamMetadata(cfg, prevCfg, JsRequiredLevelMetadataKey);
}
private static void SetOrDeleteInStreamMetadata(StreamConfig cfg, StreamConfig? prevCfg, string key)
{
if (prevCfg?.Metadata != null && prevCfg.Metadata.TryGetValue(key, out var value))
{
cfg.Metadata ??= new Dictionary<string, string>();
cfg.Metadata[key] = value;
return;
}
if (cfg.Metadata != null)
{
cfg.Metadata.Remove(key);
if (cfg.Metadata.Count == 0)
cfg.Metadata = null;
}
}
// ---- Consumer metadata mutations ----
/// <summary>
/// Sets the required API level in consumer config metadata based on which v2.11+ features
/// the consumer config uses. Removes any dynamic fields first.
/// Mirrors <c>setStaticConsumerMetadata</c>.
/// </summary>
public static void SetStaticConsumerMetadata(ConsumerConfig cfg)
{
cfg.Metadata ??= new Dictionary<string, string>();
DeleteDynamicMetadata(cfg.Metadata);
var requiredApiLevel = 0;
void Requires(int level) { if (level > requiredApiLevel) requiredApiLevel = level; }
if (cfg.PauseUntil.HasValue && cfg.PauseUntil.Value != default)
Requires(ApiLevelForConsumerPause);
if (cfg.PriorityPolicy != PriorityPolicy.PriorityNone
|| cfg.PinnedTTL != TimeSpan.Zero
|| (cfg.PriorityGroups != null && cfg.PriorityGroups.Length > 0))
Requires(ApiLevelForPriorityGroups);
cfg.Metadata[JsRequiredLevelMetadataKey] = requiredApiLevel.ToString();
}
/// <summary>
/// Returns a shallow copy of the consumer config with dynamic versioning fields added to a new
/// metadata dictionary. Does not mutate <paramref name="cfg"/>.
/// Mirrors <c>setDynamicConsumerMetadata</c>.
/// </summary>
public static ConsumerConfig SetDynamicConsumerMetadata(ConsumerConfig cfg)
{
var newCfg = new ConsumerConfig();
// Copy all fields via serialisation-free approach: copy properties from cfg
CopyConsumerConfigFields(cfg, newCfg);
newCfg.Metadata = new Dictionary<string, string>();
if (cfg.Metadata != null)
foreach (var kv in cfg.Metadata)
newCfg.Metadata[kv.Key] = kv.Value;
newCfg.Metadata[JsServerVersionMetadataKey] = ServerConstants.Version;
newCfg.Metadata[JsServerLevelMetadataKey] = JsApiLevel.ToString();
return newCfg;
}
/// <summary>
/// Returns a shallow copy of the consumer info with dynamic versioning fields added to the
/// config's metadata. Does not mutate <paramref name="info"/>.
/// Mirrors <c>setDynamicConsumerInfoMetadata</c>.
/// </summary>
public static ConsumerInfo SetDynamicConsumerInfoMetadata(ConsumerInfo info)
{
var newInfo = new ConsumerInfo
{
Stream = info.Stream,
Name = info.Name,
Created = info.Created,
Delivered = info.Delivered,
AckFloor = info.AckFloor,
NumAckPending = info.NumAckPending,
NumRedelivered = info.NumRedelivered,
NumWaiting = info.NumWaiting,
NumPending = info.NumPending,
Cluster = info.Cluster,
PushBound = info.PushBound,
Paused = info.Paused,
PauseRemaining = info.PauseRemaining,
TimeStamp = info.TimeStamp,
PriorityGroups = info.PriorityGroups,
Config = info.Config != null ? SetDynamicConsumerMetadata(info.Config) : null,
};
return newInfo;
}
/// <summary>
/// Copies the required-level versioning field from <paramref name="prevCfg"/> into
/// <paramref name="cfg"/>, removing dynamic fields and deleting the key if absent in prevCfg.
/// Mirrors <c>copyConsumerMetadata</c>.
/// </summary>
public static void CopyConsumerMetadata(ConsumerConfig cfg, ConsumerConfig? prevCfg)
{
if (cfg.Metadata != null)
DeleteDynamicMetadata(cfg.Metadata);
SetOrDeleteInConsumerMetadata(cfg, prevCfg, JsRequiredLevelMetadataKey);
}
private static void SetOrDeleteInConsumerMetadata(ConsumerConfig cfg, ConsumerConfig? prevCfg, string key)
{
if (prevCfg?.Metadata != null && prevCfg.Metadata.TryGetValue(key, out var value))
{
cfg.Metadata ??= new Dictionary<string, string>();
cfg.Metadata[key] = value;
return;
}
if (cfg.Metadata != null)
{
cfg.Metadata.Remove(key);
if (cfg.Metadata.Count == 0)
cfg.Metadata = null;
}
}
// ---- Private helpers ----
/// <summary>
/// Copies all scalar/reference properties from <paramref name="src"/> to <paramref name="dst"/>,
/// excluding <c>Metadata</c> (which is set separately by the caller).
/// </summary>
private static void CopyConsumerConfigFields(ConsumerConfig src, ConsumerConfig dst)
{
dst.DeliverPolicy = src.DeliverPolicy;
dst.OptStartSeq = src.OptStartSeq;
dst.OptStartTime = src.OptStartTime;
dst.DeliverSubject = src.DeliverSubject;
dst.DeliverGroup = src.DeliverGroup;
dst.Durable = src.Durable;
dst.Name = src.Name;
dst.Description = src.Description;
dst.FilterSubject = src.FilterSubject;
dst.FilterSubjects = src.FilterSubjects;
dst.AckPolicy = src.AckPolicy;
dst.AckWait = src.AckWait;
dst.MaxDeliver = src.MaxDeliver;
dst.BackOff = src.BackOff;
dst.ReplayPolicy = src.ReplayPolicy;
dst.RateLimit = src.RateLimit;
dst.SampleFrequency = src.SampleFrequency;
dst.MaxWaiting = src.MaxWaiting;
dst.MaxAckPending = src.MaxAckPending;
dst.FlowControl = src.FlowControl;
dst.Heartbeat = src.Heartbeat;
dst.Direct = src.Direct;
dst.HeadersOnly = src.HeadersOnly;
dst.MaxRequestBatch = src.MaxRequestBatch;
dst.MaxRequestMaxBytes = src.MaxRequestMaxBytes;
dst.MaxRequestExpires = src.MaxRequestExpires;
dst.InactiveThreshold = src.InactiveThreshold;
dst.Replicas = src.Replicas;
dst.MemoryStorage = src.MemoryStorage;
dst.PauseUntil = src.PauseUntil;
dst.PinnedTTL = src.PinnedTTL;
dst.PriorityPolicy = src.PriorityPolicy;
dst.PriorityGroups = src.PriorityGroups;
// Metadata is NOT copied here — caller sets it.
}
}

View File

@@ -78,7 +78,11 @@ public sealed class JetStreamMemStore : IStreamStore
_maxp = cfg.MaxMsgsPer;
if (cfg.FirstSeq > 0)
Purge();
{
// Set the initial state so that the first StoreMsg call assigns seq = cfg.FirstSeq.
_state.LastSeq = cfg.FirstSeq - 1;
_state.FirstSeq = cfg.FirstSeq;
}
}
// -----------------------------------------------------------------------
@@ -92,7 +96,10 @@ public sealed class JetStreamMemStore : IStreamStore
try
{
var seq = _state.LastSeq + 1;
var ts = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds() * 1_000_000L;
// Use 100-nanosecond Ticks for higher timestamp precision.
// Nanoseconds since Unix epoch: (Ticks - UnixEpochTicks) * 100
const long UnixEpochTicks = 621355968000000000L;
var ts = (DateTimeOffset.UtcNow.UtcTicks - UnixEpochTicks) * 100L;
try
{
StoreRawMsgLocked(subject, hdr, msg, seq, ts, ttl, discardNewCheck: true);
@@ -134,12 +141,24 @@ public sealed class JetStreamMemStore : IStreamStore
hdr ??= Array.Empty<byte>();
msg ??= Array.Empty<byte>();
// Determine if we are at the per-subject limit.
bool atSubjectLimit = false;
if (_maxp > 0 && !string.IsNullOrEmpty(subject))
{
var subjectBytesCheck = Encoding.UTF8.GetBytes(subject);
var (ssCheck, foundCheck) = _fss.Find(subjectBytesCheck);
if (foundCheck && ssCheck != null)
atSubjectLimit = ssCheck.Msgs >= (ulong)_maxp;
}
// Discard-new enforcement
if (discardNewCheck && _cfg.Discard == DiscardPolicy.DiscardNew)
{
if (_cfg.MaxMsgs > 0 && _state.Msgs >= (ulong)_cfg.MaxMsgs)
if (atSubjectLimit && _cfg.DiscardNewPer)
throw StoreErrors.ErrMaxMsgsPerSubject;
if (_cfg.MaxMsgs > 0 && _state.Msgs >= (ulong)_cfg.MaxMsgs && !atSubjectLimit)
throw StoreErrors.ErrMaxMsgs;
if (_cfg.MaxBytes > 0 && _state.Bytes + MsgSize(subject, hdr, msg) > (ulong)_cfg.MaxBytes)
if (_cfg.MaxBytes > 0 && _state.Bytes + MsgSize(subject, hdr, msg) > (ulong)_cfg.MaxBytes && !atSubjectLimit)
throw StoreErrors.ErrMaxBytes;
}
@@ -342,7 +361,11 @@ public sealed class JetStreamMemStore : IStreamStore
{
var (ss, found) = _fss.Find(Encoding.UTF8.GetBytes(subject));
if (found && ss != null && ss.Msgs > 0)
{
if (ss.LastNeedsUpdate)
RecalculateForSubj(subject, ss);
_msgs.TryGetValue(ss.Last, out stored);
}
}
if (stored == null)
@@ -603,13 +626,69 @@ public sealed class JetStreamMemStore : IStreamStore
/// <inheritdoc/>
public (ulong Purged, Exception? Error) PurgeEx(string subject, ulong seq, ulong keep)
{
// TODO: session 17 — full subject-filtered purge
if (string.IsNullOrEmpty(subject) || subject == ">")
var isAll = string.IsNullOrEmpty(subject) || subject == ">";
if (isAll)
{
if (keep == 0 && seq == 0)
return Purge();
if (seq > 1)
return Compact(seq);
if (keep > 0)
{
ulong msgs, lseq;
_mu.EnterReadLock();
msgs = _state.Msgs;
lseq = _state.LastSeq;
_mu.ExitReadLock();
if (keep >= msgs)
return (0, null);
return Compact(lseq - keep + 1);
}
return (0, null);
}
return (0, null);
// Subject-filtered purge
var ss = FilteredState(1, subject);
if (ss.Msgs == 0)
return (0, null);
if (keep > 0)
{
if (keep >= ss.Msgs)
return (0, null);
ss.Msgs -= keep;
}
var last = ss.Last;
if (seq > 1)
last = seq - 1;
ulong purged = 0;
_mu.EnterWriteLock();
try
{
if (_msgs == null)
return (0, null);
for (var s = ss.First; s <= last; s++)
{
if (_msgs.TryGetValue(s, out var sm) && sm != null && sm.Subject == subject)
{
if (RemoveMsgLocked(s, false))
{
purged++;
if (purged >= ss.Msgs)
break;
}
}
}
}
finally
{
if (_mu.IsWriteLockHeld)
_mu.ExitWriteLock();
}
return (purged, null);
}
/// <inheritdoc/>
@@ -703,9 +782,10 @@ public sealed class JetStreamMemStore : IStreamStore
// Full reset
purged = (ulong)_msgs.Count;
bytes = _state.Bytes;
_state = new StreamState { LastTime = DateTime.UtcNow };
_state = new StreamState();
_msgs = new Dictionary<ulong, StoreMsg>();
_dmap = new SequenceSet();
_fss.Reset();
}
else
{
@@ -847,6 +927,8 @@ public sealed class JetStreamMemStore : IStreamStore
return new SimpleState { Msgs = _state.Msgs, First = _state.FirstSeq, Last = _state.LastSeq };
var ss = new SimpleState();
var havePartial = false;
_fss.Match(Encoding.UTF8.GetBytes(filter), (subj, fss) =>
{
if (fss.FirstNeedsUpdate || fss.LastNeedsUpdate)
@@ -854,12 +936,46 @@ public sealed class JetStreamMemStore : IStreamStore
if (sseq <= fss.First)
{
// All messages in this subject are at or after sseq
ss.Msgs += fss.Msgs;
if (ss.First == 0 || fss.First < ss.First) ss.First = fss.First;
if (fss.Last > ss.Last) ss.Last = fss.Last;
}
else if (sseq <= fss.Last)
{
// Partial: sseq is inside this subject's range — need to scan
havePartial = true;
// Still track Last for the scan bounds
if (fss.Last > ss.Last) ss.Last = fss.Last;
}
// else sseq > fss.Last: all messages before sseq, skip
return true;
});
if (!havePartial)
return ss;
// Need to scan messages from sseq to ss.Last
if (_msgs == null)
return ss;
var scanFirst = sseq;
var scanLast = ss.Last;
if (scanLast == 0) scanLast = _state.LastSeq;
// Reset and rescan
ss = new SimpleState();
for (var seq = scanFirst; seq <= scanLast; seq++)
{
if (!_msgs.TryGetValue(seq, out var sm) || sm == null)
continue;
if (isAll || MatchLiteral(sm.Subject, filter))
{
ss.Msgs++;
if (ss.First == 0) ss.First = seq;
ss.Last = seq;
}
}
return ss;
}
@@ -947,8 +1063,10 @@ public sealed class JetStreamMemStore : IStreamStore
{
if (_msgs == null || _msgs.Count == 0) return (Array.Empty<ulong>(), null);
var seqs = new List<ulong>(_fss.Size());
_fss.IterFast((_, ss) =>
_fss.IterFast((subj, ss) =>
{
if (ss.LastNeedsUpdate)
RecalculateForSubj(Encoding.UTF8.GetString(subj), ss);
seqs.Add(ss.Last);
return true;
});
@@ -974,14 +1092,32 @@ public sealed class JetStreamMemStore : IStreamStore
var seen = new HashSet<ulong>();
foreach (var filter in filters)
{
_fss.Match(Encoding.UTF8.GetBytes(filter), (_, ss) =>
_fss.Match(Encoding.UTF8.GetBytes(filter), (subj, ss) =>
{
if (ss.Last <= maxSeq && seen.Add(ss.Last))
seqs.Add(ss.Last);
if (ss.LastNeedsUpdate)
RecalculateForSubj(Encoding.UTF8.GetString(subj), ss);
if (ss.Last <= maxSeq)
{
if (seen.Add(ss.Last))
seqs.Add(ss.Last);
}
else if (ss.Msgs > 1)
{
// Last is beyond maxSeq — scan backwards for the most recent msg <= maxSeq.
var s = Encoding.UTF8.GetString(subj);
for (var seq = maxSeq; seq > 0; seq--)
{
if (_msgs.TryGetValue(seq, out var sm) && sm != null && sm.Subject == s)
{
if (seen.Add(seq)) seqs.Add(seq);
break;
}
}
}
return true;
});
if (maxAllowed > 0 && seqs.Count > maxAllowed)
return (Array.Empty<ulong>(), StoreErrors.ErrTooManyResults);
return (null!, StoreErrors.ErrTooManyResults);
}
seqs.Sort();
return (seqs.ToArray(), null);
@@ -1017,7 +1153,9 @@ public sealed class JetStreamMemStore : IStreamStore
/// <inheritdoc/>
public ulong GetSeqFromTime(DateTime t)
{
var ts = new DateTimeOffset(t).ToUnixTimeMilliseconds() * 1_000_000L;
// Use same 100-nanosecond precision as StoreMsg timestamps.
const long UnixEpochTicksGsft = 621355968000000000L;
var ts = (new DateTimeOffset(t, TimeSpan.Zero).UtcTicks - UnixEpochTicksGsft) * 100L;
_mu.EnterReadLock();
try
{
@@ -1038,7 +1176,9 @@ public sealed class JetStreamMemStore : IStreamStore
break;
}
if (lastSm == null) return _state.LastSeq + 1;
if (ts >= lastSm.Ts) return _state.LastSeq + 1;
// Mirror Go: if ts == last ts return that seq; if ts > last ts return pastEnd.
if (ts == lastSm.Ts) return lastSm.Seq;
if (ts > lastSm.Ts) return _state.LastSeq + 1;
// Linear scan fallback
for (var seq = _state.FirstSeq; seq <= _state.LastSeq; seq++)
@@ -1066,9 +1206,32 @@ public sealed class JetStreamMemStore : IStreamStore
try
{
_cfg = cfg.Clone();
_maxp = cfg.MaxMsgsPer;
// Clamp MaxMsgsPer to minimum of -1
if (_cfg.MaxMsgsPer < -1)
{
_cfg.MaxMsgsPer = -1;
cfg.MaxMsgsPer = -1;
}
var oldMaxp = _maxp;
_maxp = _cfg.MaxMsgsPer;
EnforceMsgLimit();
EnforceBytesLimit();
// Enforce per-subject limits if MaxMsgsPer was reduced or newly set
if (_maxp > 0 && (oldMaxp == 0 || _maxp < oldMaxp))
{
var lm = (ulong)_maxp;
_fss.IterFast((subj, ss) =>
{
if (ss.Msgs > lm)
EnforcePerSubjectLimit(Encoding.UTF8.GetString(subj), ss);
return true;
});
}
if (_ageChk == null && _cfg.MaxAge != TimeSpan.Zero)
StartAgeChk();
if (_ageChk != null && _cfg.MaxAge == TimeSpan.Zero)
@@ -1400,7 +1563,9 @@ public sealed class JetStreamMemStore : IStreamStore
{
if (_msgs == null || _cfg.MaxAge == TimeSpan.Zero) return;
var minAge = DateTime.UtcNow - _cfg.MaxAge;
var minTs = new DateTimeOffset(minAge).ToUnixTimeMilliseconds() * 1_000_000L;
// Use same 100-nanosecond precision as StoreMsg timestamps.
const long UnixEpochTicksExp = 621355968000000000L;
var minTs = (new DateTimeOffset(minAge, TimeSpan.Zero).UtcTicks - UnixEpochTicksExp) * 100L;
var toRemove = new List<ulong>();
foreach (var kv in _msgs)
{

View File

@@ -0,0 +1,208 @@
// Copyright 2012-2025 The NATS Authors
// Licensed under the Apache License, Version 2.0
using System.Threading.Channels;
using NATS.Client.Core;
using Shouldly;
namespace ZB.MOM.NatsNet.Server.IntegrationTests;
/// <summary>
/// Behavioral baseline tests against the reference Go NATS server.
/// These tests require a running Go NATS server on localhost:4222.
/// Start with: cd golang/nats-server && go run . -p 4222
/// </summary>
[Collection("NatsIntegration")]
[Trait("Category", "Integration")]
public class NatsServerBehaviorTests : IAsyncLifetime
{
private NatsConnection? _nats;
private Exception? _initFailure;
public async Task InitializeAsync()
{
try
{
_nats = new NatsConnection(new NatsOpts { Url = "nats://localhost:4222" });
await _nats.ConnectAsync();
}
catch (Exception ex)
{
_initFailure = ex;
}
}
public async Task DisposeAsync()
{
if (_nats is not null)
await _nats.DisposeAsync();
}
/// <summary>
/// Returns true if the server is not available, causing the calling test to return early (pass silently).
/// xUnit 2.x does not support dynamic skip at runtime; early return is the pragmatic workaround.
/// </summary>
private bool ServerUnavailable() => _initFailure != null;
[Fact]
public async Task BasicPubSub_ShouldDeliverMessage()
{
if (ServerUnavailable()) return;
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));
var received = new TaskCompletionSource<string>();
_ = Task.Run(async () =>
{
try
{
await foreach (var msg in _nats!.SubscribeAsync<string>("test.hello", cancellationToken: cts.Token))
{
received.TrySetResult(msg.Data ?? "");
break;
}
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
received.TrySetException(ex);
}
}, cts.Token);
// Give subscriber a moment to register
await Task.Delay(100, cts.Token);
await _nats!.PublishAsync("test.hello", "world");
var result = await received.Task.WaitAsync(cts.Token);
result.ShouldBe("world");
}
[Fact]
public async Task WildcardSubscription_DotStar_ShouldMatch()
{
if (ServerUnavailable()) return;
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));
var received = new TaskCompletionSource<string>();
_ = Task.Run(async () =>
{
try
{
await foreach (var msg in _nats!.SubscribeAsync<string>("foo.*", cancellationToken: cts.Token))
{
received.TrySetResult(msg.Subject);
break;
}
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
received.TrySetException(ex);
}
}, cts.Token);
await Task.Delay(100, cts.Token);
await _nats!.PublishAsync("foo.bar", "payload");
var subject = await received.Task.WaitAsync(cts.Token);
subject.ShouldBe("foo.bar");
}
[Fact]
public async Task WildcardSubscription_GreaterThan_ShouldMatchMultiLevel()
{
if (ServerUnavailable()) return;
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));
var received = new TaskCompletionSource<string>();
_ = Task.Run(async () =>
{
try
{
await foreach (var msg in _nats!.SubscribeAsync<string>("foo.>", cancellationToken: cts.Token))
{
received.TrySetResult(msg.Subject);
break;
}
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
received.TrySetException(ex);
}
}, cts.Token);
await Task.Delay(100, cts.Token);
await _nats!.PublishAsync("foo.bar.baz", "payload");
var subject = await received.Task.WaitAsync(cts.Token);
subject.ShouldBe("foo.bar.baz");
}
[Fact]
public async Task QueueGroup_ShouldDeliverToOnlyOneSubscriber()
{
if (ServerUnavailable()) return;
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
const int messageCount = 30;
var channel = Channel.CreateBounded<int>(messageCount * 2);
var count1 = 0;
var count2 = 0;
var reader1 = Task.Run(async () =>
{
try
{
await foreach (var _ in _nats!.SubscribeAsync<string>("qg.test", queueGroup: "workers", cancellationToken: cts.Token))
{
Interlocked.Increment(ref count1);
await channel.Writer.WriteAsync(1, cts.Token);
}
}
catch (OperationCanceledException) { }
});
var reader2 = Task.Run(async () =>
{
try
{
await foreach (var _ in _nats!.SubscribeAsync<string>("qg.test", queueGroup: "workers", cancellationToken: cts.Token))
{
Interlocked.Increment(ref count2);
await channel.Writer.WriteAsync(1, cts.Token);
}
}
catch (OperationCanceledException) { }
});
// Give subscribers a moment to register
await Task.Delay(200, cts.Token);
for (var i = 0; i < messageCount; i++)
await _nats!.PublishAsync("qg.test", $"msg{i}");
// Wait for all messages to be received
var received = 0;
while (received < messageCount)
{
await channel.Reader.ReadAsync(cts.Token);
received++;
}
(count1 + count2).ShouldBe(messageCount);
// Don't assert per-subscriber counts — distribution is probabilistic
cts.Cancel();
await Task.WhenAll(reader1, reader2);
}
[Fact]
public async Task ConnectDisconnect_ShouldNotThrow()
{
if (ServerUnavailable()) return;
var nats2 = new NatsConnection(new NatsOpts { Url = "nats://localhost:4222" });
await Should.NotThrowAsync(async () =>
{
await nats2.ConnectAsync();
await nats2.DisposeAsync();
});
}
}

View File

@@ -1,10 +0,0 @@
namespace ZB.MOM.NatsNet.Server.IntegrationTests;
public class UnitTest1
{
[Fact]
public void Test1()
{
}
}

View File

@@ -15,6 +15,7 @@
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="NATS.Client.Core" Version="2.7.2" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.4" />
<PackageReference Include="Shouldly" Version="*" />

View File

@@ -0,0 +1,770 @@
// Copyright 2012-2025 The NATS Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Mirrors server/dirstore_test.go tests 285296 in the NATS server Go source.
// The Go tests use nkeys.CreateAccount() + jwt.NewAccountClaims() to generate
// real signed JWTs. Here we craft minimal fake JWT strings directly using
// Base64URL-encoded JSON payloads, since DirJwtStore only parses the "exp",
// "iat" and "jti" numeric/string claims from the payload.
using System.Security.Cryptography;
using System.Text;
using Shouldly;
namespace ZB.MOM.NatsNet.Server.Tests.Accounts;
/// <summary>
/// Unit tests for <see cref="DirJwtStore"/> expiration, limits, LRU eviction,
/// reload, TTL and notification behaviour.
/// Mirrors server/dirstore_test.go tests 285296.
/// </summary>
[Collection("DirectoryStoreTests")]
public sealed class DirectoryStoreTests : IDisposable
{
// -------------------------------------------------------------------------
// Counter for unique public-key names
// -------------------------------------------------------------------------
private static int _counter;
private static string NextKey() =>
$"ACCT{Interlocked.Increment(ref _counter):D8}";
// -------------------------------------------------------------------------
// Temp directory management
// -------------------------------------------------------------------------
private readonly List<string> _tempDirs = [];
private string MakeTempDir()
{
var dir = Path.Combine(Path.GetTempPath(), "dirstore_" + Path.GetRandomFileName());
Directory.CreateDirectory(dir);
_tempDirs.Add(dir);
return dir;
}
public void Dispose()
{
foreach (var dir in _tempDirs)
try { Directory.Delete(dir, recursive: true); } catch { /* best-effort */ }
}
// -------------------------------------------------------------------------
// Helpers — fake JWT construction
// -------------------------------------------------------------------------
/// <summary>
/// Builds a minimal fake JWT string: header.payload.signature
/// where the payload contains "exp", "iat" and "jti" claims.
/// </summary>
private static string MakeFakeJwt(
long expUnixSeconds,
long iatUnixSeconds = 0,
string? jti = null)
{
if (iatUnixSeconds == 0)
iatUnixSeconds = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
jti ??= Guid.NewGuid().ToString("N");
var payloadObj = expUnixSeconds > 0
? $"{{\"jti\":\"{jti}\",\"iat\":{iatUnixSeconds},\"exp\":{expUnixSeconds}}}"
: $"{{\"jti\":\"{jti}\",\"iat\":{iatUnixSeconds}}}";
var headerB64 = Base64UrlEncode(Encoding.UTF8.GetBytes("{\"alg\":\"ed25519-nkey\",\"typ\":\"JWT\"}"));
var payloadB64 = Base64UrlEncode(Encoding.UTF8.GetBytes(payloadObj));
var sigB64 = Base64UrlEncode(new byte[64]); // dummy 64-byte signature
return $"{headerB64}.{payloadB64}.{sigB64}";
}
/// <summary>
/// Rounds a <see cref="DateTimeOffset"/> to the nearest whole second,
/// mirroring Go's <c>time.Now().Round(time.Second)</c>.
/// </summary>
private static DateTimeOffset RoundToSecond(DateTimeOffset dt) =>
dt.Millisecond >= 500
? new DateTimeOffset(dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second, dt.Offset).AddSeconds(1)
: new DateTimeOffset(dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second, dt.Offset);
private static string Base64UrlEncode(byte[] data)
{
return Convert.ToBase64String(data)
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
}
/// <summary>
/// Creates and saves a test account JWT in the store.
/// <paramref name="expSec"/> == 0 means no expiration.
/// Returns the saved JWT string.
/// </summary>
private static string CreateTestAccount(DirJwtStore store, string pubKey, int expSec)
{
long exp = expSec > 0
// Round to the nearest second first (mirrors Go's time.Now().Round(time.Second).Add(...).Unix()),
// ensuring the expiry is at a whole-second boundary and avoiding sub-second truncation races.
? RoundToSecond(DateTimeOffset.UtcNow).AddSeconds(expSec).ToUnixTimeSeconds()
: 0;
var theJwt = MakeFakeJwt(exp);
store.SaveAcc(pubKey, theJwt);
return theJwt;
}
/// <summary>
/// Counts non-deleted .jwt files in <paramref name="dir"/> recursively.
/// </summary>
private static int CountJwtFiles(string dir) =>
Directory.GetFiles(dir, "*.jwt", SearchOption.AllDirectories)
.Count(f => !f.EndsWith(".jwt.deleted", StringComparison.Ordinal));
// -------------------------------------------------------------------------
// T:285 — TestExpiration
// -------------------------------------------------------------------------
[Fact] // T:285
public async Task Expiration_ExpiredAccountIsRemovedByBackground()
{
var dir = MakeTempDir();
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: TimeSpan.FromMilliseconds(50),
limit: 10,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: null);
var hBegin = store.Hash();
// Add one account that should NOT expire (100-second TTL).
var keyNoExp = NextKey();
CreateTestAccount(store, keyNoExp, 100);
var hNoExp = store.Hash();
hNoExp.ShouldNotBe(hBegin);
// Add one account that should expire in ~1 second.
var keyExp = NextKey();
CreateTestAccount(store, keyExp, 1);
CountJwtFiles(dir).ShouldBe(2);
// Wait up to 4 s for the expired file to vanish.
var deadline = DateTime.UtcNow.AddSeconds(4);
while (DateTime.UtcNow < deadline)
{
await Task.Delay(100);
if (CountJwtFiles(dir) == 1)
break;
}
CountJwtFiles(dir).ShouldBe(1, "expired account should be removed");
// Hash after expiry should equal hash after adding only the non-expiring key.
var lh = store.Hash();
lh.ShouldBe(hNoExp);
}
// -------------------------------------------------------------------------
// T:286 — TestLimit
// -------------------------------------------------------------------------
[Fact] // T:286
public void Limit_LruEvictsOldestEntries()
{
var dir = MakeTempDir();
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: TimeSpan.FromMilliseconds(100),
limit: 5,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: null);
var h = store.Hash();
// Update the first account 10 times — should remain as 1 entry.
var firstKey = NextKey();
for (var i = 0; i < 10; i++)
{
CreateTestAccount(store, firstKey, 50);
CountJwtFiles(dir).ShouldBe(1);
}
// Add 10 more new accounts — limit is 5, LRU eviction kicks in.
for (var i = 0; i < 10; i++)
{
var k = NextKey();
CreateTestAccount(store, k, i + 1); // short but non-zero expiry
var nh = store.Hash();
nh.ShouldNotBe(h);
h = nh;
}
// After all adds, only 5 files should remain.
CountJwtFiles(dir).ShouldBe(5);
// The first account should have been evicted.
File.Exists(Path.Combine(dir, firstKey + ".jwt")).ShouldBeFalse();
// Updating the first account again should succeed (limit allows eviction).
for (var i = 0; i < 10; i++)
{
CreateTestAccount(store, firstKey, 50);
CountJwtFiles(dir).ShouldBe(5);
}
}
// -------------------------------------------------------------------------
// T:287 — TestLimitNoEvict
// -------------------------------------------------------------------------
[Fact] // T:287
public async Task LimitNoEvict_StoreFullThrowsOnNewKey()
{
var dir = MakeTempDir();
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: TimeSpan.FromMilliseconds(50),
limit: 2,
evictOnLimit: false,
ttl: TimeSpan.Zero,
changeNotification: null);
var key1 = NextKey();
var key2 = NextKey();
var key3 = NextKey();
CreateTestAccount(store, key1, 100);
CountJwtFiles(dir).ShouldBe(1);
// key2 expires in 1 second
CreateTestAccount(store, key2, 1);
CountJwtFiles(dir).ShouldBe(2);
var hashBefore = store.Hash();
// Attempting to add key3 should throw (limit=2, no evict).
var exp3 = DateTimeOffset.UtcNow.AddSeconds(100).ToUnixTimeSeconds();
var jwt3 = MakeFakeJwt(exp3);
Should.Throw<InvalidOperationException>(() => store.SaveAcc(key3, jwt3));
CountJwtFiles(dir).ShouldBe(2);
File.Exists(Path.Combine(dir, key1 + ".jwt")).ShouldBeTrue();
File.Exists(Path.Combine(dir, key3 + ".jwt")).ShouldBeFalse();
// Hash should not change after the failed add.
store.Hash().ShouldBe(hashBefore);
// Wait for key2 to expire.
await Task.Delay(2200);
// Now adding key3 should succeed.
store.SaveAcc(key3, jwt3);
CountJwtFiles(dir).ShouldBe(2);
File.Exists(Path.Combine(dir, key1 + ".jwt")).ShouldBeTrue();
File.Exists(Path.Combine(dir, key3 + ".jwt")).ShouldBeTrue();
}
// -------------------------------------------------------------------------
// T:288 — TestLruLoad
// -------------------------------------------------------------------------
[Fact] // T:288
public void LruLoad_LoadReordersLru()
{
var dir = MakeTempDir();
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: TimeSpan.FromMilliseconds(100),
limit: 2,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: null);
var key1 = NextKey();
var key2 = NextKey();
var key3 = NextKey();
CreateTestAccount(store, key1, 10);
CountJwtFiles(dir).ShouldBe(1);
CreateTestAccount(store, key2, 10);
CountJwtFiles(dir).ShouldBe(2);
// Access key1 — makes it the most-recently-used.
store.LoadAcc(key1);
// Adding key3 should evict key2 (oldest), not key1.
CreateTestAccount(store, key3, 10);
CountJwtFiles(dir).ShouldBe(2);
File.Exists(Path.Combine(dir, key1 + ".jwt")).ShouldBeTrue();
File.Exists(Path.Combine(dir, key3 + ".jwt")).ShouldBeTrue();
}
// -------------------------------------------------------------------------
// T:289 — TestLruVolume
// -------------------------------------------------------------------------
[Fact] // T:289
public void LruVolume_ContinuousReplacementsAlwaysEvictsOldest()
{
var dir = MakeTempDir();
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: TimeSpan.FromMilliseconds(50),
limit: 2,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: null);
const int ReplaceCnt = 200; // must be > 2 due to the invariant
var keys = new string[ReplaceCnt];
keys[0] = NextKey();
CreateTestAccount(store, keys[0], 10000);
CountJwtFiles(dir).ShouldBe(1);
keys[1] = NextKey();
CreateTestAccount(store, keys[1], 10000);
CountJwtFiles(dir).ShouldBe(2);
for (var i = 2; i < ReplaceCnt; i++)
{
keys[i] = NextKey();
CreateTestAccount(store, keys[i], 10000);
CountJwtFiles(dir).ShouldBe(2);
// key two positions back should have been evicted.
File.Exists(Path.Combine(dir, keys[i - 2] + ".jwt")).ShouldBeFalse(
$"key[{i - 2}] should be evicted after adding key[{i}]");
// key one position back should still be present.
File.Exists(Path.Combine(dir, keys[i - 1] + ".jwt")).ShouldBeTrue();
// current key should be present.
File.Exists(Path.Combine(dir, keys[i] + ".jwt")).ShouldBeTrue();
}
}
// -------------------------------------------------------------------------
// T:290 — TestLru
// -------------------------------------------------------------------------
[Fact] // T:290
public async Task Lru_EvictsAndExpires()
{
var dir = MakeTempDir();
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: TimeSpan.FromMilliseconds(50),
limit: 2,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: null);
var key1 = NextKey();
var key2 = NextKey();
var key3 = NextKey();
CreateTestAccount(store, key1, 1000);
CountJwtFiles(dir).ShouldBe(1);
CreateTestAccount(store, key2, 1000);
CountJwtFiles(dir).ShouldBe(2);
// Adding key3 should evict key1 (oldest).
CreateTestAccount(store, key3, 1000);
CountJwtFiles(dir).ShouldBe(2);
File.Exists(Path.Combine(dir, key1 + ".jwt")).ShouldBeFalse();
File.Exists(Path.Combine(dir, key3 + ".jwt")).ShouldBeTrue();
// Update key2 → moves it to MRU. key3 becomes LRU.
CreateTestAccount(store, key2, 1000);
CountJwtFiles(dir).ShouldBe(2);
// Recreate key1 (which was evicted) → evicts key3.
CreateTestAccount(store, key1, 1); // expires in 1 s
CountJwtFiles(dir).ShouldBe(2);
File.Exists(Path.Combine(dir, key3 + ".jwt")).ShouldBeFalse();
// Let key1 expire (1 s + 1 s buffer for rounding).
await Task.Delay(2200);
CountJwtFiles(dir).ShouldBe(1);
File.Exists(Path.Combine(dir, key1 + ".jwt")).ShouldBeFalse();
// Recreate key3 — no eviction needed, slot is free.
CreateTestAccount(store, key3, 1000);
CountJwtFiles(dir).ShouldBe(2);
}
// -------------------------------------------------------------------------
// T:291 — TestReload
// -------------------------------------------------------------------------
[Fact] // T:291
public void Reload_DetectsFilesAddedAndRemoved()
{
var dir = MakeTempDir();
var notificationChan = new System.Collections.Concurrent.ConcurrentQueue<string>();
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: TimeSpan.FromMilliseconds(100),
limit: 2,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: pk => notificationChan.Enqueue(pk));
CountJwtFiles(dir).ShouldBe(0);
var emptyHash = new byte[32];
store.Hash().ShouldBe(emptyHash);
var files = new List<string>();
// Add 5 accounts by writing to disk directly, then Reload().
for (var i = 0; i < 5; i++)
{
var key = NextKey();
var exp = DateTimeOffset.UtcNow.AddSeconds(10000).ToUnixTimeSeconds();
var jwt = MakeFakeJwt(exp);
var path = Path.Combine(dir, key + ".jwt");
File.WriteAllText(path, jwt);
files.Add(path);
store.Reload();
// Wait briefly for notification.
var deadline = DateTime.UtcNow.AddMilliseconds(500);
while (notificationChan.IsEmpty && DateTime.UtcNow < deadline)
Thread.Sleep(10);
notificationChan.TryDequeue(out _);
CountJwtFiles(dir).ShouldBe(i + 1);
store.Hash().ShouldNotBe(emptyHash);
var packed = store.Pack(-1);
packed.Split('\n').Length.ShouldBe(i + 1);
}
// Now remove files one by one.
foreach (var f in files)
{
var hash = store.Hash();
hash.ShouldNotBe(emptyHash);
File.Delete(f);
store.Reload();
CountJwtFiles(dir).ShouldBe(files.Count - files.IndexOf(f) - 1);
}
store.Hash().ShouldBe(emptyHash);
}
// -------------------------------------------------------------------------
// T:292 — TestExpirationUpdate
// -------------------------------------------------------------------------
[Fact] // T:292
public async Task ExpirationUpdate_UpdatingExpirationExtendsTTL()
{
var dir = MakeTempDir();
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: TimeSpan.FromMilliseconds(50),
limit: 10,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: null);
var key = NextKey();
var h = store.Hash();
// Save account with no expiry.
CreateTestAccount(store, key, 0);
var nh = store.Hash();
nh.ShouldNotBe(h);
h = nh;
await Task.Delay(1500);
CountJwtFiles(dir).ShouldBe(1); // should NOT have expired (no exp claim)
// Save same account with 2-second expiry.
CreateTestAccount(store, key, 2);
nh = store.Hash();
nh.ShouldNotBe(h);
h = nh;
await Task.Delay(1500);
CountJwtFiles(dir).ShouldBe(1); // not expired yet
// Save with no expiry again — resets expiry on that account.
CreateTestAccount(store, key, 0);
nh = store.Hash();
nh.ShouldNotBe(h);
h = nh;
await Task.Delay(1500);
CountJwtFiles(dir).ShouldBe(1); // still NOT expired
// Now save with 1-second expiry.
CreateTestAccount(store, key, 1);
nh = store.Hash();
nh.ShouldNotBe(h);
await Task.Delay(1500);
CountJwtFiles(dir).ShouldBe(0); // should be expired now
var empty = new byte[32];
store.Hash().ShouldBe(empty);
}
// -------------------------------------------------------------------------
// T:293 — TestTTL
// -------------------------------------------------------------------------
[Fact] // T:293
public async Task TTL_AccessResetsExpirationOnStore()
{
var dir = MakeTempDir();
var key = NextKey();
// TTL = 200 ms. Each access (Load or Save) should reset expiry.
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: TimeSpan.FromMilliseconds(50),
limit: 10,
evictOnLimit: true,
ttl: TimeSpan.FromMilliseconds(200),
changeNotification: null);
CreateTestAccount(store, key, 0);
CountJwtFiles(dir).ShouldBe(1);
// Access every 110 ms — should prevent expiration.
for (var i = 0; i < 4; i++)
{
await Task.Delay(110);
store.LoadAcc(key); // TTL reset via Load
CountJwtFiles(dir).ShouldBe(1);
}
// Stop accessing — wait for expiration.
var deadline = DateTime.UtcNow.AddSeconds(3);
while (DateTime.UtcNow < deadline)
{
await Task.Delay(50);
if (CountJwtFiles(dir) == 0)
return; // expired as expected
}
Assert.Fail("JWT should have expired by now via TTL");
}
// -------------------------------------------------------------------------
// T:294 — TestRemove
// -------------------------------------------------------------------------
[Fact] // T:294
public void Remove_RespectsDeleteType()
{
foreach (var (deleteType, expectedJwt, expectedDeleted) in new[]
{
(JwtDeleteType.HardDelete, 0, 0),
(JwtDeleteType.RenameDeleted, 0, 1),
(JwtDeleteType.NoDelete, 1, 0),
})
{
var dir = MakeTempDir();
using var store = DirJwtStore.NewExpiringDirJwtStore(
dir, shard: false, create: false,
deleteType: deleteType,
expireCheck: TimeSpan.Zero,
limit: 10,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: null);
var key = NextKey();
CreateTestAccount(store, key, 0);
CountJwtFiles(dir).ShouldBe(1, $"deleteType={deleteType}: should have 1 jwt before delete");
// For HardDelete and RenameDeleted the store must allow Delete.
// For NoDelete, Delete should throw.
if (deleteType == JwtDeleteType.NoDelete)
{
Should.Throw<InvalidOperationException>(() => store.Delete(key),
$"deleteType={deleteType}: should throw on delete");
}
else
{
store.Delete(key);
}
// Count .jwt files (not .jwt.deleted).
var jwtFiles = Directory.GetFiles(dir, "*.jwt", SearchOption.AllDirectories)
.Count(f => !f.EndsWith(".jwt.deleted", StringComparison.Ordinal));
jwtFiles.ShouldBe(expectedJwt, $"deleteType={deleteType}: unexpected jwt count");
// Count .jwt.deleted files.
var deletedFiles = Directory.GetFiles(dir, "*.jwt.deleted", SearchOption.AllDirectories).Length;
deletedFiles.ShouldBe(expectedDeleted, $"deleteType={deleteType}: unexpected deleted count");
}
}
// -------------------------------------------------------------------------
// T:295 — TestNotificationOnPack
// -------------------------------------------------------------------------
[Fact] // T:295
public void NotificationOnPack_MergeFiresChangedCallback()
{
// Pre-populate a store with 4 accounts, pack it, then Merge into new stores.
// Each Merge should fire the change notification for every key.
const int JwtCount = 4;
var infDur = TimeSpan.FromDays(49); // "effectively infinite" (Timer max ≈ 49.7 days; TimeSpan.MaxValue/2 exceeds it)
var dirPack = MakeTempDir();
var keys = new string[JwtCount];
var jwts = new string[JwtCount];
var notifications = new System.Collections.Concurrent.ConcurrentQueue<string>();
using var packStore = DirJwtStore.NewExpiringDirJwtStore(
dirPack, shard: false, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: infDur,
limit: 0,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: pk => notifications.Enqueue(pk));
for (var i = 0; i < JwtCount; i++)
{
keys[i] = NextKey();
jwts[i] = MakeFakeJwt(0); // no expiry
packStore.SaveAcc(keys[i], jwts[i]);
}
// Drain initial notifications.
var deadline = DateTime.UtcNow.AddSeconds(2);
while (notifications.Count < JwtCount && DateTime.UtcNow < deadline)
Thread.Sleep(10);
while (notifications.TryDequeue(out _)) { }
var msg = packStore.Pack(-1);
var hash = packStore.Hash();
// Merge into new stores (sharded and unsharded).
foreach (var shard in new[] { true, false, true, false })
{
var dirMerge = MakeTempDir();
var mergeNotifications = new System.Collections.Concurrent.ConcurrentQueue<string>();
using var mergeStore = DirJwtStore.NewExpiringDirJwtStore(
dirMerge, shard: shard, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: infDur,
limit: 0,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: pk => mergeNotifications.Enqueue(pk));
mergeStore.Merge(msg);
CountJwtFiles(dirMerge).ShouldBe(JwtCount);
// Hashes must match.
packStore.Hash().ShouldBe(hash);
// Wait for JwtCount notifications.
deadline = DateTime.UtcNow.AddSeconds(2);
while (mergeNotifications.Count < JwtCount && DateTime.UtcNow < deadline)
Thread.Sleep(10);
mergeNotifications.Count.ShouldBeGreaterThanOrEqualTo(JwtCount);
// Double-merge should produce no extra file changes.
while (mergeNotifications.TryDequeue(out _)) { }
mergeStore.Merge(msg);
CountJwtFiles(dirMerge).ShouldBe(JwtCount);
Thread.Sleep(50);
mergeNotifications.IsEmpty.ShouldBeTrue("no new notifications on re-merge of identical JWTs");
msg = mergeStore.Pack(-1);
}
// All original JWTs can still be loaded from the last pack.
for (var i = 0; i < JwtCount; i++)
{
var found = msg.Contains(keys[i] + "|" + jwts[i]);
found.ShouldBeTrue($"key {keys[i]} should be in packed message");
}
}
// -------------------------------------------------------------------------
// T:296 — TestNotificationOnPackWalk
// -------------------------------------------------------------------------
[Fact] // T:296
public void NotificationOnPackWalk_PropagatesAcrossChainOfStores()
{
const int StoreCnt = 5;
const int KeyCnt = 50;
const int IterCnt = 4; // reduced from Go's 8 to keep test fast
var infDur = TimeSpan.FromDays(49); // "effectively infinite" (Timer max ≈ 49.7 days; TimeSpan.MaxValue/2 exceeds it)
var stores = new DirJwtStore[StoreCnt];
var dirs = new string[StoreCnt];
try
{
for (var i = 0; i < StoreCnt; i++)
{
dirs[i] = MakeTempDir();
stores[i] = DirJwtStore.NewExpiringDirJwtStore(
dirs[i], shard: true, create: false,
deleteType: JwtDeleteType.NoDelete,
expireCheck: infDur,
limit: 0,
evictOnLimit: true,
ttl: TimeSpan.Zero,
changeNotification: null);
}
for (var iter = 0; iter < IterCnt; iter++)
{
// Fill store[0] with KeyCnt new accounts.
for (var j = 0; j < KeyCnt; j++)
{
var k = NextKey();
var jwt = MakeFakeJwt(0);
stores[0].SaveAcc(k, jwt);
}
// Propagate via PackWalk from store[n] → store[n+1].
for (var j = 0; j < StoreCnt - 1; j++)
{
stores[j].PackWalk(3, partial => stores[j + 1].Merge(partial));
}
// Verify all adjacent store hashes match.
for (var j = 0; j < StoreCnt - 1; j++)
{
stores[j].Hash().ShouldBe(stores[j + 1].Hash(),
$"stores[{j}] and stores[{j + 1}] should have matching hashes after iteration {iter}");
}
}
}
finally
{
foreach (var s in stores) try { s?.Dispose(); } catch { /* best-effort */ }
}
}
}

View File

@@ -12,6 +12,7 @@
// limitations under the License.
using Shouldly;
using ZB.MOM.NatsNet.Server;
using ZB.MOM.NatsNet.Server.Auth;
namespace ZB.MOM.NatsNet.Server.Tests.Auth;
@@ -381,4 +382,68 @@ public class AuthHandlerTests
{
AuthHandler.ConnectionTypes.IsKnown(ct).ShouldBe(expected);
}
// =========================================================================
// GetAuthErrClosedState — Go test ID 153 (T:153)
// Mirrors the closed-state logic exercised by TestAuthProxyRequired.
// (The full Go test is server-dependent; this covers the pure unit subset.)
// =========================================================================
/// <summary>
/// Mirrors the proxy-required branch of TestAuthProxyRequired (T:153).
/// </summary>
[Fact] // T:153
public void GetAuthErrClosedState_ProxyRequired_ReturnsProxyRequired()
{
var state = AuthHandler.GetAuthErrClosedState(new AuthProxyRequiredException());
state.ShouldBe(ClosedState.ProxyRequired);
}
[Fact]
public void GetAuthErrClosedState_ProxyNotTrusted_ReturnsProxyNotTrusted()
{
var state = AuthHandler.GetAuthErrClosedState(new AuthProxyNotTrustedException());
state.ShouldBe(ClosedState.ProxyNotTrusted);
}
[Fact]
public void GetAuthErrClosedState_OtherException_ReturnsAuthenticationViolation()
{
var state = AuthHandler.GetAuthErrClosedState(new InvalidOperationException("bad"));
state.ShouldBe(ClosedState.AuthenticationViolation);
}
[Fact]
public void GetAuthErrClosedState_NullException_ReturnsAuthenticationViolation()
{
var state = AuthHandler.GetAuthErrClosedState(null);
state.ShouldBe(ClosedState.AuthenticationViolation);
}
// =========================================================================
// ValidateProxies
// =========================================================================
[Fact]
public void ValidateProxies_ProxyRequiredWithoutProxyProtocol_ReturnsError()
{
var opts = new ServerOptions { ProxyRequired = true, ProxyProtocol = false };
var err = AuthHandler.ValidateProxies(opts);
err.ShouldNotBeNull();
err!.Message.ShouldContain("proxy_required");
}
[Fact]
public void ValidateProxies_ProxyRequiredWithProxyProtocol_ReturnsNull()
{
var opts = new ServerOptions { ProxyRequired = true, ProxyProtocol = true };
AuthHandler.ValidateProxies(opts).ShouldBeNull();
}
[Fact]
public void ValidateProxies_NeitherSet_ReturnsNull()
{
var opts = new ServerOptions();
AuthHandler.ValidateProxies(opts).ShouldBeNull();
}
}

View File

@@ -0,0 +1,138 @@
// Copyright 2012-2025 The NATS Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using Shouldly;
using ZB.MOM.NatsNet.Server.Internal;
namespace ZB.MOM.NatsNet.Server.Tests.Internal;
/// <summary>
/// Tests for server logging trace sanitization (RemovePassFromTrace, RemoveAuthTokenFromTrace).
/// Mirrors server/log_test.go — TestNoPasswordsFromConnectTrace, TestRemovePassFromTrace,
/// TestRemoveAuthTokenFromTrace.
/// </summary>
public class ServerLoggerTests
{
// ---------------------------------------------------------------------------
// T:2020 — TestNoPasswordsFromConnectTrace
// ---------------------------------------------------------------------------
/// <summary>
/// Mirrors TestNoPasswordsFromConnectTrace.
/// Verifies that a CONNECT trace with a password or auth_token does not
/// expose the secret value after sanitization.
/// </summary>
[Fact] // T:2020
public void NoPasswordsFromConnectTrace_ShouldSucceed()
{
const string connectWithPass =
"""CONNECT {"verbose":false,"pedantic":false,"user":"derek","pass":"s3cr3t","tls_required":false}""";
const string connectWithToken =
"""CONNECT {"verbose":false,"auth_token":"secret-token","tls_required":false}""";
ServerLogging.RemovePassFromTrace(connectWithPass).ShouldNotContain("s3cr3t");
ServerLogging.RemoveAuthTokenFromTrace(connectWithToken).ShouldNotContain("secret-token");
}
// ---------------------------------------------------------------------------
// T:2021 — TestRemovePassFromTrace
// ---------------------------------------------------------------------------
/// <summary>
/// Mirrors TestRemovePassFromTrace — covers all test vectors from log_test.go.
/// Each case verifies that RemovePassFromTrace redacts the first pass/password value
/// with [REDACTED] while leaving other fields intact.
/// </summary>
[Theory] // T:2021
[InlineData(
"user and pass",
"CONNECT {\"user\":\"derek\",\"pass\":\"s3cr3t\"}\r\n",
"CONNECT {\"user\":\"derek\",\"pass\":\"[REDACTED]\"}\r\n")]
[InlineData(
"user and pass extra space",
"CONNECT {\"user\":\"derek\",\"pass\": \"s3cr3t\"}\r\n",
"CONNECT {\"user\":\"derek\",\"pass\": \"[REDACTED]\"}\r\n")]
[InlineData(
"user and pass is empty",
"CONNECT {\"user\":\"derek\",\"pass\":\"\"}\r\n",
"CONNECT {\"user\":\"derek\",\"pass\":\"[REDACTED]\"}\r\n")]
[InlineData(
"user and pass is empty whitespace",
"CONNECT {\"user\":\"derek\",\"pass\":\" \"}\r\n",
"CONNECT {\"user\":\"derek\",\"pass\":\"[REDACTED]\"}\r\n")]
[InlineData(
"only pass",
"CONNECT {\"pass\":\"s3cr3t\",}\r\n",
"CONNECT {\"pass\":\"[REDACTED]\",}\r\n")]
[InlineData(
"complete connect",
"CONNECT {\"echo\":true,\"verbose\":false,\"pedantic\":false,\"user\":\"foo\",\"pass\":\"s3cr3t\",\"tls_required\":false,\"name\":\"APM7JU94z77YzP6WTBEiuw\"}\r\n",
"CONNECT {\"echo\":true,\"verbose\":false,\"pedantic\":false,\"user\":\"foo\",\"pass\":\"[REDACTED]\",\"tls_required\":false,\"name\":\"APM7JU94z77YzP6WTBEiuw\"}\r\n")]
[InlineData(
"user and pass are filtered",
"CONNECT {\"user\":\"s3cr3t\",\"pass\":\"s3cr3t\"}\r\n",
"CONNECT {\"user\":\"s3cr3t\",\"pass\":\"[REDACTED]\"}\r\n")]
[InlineData(
"single long password",
"CONNECT {\"pass\":\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\"}\r\n",
"CONNECT {\"pass\":\"[REDACTED]\"}\r\n")]
public void RemovePassFromTrace_ShouldSucceed(string name, string input, string expected)
{
_ = name; // used for test display only
ServerLogging.RemovePassFromTrace(input).ShouldBe(expected);
}
// ---------------------------------------------------------------------------
// T:2022 — TestRemoveAuthTokenFromTrace
// ---------------------------------------------------------------------------
/// <summary>
/// Mirrors TestRemoveAuthTokenFromTrace — covers representative test vectors
/// from log_test.go. Each case verifies that RemoveAuthTokenFromTrace redacts
/// the first auth_token value with [REDACTED].
/// </summary>
[Theory] // T:2022
[InlineData(
"user and auth_token",
"CONNECT {\"user\":\"derek\",\"auth_token\":\"s3cr3t\"}\r\n",
"CONNECT {\"user\":\"derek\",\"auth_token\":\"[REDACTED]\"}\r\n")]
[InlineData(
"user and auth_token extra space",
"CONNECT {\"user\":\"derek\",\"auth_token\": \"s3cr3t\"}\r\n",
"CONNECT {\"user\":\"derek\",\"auth_token\": \"[REDACTED]\"}\r\n")]
[InlineData(
"user and auth_token is empty",
"CONNECT {\"user\":\"derek\",\"auth_token\":\"\"}\r\n",
"CONNECT {\"user\":\"derek\",\"auth_token\":\"[REDACTED]\"}\r\n")]
[InlineData(
"only auth_token",
"CONNECT {\"auth_token\":\"s3cr3t\",}\r\n",
"CONNECT {\"auth_token\":\"[REDACTED]\",}\r\n")]
[InlineData(
"complete connect",
"CONNECT {\"echo\":true,\"verbose\":false,\"pedantic\":false,\"auth_token\":\"s3cr3t\",\"tls_required\":false,\"name\":\"APM7JU94z77YzP6WTBEiuw\"}\r\n",
"CONNECT {\"echo\":true,\"verbose\":false,\"pedantic\":false,\"auth_token\":\"[REDACTED]\",\"tls_required\":false,\"name\":\"APM7JU94z77YzP6WTBEiuw\"}\r\n")]
[InlineData(
"user and token are filtered",
"CONNECT {\"user\":\"s3cr3t\",\"auth_token\":\"s3cr3t\"}\r\n",
"CONNECT {\"user\":\"s3cr3t\",\"auth_token\":\"[REDACTED]\"}\r\n")]
[InlineData(
"single long token",
"CONNECT {\"auth_token\":\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\"}\r\n",
"CONNECT {\"auth_token\":\"[REDACTED]\"}\r\n")]
public void RemoveAuthTokenFromTrace_ShouldSucceed(string name, string input, string expected)
{
_ = name; // used for test display only
ServerLogging.RemoveAuthTokenFromTrace(input).ShouldBe(expected);
}
}

View File

@@ -67,4 +67,100 @@ public class SignalHandlerTests
var err = SignalHandler.ProcessSignal(ServerCommand.Stop, "not-a-pid");
err.ShouldNotBeNull();
}
// ---------------------------------------------------------------------------
// Tests ported from server/signal_test.go
// ---------------------------------------------------------------------------
/// <summary>
/// Mirrors TestProcessSignalInvalidCommand.
/// An out-of-range ServerCommand enum value is treated as an unknown signal
/// and ProcessSignal returns a non-null error.
/// </summary>
[Fact] // T:2919
public void ProcessSignalInvalidCommand_ShouldSucceed()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
return; // Skip on Windows
var err = SignalHandler.ProcessSignal((ServerCommand)99, "123");
err.ShouldNotBeNull();
}
/// <summary>
/// Mirrors TestProcessSignalInvalidPid.
/// A non-numeric PID string returns an error containing "invalid pid".
/// </summary>
[Fact] // T:2920
public void ProcessSignalInvalidPid_ShouldSucceed()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
return; // Skip on Windows
var err = SignalHandler.ProcessSignal(ServerCommand.Stop, "abc");
err.ShouldNotBeNull();
err!.Message.ShouldContain("invalid pid");
}
// ---------------------------------------------------------------------------
// Deferred signal tests — require pgrep/kill injection or real OS process spawning.
// These cannot be unit-tested without refactoring SignalHandler to accept
// injectable pgrep/kill delegates (as the Go source does).
// ---------------------------------------------------------------------------
/// <summary>Mirrors TestProcessSignalMultipleProcesses — deferred: requires pgrep injection.</summary>
[Fact(Skip = "deferred: requires pgrep/kill injection")] // T:2913
public void ProcessSignalMultipleProcesses_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalMultipleProcessesGlob — deferred: requires pgrep injection.</summary>
[Fact(Skip = "deferred: requires pgrep/kill injection")] // T:2914
public void ProcessSignalMultipleProcessesGlob_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalMultipleProcessesGlobPartial — deferred: requires pgrep injection.</summary>
[Fact(Skip = "deferred: requires pgrep/kill injection")] // T:2915
public void ProcessSignalMultipleProcessesGlobPartial_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalPgrepError — deferred: requires pgrep injection.</summary>
[Fact(Skip = "deferred: requires pgrep injection")] // T:2916
public void ProcessSignalPgrepError_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalPgrepMangled — deferred: requires pgrep injection.</summary>
[Fact(Skip = "deferred: requires pgrep injection")] // T:2917
public void ProcessSignalPgrepMangled_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalResolveSingleProcess — deferred: requires pgrep and kill injection.</summary>
[Fact(Skip = "deferred: requires pgrep/kill injection")] // T:2918
public void ProcessSignalResolveSingleProcess_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalQuitProcess — deferred: requires kill injection.</summary>
[Fact(Skip = "deferred: requires kill injection")] // T:2921
public void ProcessSignalQuitProcess_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalTermProcess — deferred: requires kill injection and commandTerm equivalent.</summary>
[Fact(Skip = "deferred: requires kill injection")] // T:2922
public void ProcessSignalTermProcess_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalReopenProcess — deferred: requires kill injection.</summary>
[Fact(Skip = "deferred: requires kill injection")] // T:2923
public void ProcessSignalReopenProcess_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalReloadProcess — deferred: requires kill injection.</summary>
[Fact(Skip = "deferred: requires kill injection")] // T:2924
public void ProcessSignalReloadProcess_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalLameDuckMode — deferred: requires kill injection and commandLDMode equivalent.</summary>
[Fact(Skip = "deferred: requires kill injection")] // T:2925
public void ProcessSignalLameDuckMode_ShouldSucceed() { }
/// <summary>Mirrors TestProcessSignalTermDuringLameDuckMode — deferred: requires full server (RunServer) and real OS signal.</summary>
[Fact(Skip = "deferred: requires RunServer and real OS SIGTERM")] // T:2926
public void ProcessSignalTermDuringLameDuckMode_ShouldSucceed() { }
/// <summary>Mirrors TestSignalInterruptHasSuccessfulExit — deferred: requires spawning a subprocess to test exit code on SIGINT.</summary>
[Fact(Skip = "deferred: requires subprocess process spawning")] // T:2927
public void SignalInterruptHasSuccessfulExit_ShouldSucceed() { }
/// <summary>Mirrors TestSignalTermHasSuccessfulExit — deferred: requires spawning a subprocess to test exit code on SIGTERM.</summary>
[Fact(Skip = "deferred: requires subprocess process spawning")] // T:2928
public void SignalTermHasSuccessfulExit_ShouldSucceed() { }
}

View File

@@ -0,0 +1,113 @@
// Copyright 2025 The NATS Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Mirrors server/jetstream_batching_test.go in the NATS server Go source.
// ALL tests in this file are deferred: they all use createJetStreamClusterExplicit()
// or RunBasicJetStreamServer() and require a running JetStream cluster/server.
namespace ZB.MOM.NatsNet.Server.Tests.JetStream;
/// <summary>
/// Tests for JetStream atomic batch publishing.
/// Mirrors server/jetstream_batching_test.go.
/// All tests are deferred pending JetStream server infrastructure.
/// </summary>
public sealed class JetStreamBatchingTests
{
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:716
public void JetStreamAtomicBatchPublish_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:717
public void JetStreamAtomicBatchPublishEmptyAck_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:718
public void JetStreamAtomicBatchPublishCommitEob_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:719
public void JetStreamAtomicBatchPublishLimits_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:720
public void JetStreamAtomicBatchPublishDedupeNotAllowed_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:721
public void JetStreamAtomicBatchPublishSourceAndMirror_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:722
public void JetStreamAtomicBatchPublishCleanup_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:723
public void JetStreamAtomicBatchPublishConfigOpts_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:724
public void JetStreamAtomicBatchPublishDenyHeaders_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:725
public void JetStreamAtomicBatchPublishStageAndCommit_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:726
public void JetStreamAtomicBatchPublishHighLevelRollback_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:727
public void JetStreamAtomicBatchPublishExpectedPerSubject_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:728
public void JetStreamAtomicBatchPublishSingleServerRecovery_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:729
public void JetStreamAtomicBatchPublishSingleServerRecoveryCommitEob_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:730
public void JetStreamAtomicBatchPublishEncode_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:731
public void JetStreamAtomicBatchPublishProposeOne_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:732
public void JetStreamAtomicBatchPublishProposeMultiple_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:733
public void JetStreamAtomicBatchPublishProposeOnePartialBatch_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:734
public void JetStreamAtomicBatchPublishProposeMultiplePartialBatches_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:735
public void JetStreamAtomicBatchPublishContinuousBatchesStillMoveAppliedUp_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:736
public void JetStreamAtomicBatchPublishPartiallyAppliedBatchOnRecovery_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:737
public void JetStreamRollupIsolatedRead_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:738
public void JetStreamAtomicBatchPublishAdvisories_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:739
public void JetStreamAtomicBatchPublishExpectedSeq_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:740
public void JetStreamAtomicBatchPublishPartialBatchInSharedAppendEntry_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:741
public void JetStreamAtomicBatchPublishRejectPartialBatchOnLeaderChange_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:742
public void JetStreamAtomicBatchPublishPersistModeAsync_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:743
public void JetStreamAtomicBatchPublishExpectedLastSubjectSequence_RequiresRunningServer() { }
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:744
public void JetStreamAtomicBatchPublishCommitUnsupported_RequiresRunningServer() { }
}

View File

@@ -0,0 +1,50 @@
// Copyright 2020-2025 The NATS Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Mirrors server/jetstream_errors_test.go in the NATS server Go source.
//
// All 4 tests are deferred:
// T:1381 — TestIsNatsErr: uses IsNatsErr(error, ...) where the Go version accepts
// arbitrary error interface values (including plain errors.New("x") which
// evaluates to false). The .NET JsApiErrors.IsNatsError only accepts JsApiError?
// and the "NewJS*" factory constructors (NewJSRestoreSubscribeFailedError etc.)
// that populate Description templates from tags have not been ported yet.
// T:1382 — TestApiError_Error: uses ApiErrors[JSClusterNotActiveErr].Error() — the Go
// ApiErrors map and per-error .Error() method (returns "description (errCode)")
// differs from the .NET JsApiErrors.ClusterNotActive.ToString() convention.
// T:1383 — TestApiError_NewWithTags: uses NewJSRestoreSubscribeFailedError with tag
// substitution — factory constructors not yet ported.
// T:1384 — TestApiError_NewWithUnless: uses NewJSStreamRestoreError, Unless() helper,
// NewJSPeerRemapError — not yet ported.
namespace ZB.MOM.NatsNet.Server.Tests.JetStream;
/// <summary>
/// Tests for JetStream API error types and IsNatsErr helper.
/// Mirrors server/jetstream_errors_test.go.
/// All tests deferred pending port of Go factory constructors and tag-substitution system.
/// </summary>
public sealed class JetStreamErrorsTests
{
[Fact(Skip = "deferred: NewJS* factory constructors and IsNatsErr(error) not yet ported")] // T:1381
public void IsNatsErr_ShouldSucceed() { }
[Fact(Skip = "deferred: ApiErrors map and .Error() method not yet ported")] // T:1382
public void ApiError_Error_ShouldSucceed() { }
[Fact(Skip = "deferred: NewJSRestoreSubscribeFailedError with tag substitution not yet ported")] // T:1383
public void ApiError_NewWithTags_ShouldSucceed() { }
[Fact(Skip = "deferred: NewJSStreamRestoreError / Unless() helper not yet ported")] // T:1384
public void ApiError_NewWithUnless_ShouldSucceed() { }
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,436 @@
// Copyright 2024-2025 The NATS Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Mirrors server/jetstream_versioning_test.go in the NATS server Go source.
using Shouldly;
namespace ZB.MOM.NatsNet.Server.Tests.JetStream;
/// <summary>
/// Unit tests for JetStream API level versioning helpers.
/// Mirrors server/jetstream_versioning_test.go.
/// Tests 18031808 (TestJetStreamMetadataMutations, TestJetStreamMetadataStreamRestoreAndRestart,
/// TestJetStreamMetadataStreamRestoreAndRestartCluster, TestJetStreamApiErrorOnRequiredApiLevel,
/// TestJetStreamApiErrorOnRequiredApiLevelDirectGet, TestJetStreamApiErrorOnRequiredApiLevelPullConsumerNextMsg)
/// all require a running JetStream server and are deferred.
/// </summary>
public sealed class JetStreamVersioningTests
{
// -------------------------------------------------------------------------
// Helpers (mirrors module-level helpers in Go test file)
// -------------------------------------------------------------------------
private static Dictionary<string, string> MetadataAtLevel(string featureLevel) =>
new() { [JetStreamVersioning.JsRequiredLevelMetadataKey] = featureLevel };
private static Dictionary<string, string> MetadataPrevious() =>
new() { [JetStreamVersioning.JsRequiredLevelMetadataKey] = "previous-level" };
// -------------------------------------------------------------------------
// T:1791 — TestGetAndSupportsRequiredApiLevel
// -------------------------------------------------------------------------
[Fact] // T:1791
public void GetAndSupportsRequiredApiLevel_VariousInputs_ReturnsExpected()
{
// getRequiredApiLevel
JetStreamVersioning.GetRequiredApiLevel(null).ShouldBe(string.Empty);
JetStreamVersioning.GetRequiredApiLevel(new Dictionary<string, string>()).ShouldBe(string.Empty);
JetStreamVersioning.GetRequiredApiLevel(MetadataAtLevel("1")).ShouldBe("1");
JetStreamVersioning.GetRequiredApiLevel(MetadataAtLevel("text")).ShouldBe("text");
// supportsRequiredApiLevel
JetStreamVersioning.SupportsRequiredApiLevel(null).ShouldBeTrue();
JetStreamVersioning.SupportsRequiredApiLevel(new Dictionary<string, string>()).ShouldBeTrue();
JetStreamVersioning.SupportsRequiredApiLevel(MetadataAtLevel("1")).ShouldBeTrue();
JetStreamVersioning.SupportsRequiredApiLevel(
MetadataAtLevel(JetStreamVersioning.JsApiLevel.ToString())).ShouldBeTrue();
JetStreamVersioning.SupportsRequiredApiLevel(MetadataAtLevel("text")).ShouldBeFalse();
}
// -------------------------------------------------------------------------
// T:1792 — TestJetStreamSetStaticStreamMetadata
// -------------------------------------------------------------------------
[Fact] // T:1792
public void SetStaticStreamMetadata_VariousConfigs_SetsCorrectApiLevel()
{
var cases = new[]
{
("empty", new StreamConfig(), "0"),
("overwrite-user-provided", new StreamConfig { Metadata = MetadataPrevious() }, "0"),
("AllowMsgTTL", new StreamConfig { AllowMsgTTL = true }, "1"),
("SubjectDeleteMarkerTTL", new StreamConfig { SubjectDeleteMarkerTTL = TimeSpan.FromSeconds(1) }, "1"),
("AllowMsgCounter", new StreamConfig { AllowMsgCounter = true }, "2"),
("AllowAtomicPublish", new StreamConfig { AllowAtomicPublish = true }, "2"),
("AllowMsgSchedules", new StreamConfig { AllowMsgSchedules = true }, "2"),
("AsyncPersistMode", new StreamConfig { PersistMode = PersistModeType.AsyncPersistMode }, "2"),
};
foreach (var (desc, cfg, expectedLevel) in cases)
{
JetStreamVersioning.SetStaticStreamMetadata(cfg);
var level = cfg.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey];
level.ShouldBe(expectedLevel, $"case: {desc}");
// Ensure we do not exceed the server API level.
int.Parse(level).ShouldBeLessThanOrEqualTo(JetStreamVersioning.JsApiLevel,
customMessage: $"case: {desc}");
}
}
// -------------------------------------------------------------------------
// T:1793 — TestJetStreamSetStaticStreamMetadataRemoveDynamicFields
// -------------------------------------------------------------------------
[Fact] // T:1793
public void SetStaticStreamMetadata_RemovesDynamicFields()
{
var cfg = new StreamConfig
{
Metadata = new Dictionary<string, string>
{
[JetStreamVersioning.JsServerVersionMetadataKey] = "dynamic-version",
[JetStreamVersioning.JsServerLevelMetadataKey] = "dynamic-level",
}
};
JetStreamVersioning.SetStaticStreamMetadata(cfg);
cfg.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerVersionMetadataKey);
cfg.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerLevelMetadataKey);
cfg.Metadata[JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("0");
}
// -------------------------------------------------------------------------
// T:1794 — TestJetStreamSetDynamicStreamMetadata
// -------------------------------------------------------------------------
[Fact] // T:1794
public void SetDynamicStreamMetadata_DoesNotMutateOriginal_AddsVersionFields()
{
var cfg = new StreamConfig { Metadata = MetadataAtLevel("0") };
var newCfg = JetStreamVersioning.SetDynamicStreamMetadata(cfg);
// Original must NOT have dynamic fields.
cfg.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerVersionMetadataKey);
cfg.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerLevelMetadataKey);
// New copy must have dynamic fields.
newCfg.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("0");
newCfg.Metadata[JetStreamVersioning.JsServerVersionMetadataKey].ShouldBe(ServerConstants.Version);
newCfg.Metadata[JetStreamVersioning.JsServerLevelMetadataKey]
.ShouldBe(JetStreamVersioning.JsApiLevel.ToString());
}
// -------------------------------------------------------------------------
// T:1795 — TestJetStreamCopyStreamMetadata
// -------------------------------------------------------------------------
[Fact] // T:1795
public void CopyStreamMetadata_VariousScenarios_CopiesRequiredLevelKey()
{
// no-previous-ignore: when prevCfg is null, key must be absent
var cfg1 = new StreamConfig { Metadata = MetadataAtLevel("-1") };
JetStreamVersioning.CopyStreamMetadata(cfg1, null);
(cfg1.Metadata?.ContainsKey(JetStreamVersioning.JsRequiredLevelMetadataKey) ?? false).ShouldBeFalse();
// nil-previous-metadata-ignore: prevCfg has null Metadata
var cfg2 = new StreamConfig { Metadata = MetadataAtLevel("-1") };
JetStreamVersioning.CopyStreamMetadata(cfg2, new StreamConfig { Metadata = null });
(cfg2.Metadata?.ContainsKey(JetStreamVersioning.JsRequiredLevelMetadataKey) ?? false).ShouldBeFalse();
// nil-current-metadata-ignore: cfg has null Metadata — should not throw
var cfg3 = new StreamConfig { Metadata = null };
JetStreamVersioning.CopyStreamMetadata(cfg3, new StreamConfig { Metadata = MetadataPrevious() });
cfg3.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("previous-level");
// copy-previous: key from prevCfg is copied into cfg
var cfg4 = new StreamConfig { Metadata = MetadataAtLevel("-1") };
JetStreamVersioning.CopyStreamMetadata(cfg4, new StreamConfig { Metadata = MetadataPrevious() });
cfg4.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("previous-level");
// delete-missing-fields: prevCfg has empty metadata dict → key absent in cfg
var cfg5 = new StreamConfig { Metadata = MetadataAtLevel("-1") };
JetStreamVersioning.CopyStreamMetadata(cfg5, new StreamConfig { Metadata = new Dictionary<string, string>() });
(cfg5.Metadata?.ContainsKey(JetStreamVersioning.JsRequiredLevelMetadataKey) ?? false).ShouldBeFalse();
}
// -------------------------------------------------------------------------
// T:1796 — TestJetStreamCopyStreamMetadataRemoveDynamicFields
// -------------------------------------------------------------------------
[Fact] // T:1796
public void CopyStreamMetadata_RemovesDynamicFields()
{
// Copy from null prevCfg — dynamic fields should be removed and key absent.
var cfg = new StreamConfig
{
Metadata = new Dictionary<string, string>
{
[JetStreamVersioning.JsServerVersionMetadataKey] = "dynamic-version",
[JetStreamVersioning.JsServerLevelMetadataKey] = "dynamic-level",
}
};
JetStreamVersioning.CopyStreamMetadata(cfg, null);
cfg.Metadata.ShouldBeNull(); // all entries removed → null'd
// Copy from prevCfg with req-level → dynamic fields removed, req-level preserved.
var cfg2 = new StreamConfig
{
Metadata = new Dictionary<string, string>
{
[JetStreamVersioning.JsServerVersionMetadataKey] = "dynamic-version",
[JetStreamVersioning.JsServerLevelMetadataKey] = "dynamic-level",
}
};
var prev = new StreamConfig { Metadata = MetadataAtLevel("0") };
JetStreamVersioning.CopyStreamMetadata(cfg2, prev);
cfg2.Metadata.ShouldNotBeNull();
cfg2.Metadata!.ShouldNotContainKey(JetStreamVersioning.JsServerVersionMetadataKey);
cfg2.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerLevelMetadataKey);
cfg2.Metadata[JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("0");
}
// -------------------------------------------------------------------------
// T:1797 — TestJetStreamSetStaticConsumerMetadata
// -------------------------------------------------------------------------
[Fact] // T:1797
public void SetStaticConsumerMetadata_VariousConfigs_SetsCorrectApiLevel()
{
var pauseUntil = new DateTime(1970, 1, 1, 0, 0, 1, DateTimeKind.Utc); // Unix(0, 0) = epoch+1s
var pauseUntilZero = default(DateTime);
var cases = new[]
{
("empty", new ConsumerConfig(), "0"),
("overwrite-user-provided", new ConsumerConfig { Metadata = MetadataPrevious() }, "0"),
("PauseUntil/zero", new ConsumerConfig { PauseUntil = pauseUntilZero }, "0"),
("PauseUntil", new ConsumerConfig { PauseUntil = pauseUntil }, "1"),
("Pinned", new ConsumerConfig { PriorityPolicy = PriorityPolicy.PriorityPinnedClient,
PriorityGroups = new[] { "a" } }, "1"),
};
foreach (var (desc, cfg, expectedLevel) in cases)
{
JetStreamVersioning.SetStaticConsumerMetadata(cfg);
var level = cfg.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey];
level.ShouldBe(expectedLevel, $"case: {desc}");
int.Parse(level).ShouldBeLessThanOrEqualTo(JetStreamVersioning.JsApiLevel,
customMessage: $"case: {desc}");
}
}
// -------------------------------------------------------------------------
// T:1797-extra — SetStaticConsumerMetadata_RequiresLevel1_ForPriorityFeatures
// Missing case: PriorityPolicy != PriorityNone with empty PriorityGroups should
// still require API level 1.
// -------------------------------------------------------------------------
[Theory]
[InlineData("PolicyOnly")]
public void SetStaticConsumerMetadata_RequiresLevel1_ForPriorityFeatures(string desc)
{
// Case: PriorityPolicy != PriorityNone with no groups should still require level 1
var cfg = new ConsumerConfig { PriorityPolicy = PriorityPolicy.PriorityPinnedClient };
JetStreamVersioning.SetStaticConsumerMetadata(cfg);
var level = cfg.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey];
level.ShouldBe("1", $"case: {desc}");
int.Parse(level).ShouldBeLessThanOrEqualTo(JetStreamVersioning.JsApiLevel,
customMessage: $"case: {desc}");
}
// -------------------------------------------------------------------------
// T:1798 — TestJetStreamSetStaticConsumerMetadataRemoveDynamicFields
// -------------------------------------------------------------------------
[Fact] // T:1798
public void SetStaticConsumerMetadata_RemovesDynamicFields()
{
var cfg = new ConsumerConfig
{
Metadata = new Dictionary<string, string>
{
[JetStreamVersioning.JsServerVersionMetadataKey] = "dynamic-version",
[JetStreamVersioning.JsServerLevelMetadataKey] = "dynamic-level",
}
};
JetStreamVersioning.SetStaticConsumerMetadata(cfg);
cfg.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerVersionMetadataKey);
cfg.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerLevelMetadataKey);
cfg.Metadata[JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("0");
}
// -------------------------------------------------------------------------
// T:1799 — TestJetStreamSetDynamicConsumerMetadata
// -------------------------------------------------------------------------
[Fact] // T:1799
public void SetDynamicConsumerMetadata_DoesNotMutateOriginal_AddsVersionFields()
{
var cfg = new ConsumerConfig { Metadata = MetadataAtLevel("0") };
var newCfg = JetStreamVersioning.SetDynamicConsumerMetadata(cfg);
// Original must NOT have dynamic fields.
cfg.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerVersionMetadataKey);
cfg.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerLevelMetadataKey);
// New copy must have dynamic fields.
newCfg.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("0");
newCfg.Metadata[JetStreamVersioning.JsServerVersionMetadataKey].ShouldBe(ServerConstants.Version);
newCfg.Metadata[JetStreamVersioning.JsServerLevelMetadataKey]
.ShouldBe(JetStreamVersioning.JsApiLevel.ToString());
}
// -------------------------------------------------------------------------
// T:1800 — TestJetStreamSetDynamicConsumerInfoMetadata
// -------------------------------------------------------------------------
[Fact] // T:1800
public void SetDynamicConsumerInfoMetadata_DoesNotMutateOriginal_AddsVersionFields()
{
var ci = new ConsumerInfo { Config = new ConsumerConfig { Metadata = MetadataAtLevel("0") } };
var newCi = JetStreamVersioning.SetDynamicConsumerInfoMetadata(ci);
// Configs must not be reference-equal (we got a new object).
ReferenceEquals(ci, newCi).ShouldBeFalse();
// Original config must NOT have dynamic fields.
ci.Config!.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerVersionMetadataKey);
ci.Config.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerLevelMetadataKey);
// New config must have dynamic fields.
newCi.Config!.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("0");
newCi.Config.Metadata[JetStreamVersioning.JsServerVersionMetadataKey].ShouldBe(ServerConstants.Version);
newCi.Config.Metadata[JetStreamVersioning.JsServerLevelMetadataKey]
.ShouldBe(JetStreamVersioning.JsApiLevel.ToString());
}
// -------------------------------------------------------------------------
// T:1801 — TestJetStreamCopyConsumerMetadata
// -------------------------------------------------------------------------
[Fact] // T:1801
public void CopyConsumerMetadata_VariousScenarios_CopiesRequiredLevelKey()
{
// no-previous-ignore
var cfg1 = new ConsumerConfig { Metadata = MetadataAtLevel("-1") };
JetStreamVersioning.CopyConsumerMetadata(cfg1, null);
(cfg1.Metadata?.ContainsKey(JetStreamVersioning.JsRequiredLevelMetadataKey) ?? false).ShouldBeFalse();
// nil-previous-metadata-ignore
var cfg2 = new ConsumerConfig { Metadata = MetadataAtLevel("-1") };
JetStreamVersioning.CopyConsumerMetadata(cfg2, new ConsumerConfig { Metadata = null });
(cfg2.Metadata?.ContainsKey(JetStreamVersioning.JsRequiredLevelMetadataKey) ?? false).ShouldBeFalse();
// nil-current-metadata-ignore
var cfg3 = new ConsumerConfig { Metadata = null };
JetStreamVersioning.CopyConsumerMetadata(cfg3, new ConsumerConfig { Metadata = MetadataPrevious() });
cfg3.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("previous-level");
// copy-previous
var cfg4 = new ConsumerConfig { Metadata = MetadataAtLevel("-1") };
JetStreamVersioning.CopyConsumerMetadata(cfg4, new ConsumerConfig { Metadata = MetadataPrevious() });
cfg4.Metadata![JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("previous-level");
// delete-missing-fields
var cfg5 = new ConsumerConfig { Metadata = MetadataAtLevel("-1") };
JetStreamVersioning.CopyConsumerMetadata(cfg5,
new ConsumerConfig { Metadata = new Dictionary<string, string>() });
(cfg5.Metadata?.ContainsKey(JetStreamVersioning.JsRequiredLevelMetadataKey) ?? false).ShouldBeFalse();
}
// -------------------------------------------------------------------------
// T:1802 — TestJetStreamCopyConsumerMetadataRemoveDynamicFields
// -------------------------------------------------------------------------
[Fact] // T:1802
public void CopyConsumerMetadata_RemovesDynamicFields()
{
// Copy from null prevCfg → dynamic removed, key absent.
var cfg = new ConsumerConfig
{
Metadata = new Dictionary<string, string>
{
[JetStreamVersioning.JsServerVersionMetadataKey] = "dynamic-version",
[JetStreamVersioning.JsServerLevelMetadataKey] = "dynamic-level",
}
};
JetStreamVersioning.CopyConsumerMetadata(cfg, null);
cfg.Metadata.ShouldBeNull();
// Copy from prevCfg with req-level → dynamic removed, req-level preserved.
var cfg2 = new ConsumerConfig
{
Metadata = new Dictionary<string, string>
{
[JetStreamVersioning.JsServerVersionMetadataKey] = "dynamic-version",
[JetStreamVersioning.JsServerLevelMetadataKey] = "dynamic-level",
}
};
var prev = new ConsumerConfig { Metadata = MetadataAtLevel("0") };
JetStreamVersioning.CopyConsumerMetadata(cfg2, prev);
cfg2.Metadata.ShouldNotBeNull();
cfg2.Metadata!.ShouldNotContainKey(JetStreamVersioning.JsServerVersionMetadataKey);
cfg2.Metadata.ShouldNotContainKey(JetStreamVersioning.JsServerLevelMetadataKey);
cfg2.Metadata[JetStreamVersioning.JsRequiredLevelMetadataKey].ShouldBe("0");
}
// -------------------------------------------------------------------------
// T:1803 — TestJetStreamMetadataMutations — deferred: requires RunBasicJetStreamServer
// -------------------------------------------------------------------------
[Fact(Skip = "deferred: requires running JetStream server")] // T:1803
public void JetStreamMetadataMutations_RequiresRunningServer() { }
// -------------------------------------------------------------------------
// T:1804 — TestJetStreamMetadataStreamRestoreAndRestart — deferred
// -------------------------------------------------------------------------
[Fact(Skip = "deferred: requires running JetStream server")] // T:1804
public void JetStreamMetadataStreamRestoreAndRestart_RequiresRunningServer() { }
// -------------------------------------------------------------------------
// T:1805 — TestJetStreamMetadataStreamRestoreAndRestartCluster — deferred
// -------------------------------------------------------------------------
[Fact(Skip = "deferred: requires running JetStream cluster")] // T:1805
public void JetStreamMetadataStreamRestoreAndRestartCluster_RequiresRunningServer() { }
// -------------------------------------------------------------------------
// T:1806 — TestJetStreamApiErrorOnRequiredApiLevel — deferred
// -------------------------------------------------------------------------
[Fact(Skip = "deferred: requires running JetStream server")] // T:1806
public void JetStreamApiErrorOnRequiredApiLevel_RequiresRunningServer() { }
// -------------------------------------------------------------------------
// T:1807 — TestJetStreamApiErrorOnRequiredApiLevelDirectGet — deferred
// -------------------------------------------------------------------------
[Fact(Skip = "deferred: requires running JetStream server")] // T:1807
public void JetStreamApiErrorOnRequiredApiLevelDirectGet_RequiresRunningServer() { }
// -------------------------------------------------------------------------
// T:1808 — TestJetStreamApiErrorOnRequiredApiLevelPullConsumerNextMsg — deferred
// -------------------------------------------------------------------------
[Fact(Skip = "deferred: requires running JetStream server")] // T:1808
public void JetStreamApiErrorOnRequiredApiLevelPullConsumerNextMsg_RequiresRunningServer() { }
}

View File

@@ -0,0 +1,575 @@
// Copyright 2012-2026 The NATS Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Mirrors server/store_test.go (MemStore permutation only; file store permutations deferred).
using System.Text;
using Shouldly;
using ZB.MOM.NatsNet.Server;
namespace ZB.MOM.NatsNet.Server.Tests.JetStream;
/// <summary>
/// Unit tests for IStreamStore contract, exercised against JetStreamMemStore.
/// Mirrors server/store_test.go (memory permutations only).
/// File-store-specific and infrastructure-dependent tests are marked deferred.
/// </summary>
public class StorageEngineTests
{
// -----------------------------------------------------------------------
// Helpers
// -----------------------------------------------------------------------
private static JetStreamMemStore NewMemStore(StreamConfig cfg)
{
cfg.Storage = StorageType.MemoryStorage;
return new JetStreamMemStore(cfg);
}
private static byte[] Bytes(string s) => Encoding.UTF8.GetBytes(s);
// -----------------------------------------------------------------------
// TestStoreDeleteSlice (T:2943)
// -----------------------------------------------------------------------
[Fact] // T:2943
public void StoreDeleteSlice_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStoreDeleteSlice line 147
var ds = new DeleteSlice(new ulong[] { 2 });
var deletes = new List<ulong>();
ds.Range(seq => { deletes.Add(seq); return true; });
deletes.Count.ShouldBe(1);
deletes[0].ShouldBe(2UL);
var (first, last, num) = ds.GetState();
first.ShouldBe(2UL);
last.ShouldBe(2UL);
num.ShouldBe(1UL);
}
// -----------------------------------------------------------------------
// TestStoreDeleteRange (T:2944)
// -----------------------------------------------------------------------
[Fact] // T:2944
public void StoreDeleteRange_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStoreDeleteRange line 163
var dr = new DeleteRange { First = 2, Num = 1 };
var deletes = new List<ulong>();
dr.Range(seq => { deletes.Add(seq); return true; });
deletes.Count.ShouldBe(1);
deletes[0].ShouldBe(2UL);
var (first, last, num) = dr.GetState();
first.ShouldBe(2UL);
last.ShouldBe(2UL);
num.ShouldBe(1UL);
}
// -----------------------------------------------------------------------
// TestStoreSubjectStateConsistency (T:2945) — MemStore permutation only
// -----------------------------------------------------------------------
[Fact] // T:2945
public void StoreSubjectStateConsistency_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStoreSubjectStateConsistency line 179
var fs = NewMemStore(new StreamConfig { Name = "TEST", Subjects = new[] { "foo" } });
SimpleState GetSubjectState()
{
var ss = fs.SubjectsState("foo");
ss.TryGetValue("foo", out var result);
return result ?? new SimpleState();
}
var smp = new StoreMsg();
ulong ExpectFirstSeq()
{
var (sm, _, err) = fs.LoadNextMsg("foo", false, 0, smp).Sm?.Seq is ulong s
? (smp, s, (Exception?)null)
: (null, 0UL, StoreErrors.ErrStoreMsgNotFound);
var (smr, skip) = fs.LoadNextMsg("foo", false, 0, smp);
smr.ShouldNotBeNull();
return skip;
}
ulong ExpectLastSeq()
{
var sm = fs.LoadLastMsg("foo", smp);
sm.ShouldNotBeNull();
return sm!.Seq;
}
// Publish 4 messages
for (var i = 0; i < 4; i++)
fs.StoreMsg("foo", null, null, 0);
var ss = GetSubjectState();
ss.Msgs.ShouldBe(4UL);
ss.First.ShouldBe(1UL);
ss.Last.ShouldBe(4UL);
// Verify first/last via LoadNextMsg / LoadLastMsg
var (firstSm, firstSeq) = fs.LoadNextMsg("foo", false, 0, smp);
firstSm.ShouldNotBeNull();
firstSeq.ShouldBe(1UL);
var lastSm = fs.LoadLastMsg("foo", smp);
lastSm!.Seq.ShouldBe(4UL);
// Remove first message
var (removed, _) = fs.RemoveMsg(1);
removed.ShouldBeTrue();
ss = GetSubjectState();
ss.Msgs.ShouldBe(3UL);
ss.First.ShouldBe(2UL);
ss.Last.ShouldBe(4UL);
(firstSm, firstSeq) = fs.LoadNextMsg("foo", false, 0, smp);
firstSm.ShouldNotBeNull();
firstSeq.ShouldBe(2UL);
lastSm = fs.LoadLastMsg("foo", smp);
lastSm!.Seq.ShouldBe(4UL);
// Remove last message
(removed, _) = fs.RemoveMsg(4);
removed.ShouldBeTrue();
ss = GetSubjectState();
ss.Msgs.ShouldBe(2UL);
ss.First.ShouldBe(2UL);
ss.Last.ShouldBe(3UL);
(firstSm, firstSeq) = fs.LoadNextMsg("foo", false, 0, smp);
firstSm.ShouldNotBeNull();
firstSeq.ShouldBe(2UL);
lastSm = fs.LoadLastMsg("foo", smp);
lastSm!.Seq.ShouldBe(3UL);
// Remove seq 2
(removed, _) = fs.RemoveMsg(2);
removed.ShouldBeTrue();
ss = GetSubjectState();
ss.Msgs.ShouldBe(1UL);
ss.First.ShouldBe(3UL);
ss.Last.ShouldBe(3UL);
(firstSm, firstSeq) = fs.LoadNextMsg("foo", false, 0, smp);
firstSm.ShouldNotBeNull();
firstSeq.ShouldBe(3UL);
lastSm = fs.LoadLastMsg("foo", smp);
lastSm!.Seq.ShouldBe(3UL);
// Publish 3 more
for (var i = 0; i < 3; i++)
fs.StoreMsg("foo", null, null, 0);
ss = GetSubjectState();
ss.Msgs.ShouldBe(4UL);
ss.First.ShouldBe(3UL);
ss.Last.ShouldBe(7UL);
// Remove seq 7 and seq 3
(removed, _) = fs.RemoveMsg(7);
removed.ShouldBeTrue();
(removed, _) = fs.RemoveMsg(3);
removed.ShouldBeTrue();
// Remove seq 5 (the now-first)
(removed, _) = fs.RemoveMsg(5);
removed.ShouldBeTrue();
ss = GetSubjectState();
ss.Msgs.ShouldBe(1UL);
ss.First.ShouldBe(6UL);
ss.Last.ShouldBe(6UL);
(firstSm, firstSeq) = fs.LoadNextMsg("foo", false, 0, smp);
firstSm.ShouldNotBeNull();
firstSeq.ShouldBe(6UL);
lastSm = fs.LoadLastMsg("foo", smp);
lastSm!.Seq.ShouldBe(6UL);
// Store + immediately remove seq 8, then store seq 9
fs.StoreMsg("foo", null, null, 0);
(removed, _) = fs.RemoveMsg(8);
removed.ShouldBeTrue();
fs.StoreMsg("foo", null, null, 0);
ss = GetSubjectState();
ss.Msgs.ShouldBe(2UL);
ss.First.ShouldBe(6UL);
ss.Last.ShouldBe(9UL);
(firstSm, firstSeq) = fs.LoadNextMsg("foo", false, 0, smp);
firstSm.ShouldNotBeNull();
firstSeq.ShouldBe(6UL);
lastSm = fs.LoadLastMsg("foo", smp);
lastSm!.Seq.ShouldBe(9UL);
fs.Stop();
}
// -----------------------------------------------------------------------
// TestStoreMaxMsgsPerUpdateBug (T:2947) — MemStore permutation only
// -----------------------------------------------------------------------
[Fact] // T:2947
public void StoreMaxMsgsPerUpdateBug_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStoreMaxMsgsPerUpdateBug line 405
var cfg = new StreamConfig
{
Name = "TEST",
Subjects = new[] { "foo" },
MaxMsgsPer = 0,
};
var fs = NewMemStore(cfg);
for (var i = 0; i < 5; i++)
fs.StoreMsg("foo", null, null, 0);
var ss = fs.State();
ss.Msgs.ShouldBe(5UL);
ss.FirstSeq.ShouldBe(1UL);
ss.LastSeq.ShouldBe(5UL);
// Update max messages per-subject from 0 (infinite) to 1
cfg.MaxMsgsPer = 1;
fs.UpdateConfig(cfg);
// Only one message should remain
ss = fs.State();
ss.Msgs.ShouldBe(1UL);
ss.FirstSeq.ShouldBe(5UL);
ss.LastSeq.ShouldBe(5UL);
// Update to invalid value (< -1) — should clamp to -1
cfg.MaxMsgsPer = -2;
fs.UpdateConfig(cfg);
cfg.MaxMsgsPer.ShouldBe(-1L);
fs.Stop();
}
// -----------------------------------------------------------------------
// TestStoreCompactCleansUpDmap (T:2948) — MemStore permutation only
// -----------------------------------------------------------------------
[Fact] // T:2948
public void StoreCompactCleansUpDmap_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStoreCompactCleansUpDmap line 449
// We run for compact sequences 2, 3, 4
for (var cseq = 2UL; cseq <= 4UL; cseq++)
{
var cfg = new StreamConfig
{
Name = "TEST",
Subjects = new[] { "foo" },
MaxMsgsPer = 0,
};
var fs = NewMemStore(cfg);
// Publish 3 messages; no interior deletes
for (var i = 0; i < 3; i++)
fs.StoreMsg("foo", null, null, 0);
// Remove one message in the middle = interior delete
var (removed, _) = fs.RemoveMsg(2);
removed.ShouldBeTrue();
// The dmap should have 1 entry (seq 2) — verify via State().NumDeleted
var state = fs.State();
state.NumDeleted.ShouldBe(1);
// Compact — must clean up the interior delete
var (_, err) = fs.Compact(cseq);
err.ShouldBeNull();
// After compaction, no deleted entries in the range
state = fs.State();
state.NumDeleted.ShouldBe(0);
// Validate first/last sequence
var expectedFirst = Math.Max(3UL, cseq);
state.FirstSeq.ShouldBe(expectedFirst);
state.LastSeq.ShouldBe(3UL);
fs.Stop();
}
}
// -----------------------------------------------------------------------
// TestStoreTruncateCleansUpDmap (T:2949) — MemStore permutation only
// -----------------------------------------------------------------------
[Fact] // T:2949
public void StoreTruncateCleansUpDmap_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStoreTruncateCleansUpDmap line 500
// We run for truncate sequences 0 and 1
for (var tseq = 0UL; tseq <= 1UL; tseq++)
{
var cfg = new StreamConfig
{
Name = "TEST",
Subjects = new[] { "foo" },
MaxMsgsPer = 0,
};
var fs = NewMemStore(cfg);
// Publish 3 messages
for (var i = 0; i < 3; i++)
fs.StoreMsg("foo", null, null, 0);
// Remove middle message = interior delete
var (removed, _) = fs.RemoveMsg(2);
removed.ShouldBeTrue();
var state = fs.State();
state.NumDeleted.ShouldBe(1);
// Truncate
fs.Truncate(tseq);
state = fs.State();
state.NumDeleted.ShouldBe(0);
// Validate first/last sequence
var expectedFirst = Math.Min(1UL, tseq);
state.FirstSeq.ShouldBe(expectedFirst);
state.LastSeq.ShouldBe(tseq);
fs.Stop();
}
}
// -----------------------------------------------------------------------
// TestStorePurgeExZero (T:2950) — MemStore permutation only
// -----------------------------------------------------------------------
[Fact] // T:2950
public void StorePurgeExZero_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStorePurgeExZero line 552
var fs = NewMemStore(new StreamConfig { Name = "TEST", Subjects = new[] { "foo" } });
// Simple purge all
var (_, err) = fs.Purge();
err.ShouldBeNull();
var ss = fs.State();
ss.FirstSeq.ShouldBe(1UL);
ss.LastSeq.ShouldBe(0UL);
// PurgeEx(seq=0) must equal Purge()
(_, err) = fs.PurgeEx(string.Empty, 0, 0);
err.ShouldBeNull();
ss = fs.State();
ss.FirstSeq.ShouldBe(1UL);
ss.LastSeq.ShouldBe(0UL);
fs.Stop();
}
// -----------------------------------------------------------------------
// TestStoreGetSeqFromTimeWithInteriorDeletesGap (T:2955) — MemStore permutation only
// -----------------------------------------------------------------------
[Fact] // T:2955
public void StoreGetSeqFromTimeWithInteriorDeletesGap_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStoreGetSeqFromTimeWithInteriorDeletesGap line 874
// Go: start = ts from StoreMsg at i==1; ts := time.Unix(0, start).UTC()
// .NET: convert the 100-ns store timestamp directly to DateTime (same precision).
var fs = NewMemStore(new StreamConfig { Name = "zzz", Subjects = new[] { "foo" } });
long start = 0;
for (var i = 0; i < 10; i++)
{
var (_, ts) = fs.StoreMsg("foo", null, null, 0);
if (i == 1)
start = ts; // exact timestamp of seq 2
}
// Create a delete gap at seqs 4-7
for (var seq = 4UL; seq <= 7UL; seq++)
fs.RemoveMsg(seq);
// Convert 100-ns-since-epoch to DateTime (mirrors Go's time.Unix(0, start))
const long UnixEpochTicks = 621355968000000000L;
var t = new DateTime(start / 100L + UnixEpochTicks, DateTimeKind.Utc);
var gotSeq = fs.GetSeqFromTime(t);
gotSeq.ShouldBe(2UL);
fs.Stop();
}
// -----------------------------------------------------------------------
// TestStoreGetSeqFromTimeWithTrailingDeletes (T:2956) — MemStore permutation only
// -----------------------------------------------------------------------
[Fact] // T:2956
public void StoreGetSeqFromTimeWithTrailingDeletes_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStoreGetSeqFromTimeWithTrailingDeletes line 900
// Go: start = ts from StoreMsg at i==1; ts := time.Unix(0, start).UTC()
// .NET: convert the 100-ns store timestamp directly to DateTime (same precision).
var fs = NewMemStore(new StreamConfig { Name = "zzz", Subjects = new[] { "foo" } });
long start = 0;
for (var i = 0; i < 3; i++)
{
var (_, ts) = fs.StoreMsg("foo", null, null, 0);
if (i == 1)
start = ts; // exact timestamp of seq 2
}
fs.RemoveMsg(3);
// Convert 100-ns-since-epoch to DateTime (mirrors Go's time.Unix(0, start))
const long UnixEpochTicks = 621355968000000000L;
var t = new DateTime(start / 100L + UnixEpochTicks, DateTimeKind.Utc);
var gotSeq = fs.GetSeqFromTime(t);
gotSeq.ShouldBe(2UL);
fs.Stop();
}
// -----------------------------------------------------------------------
// TestFileStoreMultiLastSeqsAndLoadLastMsgWithLazySubjectState (T:2957)
// MemStore permutation only
// -----------------------------------------------------------------------
[Fact] // T:2957
public void FileStoreMultiLastSeqsAndLoadLastMsgWithLazySubjectState_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestFileStoreMultiLastSeqsAndLoadLastMsgWithLazySubjectState line 921
var fs = NewMemStore(new StreamConfig { Name = "zzz", Subjects = new[] { "foo" } });
for (var i = 0; i < 3; i++)
fs.StoreMsg("foo", null, null, 0);
var (seqs, err) = fs.MultiLastSeqs(new[] { "foo" }, 0, 0);
err.ShouldBeNull();
seqs!.Length.ShouldBe(1);
seqs![0].ShouldBe(3UL);
var (removed, _) = fs.RemoveMsg(3);
removed.ShouldBeTrue();
(seqs, err) = fs.MultiLastSeqs(new[] { "foo" }, 0, 0);
err.ShouldBeNull();
seqs!.Length.ShouldBe(1);
seqs![0].ShouldBe(2UL);
fs.StoreMsg("foo", null, null, 0);
var sm = fs.LoadLastMsg("foo", null);
sm.ShouldNotBeNull();
sm!.Seq.ShouldBe(4UL);
(removed, _) = fs.RemoveMsg(4);
removed.ShouldBeTrue();
sm = fs.LoadLastMsg("foo", null);
sm.ShouldNotBeNull();
sm!.Seq.ShouldBe(2UL);
fs.Stop();
}
// -----------------------------------------------------------------------
// TestStoreDiscardNew (T:2954) — MemStore permutation only
// -----------------------------------------------------------------------
[Fact] // T:2954
public void StoreDiscardNew_ShouldSucceed()
{
// Reference: golang/nats-server/server/store_test.go:TestStoreDiscardNew line 788
// Helper that runs the discard-new test for a given config modifier
void Test(Action<StreamConfig> updateConfig, Exception? expectedErr)
{
var cfg = new StreamConfig
{
Name = "zzz",
Subjects = new[] { "foo" },
Discard = DiscardPolicy.DiscardNew,
};
updateConfig(cfg);
cfg.Storage = StorageType.MemoryStorage;
var fs = new JetStreamMemStore(cfg);
var ts = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds() * 1_000_000L;
var expectedSeq = 1UL;
void RequireState()
{
var state = fs.State();
state.Msgs.ShouldBe(1UL);
state.FirstSeq.ShouldBe(expectedSeq);
state.LastSeq.ShouldBe(expectedSeq);
}
fs.StoreMsg("foo", null, null, 0);
// StoreRawMsg with discardNewCheck=true
if (expectedErr == null)
{
fs.StoreRawMsg("foo", null, null, 0, ts, 0, true);
expectedSeq++;
}
else
{
Should.Throw<Exception>(() => fs.StoreRawMsg("foo", null, null, 0, ts, 0, true));
}
RequireState();
// StoreRawMsg with discardNewCheck=false (followers must always accept)
fs.StoreRawMsg("foo", null, null, 0, ts, 0, false);
expectedSeq++;
// For MaxMsgsPer we stay at 1 msg; otherwise 2 msgs
if (cfg.MaxMsgsPer > 0)
{
RequireState();
}
else
{
var state = fs.State();
state.Msgs.ShouldBe(2UL);
state.FirstSeq.ShouldBe(expectedSeq - 1);
state.LastSeq.ShouldBe(expectedSeq);
}
fs.Stop();
}
Test(cfg => cfg.MaxMsgs = 1, StoreErrors.ErrMaxMsgs);
Test(cfg => cfg.MaxBytes = 33, StoreErrors.ErrMaxBytes);
Test(cfg => cfg.MaxMsgsPer = 1, null);
Test(cfg => { cfg.DiscardNewPer = true; cfg.MaxMsgsPer = 1; }, StoreErrors.ErrMaxMsgsPerSubject);
Test(cfg => { cfg.MaxMsgs = 1; cfg.MaxMsgsPer = 1; }, null);
Test(cfg => { cfg.MaxBytes = 33; cfg.MaxMsgsPer = 1; }, null);
Test(cfg => { cfg.DiscardNewPer = true; cfg.MaxMsgs = 1; cfg.MaxMsgsPer = 1; }, StoreErrors.ErrMaxMsgsPerSubject);
Test(cfg => { cfg.DiscardNewPer = true; cfg.MaxBytes = 33; cfg.MaxMsgsPer = 1; }, StoreErrors.ErrMaxMsgsPerSubject);
}
}

View File

@@ -3,6 +3,7 @@
using Shouldly;
using ZB.MOM.NatsNet.Server;
using ZB.MOM.NatsNet.Server.Config;
namespace ZB.MOM.NatsNet.Server.Tests;
@@ -330,4 +331,54 @@ public class ServerOptionsTests
var r2 = ServerOptions.MergeOptions(null, flagOpts);
r2.Port.ShouldBe(5678);
}
/// <summary>
/// Mirrors TestListenMonitoringDefault — when Host is set without HTTPHost,
/// SetBaselineOptions should copy Host to HTTPHost.
/// </summary>
[Fact] // T:2524
public void ListenMonitoringDefault_ShouldSetHttpHostToHost()
{
var opts = new ServerOptions { Host = "10.0.1.22" };
opts.SetBaselineOptions();
opts.Host.ShouldBe("10.0.1.22");
opts.HttpHost.ShouldBe("10.0.1.22");
opts.Port.ShouldBe(ServerConstants.DefaultPort);
}
/// <summary>
/// Mirrors TestGetStorageSize — StorageSizeJsonConverter.Parse converts K/M/G/T suffixes
/// and returns 0 for empty input; invalid suffixes throw.
/// </summary>
[Fact] // T:2576
public void GetStorageSize_ShouldParseSuffixes()
{
StorageSizeJsonConverter.Parse("1K").ShouldBe(1024L);
StorageSizeJsonConverter.Parse("1M").ShouldBe(1048576L);
StorageSizeJsonConverter.Parse("1G").ShouldBe(1073741824L);
StorageSizeJsonConverter.Parse("1T").ShouldBe(1099511627776L);
StorageSizeJsonConverter.Parse("").ShouldBe(0L);
Should.Throw<FormatException>(() => StorageSizeJsonConverter.Parse("1L"));
Should.Throw<FormatException>(() => StorageSizeJsonConverter.Parse("TT"));
}
/// <summary>
/// Mirrors TestClusterNameAndGatewayNameConflict — when Cluster.Name != Gateway.Name,
/// ValidateOptions should return ErrClusterNameConfigConflict.
/// </summary>
[Fact] // T:2571
public void ClusterNameAndGatewayNameConflict_ShouldReturnConflictError()
{
var opts = new ServerOptions
{
Cluster = new ClusterOpts { Name = "A", Port = -1 },
Gateway = new GatewayOpts { Name = "B", Port = -1 },
};
var err = NatsServer.ValidateOptions(opts);
err.ShouldNotBeNull();
err.ShouldBe(ServerErrors.ErrClusterNameConfigConflict);
}
}

View File

@@ -1,10 +0,0 @@
namespace ZB.MOM.NatsNet.Server.Tests;
public class UnitTest1
{
[Fact]
public void Test1()
{
}
}

View File

@@ -33,7 +33,7 @@ CREATE TABLE IF NOT EXISTS features (
go_line_number INTEGER,
go_line_count INTEGER,
status TEXT NOT NULL DEFAULT 'not_started'
CHECK (status IN ('not_started', 'stub', 'complete', 'verified', 'n_a')),
CHECK (status IN ('not_started', 'stub', 'complete', 'verified', 'n_a', 'deferred')),
dotnet_project TEXT,
dotnet_class TEXT,
dotnet_method TEXT,
@@ -54,7 +54,7 @@ CREATE TABLE IF NOT EXISTS unit_tests (
go_line_number INTEGER,
go_line_count INTEGER,
status TEXT NOT NULL DEFAULT 'not_started'
CHECK (status IN ('not_started', 'stub', 'complete', 'verified', 'n_a')),
CHECK (status IN ('not_started', 'stub', 'complete', 'verified', 'n_a', 'deferred')),
dotnet_project TEXT,
dotnet_class TEXT,
dotnet_method TEXT,

Binary file not shown.

View File

@@ -1,29 +1,27 @@
# NATS .NET Porting Status Report
Generated: 2026-02-26 22:49:14 UTC
Generated: 2026-02-27 09:45:24 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| complete | 11 |
| not_started | 1 |
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3596 |
| n_a | 77 |
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 319 |
| n_a | 181 |
| not_started | 2533 |
| stub | 224 |
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
@@ -34,4 +32,4 @@ Generated: 2026-02-26 22:49:14 UTC
## Overall Progress
**4184/6942 items complete (60.3%)**
**868/6942 items complete (12.5%)**

39
reports/report_0862038.md Normal file
View File

@@ -0,0 +1,39 @@
# NATS .NET Porting Status Report
Generated: 2026-02-26 23:53:55 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| not_started | 1 |
| verified | 11 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3368 |
| n_a | 26 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 205 |
| n_a | 187 |
| not_started | 2527 |
| stub | 224 |
| verified | 114 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4190/6942 items complete (60.4%)**

39
reports/report_0950580.md Normal file
View File

@@ -0,0 +1,39 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 00:40:06 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| not_started | 1 |
| verified | 11 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3368 |
| n_a | 26 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 252 |
| deferred | 484 |
| n_a | 187 |
| not_started | 2220 |
| verified | 114 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4237/6942 items complete (61.0%)**

35
reports/report_0a6e6bf.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 09:43:12 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

35
reports/report_11ec33d.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 09:34:53 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

35
reports/report_1c5921d.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 02:53:53 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

35
reports/report_21bb760.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 09:44:06 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

35
reports/report_2a900bf.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 09:43:52 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

39
reports/report_364329c.md Normal file
View File

@@ -0,0 +1,39 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 00:15:57 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| not_started | 1 |
| verified | 11 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3368 |
| n_a | 26 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 214 |
| deferred | 215 |
| n_a | 187 |
| not_started | 2527 |
| verified | 114 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4199/6942 items complete (60.5%)**

34
reports/report_3e35ffa.md Normal file
View File

@@ -0,0 +1,34 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 01:21:30 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| verified | 3673 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4262/6942 items complete (61.4%)**

35
reports/report_3f6c5f2.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 09:40:28 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

35
reports/report_4901249.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 09:43:59 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

35
reports/report_4ba6b26.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 09:45:24 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

34
reports/report_6a1df6b.md Normal file
View File

@@ -0,0 +1,34 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 01:17:26 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| verified | 3673 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4262/6942 items complete (61.4%)**

39
reports/report_6e90eea.md Normal file
View File

@@ -0,0 +1,39 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 01:02:01 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| not_started | 1 |
| verified | 11 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3368 |
| n_a | 26 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 276 |
| deferred | 554 |
| n_a | 187 |
| not_started | 2126 |
| verified | 114 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4261/6942 items complete (61.4%)**

37
reports/report_7750b46.md Normal file
View File

@@ -0,0 +1,37 @@
# NATS .NET Porting Status Report
Generated: 2026-02-26 23:50:50 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| complete | 11 |
| not_started | 1 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3596 |
| n_a | 77 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 319 |
| n_a | 181 |
| not_started | 2533 |
| stub | 224 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4184/6942 items complete (60.3%)**

40
reports/report_8b63a6f.md Normal file
View File

@@ -0,0 +1,40 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 00:00:19 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| not_started | 1 |
| verified | 11 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3368 |
| n_a | 26 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 208 |
| deferred | 74 |
| n_a | 187 |
| not_started | 2527 |
| stub | 147 |
| verified | 114 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4193/6942 items complete (60.4%)**

39
reports/report_917cd33.md Normal file
View File

@@ -0,0 +1,39 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 00:35:59 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| not_started | 1 |
| verified | 11 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3368 |
| n_a | 26 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 252 |
| deferred | 235 |
| n_a | 187 |
| not_started | 2469 |
| verified | 114 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4237/6942 items complete (61.0%)**

40
reports/report_91f86b9.md Normal file
View File

@@ -0,0 +1,40 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 00:07:45 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| not_started | 1 |
| verified | 11 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3368 |
| n_a | 26 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 209 |
| deferred | 201 |
| n_a | 187 |
| not_started | 2527 |
| stub | 19 |
| verified | 114 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4194/6942 items complete (60.4%)**

34
reports/report_9552f6e.md Normal file
View File

@@ -0,0 +1,34 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 01:14:38 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| verified | 3673 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4262/6942 items complete (61.4%)**

35
reports/report_97be7a2.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 09:37:37 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

37
reports/report_a0c9c00.md Normal file
View File

@@ -0,0 +1,37 @@
# NATS .NET Porting Status Report
Generated: 2026-02-26 23:38:28 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| complete | 11 |
| not_started | 1 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3596 |
| n_a | 77 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 319 |
| n_a | 181 |
| not_started | 2533 |
| stub | 224 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4184/6942 items complete (60.3%)**

35
reports/report_a99092d.md Normal file
View File

@@ -0,0 +1,35 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 09:38:59 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| verified | 12 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| deferred | 3394 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| deferred | 2680 |
| n_a | 187 |
| verified | 390 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**868/6942 items complete (12.5%)**

37
reports/report_d09de1c.md Normal file
View File

@@ -0,0 +1,37 @@
# NATS .NET Porting Status Report
Generated: 2026-02-26 23:49:24 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| complete | 11 |
| not_started | 1 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3596 |
| n_a | 77 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 319 |
| n_a | 181 |
| not_started | 2533 |
| stub | 224 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4184/6942 items complete (60.3%)**

40
reports/report_f0b4138.md Normal file
View File

@@ -0,0 +1,40 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 00:04:02 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| not_started | 1 |
| verified | 11 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3368 |
| n_a | 26 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 208 |
| deferred | 162 |
| n_a | 187 |
| not_started | 2527 |
| stub | 59 |
| verified | 114 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4193/6942 items complete (60.4%)**

39
reports/report_f0faaff.md Normal file
View File

@@ -0,0 +1,39 @@
# NATS .NET Porting Status Report
Generated: 2026-02-27 01:10:05 UTC
## Modules (12 total)
| Status | Count |
|--------|-------|
| not_started | 1 |
| verified | 11 |
## Features (3673 total)
| Status | Count |
|--------|-------|
| complete | 3368 |
| n_a | 26 |
| verified | 279 |
## Unit Tests (3257 total)
| Status | Count |
|--------|-------|
| complete | 276 |
| deferred | 554 |
| n_a | 187 |
| not_started | 2126 |
| verified | 114 |
## Library Mappings (36 total)
| Status | Count |
|--------|-------|
| mapped | 36 |
## Overall Progress
**4261/6942 items complete (61.4%)**

View File

@@ -0,0 +1,191 @@
using System.CommandLine;
using NatsNet.PortTracker.Data;
namespace NatsNet.PortTracker.Commands;
public static class BatchFilters
{
public static Option<string?> IdsOption() => new("--ids")
{
Description = "ID range: 100-200, 1,5,10, or mixed 1-5,10,20-25"
};
public static Option<int?> ModuleOption() => new("--module")
{
Description = "Filter by module ID"
};
public static Option<string?> StatusOption() => new("--status")
{
Description = "Filter by current status"
};
public static Option<bool> ExecuteOption() => new("--execute")
{
Description = "Actually apply changes (default is dry-run preview)",
DefaultValueFactory = _ => false
};
public static void AddFilterOptions(Command cmd, bool includeModuleFilter)
{
cmd.Add(IdsOption());
if (includeModuleFilter)
cmd.Add(ModuleOption());
cmd.Add(StatusOption());
cmd.Add(ExecuteOption());
}
public static List<int> ParseIds(string? idsSpec)
{
if (string.IsNullOrWhiteSpace(idsSpec)) return [];
var ids = new List<int>();
foreach (var part in idsSpec.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
{
if (part.Contains('-'))
{
var range = part.Split('-', 2);
if (int.TryParse(range[0], out var start) && int.TryParse(range[1], out var end))
{
for (var i = start; i <= end; i++)
ids.Add(i);
}
else
{
Console.WriteLine($"Warning: invalid range '{part}', skipping.");
}
}
else if (int.TryParse(part, out var id))
{
ids.Add(id);
}
else
{
Console.WriteLine($"Warning: invalid ID '{part}', skipping.");
}
}
return ids;
}
public static (string whereClause, List<(string name, object? value)> parameters) BuildWhereClause(
string? idsSpec, int? moduleId, string? status, string idColumn = "id", string moduleColumn = "module_id")
{
var clauses = new List<string>();
var parameters = new List<(string name, object? value)>();
if (!string.IsNullOrWhiteSpace(idsSpec))
{
var ids = ParseIds(idsSpec);
if (ids.Count > 0)
{
var placeholders = new List<string>();
for (var i = 0; i < ids.Count; i++)
{
placeholders.Add($"@id{i}");
parameters.Add(($"@id{i}", ids[i]));
}
clauses.Add($"{idColumn} IN ({string.Join(", ", placeholders)})");
}
}
if (moduleId is not null)
{
clauses.Add($"{moduleColumn} = @moduleFilter");
parameters.Add(("@moduleFilter", moduleId));
}
if (!string.IsNullOrWhiteSpace(status))
{
clauses.Add("status = @statusFilter");
parameters.Add(("@statusFilter", status));
}
if (clauses.Count == 0)
return ("", parameters);
return (" WHERE " + string.Join(" AND ", clauses), parameters);
}
public static void PreviewOrExecute(
Database db,
string table,
string displayColumns,
string updateSetClause,
List<(string name, object? value)> updateParams,
string whereClause,
List<(string name, object? value)> filterParams,
bool execute)
{
// Count matching rows
var countSql = $"SELECT COUNT(*) FROM {table}{whereClause}";
var count = db.ExecuteScalar<long>(countSql, filterParams.ToArray());
if (count == 0)
{
Console.WriteLine("No items match the specified filters.");
return;
}
// Preview
var previewSql = $"SELECT {displayColumns} FROM {table}{whereClause} ORDER BY id";
var rows = db.Query(previewSql, filterParams.ToArray());
if (!execute)
{
Console.WriteLine($"Would affect {count} items:");
Console.WriteLine();
PrintPreviewTable(rows);
Console.WriteLine();
Console.WriteLine("Add --execute to apply these changes.");
return;
}
// Execute
var allParams = new List<(string name, object? value)>();
allParams.AddRange(updateParams);
allParams.AddRange(filterParams);
var updateSql = $"UPDATE {table} SET {updateSetClause}{whereClause}";
var affected = db.ExecuteInTransaction(updateSql, allParams.ToArray());
Console.WriteLine($"Updated {affected} items.");
}
private static void PrintPreviewTable(List<Dictionary<string, object?>> rows)
{
if (rows.Count == 0) return;
var columns = rows[0].Keys.ToList();
var widths = columns.Select(c => c.Length).ToList();
foreach (var row in rows)
{
for (var i = 0; i < columns.Count; i++)
{
var val = row[columns[i]]?.ToString() ?? "";
if (val.Length > widths[i]) widths[i] = Math.Min(val.Length, 40);
}
}
// Header
var header = string.Join(" ", columns.Select((c, i) => Truncate(c, widths[i]).PadRight(widths[i])));
Console.WriteLine(header);
Console.WriteLine(new string('-', header.Length));
// Rows (cap at 50 for preview)
var displayRows = rows.Take(50).ToList();
foreach (var row in displayRows)
{
var line = string.Join(" ", columns.Select((c, i) =>
Truncate(row[c]?.ToString() ?? "", widths[i]).PadRight(widths[i])));
Console.WriteLine(line);
}
if (rows.Count > 50)
Console.WriteLine($" ... and {rows.Count - 50} more");
}
private static string Truncate(string s, int maxLen)
{
return s.Length <= maxLen ? s : s[..(maxLen - 2)] + "..";
}
}

View File

@@ -171,10 +171,124 @@ public static class FeatureCommands
featureCommand.Add(updateCmd);
featureCommand.Add(mapCmd);
featureCommand.Add(naCmd);
featureCommand.Add(CreateBatchUpdate(dbOption));
featureCommand.Add(CreateBatchMap(dbOption));
return featureCommand;
}
private static Command CreateBatchUpdate(Option<string> dbOption)
{
var cmd = new Command("batch-update", "Bulk update feature status");
var idsOpt = BatchFilters.IdsOption();
var moduleOpt = BatchFilters.ModuleOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setStatus = new Option<string>("--set-status") { Description = "New status to set", Required = true };
var setNotes = new Option<string?>("--set-notes") { Description = "Notes to set" };
cmd.Add(idsOpt);
cmd.Add(moduleOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setStatus);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var module = parseResult.GetValue(moduleOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var newStatus = parseResult.GetValue(setStatus)!;
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && module is null && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --module, --status) is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, module, status);
var setClauses = new List<string> { "status = @newStatus" };
var updateParams = new List<(string, object?)> { ("@newStatus", newStatus) };
if (notes is not null)
{
setClauses.Add("notes = @newNotes");
updateParams.Add(("@newNotes", notes));
}
BatchFilters.PreviewOrExecute(db, "features",
"id, name, status, module_id, notes",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static Command CreateBatchMap(Option<string> dbOption)
{
var cmd = new Command("batch-map", "Bulk map features to .NET methods");
var idsOpt = BatchFilters.IdsOption();
var moduleOpt = BatchFilters.ModuleOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setProject = new Option<string?>("--set-project") { Description = ".NET project" };
var setClass = new Option<string?>("--set-class") { Description = ".NET class" };
var setMethod = new Option<string?>("--set-method") { Description = ".NET method" };
cmd.Add(idsOpt);
cmd.Add(moduleOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setProject);
cmd.Add(setClass);
cmd.Add(setMethod);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var module = parseResult.GetValue(moduleOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var project = parseResult.GetValue(setProject);
var cls = parseResult.GetValue(setClass);
var method = parseResult.GetValue(setMethod);
if (string.IsNullOrWhiteSpace(ids) && module is null && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --module, --status) is required.");
return;
}
if (project is null && cls is null && method is null)
{
Console.WriteLine("Error: at least one of --set-project, --set-class, --set-method is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, module, status);
var setClauses = new List<string>();
var updateParams = new List<(string, object?)>();
if (project is not null) { setClauses.Add("dotnet_project = @setProject"); updateParams.Add(("@setProject", project)); }
if (cls is not null) { setClauses.Add("dotnet_class = @setClass"); updateParams.Add(("@setClass", cls)); }
if (method is not null) { setClauses.Add("dotnet_method = @setMethod"); updateParams.Add(("@setMethod", method)); }
BatchFilters.PreviewOrExecute(db, "features",
"id, name, status, dotnet_project, dotnet_class, dotnet_method",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static string Truncate(string? s, int maxLen)
{
if (s is null) return "";

View File

@@ -86,10 +86,118 @@ public static class LibraryCommands
libraryCommand.Add(listCmd);
libraryCommand.Add(mapCmd);
libraryCommand.Add(suggestCmd);
libraryCommand.Add(CreateBatchUpdate(dbOption));
libraryCommand.Add(CreateBatchMap(dbOption));
return libraryCommand;
}
private static Command CreateBatchUpdate(Option<string> dbOption)
{
var cmd = new Command("batch-update", "Bulk update library status");
var idsOpt = BatchFilters.IdsOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setStatus = new Option<string>("--set-status") { Description = "New status to set", Required = true };
var setNotes = new Option<string?>("--set-notes") { Description = "Usage notes to set" };
cmd.Add(idsOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setStatus);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var newStatus = parseResult.GetValue(setStatus)!;
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --status) is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, null, status);
var setClauses = new List<string> { "status = @newStatus" };
var updateParams = new List<(string, object?)> { ("@newStatus", newStatus) };
if (notes is not null)
{
setClauses.Add("dotnet_usage_notes = @newNotes");
updateParams.Add(("@newNotes", notes));
}
BatchFilters.PreviewOrExecute(db, "library_mappings",
"id, go_import_path, status, dotnet_usage_notes",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static Command CreateBatchMap(Option<string> dbOption)
{
var cmd = new Command("batch-map", "Bulk map libraries to .NET packages");
var idsOpt = BatchFilters.IdsOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setPackage = new Option<string?>("--set-package") { Description = ".NET NuGet package" };
var setNamespace = new Option<string?>("--set-namespace") { Description = ".NET namespace" };
var setNotes = new Option<string?>("--set-notes") { Description = "Usage notes" };
cmd.Add(idsOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setPackage);
cmd.Add(setNamespace);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var package = parseResult.GetValue(setPackage);
var ns = parseResult.GetValue(setNamespace);
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --status) is required.");
return;
}
if (package is null && ns is null && notes is null)
{
Console.WriteLine("Error: at least one of --set-package, --set-namespace, --set-notes is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, null, status);
var setClauses = new List<string>();
var updateParams = new List<(string, object?)>();
if (package is not null) { setClauses.Add("dotnet_package = @setPackage"); updateParams.Add(("@setPackage", package)); }
if (ns is not null) { setClauses.Add("dotnet_namespace = @setNamespace"); updateParams.Add(("@setNamespace", ns)); }
if (notes is not null) { setClauses.Add("dotnet_usage_notes = @setNotes"); updateParams.Add(("@setNotes", notes)); }
BatchFilters.PreviewOrExecute(db, "library_mappings",
"id, go_import_path, status, dotnet_package, dotnet_namespace",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static string Truncate(string? s, int maxLen)
{
if (s is null) return "";

View File

@@ -147,7 +147,115 @@ public static class ModuleCommands
moduleCommand.Add(updateCmd);
moduleCommand.Add(mapCmd);
moduleCommand.Add(naCmd);
moduleCommand.Add(CreateBatchUpdate(dbOption));
moduleCommand.Add(CreateBatchMap(dbOption));
return moduleCommand;
}
private static Command CreateBatchUpdate(Option<string> dbOption)
{
var cmd = new Command("batch-update", "Bulk update module status");
var idsOpt = BatchFilters.IdsOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setStatus = new Option<string>("--set-status") { Description = "New status to set", Required = true };
var setNotes = new Option<string?>("--set-notes") { Description = "Notes to set" };
cmd.Add(idsOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setStatus);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var newStatus = parseResult.GetValue(setStatus)!;
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --status) is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, null, status);
var setClauses = new List<string> { "status = @newStatus" };
var updateParams = new List<(string, object?)> { ("@newStatus", newStatus) };
if (notes is not null)
{
setClauses.Add("notes = @newNotes");
updateParams.Add(("@newNotes", notes));
}
BatchFilters.PreviewOrExecute(db, "modules",
"id, name, status, notes",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static Command CreateBatchMap(Option<string> dbOption)
{
var cmd = new Command("batch-map", "Bulk map modules to .NET projects");
var idsOpt = BatchFilters.IdsOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setProject = new Option<string?>("--set-project") { Description = ".NET project" };
var setNamespace = new Option<string?>("--set-namespace") { Description = ".NET namespace" };
var setClass = new Option<string?>("--set-class") { Description = ".NET class" };
cmd.Add(idsOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setProject);
cmd.Add(setNamespace);
cmd.Add(setClass);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var project = parseResult.GetValue(setProject);
var ns = parseResult.GetValue(setNamespace);
var cls = parseResult.GetValue(setClass);
if (string.IsNullOrWhiteSpace(ids) && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --status) is required.");
return;
}
if (project is null && ns is null && cls is null)
{
Console.WriteLine("Error: at least one of --set-project, --set-namespace, --set-class is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, null, status);
var setClauses = new List<string>();
var updateParams = new List<(string, object?)>();
if (project is not null) { setClauses.Add("dotnet_project = @setProject"); updateParams.Add(("@setProject", project)); }
if (ns is not null) { setClauses.Add("dotnet_namespace = @setNamespace"); updateParams.Add(("@setNamespace", ns)); }
if (cls is not null) { setClauses.Add("dotnet_class = @setClass"); updateParams.Add(("@setClass", cls)); }
BatchFilters.PreviewOrExecute(db, "modules",
"id, name, status, dotnet_project, dotnet_namespace, dotnet_class",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
}

View File

@@ -131,10 +131,124 @@ public static class TestCommands
testCommand.Add(showCmd);
testCommand.Add(updateCmd);
testCommand.Add(mapCmd);
testCommand.Add(CreateBatchUpdate(dbOption));
testCommand.Add(CreateBatchMap(dbOption));
return testCommand;
}
private static Command CreateBatchUpdate(Option<string> dbOption)
{
var cmd = new Command("batch-update", "Bulk update test status");
var idsOpt = BatchFilters.IdsOption();
var moduleOpt = BatchFilters.ModuleOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setStatus = new Option<string>("--set-status") { Description = "New status to set", Required = true };
var setNotes = new Option<string?>("--set-notes") { Description = "Notes to set" };
cmd.Add(idsOpt);
cmd.Add(moduleOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setStatus);
cmd.Add(setNotes);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var module = parseResult.GetValue(moduleOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var newStatus = parseResult.GetValue(setStatus)!;
var notes = parseResult.GetValue(setNotes);
if (string.IsNullOrWhiteSpace(ids) && module is null && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --module, --status) is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, module, status);
var setClauses = new List<string> { "status = @newStatus" };
var updateParams = new List<(string, object?)> { ("@newStatus", newStatus) };
if (notes is not null)
{
setClauses.Add("notes = @newNotes");
updateParams.Add(("@newNotes", notes));
}
BatchFilters.PreviewOrExecute(db, "unit_tests",
"id, name, status, module_id, notes",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static Command CreateBatchMap(Option<string> dbOption)
{
var cmd = new Command("batch-map", "Bulk map tests to .NET test methods");
var idsOpt = BatchFilters.IdsOption();
var moduleOpt = BatchFilters.ModuleOption();
var statusOpt = BatchFilters.StatusOption();
var executeOpt = BatchFilters.ExecuteOption();
var setProject = new Option<string?>("--set-project") { Description = ".NET test project" };
var setClass = new Option<string?>("--set-class") { Description = ".NET test class" };
var setMethod = new Option<string?>("--set-method") { Description = ".NET test method" };
cmd.Add(idsOpt);
cmd.Add(moduleOpt);
cmd.Add(statusOpt);
cmd.Add(executeOpt);
cmd.Add(setProject);
cmd.Add(setClass);
cmd.Add(setMethod);
cmd.SetAction(parseResult =>
{
var dbPath = parseResult.GetValue(dbOption)!;
var ids = parseResult.GetValue(idsOpt);
var module = parseResult.GetValue(moduleOpt);
var status = parseResult.GetValue(statusOpt);
var execute = parseResult.GetValue(executeOpt);
var project = parseResult.GetValue(setProject);
var cls = parseResult.GetValue(setClass);
var method = parseResult.GetValue(setMethod);
if (string.IsNullOrWhiteSpace(ids) && module is null && string.IsNullOrWhiteSpace(status))
{
Console.WriteLine("Error: at least one filter (--ids, --module, --status) is required.");
return;
}
if (project is null && cls is null && method is null)
{
Console.WriteLine("Error: at least one of --set-project, --set-class, --set-method is required.");
return;
}
using var db = new Database(dbPath);
var (whereClause, filterParams) = BatchFilters.BuildWhereClause(ids, module, status);
var setClauses = new List<string>();
var updateParams = new List<(string, object?)>();
if (project is not null) { setClauses.Add("dotnet_project = @setProject"); updateParams.Add(("@setProject", project)); }
if (cls is not null) { setClauses.Add("dotnet_class = @setClass"); updateParams.Add(("@setClass", cls)); }
if (method is not null) { setClauses.Add("dotnet_method = @setMethod"); updateParams.Add(("@setMethod", method)); }
BatchFilters.PreviewOrExecute(db, "unit_tests",
"id, name, status, dotnet_project, dotnet_class, dotnet_method",
string.Join(", ", setClauses), updateParams,
whereClause, filterParams, execute);
});
return cmd;
}
private static string Truncate(string? s, int maxLen)
{
if (s is null) return "";

View File

@@ -70,6 +70,26 @@ public sealed class Database : IDisposable
return results;
}
public int ExecuteInTransaction(string sql, params (string name, object? value)[] parameters)
{
using var transaction = _connection.BeginTransaction();
try
{
using var cmd = CreateCommand(sql);
cmd.Transaction = transaction;
foreach (var (name, value) in parameters)
cmd.Parameters.AddWithValue(name, value ?? DBNull.Value);
var affected = cmd.ExecuteNonQuery();
transaction.Commit();
return affected;
}
catch
{
transaction.Rollback();
throw;
}
}
public void Dispose()
{
_connection.Dispose();