New project Driver.Historian.Wonderware.Client (net10 x64) implements both Core.Abstractions.IHistorianDataSource (read paths consumed by the server's IHistoryRouter) and Core.AlarmHistorian.IAlarmHistorianWriter (alarm-event drain consumed by SqliteStoreAndForwardSink) against the sidecar's PR 3.3 pipe protocol. Wire-format files (Framing/MessageKind, Hello, Contracts, FrameReader, FrameWriter) are byte-identical mirrors of the sidecar's net48 originals — the sidecar can't be referenced as a ProjectReference because of the runtime/bitness gap, so we duplicate and pin the wire bytes via tests. PipeChannel owns one bidirectional NamedPipeClientStream + Hello handshake + serializes calls. Single in-flight at a time (semaphore); transport failures trigger one in-flight reconnect-and-retry before propagating. Connect is abstracted behind a Func<CancellationToken, Task<Stream>> so tests inject in-process pipes. WonderwareHistorianClient maps: - HistorianSampleDto.Quality (raw OPC DA byte) → OPC UA StatusCode uint via QualityMapper (port of HistorianQualityMapper from sidecar). - HistorianAggregateSampleDto.Value=null → BadNoData (0x800E0000). - WriteAlarmEventsReply.PerEventOk[i]=true → Ack, false → RetryPlease. Whole-call failure or transport exception → RetryPlease for every event in the batch (drain worker handles backoff). - AlarmHistorianEvent → AlarmHistorianEventDto with severity bucketed via AlarmSeverity-to-ushort mapping (Low=250, Medium=500, High=700, Crit=900). GetHealthSnapshot tracks transport success + sidecar-reported failure separately; ConsecutiveFailures rises on operation-level errors, not just transport drops. 10 round-trip tests via FakeSidecarServer (in-process net10 fake using the client's own framing): byte→uint quality mapping, null-bucket BadNoData, at-time order preservation, event-field round-trip, sidecar error surfacing, WriteBatch per-event status, whole-call retry-please mapping, Hello shared-secret rejection, transport-drop reconnect-and-retry, health snapshot counters. PR 3.W will register this client as IHistorianDataSource + IAlarmHistorianWriter in OpcUaServerService DI. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
314 lines
12 KiB
C#
314 lines
12 KiB
C#
using MessagePack;
|
|
using Shouldly;
|
|
using Xunit;
|
|
using ZB.MOM.WW.OtOpcUa.Core.Abstractions;
|
|
using ZB.MOM.WW.OtOpcUa.Core.AlarmHistorian;
|
|
using ZB.MOM.WW.OtOpcUa.Driver.Historian.Wonderware.Client.Ipc;
|
|
|
|
namespace ZB.MOM.WW.OtOpcUa.Driver.Historian.Wonderware.Client.Tests;
|
|
|
|
/// <summary>
|
|
/// End-to-end tests for <see cref="WonderwareHistorianClient"/>: every interface method
|
|
/// round-trips through a real named pipe against the in-process
|
|
/// <see cref="FakeSidecarServer"/>, which reuses the client's own byte-identical framing
|
|
/// code. Covers byte→uint quality mapping, BadNoData propagation for null aggregate
|
|
/// buckets, alarm-write per-event status flow, Hello handshake rejection on bad secret,
|
|
/// and reconnect after a transport drop.
|
|
/// </summary>
|
|
public sealed class WonderwareHistorianClientTests
|
|
{
|
|
private const string Secret = "test-secret-123";
|
|
|
|
private static string UniquePipeName() => $"otopcua-historian-test-{Guid.NewGuid():N}";
|
|
|
|
private static WonderwareHistorianClientOptions OptsFor(string pipe) => new(
|
|
PipeName: pipe,
|
|
SharedSecret: Secret,
|
|
PeerName: "test",
|
|
ConnectTimeout: TimeSpan.FromSeconds(2),
|
|
CallTimeout: TimeSpan.FromSeconds(2));
|
|
|
|
[Fact]
|
|
public async Task ReadRawAsync_RoundTripsSamples_AndMapsQualityByteToOpcUaStatusCode()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
await using var server = new FakeSidecarServer(pipe, Secret)
|
|
{
|
|
OnReadRaw = req => new ReadRawReply
|
|
{
|
|
Success = true,
|
|
Samples =
|
|
[
|
|
new HistorianSampleDto
|
|
{
|
|
ValueBytes = MessagePackSerializer.Serialize<object>(42.0),
|
|
Quality = 192, // Good
|
|
TimestampUtcTicks = new DateTime(2026, 4, 29, 12, 0, 0, DateTimeKind.Utc).Ticks,
|
|
},
|
|
new HistorianSampleDto
|
|
{
|
|
ValueBytes = MessagePackSerializer.Serialize<object>(43.5),
|
|
Quality = 8, // Bad_NotConnected
|
|
TimestampUtcTicks = new DateTime(2026, 4, 29, 12, 0, 1, DateTimeKind.Utc).Ticks,
|
|
},
|
|
],
|
|
},
|
|
};
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
var result = await client.ReadRawAsync("Tank.Level",
|
|
new DateTime(2026, 4, 29, 0, 0, 0, DateTimeKind.Utc),
|
|
new DateTime(2026, 4, 30, 0, 0, 0, DateTimeKind.Utc),
|
|
100, CancellationToken.None);
|
|
|
|
result.ContinuationPoint.ShouldBeNull();
|
|
result.Samples.Count.ShouldBe(2);
|
|
result.Samples[0].StatusCode.ShouldBe(0x00000000u); // Good
|
|
result.Samples[0].SourceTimestampUtc.ShouldBe(new DateTime(2026, 4, 29, 12, 0, 0, DateTimeKind.Utc));
|
|
result.Samples[1].StatusCode.ShouldBe(0x808A0000u); // Bad_NotConnected
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ReadProcessedAsync_NullBuckets_MapToBadNoData()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
await using var server = new FakeSidecarServer(pipe, Secret)
|
|
{
|
|
OnReadProcessed = _ => new ReadProcessedReply
|
|
{
|
|
Success = true,
|
|
Buckets =
|
|
[
|
|
new HistorianAggregateSampleDto { Value = 50.0, TimestampUtcTicks = new DateTime(2026, 4, 29, 0, 0, 0, DateTimeKind.Utc).Ticks },
|
|
new HistorianAggregateSampleDto { Value = null, TimestampUtcTicks = new DateTime(2026, 4, 29, 0, 1, 0, DateTimeKind.Utc).Ticks },
|
|
],
|
|
},
|
|
};
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
var result = await client.ReadProcessedAsync("Tank.Level",
|
|
new DateTime(2026, 4, 29, 0, 0, 0, DateTimeKind.Utc),
|
|
new DateTime(2026, 4, 29, 0, 2, 0, DateTimeKind.Utc),
|
|
TimeSpan.FromMinutes(1), HistoryAggregateType.Average, CancellationToken.None);
|
|
|
|
result.Samples.Count.ShouldBe(2);
|
|
result.Samples[0].StatusCode.ShouldBe(0x00000000u); // Good
|
|
result.Samples[0].Value.ShouldBe(50.0);
|
|
result.Samples[1].StatusCode.ShouldBe(0x800E0000u); // BadNoData
|
|
result.Samples[1].Value.ShouldBeNull();
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ReadAtTimeAsync_PreservesTimestampOrder()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
var t1 = new DateTime(2026, 4, 29, 1, 0, 0, DateTimeKind.Utc);
|
|
var t2 = new DateTime(2026, 4, 29, 2, 0, 0, DateTimeKind.Utc);
|
|
|
|
await using var server = new FakeSidecarServer(pipe, Secret)
|
|
{
|
|
OnReadAtTime = req => new ReadAtTimeReply
|
|
{
|
|
Success = true,
|
|
Samples = req.TimestampsUtcTicks
|
|
.Select(ticks => new HistorianSampleDto { Quality = 192, TimestampUtcTicks = ticks })
|
|
.ToArray(),
|
|
},
|
|
};
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
var result = await client.ReadAtTimeAsync("Tank.Level", new[] { t1, t2 }, CancellationToken.None);
|
|
|
|
result.Samples.Count.ShouldBe(2);
|
|
result.Samples[0].SourceTimestampUtc.ShouldBe(t1);
|
|
result.Samples[1].SourceTimestampUtc.ShouldBe(t2);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ReadEventsAsync_PreservesEventFields()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
var eid = Guid.NewGuid().ToString("N");
|
|
await using var server = new FakeSidecarServer(pipe, Secret)
|
|
{
|
|
OnReadEvents = _ => new ReadEventsReply
|
|
{
|
|
Success = true,
|
|
Events =
|
|
[
|
|
new HistorianEventDto
|
|
{
|
|
EventId = eid, Source = "Tank.HiHi",
|
|
EventTimeUtcTicks = new DateTime(2026, 4, 29, 1, 0, 0, DateTimeKind.Utc).Ticks,
|
|
ReceivedTimeUtcTicks = new DateTime(2026, 4, 29, 1, 0, 1, DateTimeKind.Utc).Ticks,
|
|
DisplayText = "Level high-high", Severity = 800,
|
|
},
|
|
],
|
|
},
|
|
};
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
var result = await client.ReadEventsAsync("Tank.HiHi",
|
|
new DateTime(2026, 4, 29, 0, 0, 0, DateTimeKind.Utc),
|
|
new DateTime(2026, 4, 30, 0, 0, 0, DateTimeKind.Utc),
|
|
100, CancellationToken.None);
|
|
|
|
result.Events.Count.ShouldBe(1);
|
|
result.Events[0].EventId.ShouldBe(eid);
|
|
result.Events[0].SourceName.ShouldBe("Tank.HiHi");
|
|
result.Events[0].Message.ShouldBe("Level high-high");
|
|
result.Events[0].Severity.ShouldBe((ushort)800);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ReadRawAsync_ServerError_ThrowsInvalidOperation()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
await using var server = new FakeSidecarServer(pipe, Secret)
|
|
{
|
|
OnReadRaw = _ => new ReadRawReply { Success = false, Error = "historian unreachable" },
|
|
};
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
|
|
var ex = await Should.ThrowAsync<InvalidOperationException>(() =>
|
|
client.ReadRawAsync("Tag", DateTime.UtcNow, DateTime.UtcNow, 100, CancellationToken.None));
|
|
ex.Message.ShouldContain("historian unreachable");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task WriteBatchAsync_PerEventOk_MapsToAckOrRetryPlease()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
await using var server = new FakeSidecarServer(pipe, Secret)
|
|
{
|
|
OnWriteAlarmEvents = req => new WriteAlarmEventsReply
|
|
{
|
|
Success = true,
|
|
PerEventOk = req.Events.Select(e => e.EventId != "ev-fail").ToArray(),
|
|
},
|
|
};
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
var batch = new[]
|
|
{
|
|
new AlarmHistorianEvent("ev-1", "Tank/HiHi", "HiHi", "LimitAlarm", AlarmSeverity.High, "Activated", "msg", "operator", null, DateTime.UtcNow),
|
|
new AlarmHistorianEvent("ev-fail", "Tank/HiHi", "HiHi", "LimitAlarm", AlarmSeverity.High, "Acknowledged", "msg", "operator", null, DateTime.UtcNow),
|
|
};
|
|
|
|
var outcomes = await client.WriteBatchAsync(batch, CancellationToken.None);
|
|
|
|
outcomes.Count.ShouldBe(2);
|
|
outcomes[0].ShouldBe(HistorianWriteOutcome.Ack);
|
|
outcomes[1].ShouldBe(HistorianWriteOutcome.RetryPlease);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task WriteBatchAsync_WholeCallFailure_ReturnsRetryPleaseForEveryEvent()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
await using var server = new FakeSidecarServer(pipe, Secret)
|
|
{
|
|
OnWriteAlarmEvents = _ => new WriteAlarmEventsReply
|
|
{
|
|
Success = false,
|
|
Error = "historian event-store down",
|
|
PerEventOk = new bool[2],
|
|
},
|
|
};
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
var batch = new[]
|
|
{
|
|
new AlarmHistorianEvent("ev-1", "Tank/HiHi", "HiHi", "LimitAlarm", AlarmSeverity.High, "Activated", "msg", "u", null, DateTime.UtcNow),
|
|
new AlarmHistorianEvent("ev-2", "Tank/HiHi", "HiHi", "LimitAlarm", AlarmSeverity.High, "Cleared", "msg", "u", null, DateTime.UtcNow),
|
|
};
|
|
|
|
var outcomes = await client.WriteBatchAsync(batch, CancellationToken.None);
|
|
|
|
outcomes.Count.ShouldBe(2);
|
|
outcomes[0].ShouldBe(HistorianWriteOutcome.RetryPlease);
|
|
outcomes[1].ShouldBe(HistorianWriteOutcome.RetryPlease);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task Hello_BadSecret_ThrowsUnauthorizedAccess()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
await using var server = new FakeSidecarServer(pipe, "different-secret");
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
|
|
var ex = await Should.ThrowAsync<UnauthorizedAccessException>(() =>
|
|
client.ReadRawAsync("Tag", DateTime.UtcNow, DateTime.UtcNow, 100, CancellationToken.None));
|
|
ex.Message.ShouldContain("shared-secret-mismatch");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task Reconnect_AfterTransportDrop_RetriesOnce()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
await using var server = new FakeSidecarServer(pipe, Secret)
|
|
{
|
|
// First connection drops after handshake → client retries on next call.
|
|
DisconnectAfterHandshake = true,
|
|
OnReadRaw = req => new ReadRawReply
|
|
{
|
|
Success = true,
|
|
Samples = [new HistorianSampleDto { Quality = 192, TimestampUtcTicks = req.StartUtcTicks }],
|
|
},
|
|
};
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
|
|
// First call: handshake + dropped. Reconnect kicks in inside the channel; second
|
|
// attempt within the same InvokeAsync succeeds. From the caller's perspective it's
|
|
// one ReadRawAsync that returns a sample.
|
|
var result = await client.ReadRawAsync("Tag",
|
|
new DateTime(2026, 4, 29, 0, 0, 0, DateTimeKind.Utc),
|
|
new DateTime(2026, 4, 30, 0, 0, 0, DateTimeKind.Utc),
|
|
100, CancellationToken.None);
|
|
|
|
result.Samples.Count.ShouldBe(1);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task GetHealthSnapshot_TracksSuccessAndFailureCounts()
|
|
{
|
|
var pipe = UniquePipeName();
|
|
var failNext = false;
|
|
await using var server = new FakeSidecarServer(pipe, Secret)
|
|
{
|
|
OnReadRaw = _ => failNext
|
|
? new ReadRawReply { Success = false, Error = "boom" }
|
|
: new ReadRawReply { Success = true },
|
|
};
|
|
await server.StartAsync();
|
|
|
|
await using var client = new WonderwareHistorianClient(OptsFor(pipe));
|
|
|
|
await client.ReadRawAsync("Tag", DateTime.UtcNow, DateTime.UtcNow, 1, CancellationToken.None);
|
|
|
|
failNext = true;
|
|
await Should.ThrowAsync<InvalidOperationException>(() =>
|
|
client.ReadRawAsync("Tag", DateTime.UtcNow, DateTime.UtcNow, 1, CancellationToken.None));
|
|
|
|
var snap = client.GetHealthSnapshot();
|
|
snap.TotalQueries.ShouldBe(2);
|
|
snap.TotalSuccesses.ShouldBe(1);
|
|
snap.TotalFailures.ShouldBe(1);
|
|
snap.ConsecutiveFailures.ShouldBe(1);
|
|
snap.LastError.ShouldNotBeNull();
|
|
snap.ProcessConnectionOpen.ShouldBeTrue();
|
|
}
|
|
}
|