using CliFx; using CliFx.Attributes; using CliFx.Infrastructure; using Opc.Ua; using Opc.Ua.Client; namespace OpcUaCli.Commands; [Command("historyread", Description = "Read historical data from a node")] public class HistoryReadCommand : ICommand { [CommandOption("url", 'u', Description = "OPC UA server endpoint URL", IsRequired = true)] public string Url { get; init; } = default!; [CommandOption("node", 'n', Description = "Node ID (e.g. ns=1;s=TestMachine_001.TestHistoryValue)", IsRequired = true)] public string NodeId { get; init; } = default!; [CommandOption("start", Description = "Start time (ISO 8601 or date string, default: 24 hours ago)")] public string? StartTime { get; init; } [CommandOption("end", Description = "End time (ISO 8601 or date string, default: now)")] public string? EndTime { get; init; } [CommandOption("max", Description = "Maximum number of values to return")] public int MaxValues { get; init; } = 1000; [CommandOption("aggregate", Description = "Aggregate function: Average, Minimum, Maximum, Count")] public string? Aggregate { get; init; } [CommandOption("interval", Description = "Processing interval in milliseconds for aggregates")] public double IntervalMs { get; init; } = 3600000; public async ValueTask ExecuteAsync(IConsole console) { using var session = await OpcUaHelper.ConnectAsync(Url); var nodeId = new NodeId(NodeId); var start = string.IsNullOrEmpty(StartTime) ? DateTime.UtcNow.AddHours(-24) : DateTime.Parse(StartTime).ToUniversalTime(); var end = string.IsNullOrEmpty(EndTime) ? DateTime.UtcNow : DateTime.Parse(EndTime).ToUniversalTime(); if (string.IsNullOrEmpty(Aggregate)) { await ReadRawAsync(session, console, nodeId, start, end); } else { await ReadProcessedAsync(session, console, nodeId, start, end); } } private async Task ReadRawAsync(Session session, IConsole console, NodeId nodeId, DateTime start, DateTime end) { var details = new ReadRawModifiedDetails { StartTime = start, EndTime = end, NumValuesPerNode = (uint)MaxValues, IsReadModified = false, ReturnBounds = false }; var nodesToRead = new HistoryReadValueIdCollection { new HistoryReadValueId { NodeId = nodeId } }; await console.Output.WriteLineAsync( $"History for {NodeId} ({start:yyyy-MM-dd HH:mm} → {end:yyyy-MM-dd HH:mm})"); await console.Output.WriteLineAsync(); await console.Output.WriteLineAsync($"{"Timestamp",-35} {"Value",-15} {"Status"}"); int totalValues = 0; byte[]? continuationPoint = null; do { if (continuationPoint != null) nodesToRead[0].ContinuationPoint = continuationPoint; session.HistoryRead( null, new ExtensionObject(details), TimestampsToReturn.Source, continuationPoint != null, nodesToRead, out var results, out _); if (results == null || results.Count == 0) break; var result = results[0]; if (StatusCode.IsBad(result.StatusCode)) { await console.Error.WriteLineAsync($"HistoryRead failed: {result.StatusCode}"); break; } if (result.HistoryData == null) { await console.Error.WriteLineAsync($"No history data returned (status: {result.StatusCode})"); break; } if (result.HistoryData is ExtensionObject ext && ext.Body is HistoryData historyData) { foreach (var dv in historyData.DataValues) { var status = StatusCode.IsGood(dv.StatusCode) ? "Good" : StatusCode.IsBad(dv.StatusCode) ? "Bad" : "Uncertain"; await console.Output.WriteLineAsync( $"{dv.SourceTimestamp.ToString("O"),-35} {dv.Value,-15} {status}"); totalValues++; } } continuationPoint = result.ContinuationPoint; } while (continuationPoint != null && continuationPoint.Length > 0 && totalValues < MaxValues); await console.Output.WriteLineAsync(); await console.Output.WriteLineAsync($"{totalValues} values returned."); } private async Task ReadProcessedAsync(Session session, IConsole console, NodeId nodeId, DateTime start, DateTime end) { var aggregateId = MapAggregateName(Aggregate!); if (aggregateId == null) { await console.Error.WriteLineAsync($"Unknown aggregate: {Aggregate}. Supported: Average, Minimum, Maximum, Count, Start, End"); return; } var details = new ReadProcessedDetails { StartTime = start, EndTime = end, ProcessingInterval = IntervalMs, AggregateType = new NodeIdCollection { aggregateId } }; var nodesToRead = new HistoryReadValueIdCollection { new HistoryReadValueId { NodeId = nodeId } }; session.HistoryRead( null, new ExtensionObject(details), TimestampsToReturn.Source, false, nodesToRead, out var results, out _); await console.Output.WriteLineAsync( $"History for {NodeId} ({Aggregate}, interval={IntervalMs}ms)"); await console.Output.WriteLineAsync(); await console.Output.WriteLineAsync($"{"Timestamp",-35} {"Value",-15} {"Status"}"); int totalValues = 0; if (results != null && results.Count > 0) { var result = results[0]; if (StatusCode.IsBad(result.StatusCode)) { await console.Error.WriteLineAsync($"HistoryRead failed: {result.StatusCode}"); return; } if (result.HistoryData is ExtensionObject ext && ext.Body is HistoryData historyData) { foreach (var dv in historyData.DataValues) { var status = StatusCode.IsGood(dv.StatusCode) ? "Good" : StatusCode.IsBad(dv.StatusCode) ? "Bad" : "Uncertain"; await console.Output.WriteLineAsync( $"{dv.SourceTimestamp.ToString("O"),-35} {dv.Value,-15} {status}"); totalValues++; } } } await console.Output.WriteLineAsync(); await console.Output.WriteLineAsync($"{totalValues} values returned."); } private static NodeId? MapAggregateName(string name) { return name.ToLowerInvariant() switch { "average" => ObjectIds.AggregateFunction_Average, "minimum" or "min" => ObjectIds.AggregateFunction_Minimum, "maximum" or "max" => ObjectIds.AggregateFunction_Maximum, "count" => ObjectIds.AggregateFunction_Count, "start" or "first" => ObjectIds.AggregateFunction_Start, "end" or "last" => ObjectIds.AggregateFunction_End, _ => null }; } }