LmxProxy is no longer needed. Moved the entire lmxproxy/ workspace, DCL adapter files, and related docs to deprecated/. Removed LmxProxy registration from DataConnectionFactory, project reference from DCL, protocol option from UI, and cleaned up all requirement docs.
194 lines
7.5 KiB
C#
194 lines
7.5 KiB
C#
using System;
|
|
using System.Threading.Tasks;
|
|
using Polly;
|
|
using Polly.Timeout;
|
|
using Serilog;
|
|
|
|
namespace ZB.MOM.WW.LmxProxy.Host.Services
|
|
{
|
|
/// <summary>
|
|
/// Provides retry policies for resilient operations
|
|
/// </summary>
|
|
public static class RetryPolicies
|
|
{
|
|
private static readonly ILogger Logger = Log.ForContext(typeof(RetryPolicies));
|
|
|
|
/// <summary>
|
|
/// Creates a retry policy with exponential backoff for read operations
|
|
/// </summary>
|
|
public static IAsyncPolicy<T> CreateReadPolicy<T>()
|
|
{
|
|
return Policy<T>
|
|
.Handle<Exception>(ex => !(ex is ArgumentException || ex is InvalidOperationException))
|
|
.WaitAndRetryAsync(
|
|
3,
|
|
retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt - 1)),
|
|
(outcome, timespan, retryCount, context) =>
|
|
{
|
|
Exception? exception = outcome.Exception;
|
|
Logger.Warning(exception,
|
|
"Read operation retry {RetryCount} after {DelayMs}ms. Operation: {Operation}",
|
|
retryCount,
|
|
timespan.TotalMilliseconds,
|
|
context.ContainsKey("Operation") ? context["Operation"] : "Unknown");
|
|
});
|
|
}
|
|
|
|
/// <summary>
|
|
/// Creates a retry policy with exponential backoff for write operations
|
|
/// </summary>
|
|
public static IAsyncPolicy CreateWritePolicy()
|
|
{
|
|
return Policy
|
|
.Handle<Exception>(ex => !(ex is ArgumentException || ex is InvalidOperationException))
|
|
.WaitAndRetryAsync(
|
|
3,
|
|
retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)),
|
|
(exception, timespan, retryCount, context) =>
|
|
{
|
|
Logger.Warning(exception,
|
|
"Write operation retry {RetryCount} after {DelayMs}ms. Operation: {Operation}",
|
|
retryCount,
|
|
timespan.TotalMilliseconds,
|
|
context.ContainsKey("Operation") ? context["Operation"] : "Unknown");
|
|
});
|
|
}
|
|
|
|
/// <summary>
|
|
/// Creates a retry policy for connection operations with longer delays
|
|
/// </summary>
|
|
public static IAsyncPolicy CreateConnectionPolicy()
|
|
{
|
|
return Policy
|
|
.Handle<Exception>()
|
|
.WaitAndRetryAsync(
|
|
5,
|
|
retryAttempt =>
|
|
{
|
|
// 2s, 4s, 8s, 16s, 32s
|
|
var delay = TimeSpan.FromSeconds(Math.Min(32, Math.Pow(2, retryAttempt)));
|
|
return delay;
|
|
},
|
|
(exception, timespan, retryCount, context) =>
|
|
{
|
|
Logger.Warning(exception,
|
|
"Connection retry {RetryCount} after {DelayMs}ms",
|
|
retryCount,
|
|
timespan.TotalMilliseconds);
|
|
});
|
|
}
|
|
|
|
/// <summary>
|
|
/// Creates a circuit breaker policy for protecting against repeated failures
|
|
/// </summary>
|
|
public static IAsyncPolicy<T> CreateCircuitBreakerPolicy<T>()
|
|
{
|
|
return Policy<T>
|
|
.Handle<Exception>()
|
|
.CircuitBreakerAsync(
|
|
5,
|
|
TimeSpan.FromSeconds(30),
|
|
(result, timespan) =>
|
|
{
|
|
Logger.Error(result.Exception,
|
|
"Circuit breaker opened for {BreakDurationSeconds}s due to repeated failures",
|
|
timespan.TotalSeconds);
|
|
},
|
|
() => { Logger.Information("Circuit breaker reset - resuming normal operations"); },
|
|
() => { Logger.Information("Circuit breaker half-open - testing operation"); });
|
|
}
|
|
|
|
/// <summary>
|
|
/// Creates a combined policy with retry and circuit breaker
|
|
/// </summary>
|
|
public static IAsyncPolicy<T> CreateCombinedPolicy<T>()
|
|
{
|
|
IAsyncPolicy<T> retry = CreateReadPolicy<T>();
|
|
IAsyncPolicy<T> circuitBreaker = CreateCircuitBreakerPolicy<T>();
|
|
|
|
// Wrap retry around circuit breaker
|
|
// This means retry happens first, and if all retries fail, it counts toward the circuit breaker
|
|
return Policy.WrapAsync(retry, circuitBreaker);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Creates a timeout policy for operations
|
|
/// </summary>
|
|
public static IAsyncPolicy CreateTimeoutPolicy(TimeSpan timeout)
|
|
{
|
|
return Policy
|
|
.TimeoutAsync(
|
|
timeout,
|
|
TimeoutStrategy.Pessimistic,
|
|
async (context, timespan, task) =>
|
|
{
|
|
Logger.Warning(
|
|
"Operation timed out after {TimeoutMs}ms. Operation: {Operation}",
|
|
timespan.TotalMilliseconds,
|
|
context.ContainsKey("Operation") ? context["Operation"] : "Unknown");
|
|
|
|
if (task != null)
|
|
{
|
|
try
|
|
{
|
|
await task;
|
|
}
|
|
catch
|
|
{
|
|
// Ignore exceptions from the timed-out task
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
/// <summary>
|
|
/// Creates a bulkhead policy to limit concurrent operations
|
|
/// </summary>
|
|
public static IAsyncPolicy CreateBulkheadPolicy(int maxParallelization, int maxQueuingActions = 100)
|
|
{
|
|
return Policy
|
|
.BulkheadAsync(
|
|
maxParallelization,
|
|
maxQueuingActions,
|
|
context =>
|
|
{
|
|
Logger.Warning(
|
|
"Bulkhead rejected operation. Max parallelization: {MaxParallel}, Queue: {MaxQueue}",
|
|
maxParallelization,
|
|
maxQueuingActions);
|
|
return Task.CompletedTask;
|
|
});
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Extension methods for applying retry policies
|
|
/// </summary>
|
|
public static class RetryPolicyExtensions
|
|
{
|
|
/// <summary>
|
|
/// Executes an operation with retry policy
|
|
/// </summary>
|
|
public static async Task<T> ExecuteWithRetryAsync<T>(
|
|
this IAsyncPolicy<T> policy,
|
|
Func<Task<T>> operation,
|
|
string operationName)
|
|
{
|
|
var context = new Context { ["Operation"] = operationName };
|
|
return await policy.ExecuteAsync(async ctx => await operation(), context);
|
|
}
|
|
|
|
/// <summary>
|
|
/// Executes an operation with retry policy (non-generic)
|
|
/// </summary>
|
|
public static async Task ExecuteWithRetryAsync(
|
|
this IAsyncPolicy policy,
|
|
Func<Task> operation,
|
|
string operationName)
|
|
{
|
|
var context = new Context { ["Operation"] = operationName };
|
|
await policy.ExecuteAsync(async ctx => await operation(), context);
|
|
}
|
|
}
|
|
}
|