Implement in-process multi-dataset sync isolation across core, network, persistence, and tests
All checks were successful
NuGet Package Publish / nuget (push) Successful in 1m14s

This commit is contained in:
Joseph Doherty
2026-02-22 11:58:34 -05:00
parent c06b56172a
commit 8e97061ab8
60 changed files with 4519 additions and 559 deletions

View File

@@ -112,6 +112,7 @@ public class ConsoleInteractiveService : BackgroundService
System.Console.WriteLine("Commands:");
System.Console.WriteLine(" [p]ut, [g]et, [d]elete, [f]ind, [l]ist peers, [q]uit");
System.Console.WriteLine(" [n]ew (auto), [s]pam (5x), [c]ount, [t]odos");
System.Console.WriteLine(" log [count], ts [count] (append telemetry load)");
System.Console.WriteLine(" [h]ealth, cac[h]e");
System.Console.WriteLine(" [r]esolver [lww|merge], [demo] conflict");
}
@@ -156,8 +157,12 @@ public class ConsoleInteractiveService : BackgroundService
{
int userCount = _db.Users.FindAll().Count();
int todoCount = _db.TodoLists.FindAll().Count();
int logCount = _db.Logs.FindAll().Count();
int timeseriesCount = _db.Timeseries.FindAll().Count();
System.Console.WriteLine($"Collection 'Users': {userCount} documents");
System.Console.WriteLine($"Collection 'TodoLists': {todoCount} documents");
System.Console.WriteLine($"Collection 'Logs': {logCount} documents");
System.Console.WriteLine($"Collection 'Timeseries': {timeseriesCount} documents");
}
else if (input.StartsWith("p"))
{
@@ -212,6 +217,42 @@ public class ConsoleInteractiveService : BackgroundService
var results = _db.Users.Find(u => u.Age > 28);
foreach (var u in results) System.Console.WriteLine($"Found: {u.Name} ({u.Age})");
}
else if (input.StartsWith("log", StringComparison.OrdinalIgnoreCase))
{
int count = ParseCount(input, 100);
for (var i = 0; i < count; i++)
{
var entry = new TelemetryLogEntry
{
Id = Guid.NewGuid().ToString("N"),
Level = i % 25 == 0 ? "Warning" : "Information",
Message = $"sample-log-{DateTimeOffset.UtcNow:O}-{i}",
CreatedUtc = DateTime.UtcNow
};
await _db.Logs.InsertAsync(entry);
}
await _db.SaveChangesAsync();
System.Console.WriteLine($"Appended {count} log entries.");
}
else if (input.StartsWith("ts", StringComparison.OrdinalIgnoreCase))
{
int count = ParseCount(input, 100);
for (var i = 0; i < count; i++)
{
var point = new TimeseriesPoint
{
Id = Guid.NewGuid().ToString("N"),
Metric = i % 2 == 0 ? "cpu" : "latency",
Value = Random.Shared.NextDouble() * 100,
RecordedUtc = DateTime.UtcNow
};
await _db.Timeseries.InsertAsync(point);
}
await _db.SaveChangesAsync();
System.Console.WriteLine($"Appended {count} timeseries points.");
}
else if (input.StartsWith("h"))
{
var health = await _healthCheck.CheckAsync();
@@ -283,6 +324,13 @@ public class ConsoleInteractiveService : BackgroundService
}
}
private static int ParseCount(string input, int fallback)
{
string[] parts = input.Split(' ', StringSplitOptions.RemoveEmptyEntries);
if (parts.Length < 2) return fallback;
return int.TryParse(parts[1], out int parsed) && parsed > 0 ? parsed : fallback;
}
private async Task RunConflictDemo()
{
System.Console.WriteLine("\n=== Conflict Resolution Demo ===");
@@ -355,4 +403,4 @@ public class ConsoleInteractiveService : BackgroundService
System.Console.WriteLine("\n✓ Demo complete. Run 'todos' to see all lists.\n");
}
}
}

View File

@@ -4,6 +4,7 @@ using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Serilog;
using System.Text.Json;
using ZB.MOM.WW.CBDDC.Core;
using ZB.MOM.WW.CBDDC.Core.Network;
using ZB.MOM.WW.CBDDC.Core.Storage;
using ZB.MOM.WW.CBDDC.Core.Sync;
@@ -62,11 +63,22 @@ internal class Program
Directory.CreateDirectory(dataPath);
string databasePath = Path.Combine(dataPath, $"{nodeId}.rocksdb");
string surrealDatabase = nodeId.Replace("-", "_", StringComparison.Ordinal);
var multiDatasetOptions = builder.Configuration
.GetSection("CBDDC:MultiDataset")
.Get<MultiDatasetRuntimeOptions>()
?? new MultiDatasetRuntimeOptions
{
EnableMultiDatasetSync = true,
EnableDatasetPrimary = true,
EnableDatasetLogs = true,
EnableDatasetTimeseries = true
};
// Register CBDDC services with embedded Surreal (RocksDB).
builder.Services.AddSingleton<ICBDDCSurrealSchemaInitializer, SampleSurrealSchemaInitializer>();
builder.Services.AddSingleton<SampleDbContext>();
builder.Services.AddCBDDCCore()
builder.Services
.AddCBDDCCore()
.AddCBDDCSurrealEmbedded<SampleDocumentStore>(_ => new CBDDCSurrealEmbeddedOptions
{
Endpoint = "rocksdb://local",
@@ -74,8 +86,30 @@ internal class Program
Namespace = "cbddc_sample",
Database = surrealDatabase
})
.AddCBDDCSurrealEmbeddedDataset(DatasetId.Primary, options =>
{
options.InterestingCollections = ["Users", "TodoLists"];
})
.AddCBDDCSurrealEmbeddedDataset(DatasetId.Logs, options =>
{
options.InterestingCollections = ["Logs"];
})
.AddCBDDCSurrealEmbeddedDataset(DatasetId.Timeseries, options =>
{
options.InterestingCollections = ["Timeseries"];
})
.AddCBDDCNetwork<StaticPeerNodeConfigurationProvider>(); // useHostedService = true by default
if (multiDatasetOptions.EnableMultiDatasetSync)
builder.Services.AddCBDDCMultiDataset(options =>
{
options.EnableMultiDatasetSync = multiDatasetOptions.EnableMultiDatasetSync;
options.EnableDatasetPrimary = multiDatasetOptions.EnableDatasetPrimary;
options.EnableDatasetLogs = multiDatasetOptions.EnableDatasetLogs;
options.EnableDatasetTimeseries = multiDatasetOptions.EnableDatasetTimeseries;
options.AdditionalDatasets = multiDatasetOptions.AdditionalDatasets.ToList();
});
builder.Services.AddHostedService<ConsoleInteractiveService>(); // Runs the Input Loop
var host = builder.Build();

View File

@@ -11,6 +11,8 @@ public class SampleDbContext : IDisposable
{
private const string UsersTable = "sample_users";
private const string TodoListsTable = "sample_todo_lists";
private const string LogsTable = "sample_logs";
private const string TimeseriesTable = "sample_timeseries";
private readonly bool _ownsClient;
@@ -28,6 +30,8 @@ public class SampleDbContext : IDisposable
Users = new SampleSurrealCollection<User>(UsersTable, u => u.Id, SurrealEmbeddedClient, SchemaInitializer);
TodoLists = new SampleSurrealCollection<TodoList>(TodoListsTable, t => t.Id, SurrealEmbeddedClient, SchemaInitializer);
Logs = new SampleSurrealCollection<TelemetryLogEntry>(LogsTable, e => e.Id, SurrealEmbeddedClient, SchemaInitializer);
Timeseries = new SampleSurrealCollection<TimeseriesPoint>(TimeseriesTable, p => p.Id, SurrealEmbeddedClient, SchemaInitializer);
OplogEntries = new SampleSurrealReadOnlyCollection<SampleOplogEntry>(
CBDDCSurrealSchemaNames.OplogEntriesTable,
SurrealEmbeddedClient,
@@ -57,6 +61,8 @@ public class SampleDbContext : IDisposable
Users = new SampleSurrealCollection<User>(UsersTable, u => u.Id, SurrealEmbeddedClient, SchemaInitializer);
TodoLists = new SampleSurrealCollection<TodoList>(TodoListsTable, t => t.Id, SurrealEmbeddedClient, SchemaInitializer);
Logs = new SampleSurrealCollection<TelemetryLogEntry>(LogsTable, e => e.Id, SurrealEmbeddedClient, SchemaInitializer);
Timeseries = new SampleSurrealCollection<TimeseriesPoint>(TimeseriesTable, p => p.Id, SurrealEmbeddedClient, SchemaInitializer);
OplogEntries = new SampleSurrealReadOnlyCollection<SampleOplogEntry>(
CBDDCSurrealSchemaNames.OplogEntriesTable,
SurrealEmbeddedClient,
@@ -88,6 +94,16 @@ public class SampleDbContext : IDisposable
/// </summary>
public SampleSurrealReadOnlyCollection<SampleOplogEntry> OplogEntries { get; private set; }
/// <summary>
/// Gets the append-only telemetry logs collection.
/// </summary>
public SampleSurrealCollection<TelemetryLogEntry> Logs { get; private set; }
/// <summary>
/// Gets the append-only timeseries collection.
/// </summary>
public SampleSurrealCollection<TimeseriesPoint> Timeseries { get; private set; }
/// <summary>
/// Ensures schema changes are applied before persisting updates.
/// </summary>
@@ -102,6 +118,8 @@ public class SampleDbContext : IDisposable
{
Users.Dispose();
TodoLists.Dispose();
Logs.Dispose();
Timeseries.Dispose();
if (_ownsClient) SurrealEmbeddedClient.Dispose();
}
@@ -126,6 +144,8 @@ public sealed class SampleSurrealSchemaInitializer : ICBDDCSurrealSchemaInitiali
private const string SampleSchemaSql = """
DEFINE TABLE OVERWRITE sample_users SCHEMALESS CHANGEFEED 7d;
DEFINE TABLE OVERWRITE sample_todo_lists SCHEMALESS CHANGEFEED 7d;
DEFINE TABLE OVERWRITE sample_logs SCHEMALESS CHANGEFEED 7d;
DEFINE TABLE OVERWRITE sample_timeseries SCHEMALESS CHANGEFEED 7d;
""";
private readonly ICBDDCSurrealEmbeddedClient _client;
private int _initialized;

View File

@@ -14,6 +14,8 @@ public class SampleDocumentStore : SurrealDocumentStore<SampleDbContext>
{
private const string UsersCollection = "Users";
private const string TodoListsCollection = "TodoLists";
private const string LogsCollection = "Logs";
private const string TimeseriesCollection = "Timeseries";
/// <summary>
/// Initializes a new instance of the <see cref="SampleDocumentStore"/> class.
@@ -40,6 +42,8 @@ public class SampleDocumentStore : SurrealDocumentStore<SampleDbContext>
{
WatchCollection(UsersCollection, context.Users, u => u.Id);
WatchCollection(TodoListsCollection, context.TodoLists, t => t.Id);
WatchCollection(LogsCollection, context.Logs, entry => entry.Id);
WatchCollection(TimeseriesCollection, context.Timeseries, point => point.Id);
}
/// <inheritdoc />
@@ -71,6 +75,8 @@ public class SampleDocumentStore : SurrealDocumentStore<SampleDbContext>
{
UsersCollection => SerializeEntity(await _context.Users.FindByIdAsync(key, cancellationToken)),
TodoListsCollection => SerializeEntity(await _context.TodoLists.FindByIdAsync(key, cancellationToken)),
LogsCollection => SerializeEntity(await _context.Logs.FindByIdAsync(key, cancellationToken)),
TimeseriesCollection => SerializeEntity(await _context.Timeseries.FindByIdAsync(key, cancellationToken)),
_ => null
};
}
@@ -106,6 +112,12 @@ public class SampleDocumentStore : SurrealDocumentStore<SampleDbContext>
TodoListsCollection => (await _context.TodoLists.FindAllAsync(cancellationToken))
.Select(t => (t.Id, SerializeEntity(t)!.Value))
.ToList(),
LogsCollection => (await _context.Logs.FindAllAsync(cancellationToken))
.Select(entry => (entry.Id, SerializeEntity(entry)!.Value))
.ToList(),
TimeseriesCollection => (await _context.Timeseries.FindAllAsync(cancellationToken))
.Select(point => (point.Id, SerializeEntity(point)!.Value))
.ToList(),
_ => []
};
}
@@ -137,6 +149,26 @@ public class SampleDocumentStore : SurrealDocumentStore<SampleDbContext>
await _context.TodoLists.UpdateAsync(todo, cancellationToken);
break;
case LogsCollection:
var logEntry = content.Deserialize<TelemetryLogEntry>() ??
throw new InvalidOperationException("Failed to deserialize telemetry log.");
logEntry.Id = key;
if (await _context.Logs.FindByIdAsync(key, cancellationToken) == null)
await _context.Logs.InsertAsync(logEntry, cancellationToken);
else
await _context.Logs.UpdateAsync(logEntry, cancellationToken);
break;
case TimeseriesCollection:
var point = content.Deserialize<TimeseriesPoint>() ??
throw new InvalidOperationException("Failed to deserialize timeseries point.");
point.Id = key;
if (await _context.Timeseries.FindByIdAsync(key, cancellationToken) == null)
await _context.Timeseries.InsertAsync(point, cancellationToken);
else
await _context.Timeseries.UpdateAsync(point, cancellationToken);
break;
default:
throw new NotSupportedException($"Collection '{collection}' is not supported for sync.");
}
@@ -152,6 +184,12 @@ public class SampleDocumentStore : SurrealDocumentStore<SampleDbContext>
case TodoListsCollection:
await _context.TodoLists.DeleteAsync(key, cancellationToken);
break;
case LogsCollection:
await _context.Logs.DeleteAsync(key, cancellationToken);
break;
case TimeseriesCollection:
await _context.Timeseries.DeleteAsync(key, cancellationToken);
break;
default:
_logger.LogWarning("Attempted to remove entity from unsupported collection: {Collection}", collection);
break;

View File

@@ -0,0 +1,57 @@
using System.ComponentModel.DataAnnotations;
namespace ZB.MOM.WW.CBDDC.Sample.Console;
/// <summary>
/// Append-only telemetry log entry used for high-volume sync scenarios.
/// </summary>
public class TelemetryLogEntry
{
/// <summary>
/// Gets or sets the unique log identifier.
/// </summary>
[Key]
public string Id { get; set; } = Guid.NewGuid().ToString("N");
/// <summary>
/// Gets or sets the log level.
/// </summary>
public string Level { get; set; } = "Information";
/// <summary>
/// Gets or sets the log message.
/// </summary>
public string Message { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the UTC timestamp.
/// </summary>
public DateTime CreatedUtc { get; set; } = DateTime.UtcNow;
}
/// <summary>
/// Append-only timeseries metric point used for telemetry sync scenarios.
/// </summary>
public class TimeseriesPoint
{
/// <summary>
/// Gets or sets the unique metric point identifier.
/// </summary>
[Key]
public string Id { get; set; } = Guid.NewGuid().ToString("N");
/// <summary>
/// Gets or sets the metric name.
/// </summary>
public string Metric { get; set; } = "cpu";
/// <summary>
/// Gets or sets the metric value.
/// </summary>
public double Value { get; set; }
/// <summary>
/// Gets or sets the UTC timestamp.
/// </summary>
public DateTime RecordedUtc { get; set; } = DateTime.UtcNow;
}

View File

@@ -28,13 +28,19 @@
"BackupPath": "backups/",
"BusyTimeoutMs": 5000
},
"Sync": {
"SyncIntervalMs": 5000,
"BatchSize": 100,
"EnableOfflineQueue": true,
"MaxQueueSize": 1000
},
"Logging": {
"Sync": {
"SyncIntervalMs": 5000,
"BatchSize": 100,
"EnableOfflineQueue": true,
"MaxQueueSize": 1000
},
"MultiDataset": {
"EnableMultiDatasetSync": true,
"EnableDatasetPrimary": true,
"EnableDatasetLogs": true,
"EnableDatasetTimeseries": true
},
"Logging": {
"LogLevel": "Information",
"LogFilePath": "logs/cbddc.log",
"MaxLogFileSizeMb": 10,
@@ -48,4 +54,4 @@
}
]
}
}
}