diff --git a/samples/Foundatio.SampleJob/Extensions/NumberExtensions.cs b/samples/Foundatio.SampleJob/Extensions/NumberExtensions.cs index 861042e..9e126eb 100644 --- a/samples/Foundatio.SampleJob/Extensions/NumberExtensions.cs +++ b/samples/Foundatio.SampleJob/Extensions/NumberExtensions.cs @@ -1,30 +1,27 @@ -using System; +namespace Foundatio.Extensions; -namespace Foundatio.Extensions +internal static class NumericExtensions { - internal static class NumericExtensions + public static string ToOrdinal(this int num) { - public static string ToOrdinal(this int num) + switch (num % 100) { - switch (num % 100) - { - case 11: - case 12: - case 13: - return num.ToString("#,###0") + "th"; - } + case 11: + case 12: + case 13: + return num.ToString("#,###0") + "th"; + } - switch (num % 10) - { - case 1: - return num.ToString("#,###0") + "st"; - case 2: - return num.ToString("#,###0") + "nd"; - case 3: - return num.ToString("#,###0") + "rd"; - default: - return num.ToString("#,###0") + "th"; - } + switch (num % 10) + { + case 1: + return num.ToString("#,###0") + "st"; + case 2: + return num.ToString("#,###0") + "nd"; + case 3: + return num.ToString("#,###0") + "rd"; + default: + return num.ToString("#,###0") + "th"; } } } diff --git a/samples/Foundatio.SampleJob/Extensions/TaskExtensions.cs b/samples/Foundatio.SampleJob/Extensions/TaskExtensions.cs index 25722ae..d848079 100644 --- a/samples/Foundatio.SampleJob/Extensions/TaskExtensions.cs +++ b/samples/Foundatio.SampleJob/Extensions/TaskExtensions.cs @@ -1,22 +1,20 @@ -using System; -using System.Diagnostics; +using System.Diagnostics; using System.Runtime.CompilerServices; using System.Threading.Tasks; -namespace Foundatio.Extensions +namespace Foundatio.Extensions; + +internal static class TaskExtensions { - internal static class TaskExtensions + [DebuggerStepThrough] + public static ConfiguredTaskAwaitable AnyContext(this Task task) { - [DebuggerStepThrough] - public static ConfiguredTaskAwaitable AnyContext(this Task task) - { - return task.ConfigureAwait(continueOnCapturedContext: false); - } + return task.ConfigureAwait(continueOnCapturedContext: false); + } - [DebuggerStepThrough] - public static ConfiguredTaskAwaitable AnyContext(this Task task) - { - return task.ConfigureAwait(continueOnCapturedContext: false); - } + [DebuggerStepThrough] + public static ConfiguredTaskAwaitable AnyContext(this Task task) + { + return task.ConfigureAwait(continueOnCapturedContext: false); } } diff --git a/samples/Foundatio.SampleJob/PingQueueJob.cs b/samples/Foundatio.SampleJob/PingQueueJob.cs index b100748..84fe0e2 100644 --- a/samples/Foundatio.SampleJob/PingQueueJob.cs +++ b/samples/Foundatio.SampleJob/PingQueueJob.cs @@ -11,47 +11,46 @@ using Foundatio.Utility; using Microsoft.Extensions.Logging; -namespace Foundatio.SampleJob +namespace Foundatio.SampleJob; + +public class PingQueueJob : QueueJobBase { - public class PingQueueJob : QueueJobBase - { - private readonly ILockProvider _locker; - private int _runCount; + private readonly ILockProvider _locker; + private int _runCount; - public PingQueueJob(IQueue queue, ILoggerFactory loggerFactory, ICacheClient cacheClient, IMessageBus messageBus) : base(queue, loggerFactory) - { - AutoComplete = true; - _locker = new CacheLockProvider(cacheClient, messageBus, loggerFactory); - } + public PingQueueJob(IQueue queue, ILoggerFactory loggerFactory, ICacheClient cacheClient, IMessageBus messageBus) : base(queue, loggerFactory) + { + AutoComplete = true; + _locker = new CacheLockProvider(cacheClient, messageBus, loggerFactory); + } - public int RunCount => _runCount; + public int RunCount => _runCount; - protected override Task GetQueueEntryLockAsync(IQueueEntry queueEntry, CancellationToken cancellationToken = new CancellationToken()) - { - return _locker.AcquireAsync(String.Concat("pull:", queueEntry.Value.Id), - TimeSpan.FromMinutes(30), - TimeSpan.FromSeconds(1)); - } + protected override Task GetQueueEntryLockAsync(IQueueEntry queueEntry, CancellationToken cancellationToken = new CancellationToken()) + { + return _locker.AcquireAsync(String.Concat("pull:", queueEntry.Value.Id), + TimeSpan.FromMinutes(30), + TimeSpan.FromSeconds(1)); + } - protected override async Task ProcessQueueEntryAsync(QueueEntryContext context) - { - Interlocked.Increment(ref _runCount); + protected override async Task ProcessQueueEntryAsync(QueueEntryContext context) + { + Interlocked.Increment(ref _runCount); - if (_logger.IsEnabled(LogLevel.Information)) - _logger.LogInformation("Got {RunCount} ping. Sending pong!", RunCount.ToOrdinal()); - await SystemClock.SleepAsync(TimeSpan.FromMilliseconds(1)).AnyContext(); + if (_logger.IsEnabled(LogLevel.Information)) + _logger.LogInformation("Got {RunCount} ping. Sending pong!", RunCount.ToOrdinal()); + await SystemClock.SleepAsync(TimeSpan.FromMilliseconds(1)).AnyContext(); - if (RandomData.GetBool(context.QueueEntry.Value.PercentChanceOfException)) - throw new ApplicationException("Boom!"); + if (RandomData.GetBool(context.QueueEntry.Value.PercentChanceOfException)) + throw new ApplicationException("Boom!"); - return JobResult.Success; - } + return JobResult.Success; } +} - public class PingRequest - { - public string Data { get; set; } - public string Id { get; set; } - public int PercentChanceOfException { get; set; } = 0; - } +public class PingRequest +{ + public string Data { get; set; } + public string Id { get; set; } + public int PercentChanceOfException { get; set; } = 0; } diff --git a/samples/Foundatio.SampleJob/Program.cs b/samples/Foundatio.SampleJob/Program.cs index 78f41db..81a83ea 100644 --- a/samples/Foundatio.SampleJob/Program.cs +++ b/samples/Foundatio.SampleJob/Program.cs @@ -1,36 +1,33 @@ -using System; -using System.Threading.Tasks; -using Foundatio.Jobs; +using Foundatio.Jobs; using Foundatio.Messaging; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; -namespace Foundatio.SampleJob -{ - public class Program - { - private static ILogger _logger; +namespace Foundatio.SampleJob; - public static int Main() - { - var loggerFactory = LoggerFactory.Create(builder => builder.AddConsole()); - _logger = loggerFactory.CreateLogger("MessageBus"); +public class Program +{ + private static ILogger _logger; - var serviceProvider = SampleServiceProvider.Create(loggerFactory); - var jobOptions = JobOptions.GetDefaults(() => serviceProvider.GetRequiredService()); - var messageBus = serviceProvider.GetRequiredService(); - messageBus.SubscribeAsync(m => HandleEchoMessage(m)).GetAwaiter().GetResult(); - return new JobRunner(jobOptions).RunInConsoleAsync().GetAwaiter().GetResult(); - } + public static int Main() + { + var loggerFactory = LoggerFactory.Create(builder => builder.AddConsole()); + _logger = loggerFactory.CreateLogger("MessageBus"); - private static void HandleEchoMessage(EchoMessage m) - { - _logger.LogInformation($"Got message: {m.Message}"); - } + var serviceProvider = SampleServiceProvider.Create(loggerFactory); + var jobOptions = JobOptions.GetDefaults(() => serviceProvider.GetRequiredService()); + var messageBus = serviceProvider.GetRequiredService(); + messageBus.SubscribeAsync(m => HandleEchoMessage(m)).GetAwaiter().GetResult(); + return new JobRunner(jobOptions).RunInConsoleAsync().GetAwaiter().GetResult(); } - public class EchoMessage + private static void HandleEchoMessage(EchoMessage m) { - public string Message { get; set; } + _logger.LogInformation($"Got message: {m.Message}"); } } + +public class EchoMessage +{ + public string Message { get; set; } +} diff --git a/samples/Foundatio.SampleJob/SampleServiceProvider.cs b/samples/Foundatio.SampleJob/SampleServiceProvider.cs index d460b21..3dd998f 100644 --- a/samples/Foundatio.SampleJob/SampleServiceProvider.cs +++ b/samples/Foundatio.SampleJob/SampleServiceProvider.cs @@ -2,35 +2,33 @@ using Foundatio.Caching; using Foundatio.Lock; using Foundatio.Messaging; -using Foundatio.Metrics; using Foundatio.Queues; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using StackExchange.Redis; -namespace Foundatio.SampleJob +namespace Foundatio.SampleJob; + +public class SampleServiceProvider { - public class SampleServiceProvider + public static IServiceProvider Create(ILoggerFactory loggerFactory) { - public static IServiceProvider Create(ILoggerFactory loggerFactory) - { - var container = new ServiceCollection(); + var container = new ServiceCollection(); - if (loggerFactory != null) - { - container.AddSingleton(loggerFactory); - container.AddSingleton(typeof(ILogger<>), typeof(Logger<>)); - } + if (loggerFactory != null) + { + container.AddSingleton(loggerFactory); + container.AddSingleton(typeof(ILogger<>), typeof(Logger<>)); + } - var muxer = ConnectionMultiplexer.Connect("localhost", o => o.LoggerFactory = loggerFactory); - container.AddSingleton(muxer); - container.AddSingleton>(s => new RedisQueue(o => o.ConnectionMultiplexer(muxer).RetryDelay(TimeSpan.FromSeconds(1)).WorkItemTimeout(TimeSpan.FromSeconds(5)).LoggerFactory(loggerFactory))); - container.AddSingleton(s => new RedisCacheClient(o => o.ConnectionMultiplexer(muxer).LoggerFactory(loggerFactory))); - container.AddSingleton(s => new RedisMessageBus(o => o.Subscriber(muxer.GetSubscriber()).LoggerFactory(loggerFactory).MapMessageTypeToClassName())); - container.AddSingleton(s => new CacheLockProvider(s.GetRequiredService(), s.GetRequiredService(), loggerFactory)); - container.AddTransient(); + var muxer = ConnectionMultiplexer.Connect("localhost", o => o.LoggerFactory = loggerFactory); + container.AddSingleton(muxer); + container.AddSingleton>(s => new RedisQueue(o => o.ConnectionMultiplexer(muxer).RetryDelay(TimeSpan.FromSeconds(1)).WorkItemTimeout(TimeSpan.FromSeconds(5)).LoggerFactory(loggerFactory))); + container.AddSingleton(s => new RedisCacheClient(o => o.ConnectionMultiplexer(muxer).LoggerFactory(loggerFactory))); + container.AddSingleton(s => new RedisMessageBus(o => o.Subscriber(muxer.GetSubscriber()).LoggerFactory(loggerFactory).MapMessageTypeToClassName())); + container.AddSingleton(s => new CacheLockProvider(s.GetRequiredService(), s.GetRequiredService(), loggerFactory)); + container.AddTransient(); - return container.BuildServiceProvider(); - } + return container.BuildServiceProvider(); } } diff --git a/samples/Foundatio.SampleJobClient/Program.cs b/samples/Foundatio.SampleJobClient/Program.cs index 6ad86e2..cdbb9e4 100644 --- a/samples/Foundatio.SampleJobClient/Program.cs +++ b/samples/Foundatio.SampleJobClient/Program.cs @@ -9,219 +9,218 @@ using Microsoft.Extensions.Logging; using StackExchange.Redis; -namespace Foundatio.SampleJobClient +namespace Foundatio.SampleJobClient; + +public class Program { - public class Program + private static IQueue _queue; + private static IMessageBus _messageBus; + private static TestLoggerFactory _loggerFactory; + private static ILogger _logger; + private static bool _isRunning = true; + private static CancellationTokenSource _continuousEnqueueTokenSource = new(); + + public static void Main(string[] args) { - private static IQueue _queue; - private static IMessageBus _messageBus; - private static TestLoggerFactory _loggerFactory; - private static ILogger _logger; - private static bool _isRunning = true; - private static CancellationTokenSource _continuousEnqueueTokenSource = new(); - - public static void Main(string[] args) - { - _loggerFactory = new TestLoggerFactory(); - _loggerFactory.SetLogLevel(LogLevel.Trace); - _loggerFactory.MaxLogEntriesToStore = Console.WindowHeight - (OPTIONS_MENU_LINE_COUNT + SEPERATOR_LINE_COUNT) - 1; - _logger = _loggerFactory.CreateLogger(); + _loggerFactory = new TestLoggerFactory(); + _loggerFactory.SetLogLevel(LogLevel.Trace); + _loggerFactory.MaxLogEntriesToStore = Console.WindowHeight - (OPTIONS_MENU_LINE_COUNT + SEPERATOR_LINE_COUNT) - 1; + _logger = _loggerFactory.CreateLogger(); - var muxer = ConnectionMultiplexer.Connect("localhost", o => o.LoggerFactory = _loggerFactory); - _queue = new RedisQueue(new RedisQueueOptions { ConnectionMultiplexer = muxer }); - _messageBus = new RedisMessageBus(o => o.Subscriber(muxer.GetSubscriber()).LoggerFactory(_loggerFactory).MapMessageTypeToClassName()); + var muxer = ConnectionMultiplexer.Connect("localhost", o => o.LoggerFactory = _loggerFactory); + _queue = new RedisQueue(new RedisQueueOptions { ConnectionMultiplexer = muxer }); + _messageBus = new RedisMessageBus(o => o.Subscriber(muxer.GetSubscriber()).LoggerFactory(_loggerFactory).MapMessageTypeToClassName()); - MonitorKeyPress(); - DrawLoop(); - } + MonitorKeyPress(); + DrawLoop(); + } + + private static void EnqueuePing(int count) + { + for (int i = 0; i < count; i++) + _queue.EnqueueAsync(new PingRequest { Data = "b", PercentChanceOfException = 0 }).GetAwaiter().GetResult(); + + if (_logger.IsEnabled(LogLevel.Information)) + _logger.LogInformation("Enqueued {Count} ping requests", count); + } - private static void EnqueuePing(int count) + private static void EnqueueContinuousPings(int count, CancellationToken token) + { + do { for (int i = 0; i < count; i++) _queue.EnqueueAsync(new PingRequest { Data = "b", PercentChanceOfException = 0 }).GetAwaiter().GetResult(); if (_logger.IsEnabled(LogLevel.Information)) _logger.LogInformation("Enqueued {Count} ping requests", count); - } + } while (!token.IsCancellationRequested); + } - private static void EnqueueContinuousPings(int count, CancellationToken token) + private static void HandleKey(ConsoleKey key) + { + if (key == ConsoleKey.D1) { - do - { - for (int i = 0; i < count; i++) - _queue.EnqueueAsync(new PingRequest { Data = "b", PercentChanceOfException = 0 }).GetAwaiter().GetResult(); - - if (_logger.IsEnabled(LogLevel.Information)) - _logger.LogInformation("Enqueued {Count} ping requests", count); - } while (!token.IsCancellationRequested); + EnqueuePing(1); } - - private static void HandleKey(ConsoleKey key) + else if (key == ConsoleKey.D2) { - if (key == ConsoleKey.D1) - { - EnqueuePing(1); - } - else if (key == ConsoleKey.D2) - { - EnqueuePing(100); - } - else if (key == ConsoleKey.D3) - { - if (_continuousEnqueueTokenSource.IsCancellationRequested) - _continuousEnqueueTokenSource = new CancellationTokenSource(); - - _logger.LogWarning("Starting continuous ping..."); - Task.Run(() => EnqueueContinuousPings(25, _continuousEnqueueTokenSource.Token), _continuousEnqueueTokenSource.Token); - } - else if (key == ConsoleKey.M) - { - _messageBus.PublishAsync(new EchoMessage { Message = "Hello World!" }).GetAwaiter().GetResult(); - } - else if (key == ConsoleKey.Q) - { - _isRunning = false; - } - else if (key == ConsoleKey.S) - { - _logger.LogWarning("Cancelling continuous ping."); - _continuousEnqueueTokenSource.Cancel(); - } + EnqueuePing(100); } - - private static void MonitorKeyPress() + else if (key == ConsoleKey.D3) { - Task.Run(() => - { - while (_isRunning) - { - while (!Console.KeyAvailable) - { - SystemClock.Sleep(250); - } - var key = Console.ReadKey(true).Key; + if (_continuousEnqueueTokenSource.IsCancellationRequested) + _continuousEnqueueTokenSource = new CancellationTokenSource(); - HandleKey(key); - } - }); + _logger.LogWarning("Starting continuous ping..."); + Task.Run(() => EnqueueContinuousPings(25, _continuousEnqueueTokenSource.Token), _continuousEnqueueTokenSource.Token); } - - private static void DrawLoop() + else if (key == ConsoleKey.M) { - Console.CursorVisible = false; - - while (_isRunning) - { - ClearConsoleLines(0, OPTIONS_MENU_LINE_COUNT + SEPERATOR_LINE_COUNT + _loggerFactory.MaxLogEntriesToStore); - - DrawOptionsMenu(); - DrawLogMessages(); - - Console.SetCursorPosition(0, OPTIONS_MENU_LINE_COUNT + 1); - - SystemClock.Sleep(250); - } + _messageBus.PublishAsync(new EchoMessage { Message = "Hello World!" }).GetAwaiter().GetResult(); } - - private const int OPTIONS_MENU_LINE_COUNT = 5; - private const int SEPERATOR_LINE_COUNT = 2; - private static void DrawOptionsMenu() + else if (key == ConsoleKey.Q) { - Console.SetCursorPosition(0, 0); - Console.ForegroundColor = ConsoleColor.Yellow; - Console.Write("1: "); - Console.ForegroundColor = ConsoleColor.White; - Console.Write("Enqueue 1"); - Console.ForegroundColor = ConsoleColor.DarkGray; - Console.Write(" | "); - Console.ForegroundColor = ConsoleColor.Yellow; - Console.Write("2: "); - Console.ForegroundColor = ConsoleColor.White; - Console.Write("Enqueue 100"); - Console.ForegroundColor = ConsoleColor.DarkGray; - Console.Write(" | "); - Console.ForegroundColor = ConsoleColor.Yellow; - Console.Write("3: "); - Console.ForegroundColor = ConsoleColor.White; - Console.WriteLine("Enqueue continuous"); - - Console.ForegroundColor = ConsoleColor.Yellow; - Console.Write("M: "); - Console.ForegroundColor = ConsoleColor.White; - Console.WriteLine("Send echo message"); - - Console.WriteLine(); - - Console.ForegroundColor = ConsoleColor.Yellow; - Console.Write("S: "); - Console.ForegroundColor = ConsoleColor.White; - Console.WriteLine("Stop"); - Console.ForegroundColor = ConsoleColor.Yellow; - Console.Write("Q: "); - Console.ForegroundColor = ConsoleColor.White; - Console.WriteLine("Quit"); + _isRunning = false; } + else if (key == ConsoleKey.S) + { + _logger.LogWarning("Cancelling continuous ping."); + _continuousEnqueueTokenSource.Cancel(); + } + } - private static void DrawLogMessages() + private static void MonitorKeyPress() + { + Task.Run(() => { - Console.SetCursorPosition(0, OPTIONS_MENU_LINE_COUNT + SEPERATOR_LINE_COUNT); - foreach (var logEntry in _loggerFactory.LogEntries.ToArray()) + while (_isRunning) { - var originalColor = Console.ForegroundColor; - Console.ForegroundColor = GetColor(logEntry); - Console.WriteLine(logEntry); - Console.ForegroundColor = originalColor; + while (!Console.KeyAvailable) + { + SystemClock.Sleep(250); + } + var key = Console.ReadKey(true).Key; + + HandleKey(key); } - } + }); + } - private static void ClearConsoleLines(int startLine = 0, int endLine = -1) + private static void DrawLoop() + { + Console.CursorVisible = false; + + while (_isRunning) { - if (endLine < 0) - endLine = Console.WindowHeight - 2; + ClearConsoleLines(0, OPTIONS_MENU_LINE_COUNT + SEPERATOR_LINE_COUNT + _loggerFactory.MaxLogEntriesToStore); - int currentLine = Console.CursorTop; - int currentPosition = Console.CursorLeft; + DrawOptionsMenu(); + DrawLogMessages(); - for (int i = startLine; i <= endLine; i++) - { - Console.SetCursorPosition(0, i); - Console.Write(new string(' ', Console.WindowWidth)); - } + Console.SetCursorPosition(0, OPTIONS_MENU_LINE_COUNT + 1); - Console.SetCursorPosition(currentPosition, currentLine); + SystemClock.Sleep(250); } + } - private static ConsoleColor GetColor(LogEntry logEntry) - { - switch (logEntry.LogLevel) - { - case LogLevel.Debug: - return ConsoleColor.Gray; - case LogLevel.Error: - return ConsoleColor.Yellow; - case LogLevel.Information: - return ConsoleColor.White; - case LogLevel.Trace: - return ConsoleColor.DarkGray; - case LogLevel.Warning: - return ConsoleColor.Magenta; - case LogLevel.Critical: - return ConsoleColor.Red; - } + private const int OPTIONS_MENU_LINE_COUNT = 5; + private const int SEPERATOR_LINE_COUNT = 2; + private static void DrawOptionsMenu() + { + Console.SetCursorPosition(0, 0); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.Write("1: "); + Console.ForegroundColor = ConsoleColor.White; + Console.Write("Enqueue 1"); + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.Write(" | "); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.Write("2: "); + Console.ForegroundColor = ConsoleColor.White; + Console.Write("Enqueue 100"); + Console.ForegroundColor = ConsoleColor.DarkGray; + Console.Write(" | "); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.Write("3: "); + Console.ForegroundColor = ConsoleColor.White; + Console.WriteLine("Enqueue continuous"); + + Console.ForegroundColor = ConsoleColor.Yellow; + Console.Write("M: "); + Console.ForegroundColor = ConsoleColor.White; + Console.WriteLine("Send echo message"); + + Console.WriteLine(); + + Console.ForegroundColor = ConsoleColor.Yellow; + Console.Write("S: "); + Console.ForegroundColor = ConsoleColor.White; + Console.WriteLine("Stop"); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.Write("Q: "); + Console.ForegroundColor = ConsoleColor.White; + Console.WriteLine("Quit"); + } - return ConsoleColor.White; + private static void DrawLogMessages() + { + Console.SetCursorPosition(0, OPTIONS_MENU_LINE_COUNT + SEPERATOR_LINE_COUNT); + foreach (var logEntry in _loggerFactory.LogEntries.ToArray()) + { + var originalColor = Console.ForegroundColor; + Console.ForegroundColor = GetColor(logEntry); + Console.WriteLine(logEntry); + Console.ForegroundColor = originalColor; } } - public class EchoMessage + private static void ClearConsoleLines(int startLine = 0, int endLine = -1) { - public string Message { get; set; } + if (endLine < 0) + endLine = Console.WindowHeight - 2; + + int currentLine = Console.CursorTop; + int currentPosition = Console.CursorLeft; + + for (int i = startLine; i <= endLine; i++) + { + Console.SetCursorPosition(0, i); + Console.Write(new string(' ', Console.WindowWidth)); + } + + Console.SetCursorPosition(currentPosition, currentLine); } - public class PingRequest + private static ConsoleColor GetColor(LogEntry logEntry) { - public string Data { get; set; } - public string Id { get; set; } = Guid.NewGuid().ToString("N"); - public int PercentChanceOfException { get; set; } = 0; + switch (logEntry.LogLevel) + { + case LogLevel.Debug: + return ConsoleColor.Gray; + case LogLevel.Error: + return ConsoleColor.Yellow; + case LogLevel.Information: + return ConsoleColor.White; + case LogLevel.Trace: + return ConsoleColor.DarkGray; + case LogLevel.Warning: + return ConsoleColor.Magenta; + case LogLevel.Critical: + return ConsoleColor.Red; + } + + return ConsoleColor.White; } } + +public class EchoMessage +{ + public string Message { get; set; } +} + +public class PingRequest +{ + public string Data { get; set; } + public string Id { get; set; } = Guid.NewGuid().ToString("N"); + public int PercentChanceOfException { get; set; } = 0; +} diff --git a/src/Foundatio.Redis/Cache/RedisCacheClient.cs b/src/Foundatio.Redis/Cache/RedisCacheClient.cs index 5ff6aee..bc286a7 100644 --- a/src/Foundatio.Redis/Cache/RedisCacheClient.cs +++ b/src/Foundatio.Redis/Cache/RedisCacheClient.cs @@ -11,174 +11,151 @@ using Microsoft.Extensions.Logging.Abstractions; using StackExchange.Redis; -namespace Foundatio.Caching +namespace Foundatio.Caching; + +public sealed class RedisCacheClient : ICacheClient, IHaveSerializer { - public sealed class RedisCacheClient : ICacheClient, IHaveSerializer - { - private readonly RedisCacheClientOptions _options; - private readonly ILogger _logger; + private readonly RedisCacheClientOptions _options; + private readonly ILogger _logger; - private readonly AsyncLock _lock = new(); - private bool _scriptsLoaded; + private readonly AsyncLock _lock = new(); + private bool _scriptsLoaded; - private LoadedLuaScript _incrementWithExpire; - private LoadedLuaScript _removeIfEqual; - private LoadedLuaScript _replaceIfEqual; - private LoadedLuaScript _setIfHigher; - private LoadedLuaScript _setIfLower; + private LoadedLuaScript _incrementWithExpire; + private LoadedLuaScript _removeIfEqual; + private LoadedLuaScript _replaceIfEqual; + private LoadedLuaScript _setIfHigher; + private LoadedLuaScript _setIfLower; - public RedisCacheClient(RedisCacheClientOptions options) - { - _options = options; - options.Serializer = options.Serializer ?? DefaultSerializer.Instance; - _logger = options.LoggerFactory?.CreateLogger(typeof(RedisCacheClient)) ?? NullLogger.Instance; - options.ConnectionMultiplexer.ConnectionRestored += ConnectionMultiplexerOnConnectionRestored; - } + public RedisCacheClient(RedisCacheClientOptions options) + { + _options = options; + options.Serializer = options.Serializer ?? DefaultSerializer.Instance; + _logger = options.LoggerFactory?.CreateLogger(typeof(RedisCacheClient)) ?? NullLogger.Instance; + options.ConnectionMultiplexer.ConnectionRestored += ConnectionMultiplexerOnConnectionRestored; + } - public RedisCacheClient(Builder config) - : this(config(new RedisCacheClientOptionsBuilder()).Build()) { } + public RedisCacheClient(Builder config) + : this(config(new RedisCacheClientOptionsBuilder()).Build()) { } - public IDatabase Database => _options.ConnectionMultiplexer.GetDatabase(); + public IDatabase Database => _options.ConnectionMultiplexer.GetDatabase(); - public Task RemoveAsync(string key) - { - return Database.KeyDeleteAsync(key); - } + public Task RemoveAsync(string key) + { + return Database.KeyDeleteAsync(key); + } - public async Task RemoveIfEqualAsync(string key, T expected) - { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + public async Task RemoveIfEqualAsync(string key, T expected) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - await LoadScriptsAsync().AnyContext(); + await LoadScriptsAsync().AnyContext(); - var expectedValue = expected.ToRedisValue(_options.Serializer); - var redisResult = await Database.ScriptEvaluateAsync(_removeIfEqual, new { key = (RedisKey)key, expected = expectedValue }).AnyContext(); - var result = (int)redisResult; + var expectedValue = expected.ToRedisValue(_options.Serializer); + var redisResult = await Database.ScriptEvaluateAsync(_removeIfEqual, new { key = (RedisKey)key, expected = expectedValue }).AnyContext(); + var result = (int)redisResult; - return result > 0; - } + return result > 0; + } - public async Task RemoveAllAsync(IEnumerable keys = null) + public async Task RemoveAllAsync(IEnumerable keys = null) + { + if (keys == null) { - if (keys == null) + var endpoints = _options.ConnectionMultiplexer.GetEndPoints(); + if (endpoints.Length == 0) + return 0; + + foreach (var endpoint in endpoints) { - var endpoints = _options.ConnectionMultiplexer.GetEndPoints(); - if (endpoints.Length == 0) - return 0; + var server = _options.ConnectionMultiplexer.GetServer(endpoint); + if (server.IsReplica) + continue; - foreach (var endpoint in endpoints) + try { - var server = _options.ConnectionMultiplexer.GetServer(endpoint); - if (server.IsReplica) - continue; - - try - { - await server.FlushDatabaseAsync().AnyContext(); - continue; - } - catch (Exception) { } - - try - { - await foreach (var key in server.KeysAsync().ConfigureAwait(false)) - await Database.KeyDeleteAsync(key).AnyContext(); - } - catch (Exception) { } + await server.FlushDatabaseAsync().AnyContext(); + continue; } - } - else - { - var redisKeys = keys.Where(k => !String.IsNullOrEmpty(k)).Select(k => (RedisKey)k).ToArray(); - if (redisKeys.Length > 0) - return (int)await Database.KeyDeleteAsync(redisKeys).AnyContext(); - } + catch (Exception) { } - return 0; + try + { + await foreach (var key in server.KeysAsync().ConfigureAwait(false)) + await Database.KeyDeleteAsync(key).AnyContext(); + } + catch (Exception) { } + } } - - public async Task RemoveByPrefixAsync(string prefix) + else { - const int chunkSize = 2500; - string regex = $"{prefix}*"; + var redisKeys = keys.Where(k => !String.IsNullOrEmpty(k)).Select(k => (RedisKey)k).ToArray(); + if (redisKeys.Length > 0) + return (int)await Database.KeyDeleteAsync(redisKeys).AnyContext(); + } - int total = 0; - int index = 0; + return 0; + } - (int cursor, string[] keys) = await ScanKeysAsync(regex, index, chunkSize).AnyContext(); + public async Task RemoveByPrefixAsync(string prefix) + { + const int chunkSize = 2500; + string regex = $"{prefix}*"; - while (keys.Length != 0 || index < chunkSize) - { - total += await RemoveAllAsync(keys).AnyContext(); - index += chunkSize; - (cursor, keys) = await ScanKeysAsync(regex, cursor, chunkSize).AnyContext(); - } + int total = 0; + int index = 0; - return total; - } + (int cursor, string[] keys) = await ScanKeysAsync(regex, index, chunkSize).AnyContext(); - /// - /// Scan for keys matching the prefix - /// - /// SCAN, SSCAN, HSCAN and ZSCAN return a two elements multi-bulk reply, where the first element - /// is a string representing an unsigned 64 bit number (the cursor), and the second element is a multi-bulk - /// with an array of elements. - private async Task<(int, string[])> ScanKeysAsync(string prefix, int index, int chunkSize) + while (keys.Length != 0 || index < chunkSize) { - var result = await Database.ExecuteAsync("scan", index, "match", prefix, "count", chunkSize).AnyContext(); - var value = (RedisResult[])result; - return ((int)value![0], (string[])value[1]); + total += await RemoveAllAsync(keys).AnyContext(); + index += chunkSize; + (cursor, keys) = await ScanKeysAsync(regex, cursor, chunkSize).AnyContext(); } - private static readonly RedisValue _nullValue = "@@NULL"; - - public async Task> GetAsync(string key) - { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - - var redisValue = await Database.StringGetAsync(key, _options.ReadMode).AnyContext(); - return RedisValueToCacheValue(redisValue); - } + return total; + } - private CacheValue> RedisValuesToCacheValue(RedisValue[] redisValues) - { - var result = new List(); - foreach (var redisValue in redisValues) - { - if (!redisValue.HasValue) - continue; - if (redisValue == _nullValue) - continue; + /// + /// Scan for keys matching the prefix + /// + /// SCAN, SSCAN, HSCAN and ZSCAN return a two elements multi-bulk reply, where the first element + /// is a string representing an unsigned 64 bit number (the cursor), and the second element is a multi-bulk + /// with an array of elements. + private async Task<(int, string[])> ScanKeysAsync(string prefix, int index, int chunkSize) + { + var result = await Database.ExecuteAsync("scan", index, "match", prefix, "count", chunkSize).AnyContext(); + var value = (RedisResult[])result; + return ((int)value![0], (string[])value[1]); + } - try - { - var value = redisValue.ToValueOfType(_options.Serializer); - result.Add(value); - } - catch (Exception ex) - { - if (_logger.IsEnabled(LogLevel.Error)) - _logger.LogError(ex, "Unable to deserialize value {Value} to type {Type}", redisValue, typeof(T).FullName); + private static readonly RedisValue _nullValue = "@@NULL"; - if (_options.ShouldThrowOnSerializationError) - throw; - } - } + public async Task> GetAsync(string key) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - return new CacheValue>(result, true); - } + var redisValue = await Database.StringGetAsync(key, _options.ReadMode).AnyContext(); + return RedisValueToCacheValue(redisValue); + } - private CacheValue RedisValueToCacheValue(RedisValue redisValue) + private CacheValue> RedisValuesToCacheValue(RedisValue[] redisValues) + { + var result = new List(); + foreach (var redisValue in redisValues) { - if (!redisValue.HasValue) return CacheValue.NoValue; - if (redisValue == _nullValue) return CacheValue.Null; + if (!redisValue.HasValue) + continue; + if (redisValue == _nullValue) + continue; try { var value = redisValue.ToValueOfType(_options.Serializer); - return new CacheValue(value, true); + result.Add(value); } catch (Exception ex) { @@ -187,435 +164,457 @@ private CacheValue RedisValueToCacheValue(RedisValue redisValue) if (_options.ShouldThrowOnSerializationError) throw; - - return CacheValue.NoValue; } } - public async Task>> GetAllAsync(IEnumerable keys) - { - string[] keyArray = keys.ToArray(); - var values = await Database.StringGetAsync(keyArray.Select(k => (RedisKey)k).ToArray(), _options.ReadMode).AnyContext(); + return new CacheValue>(result, true); + } - var result = new Dictionary>(); - for (int i = 0; i < keyArray.Length; i++) - result.Add(keyArray[i], RedisValueToCacheValue(values[i])); + private CacheValue RedisValueToCacheValue(RedisValue redisValue) + { + if (!redisValue.HasValue) return CacheValue.NoValue; + if (redisValue == _nullValue) return CacheValue.Null; - return result; + try + { + var value = redisValue.ToValueOfType(_options.Serializer); + return new CacheValue(value, true); } - - public async Task>> GetListAsync(string key, int? page = null, int pageSize = 100) + catch (Exception ex) { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + if (_logger.IsEnabled(LogLevel.Error)) + _logger.LogError(ex, "Unable to deserialize value {Value} to type {Type}", redisValue, typeof(T).FullName); - if (page.HasValue && page.Value < 1) - throw new ArgumentNullException(nameof(page), "Page cannot be less than 1."); + if (_options.ShouldThrowOnSerializationError) + throw; - if (!page.HasValue) - { - var set = await Database.SortedSetRangeByScoreAsync(key, flags: _options.ReadMode).AnyContext(); - return RedisValuesToCacheValue(set); - } - else - { - long start = ((page.Value - 1) * pageSize); - long end = start + pageSize - 1; - var set = await Database.SortedSetRangeByRankAsync(key, start, end, flags: _options.ReadMode).AnyContext(); - return RedisValuesToCacheValue(set); - } + return CacheValue.NoValue; } + } - public async Task AddAsync(string key, T value, TimeSpan? expiresIn = null) - { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - - if (expiresIn?.Ticks < 0) - { - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Removing expired key: {Key}", key); + public async Task>> GetAllAsync(IEnumerable keys) + { + string[] keyArray = keys.ToArray(); + var values = await Database.StringGetAsync(keyArray.Select(k => (RedisKey)k).ToArray(), _options.ReadMode).AnyContext(); - await this.RemoveAsync(key).AnyContext(); - return false; - } + var result = new Dictionary>(); + for (int i = 0; i < keyArray.Length; i++) + result.Add(keyArray[i], RedisValueToCacheValue(values[i])); - return await InternalSetAsync(key, value, expiresIn, When.NotExists).AnyContext(); - } + return result; + } - public async Task ListAddAsync(string key, IEnumerable values, TimeSpan? expiresIn = null) - { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + public async Task>> GetListAsync(string key, int? page = null, int pageSize = 100) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - if (values == null) - throw new ArgumentNullException(nameof(values)); + if (page.HasValue && page.Value < 1) + throw new ArgumentNullException(nameof(page), "Page cannot be less than 1."); - if (expiresIn?.Ticks < 0) - { - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Removing expired key: {Key}", key); + if (!page.HasValue) + { + var set = await Database.SortedSetRangeByScoreAsync(key, flags: _options.ReadMode).AnyContext(); + return RedisValuesToCacheValue(set); + } + else + { + long start = ((page.Value - 1) * pageSize); + long end = start + pageSize - 1; + var set = await Database.SortedSetRangeByRankAsync(key, start, end, flags: _options.ReadMode).AnyContext(); + return RedisValuesToCacheValue(set); + } + } - await this.RemoveAsync(key).AnyContext(); - return default; - } + public async Task AddAsync(string key, T value, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - long highestScore = 0; - try - { - var items = await Database.SortedSetRangeByRankWithScoresAsync(key, 0, 0, order: Order.Descending); - highestScore = items.Length > 0 ? (long)items.First().Score : 0; - } - catch (RedisServerException ex) when (ex.Message.StartsWith("WRONGTYPE")) - { - // convert legacy set to sortedset - var oldItems = await Database.SetMembersAsync(key).AnyContext(); - await Database.KeyDeleteAsync(key).AnyContext(); + if (expiresIn?.Ticks < 0) + { + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Removing expired key: {Key}", key); - if (values is string) - { - var oldItemValues = new List(); - foreach (var oldItem in RedisValuesToCacheValue(oldItems).Value) - oldItemValues.Add(oldItem); + await RemoveAsync(key).AnyContext(); + return false; + } - highestScore = await ListAddAsync(key, oldItemValues).AnyContext(); - } - else - { - var oldItemValues = new List(); - foreach (var oldItem in RedisValuesToCacheValue(oldItems).Value) - oldItemValues.Add(oldItem); + return await InternalSetAsync(key, value, expiresIn, When.NotExists).AnyContext(); + } - highestScore = await ListAddAsync(key, oldItemValues).AnyContext(); - } - } + public async Task ListAddAsync(string key, IEnumerable values, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - var redisValues = new List(); - if (values is string stringValue) - { - redisValues.Add(new SortedSetEntry(stringValue.ToRedisValue(_options.Serializer), highestScore + 1)); - } - else - { - var valuesArray = values.ToArray(); - for (int i = 0; i < valuesArray.Length; i++) - redisValues.Add(new SortedSetEntry(valuesArray[i].ToRedisValue(_options.Serializer), highestScore + i + 1)); - } + if (values == null) + throw new ArgumentNullException(nameof(values)); - long result = await Database.SortedSetAddAsync(key, redisValues.ToArray()).AnyContext(); - if (result > 0 && expiresIn.HasValue) - await SetExpirationAsync(key, expiresIn.Value).AnyContext(); + if (expiresIn?.Ticks < 0) + { + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Removing expired key: {Key}", key); - return result; + await RemoveAsync(key).AnyContext(); + return default; } - public async Task ListRemoveAsync(string key, IEnumerable values, TimeSpan? expiresIn = null) + long highestScore = 0; + try { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - - if (values == null) - throw new ArgumentNullException(nameof(values)); + var items = await Database.SortedSetRangeByRankWithScoresAsync(key, 0, 0, order: Order.Descending); + highestScore = items.Length > 0 ? (long)items.First().Score : 0; + } + catch (RedisServerException ex) when (ex.Message.StartsWith("WRONGTYPE")) + { + // convert legacy set to sortedset + var oldItems = await Database.SetMembersAsync(key).AnyContext(); + await Database.KeyDeleteAsync(key).AnyContext(); - if (expiresIn?.Ticks < 0) + if (values is string) { - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Removing expired key: {Key}", key); + var oldItemValues = new List(); + foreach (var oldItem in RedisValuesToCacheValue(oldItems).Value) + oldItemValues.Add(oldItem); - await this.RemoveAsync(key).AnyContext(); - return default; + highestScore = await ListAddAsync(key, oldItemValues).AnyContext(); } - - var redisValues = new List(); - if (values is string stringValue) - redisValues.Add(stringValue.ToRedisValue(_options.Serializer)); else - foreach (var value in values) - redisValues.Add(value.ToRedisValue(_options.Serializer)); - - try { - long result = await Database.SortedSetRemoveAsync(key, redisValues.ToArray()).AnyContext(); - if (result > 0 && expiresIn.HasValue) - await SetExpirationAsync(key, expiresIn.Value).AnyContext(); + var oldItemValues = new List(); + foreach (var oldItem in RedisValuesToCacheValue(oldItems).Value) + oldItemValues.Add(oldItem); - return result; + highestScore = await ListAddAsync(key, oldItemValues).AnyContext(); } - catch (RedisServerException ex) when (ex.Message.StartsWith("WRONGTYPE")) - { - // convert legacy set to sortedset - var oldItems = await Database.SetMembersAsync(key).AnyContext(); - await Database.KeyDeleteAsync(key).AnyContext(); + } - if (values is string) - { - var oldItemValues = new List(); - foreach (var oldItem in RedisValuesToCacheValue(oldItems).Value) - oldItemValues.Add(oldItem); + var redisValues = new List(); + if (values is string stringValue) + { + redisValues.Add(new SortedSetEntry(stringValue.ToRedisValue(_options.Serializer), highestScore + 1)); + } + else + { + var valuesArray = values.ToArray(); + for (int i = 0; i < valuesArray.Length; i++) + redisValues.Add(new SortedSetEntry(valuesArray[i].ToRedisValue(_options.Serializer), highestScore + i + 1)); + } - await ListAddAsync(key, oldItemValues).AnyContext(); - } - else - { - var oldItemValues = new List(); - foreach (var oldItem in RedisValuesToCacheValue(oldItems).Value) - oldItemValues.Add(oldItem); + long result = await Database.SortedSetAddAsync(key, redisValues.ToArray()).AnyContext(); + if (result > 0 && expiresIn.HasValue) + await SetExpirationAsync(key, expiresIn.Value).AnyContext(); - await ListAddAsync(key, oldItemValues).AnyContext(); - } + return result; + } - // try again - return await ListRemoveAsync(key, values).AnyContext(); - } - } + public async Task ListRemoveAsync(string key, IEnumerable values, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - public Task SetAsync(string key, T value, TimeSpan? expiresIn = null) + if (values == null) + throw new ArgumentNullException(nameof(values)); + + if (expiresIn?.Ticks < 0) { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Removing expired key: {Key}", key); - return InternalSetAsync(key, value, expiresIn); + await RemoveAsync(key).AnyContext(); + return default; } - public async Task SetIfHigherAsync(string key, double value, TimeSpan? expiresIn = null) + var redisValues = new List(); + if (values is string stringValue) + redisValues.Add(stringValue.ToRedisValue(_options.Serializer)); + else + foreach (var value in values) + redisValues.Add(value.ToRedisValue(_options.Serializer)); + + try { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + long result = await Database.SortedSetRemoveAsync(key, redisValues.ToArray()).AnyContext(); + if (result > 0 && expiresIn.HasValue) + await SetExpirationAsync(key, expiresIn.Value).AnyContext(); - await LoadScriptsAsync().AnyContext(); + return result; + } + catch (RedisServerException ex) when (ex.Message.StartsWith("WRONGTYPE")) + { + // convert legacy set to sortedset + var oldItems = await Database.SetMembersAsync(key).AnyContext(); + await Database.KeyDeleteAsync(key).AnyContext(); - if (expiresIn.HasValue) + if (values is string) { - var result = await Database.ScriptEvaluateAsync(_setIfHigher, new { key = (RedisKey)key, value, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); - return (double)result; + var oldItemValues = new List(); + foreach (var oldItem in RedisValuesToCacheValue(oldItems).Value) + oldItemValues.Add(oldItem); + + await ListAddAsync(key, oldItemValues).AnyContext(); } else { - var result = await Database.ScriptEvaluateAsync(_setIfHigher, new { key = (RedisKey)key, value, expires = RedisValue.EmptyString }).AnyContext(); - return (double)result; + var oldItemValues = new List(); + foreach (var oldItem in RedisValuesToCacheValue(oldItems).Value) + oldItemValues.Add(oldItem); + + await ListAddAsync(key, oldItemValues).AnyContext(); } + + // try again + return await ListRemoveAsync(key, values).AnyContext(); } + } - public async Task SetIfHigherAsync(string key, long value, TimeSpan? expiresIn = null) - { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + public Task SetAsync(string key, T value, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - await LoadScriptsAsync().AnyContext(); + return InternalSetAsync(key, value, expiresIn); + } - if (expiresIn.HasValue) - { - var result = await Database.ScriptEvaluateAsync(_setIfHigher, new { key = (RedisKey)key, value, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); - return (long)result; - } - else - { - var result = await Database.ScriptEvaluateAsync(_setIfHigher, new { key = (RedisKey)key, value, expires = RedisValue.EmptyString }).AnyContext(); - return (long)result; - } - } + public async Task SetIfHigherAsync(string key, double value, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + + await LoadScriptsAsync().AnyContext(); - public async Task SetIfLowerAsync(string key, double value, TimeSpan? expiresIn = null) + if (expiresIn.HasValue) + { + var result = await Database.ScriptEvaluateAsync(_setIfHigher, new { key = (RedisKey)key, value, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); + return (double)result; + } + else { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + var result = await Database.ScriptEvaluateAsync(_setIfHigher, new { key = (RedisKey)key, value, expires = RedisValue.EmptyString }).AnyContext(); + return (double)result; + } + } - await LoadScriptsAsync().AnyContext(); + public async Task SetIfHigherAsync(string key, long value, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - if (expiresIn.HasValue) - { - var result = await Database.ScriptEvaluateAsync(_setIfLower, new { key = (RedisKey)key, value, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); - return (double)result; - } - else - { - var result = await Database.ScriptEvaluateAsync(_setIfLower, new { key = (RedisKey)key, value, expires = RedisValue.EmptyString }).AnyContext(); - return (double)result; - } - } + await LoadScriptsAsync().AnyContext(); - public async Task SetIfLowerAsync(string key, long value, TimeSpan? expiresIn = null) + if (expiresIn.HasValue) + { + var result = await Database.ScriptEvaluateAsync(_setIfHigher, new { key = (RedisKey)key, value, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); + return (long)result; + } + else { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + var result = await Database.ScriptEvaluateAsync(_setIfHigher, new { key = (RedisKey)key, value, expires = RedisValue.EmptyString }).AnyContext(); + return (long)result; + } + } - await LoadScriptsAsync().AnyContext(); + public async Task SetIfLowerAsync(string key, double value, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - if (expiresIn.HasValue) - { - var result = await Database.ScriptEvaluateAsync(_setIfLower, new { key = (RedisKey)key, value, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); - return (long)result; - } - else - { - var result = await Database.ScriptEvaluateAsync(_setIfLower, new { key = (RedisKey)key, value, expires = RedisValue.EmptyString }).AnyContext(); - return (long)result; - } - } + await LoadScriptsAsync().AnyContext(); - private Task InternalSetAsync(string key, T value, TimeSpan? expiresIn = null, When when = When.Always, CommandFlags flags = CommandFlags.None) + if (expiresIn.HasValue) { - var redisValue = value.ToRedisValue(_options.Serializer); - return Database.StringSetAsync(key, redisValue, expiresIn, when, flags); + var result = await Database.ScriptEvaluateAsync(_setIfLower, new { key = (RedisKey)key, value, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); + return (double)result; } - - public async Task SetAllAsync(IDictionary values, TimeSpan? expiresIn = null) + else { - if (values == null || values.Count == 0) - return 0; + var result = await Database.ScriptEvaluateAsync(_setIfLower, new { key = (RedisKey)key, value, expires = RedisValue.EmptyString }).AnyContext(); + return (double)result; + } + } - var tasks = new List>(); - foreach (var pair in values) - tasks.Add(Database.StringSetAsync(pair.Key, pair.Value.ToRedisValue(_options.Serializer), expiresIn)); + public async Task SetIfLowerAsync(string key, long value, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - bool[] results = await Task.WhenAll(tasks).AnyContext(); - return results.Count(r => r); - } + await LoadScriptsAsync().AnyContext(); - public Task ReplaceAsync(string key, T value, TimeSpan? expiresIn = null) + if (expiresIn.HasValue) { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - - return InternalSetAsync(key, value, expiresIn, When.Exists); + var result = await Database.ScriptEvaluateAsync(_setIfLower, new { key = (RedisKey)key, value, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); + return (long)result; } - - public async Task ReplaceIfEqualAsync(string key, T value, T expected, TimeSpan? expiresIn = null) + else { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + var result = await Database.ScriptEvaluateAsync(_setIfLower, new { key = (RedisKey)key, value, expires = RedisValue.EmptyString }).AnyContext(); + return (long)result; + } + } - await LoadScriptsAsync().AnyContext(); + private Task InternalSetAsync(string key, T value, TimeSpan? expiresIn = null, When when = When.Always, CommandFlags flags = CommandFlags.None) + { + var redisValue = value.ToRedisValue(_options.Serializer); + return Database.StringSetAsync(key, redisValue, expiresIn, when, flags); + } - var redisValue = value.ToRedisValue(_options.Serializer); - var expectedValue = expected.ToRedisValue(_options.Serializer); - RedisResult redisResult; - if (expiresIn.HasValue) - redisResult = await Database.ScriptEvaluateAsync(_replaceIfEqual, new { key = (RedisKey)key, value = redisValue, expected = expectedValue, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); - else - redisResult = await Database.ScriptEvaluateAsync(_replaceIfEqual, new { key = (RedisKey)key, value = redisValue, expected = expectedValue, expires = "" }).AnyContext(); + public async Task SetAllAsync(IDictionary values, TimeSpan? expiresIn = null) + { + if (values == null || values.Count == 0) + return 0; - var result = (int)redisResult; + var tasks = new List>(); + foreach (var pair in values) + tasks.Add(Database.StringSetAsync(pair.Key, pair.Value.ToRedisValue(_options.Serializer), expiresIn)); - return result > 0; - } + bool[] results = await Task.WhenAll(tasks).AnyContext(); + return results.Count(r => r); + } - public async Task IncrementAsync(string key, double amount = 1, TimeSpan? expiresIn = null) - { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + public Task ReplaceAsync(string key, T value, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - if (expiresIn?.Ticks < 0) - { - await this.RemoveAsync(key).AnyContext(); - return -1; - } + return InternalSetAsync(key, value, expiresIn, When.Exists); + } - if (expiresIn.HasValue) - { - await LoadScriptsAsync().AnyContext(); - var result = await Database.ScriptEvaluateAsync(_incrementWithExpire, new { key = (RedisKey)key, value = amount, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); - return (double)result; - } + public async Task ReplaceIfEqualAsync(string key, T value, T expected, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - return await Database.StringIncrementAsync(key, amount).AnyContext(); - } + await LoadScriptsAsync().AnyContext(); - public async Task IncrementAsync(string key, long amount = 1, TimeSpan? expiresIn = null) - { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + var redisValue = value.ToRedisValue(_options.Serializer); + var expectedValue = expected.ToRedisValue(_options.Serializer); + RedisResult redisResult; + if (expiresIn.HasValue) + redisResult = await Database.ScriptEvaluateAsync(_replaceIfEqual, new { key = (RedisKey)key, value = redisValue, expected = expectedValue, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); + else + redisResult = await Database.ScriptEvaluateAsync(_replaceIfEqual, new { key = (RedisKey)key, value = redisValue, expected = expectedValue, expires = "" }).AnyContext(); - if (expiresIn?.Ticks < 0) - { - await this.RemoveAsync(key).AnyContext(); - return -1; - } + var result = (int)redisResult; - if (expiresIn.HasValue) - { - await LoadScriptsAsync().AnyContext(); - var result = await Database.ScriptEvaluateAsync(_incrementWithExpire, new { key = (RedisKey)key, value = amount, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); - return (long)result; - } + return result > 0; + } - return await Database.StringIncrementAsync(key, amount).AnyContext(); - } + public async Task IncrementAsync(string key, double amount = 1, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - public Task ExistsAsync(string key) + if (expiresIn?.Ticks < 0) { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - - return Database.KeyExistsAsync(key); + await RemoveAsync(key).AnyContext(); + return -1; } - public Task GetExpirationAsync(string key) + if (expiresIn.HasValue) { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + await LoadScriptsAsync().AnyContext(); + var result = await Database.ScriptEvaluateAsync(_incrementWithExpire, new { key = (RedisKey)key, value = amount, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); + return (double)result; + } - return Database.KeyTimeToLiveAsync(key); + return await Database.StringIncrementAsync(key, amount).AnyContext(); + } + + public async Task IncrementAsync(string key, long amount = 1, TimeSpan? expiresIn = null) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + + if (expiresIn?.Ticks < 0) + { + await RemoveAsync(key).AnyContext(); + return -1; } - public Task SetExpirationAsync(string key, TimeSpan expiresIn) + if (expiresIn.HasValue) { - if (String.IsNullOrEmpty(key)) - throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + await LoadScriptsAsync().AnyContext(); + var result = await Database.ScriptEvaluateAsync(_incrementWithExpire, new { key = (RedisKey)key, value = amount, expires = (int)expiresIn.Value.TotalMilliseconds }).AnyContext(); + return (long)result; + } + + return await Database.StringIncrementAsync(key, amount).AnyContext(); + } - if (expiresIn.Ticks < 0) - return this.RemoveAsync(key); + public Task ExistsAsync(string key) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); - return Database.KeyExpireAsync(key, expiresIn); - } + return Database.KeyExistsAsync(key); + } + + public Task GetExpirationAsync(string key) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + + return Database.KeyTimeToLiveAsync(key); + } - private async Task LoadScriptsAsync() + public Task SetExpirationAsync(string key, TimeSpan expiresIn) + { + if (String.IsNullOrEmpty(key)) + throw new ArgumentNullException(nameof(key), "Key cannot be null or empty."); + + if (expiresIn.Ticks < 0) + return RemoveAsync(key); + + return Database.KeyExpireAsync(key, expiresIn); + } + + private async Task LoadScriptsAsync() + { + if (_scriptsLoaded) + return; + + using (await _lock.LockAsync().AnyContext()) { if (_scriptsLoaded) return; - using (await _lock.LockAsync().AnyContext()) - { - if (_scriptsLoaded) - return; - - var incrementWithExpire = LuaScript.Prepare(IncrementWithScript); - var removeIfEqual = LuaScript.Prepare(RemoveIfEqualScript); - var replaceIfEqual = LuaScript.Prepare(ReplaceIfEqualScript); - var setIfHigher = LuaScript.Prepare(SetIfHigherScript); - var setIfLower = LuaScript.Prepare(SetIfLowerScript); + var incrementWithExpire = LuaScript.Prepare(IncrementWithScript); + var removeIfEqual = LuaScript.Prepare(RemoveIfEqualScript); + var replaceIfEqual = LuaScript.Prepare(ReplaceIfEqualScript); + var setIfHigher = LuaScript.Prepare(SetIfHigherScript); + var setIfLower = LuaScript.Prepare(SetIfLowerScript); - foreach (var endpoint in _options.ConnectionMultiplexer.GetEndPoints()) - { - var server = _options.ConnectionMultiplexer.GetServer(endpoint); - if (server.IsReplica) - continue; - - _incrementWithExpire = await incrementWithExpire.LoadAsync(server).AnyContext(); - _removeIfEqual = await removeIfEqual.LoadAsync(server).AnyContext(); - _replaceIfEqual = await replaceIfEqual.LoadAsync(server).AnyContext(); - _setIfHigher = await setIfHigher.LoadAsync(server).AnyContext(); - _setIfLower = await setIfLower.LoadAsync(server).AnyContext(); - } + foreach (var endpoint in _options.ConnectionMultiplexer.GetEndPoints()) + { + var server = _options.ConnectionMultiplexer.GetServer(endpoint); + if (server.IsReplica) + continue; - _scriptsLoaded = true; + _incrementWithExpire = await incrementWithExpire.LoadAsync(server).AnyContext(); + _removeIfEqual = await removeIfEqual.LoadAsync(server).AnyContext(); + _replaceIfEqual = await replaceIfEqual.LoadAsync(server).AnyContext(); + _setIfHigher = await setIfHigher.LoadAsync(server).AnyContext(); + _setIfLower = await setIfLower.LoadAsync(server).AnyContext(); } - } - - private void ConnectionMultiplexerOnConnectionRestored(object sender, ConnectionFailedEventArgs connectionFailedEventArgs) - { - if (_logger.IsEnabled(LogLevel.Information)) _logger.LogInformation("Redis connection restored."); - _scriptsLoaded = false; - } - public void Dispose() - { - _options.ConnectionMultiplexer.ConnectionRestored -= ConnectionMultiplexerOnConnectionRestored; + _scriptsLoaded = true; } + } - ISerializer IHaveSerializer.Serializer => _options.Serializer; + private void ConnectionMultiplexerOnConnectionRestored(object sender, ConnectionFailedEventArgs connectionFailedEventArgs) + { + if (_logger.IsEnabled(LogLevel.Information)) _logger.LogInformation("Redis connection restored."); + _scriptsLoaded = false; + } - private static readonly string IncrementWithScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.IncrementWithExpire.lua"); - private static readonly string RemoveIfEqualScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.RemoveIfEqual.lua"); - private static readonly string ReplaceIfEqualScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.ReplaceIfEqual.lua"); - private static readonly string SetIfHigherScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.SetIfHigher.lua"); - private static readonly string SetIfLowerScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.SetIfLower.lua"); + public void Dispose() + { + _options.ConnectionMultiplexer.ConnectionRestored -= ConnectionMultiplexerOnConnectionRestored; } + + ISerializer IHaveSerializer.Serializer => _options.Serializer; + + private static readonly string IncrementWithScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.IncrementWithExpire.lua"); + private static readonly string RemoveIfEqualScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.RemoveIfEqual.lua"); + private static readonly string ReplaceIfEqualScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.ReplaceIfEqual.lua"); + private static readonly string SetIfHigherScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.SetIfHigher.lua"); + private static readonly string SetIfLowerScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.SetIfLower.lua"); } diff --git a/src/Foundatio.Redis/Cache/RedisCacheClientOptions.cs b/src/Foundatio.Redis/Cache/RedisCacheClientOptions.cs index 35328dc..9a80b69 100644 --- a/src/Foundatio.Redis/Cache/RedisCacheClientOptions.cs +++ b/src/Foundatio.Redis/Cache/RedisCacheClientOptions.cs @@ -1,41 +1,40 @@ using StackExchange.Redis; -namespace Foundatio.Caching +namespace Foundatio.Caching; + +public class RedisCacheClientOptions : SharedOptions { - public class RedisCacheClientOptions : SharedOptions - { - public IConnectionMultiplexer ConnectionMultiplexer { get; set; } + public IConnectionMultiplexer ConnectionMultiplexer { get; set; } - /// - /// Whether or not an error when deserializing a cache value should result in an exception being thrown or if it should just return an empty cache value - /// - public bool ShouldThrowOnSerializationError { get; set; } = true; + /// + /// Whether or not an error when deserializing a cache value should result in an exception being thrown or if it should just return an empty cache value + /// + public bool ShouldThrowOnSerializationError { get; set; } = true; - /// - /// The behaviour required when performing read operations from cache - /// - public CommandFlags ReadMode { get; set; } = CommandFlags.None; + /// + /// The behaviour required when performing read operations from cache + /// + public CommandFlags ReadMode { get; set; } = CommandFlags.None; +} + +public class RedisCacheClientOptionsBuilder : SharedOptionsBuilder +{ + public RedisCacheClientOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) + { + Target.ConnectionMultiplexer = connectionMultiplexer; + return this; + } + + public RedisCacheClientOptionsBuilder ShouldThrowOnSerializationError(bool shouldThrow) + { + Target.ShouldThrowOnSerializationError = shouldThrow; + return this; } - public class RedisCacheClientOptionsBuilder : SharedOptionsBuilder + public RedisCacheClientOptionsBuilder ReadMode(CommandFlags commandFlags) { - public RedisCacheClientOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) - { - Target.ConnectionMultiplexer = connectionMultiplexer; - return this; - } - - public RedisCacheClientOptionsBuilder ShouldThrowOnSerializationError(bool shouldThrow) - { - Target.ShouldThrowOnSerializationError = shouldThrow; - return this; - } - - public RedisCacheClientOptionsBuilder ReadMode(CommandFlags commandFlags) - { - Target.ReadMode = commandFlags; - return this; - } + Target.ReadMode = commandFlags; + return this; } } diff --git a/src/Foundatio.Redis/Cache/RedisHybridCacheClient.cs b/src/Foundatio.Redis/Cache/RedisHybridCacheClient.cs index d450f71..6fc6e91 100644 --- a/src/Foundatio.Redis/Cache/RedisHybridCacheClient.cs +++ b/src/Foundatio.Redis/Cache/RedisHybridCacheClient.cs @@ -1,32 +1,31 @@ using Foundatio.Messaging; -namespace Foundatio.Caching -{ +namespace Foundatio.Caching; - public class RedisHybridCacheClient : HybridCacheClient - { - public RedisHybridCacheClient(RedisHybridCacheClientOptions options, InMemoryCacheClientOptions localOptions = null) - : base(new RedisCacheClient(o => o - .ConnectionMultiplexer(options.ConnectionMultiplexer) - .Serializer(options.Serializer) - .LoggerFactory(options.LoggerFactory) - .ShouldThrowOnSerializationError(options.ShouldThrowOnSerializationError) - .ReadMode(options.ReadMode)), - new RedisMessageBus(o => o - .Subscriber(options.ConnectionMultiplexer.GetSubscriber()) - .Topic(options.RedisChannelName) - .Serializer(options.Serializer) - .LoggerFactory(options.LoggerFactory)), localOptions, options.LoggerFactory) - { } - public RedisHybridCacheClient(Builder config, Builder localConfig = null) - : this(config(new RedisHybridCacheClientOptionsBuilder()).Build(), localConfig(new InMemoryCacheClientOptionsBuilder()).Build()) { } +public class RedisHybridCacheClient : HybridCacheClient +{ + public RedisHybridCacheClient(RedisHybridCacheClientOptions options, InMemoryCacheClientOptions localOptions = null) + : base(new RedisCacheClient(o => o + .ConnectionMultiplexer(options.ConnectionMultiplexer) + .Serializer(options.Serializer) + .LoggerFactory(options.LoggerFactory) + .ShouldThrowOnSerializationError(options.ShouldThrowOnSerializationError) + .ReadMode(options.ReadMode)), + new RedisMessageBus(o => o + .Subscriber(options.ConnectionMultiplexer.GetSubscriber()) + .Topic(options.RedisChannelName) + .Serializer(options.Serializer) + .LoggerFactory(options.LoggerFactory)), localOptions, options.LoggerFactory) + { } - public override void Dispose() - { - base.Dispose(); - _distributedCache.Dispose(); - _messageBus.Dispose(); - } + public RedisHybridCacheClient(Builder config, Builder localConfig = null) + : this(config(new RedisHybridCacheClientOptionsBuilder()).Build(), localConfig(new InMemoryCacheClientOptionsBuilder()).Build()) { } + + public override void Dispose() + { + base.Dispose(); + _distributedCache.Dispose(); + _messageBus.Dispose(); } } diff --git a/src/Foundatio.Redis/Cache/RedisHybridCacheClientOptions.cs b/src/Foundatio.Redis/Cache/RedisHybridCacheClientOptions.cs index 1c24dd2..3e39423 100644 --- a/src/Foundatio.Redis/Cache/RedisHybridCacheClientOptions.cs +++ b/src/Foundatio.Redis/Cache/RedisHybridCacheClientOptions.cs @@ -1,32 +1,31 @@ using StackExchange.Redis; -namespace Foundatio.Caching +namespace Foundatio.Caching; + +public class RedisHybridCacheClientOptions : RedisCacheClientOptions { - public class RedisHybridCacheClientOptions : RedisCacheClientOptions + public string RedisChannelName { get; set; } = "cache-messages"; +} + +public class RedisHybridCacheClientOptionsBuilder : + SharedOptionsBuilder +{ + + public RedisHybridCacheClientOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) { - public string RedisChannelName { get; set; } = "cache-messages"; + Target.ConnectionMultiplexer = connectionMultiplexer; + return this; } - public class RedisHybridCacheClientOptionsBuilder : - SharedOptionsBuilder + public RedisHybridCacheClientOptionsBuilder RedisChannelName(string redisChannelName) { + Target.RedisChannelName = redisChannelName; + return this; + } - public RedisHybridCacheClientOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) - { - Target.ConnectionMultiplexer = connectionMultiplexer; - return this; - } - - public RedisHybridCacheClientOptionsBuilder RedisChannelName(string redisChannelName) - { - Target.RedisChannelName = redisChannelName; - return this; - } - - public RedisHybridCacheClientOptionsBuilder ShouldThrowOnSerializationError(bool shouldThrow) - { - Target.ShouldThrowOnSerializationError = shouldThrow; - return this; - } + public RedisHybridCacheClientOptionsBuilder ShouldThrowOnSerializationError(bool shouldThrow) + { + Target.ShouldThrowOnSerializationError = shouldThrow; + return this; } } diff --git a/src/Foundatio.Redis/Extensions/RedisExtensions.cs b/src/Foundatio.Redis/Extensions/RedisExtensions.cs index fda32fa..0b05f19 100644 --- a/src/Foundatio.Redis/Extensions/RedisExtensions.cs +++ b/src/Foundatio.Redis/Extensions/RedisExtensions.cs @@ -4,113 +4,112 @@ using Foundatio.Utility; using StackExchange.Redis; -namespace Foundatio.Redis +namespace Foundatio.Redis; + +internal static class RedisValueExtensions { - internal static class RedisValueExtensions - { - private static readonly RedisValue _nullValue = "@@NULL"; + private static readonly RedisValue _nullValue = "@@NULL"; - public static T ToValueOfType(this RedisValue redisValue, ISerializer serializer) - { - T value; - var type = typeof(T); + public static T ToValueOfType(this RedisValue redisValue, ISerializer serializer) + { + T value; + var type = typeof(T); - if (type == TypeHelper.BoolType || type == TypeHelper.StringType || type.IsNumeric()) - value = (T)Convert.ChangeType(redisValue, type); - else if (type == TypeHelper.NullableBoolType || type.IsNullableNumeric()) - value = redisValue.IsNull ? default : (T)Convert.ChangeType(redisValue, Nullable.GetUnderlyingType(type)); - else - return serializer.Deserialize((byte[])redisValue); + if (type == TypeHelper.BoolType || type == TypeHelper.StringType || type.IsNumeric()) + value = (T)Convert.ChangeType(redisValue, type); + else if (type == TypeHelper.NullableBoolType || type.IsNullableNumeric()) + value = redisValue.IsNull ? default : (T)Convert.ChangeType(redisValue, Nullable.GetUnderlyingType(type)); + else + return serializer.Deserialize((byte[])redisValue); - return value; - } + return value; + } - public static RedisValue ToRedisValue(this T value, ISerializer serializer) - { - var redisValue = _nullValue; - if (value == null) - return redisValue; + public static RedisValue ToRedisValue(this T value, ISerializer serializer) + { + var redisValue = _nullValue; + if (value == null) + return redisValue; - var t = typeof(T); - if (t == TypeHelper.StringType) - redisValue = value.ToString(); - else if (t == TypeHelper.BoolType) - redisValue = Convert.ToBoolean(value); - else if (t == TypeHelper.ByteType) - redisValue = Convert.ToInt16(value); - else if (t == TypeHelper.Int16Type) - redisValue = Convert.ToInt16(value); - else if (t == TypeHelper.Int32Type) - redisValue = Convert.ToInt32(value); - else if (t == TypeHelper.Int64Type) - redisValue = Convert.ToInt64(value); - else if (t == TypeHelper.DoubleType) - redisValue = Convert.ToDouble(value); - else if (t == TypeHelper.StringType) - redisValue = value.ToString(); - else if (t == TypeHelper.CharType) - redisValue = Convert.ToString(value); - else if (t == TypeHelper.SByteType) - redisValue = Convert.ToSByte(value); - else if (t == TypeHelper.UInt16Type) - redisValue = Convert.ToUInt32(value); - else if (t == TypeHelper.UInt32Type) - redisValue = Convert.ToUInt32(value); - else if (t == TypeHelper.UInt64Type) - redisValue = Convert.ToUInt64(value); - else if (t == TypeHelper.SingleType) - redisValue = Convert.ToSingle(value); - //else if (type == TypeHelper.DecimalType) - // redisValue = Convert.ToDecimal(value); - //else if (type == TypeHelper.DateTimeType) - // redisValue = Convert.ToDateTime(value); - else if (t == TypeHelper.ByteArrayType) - redisValue = value as byte[]; - else - redisValue = serializer.SerializeToBytes(value); + var t = typeof(T); + if (t == TypeHelper.StringType) + redisValue = value.ToString(); + else if (t == TypeHelper.BoolType) + redisValue = Convert.ToBoolean(value); + else if (t == TypeHelper.ByteType) + redisValue = Convert.ToInt16(value); + else if (t == TypeHelper.Int16Type) + redisValue = Convert.ToInt16(value); + else if (t == TypeHelper.Int32Type) + redisValue = Convert.ToInt32(value); + else if (t == TypeHelper.Int64Type) + redisValue = Convert.ToInt64(value); + else if (t == TypeHelper.DoubleType) + redisValue = Convert.ToDouble(value); + else if (t == TypeHelper.StringType) + redisValue = value.ToString(); + else if (t == TypeHelper.CharType) + redisValue = Convert.ToString(value); + else if (t == TypeHelper.SByteType) + redisValue = Convert.ToSByte(value); + else if (t == TypeHelper.UInt16Type) + redisValue = Convert.ToUInt32(value); + else if (t == TypeHelper.UInt32Type) + redisValue = Convert.ToUInt32(value); + else if (t == TypeHelper.UInt64Type) + redisValue = Convert.ToUInt64(value); + else if (t == TypeHelper.SingleType) + redisValue = Convert.ToSingle(value); + //else if (type == TypeHelper.DecimalType) + // redisValue = Convert.ToDecimal(value); + //else if (type == TypeHelper.DateTimeType) + // redisValue = Convert.ToDateTime(value); + else if (t == TypeHelper.ByteArrayType) + redisValue = value as byte[]; + else + redisValue = serializer.SerializeToBytes(value); - return redisValue; - } + return redisValue; } +} - public static class RedisExtensions +public static class RedisExtensions +{ + public static bool IsCluster(this IConnectionMultiplexer muxer) { - public static bool IsCluster(this IConnectionMultiplexer muxer) - { - var configuration = ConfigurationOptions.Parse(muxer.Configuration); - if (configuration.Proxy == Proxy.Twemproxy) - return true; + var configuration = ConfigurationOptions.Parse(muxer.Configuration); + if (configuration.Proxy == Proxy.Twemproxy) + return true; - int standaloneCount = 0, clusterCount = 0, sentinelCount = 0; - foreach (var endPoint in muxer.GetEndPoints()) + int standaloneCount = 0, clusterCount = 0, sentinelCount = 0; + foreach (var endPoint in muxer.GetEndPoints()) + { + var server = muxer.GetServer(endPoint); + if (server.IsConnected) { - var server = muxer.GetServer(endPoint); - if (server.IsConnected) + // count the server types + switch (server.ServerType) { - // count the server types - switch (server.ServerType) - { - case ServerType.Twemproxy: - case ServerType.Standalone: - standaloneCount++; - break; - case ServerType.Sentinel: - sentinelCount++; - break; - case ServerType.Cluster: - clusterCount++; - break; - } + case ServerType.Twemproxy: + case ServerType.Standalone: + standaloneCount++; + break; + case ServerType.Sentinel: + sentinelCount++; + break; + case ServerType.Cluster: + clusterCount++; + break; } } + } - if (clusterCount != 0) - return true; + if (clusterCount != 0) + return true; - if (standaloneCount == 0 && sentinelCount > 0) - return true; + if (standaloneCount == 0 && sentinelCount > 0) + return true; - return false; - } + return false; } } diff --git a/src/Foundatio.Redis/Extensions/TaskExtensions.cs b/src/Foundatio.Redis/Extensions/TaskExtensions.cs index f8fa69d..f0fd963 100644 --- a/src/Foundatio.Redis/Extensions/TaskExtensions.cs +++ b/src/Foundatio.Redis/Extensions/TaskExtensions.cs @@ -4,26 +4,25 @@ using System.Threading.Tasks; using Foundatio.AsyncEx; -namespace Foundatio.Extensions +namespace Foundatio.Extensions; + +internal static class TaskExtensions { - internal static class TaskExtensions + [DebuggerStepThrough] + public static ConfiguredTaskAwaitable AnyContext(this Task task) { - [DebuggerStepThrough] - public static ConfiguredTaskAwaitable AnyContext(this Task task) - { - return task.ConfigureAwait(continueOnCapturedContext: false); - } + return task.ConfigureAwait(continueOnCapturedContext: false); + } - [DebuggerStepThrough] - public static ConfiguredTaskAwaitable AnyContext(this Task task) - { - return task.ConfigureAwait(continueOnCapturedContext: false); - } + [DebuggerStepThrough] + public static ConfiguredTaskAwaitable AnyContext(this Task task) + { + return task.ConfigureAwait(continueOnCapturedContext: false); + } - [DebuggerStepThrough] - public static ConfiguredTaskAwaitable AnyContext(this AwaitableDisposable task) where TResult : IDisposable - { - return task.ConfigureAwait(continueOnCapturedContext: false); - } + [DebuggerStepThrough] + public static ConfiguredTaskAwaitable AnyContext(this AwaitableDisposable task) where TResult : IDisposable + { + return task.ConfigureAwait(continueOnCapturedContext: false); } } diff --git a/src/Foundatio.Redis/Extensions/TimespanExtensions.cs b/src/Foundatio.Redis/Extensions/TimespanExtensions.cs index ad90fa5..3576c5a 100644 --- a/src/Foundatio.Redis/Extensions/TimespanExtensions.cs +++ b/src/Foundatio.Redis/Extensions/TimespanExtensions.cs @@ -1,12 +1,11 @@ using System; -namespace Foundatio.Extensions +namespace Foundatio.Extensions; + +internal static class TimespanExtensions { - internal static class TimespanExtensions + public static TimeSpan Min(this TimeSpan source, TimeSpan other) { - public static TimeSpan Min(this TimeSpan source, TimeSpan other) - { - return source.Ticks > other.Ticks ? other : source; - } + return source.Ticks > other.Ticks ? other : source; } } diff --git a/src/Foundatio.Redis/Extensions/TypeExtensions.cs b/src/Foundatio.Redis/Extensions/TypeExtensions.cs index e1c03ad..79b1176 100644 --- a/src/Foundatio.Redis/Extensions/TypeExtensions.cs +++ b/src/Foundatio.Redis/Extensions/TypeExtensions.cs @@ -1,55 +1,54 @@ using System; using Foundatio.Utility; -namespace Foundatio.Extensions +namespace Foundatio.Extensions; + +internal static class TypeExtensions { - internal static class TypeExtensions + public static bool IsNumeric(this Type type) { - public static bool IsNumeric(this Type type) - { - if (type.IsArray) - return false; - - if (type == TypeHelper.ByteType || - type == TypeHelper.DecimalType || - type == TypeHelper.DoubleType || - type == TypeHelper.Int16Type || - type == TypeHelper.Int32Type || - type == TypeHelper.Int64Type || - type == TypeHelper.SByteType || - type == TypeHelper.SingleType || - type == TypeHelper.UInt16Type || - type == TypeHelper.UInt32Type || - type == TypeHelper.UInt64Type) - return true; + if (type.IsArray) + return false; - switch (Type.GetTypeCode(type)) - { - case TypeCode.Byte: - case TypeCode.Decimal: - case TypeCode.Double: - case TypeCode.Int16: - case TypeCode.Int32: - case TypeCode.Int64: - case TypeCode.SByte: - case TypeCode.Single: - case TypeCode.UInt16: - case TypeCode.UInt32: - case TypeCode.UInt64: - return true; - } + if (type == TypeHelper.ByteType || + type == TypeHelper.DecimalType || + type == TypeHelper.DoubleType || + type == TypeHelper.Int16Type || + type == TypeHelper.Int32Type || + type == TypeHelper.Int64Type || + type == TypeHelper.SByteType || + type == TypeHelper.SingleType || + type == TypeHelper.UInt16Type || + type == TypeHelper.UInt32Type || + type == TypeHelper.UInt64Type) + return true; - return false; + switch (Type.GetTypeCode(type)) + { + case TypeCode.Byte: + case TypeCode.Decimal: + case TypeCode.Double: + case TypeCode.Int16: + case TypeCode.Int32: + case TypeCode.Int64: + case TypeCode.SByte: + case TypeCode.Single: + case TypeCode.UInt16: + case TypeCode.UInt32: + case TypeCode.UInt64: + return true; } + return false; + } - public static bool IsNullableNumeric(this Type type) - { - if (type.IsArray) - return false; - var t = Nullable.GetUnderlyingType(type); - return t != null && t.IsNumeric(); - } + public static bool IsNullableNumeric(this Type type) + { + if (type.IsArray) + return false; + + var t = Nullable.GetUnderlyingType(type); + return t != null && t.IsNumeric(); } } diff --git a/src/Foundatio.Redis/Messaging/RedisMessageBus.cs b/src/Foundatio.Redis/Messaging/RedisMessageBus.cs index 9facdec..abb8455 100644 --- a/src/Foundatio.Redis/Messaging/RedisMessageBus.cs +++ b/src/Foundatio.Redis/Messaging/RedisMessageBus.cs @@ -10,128 +10,127 @@ using Microsoft.Extensions.Logging; using StackExchange.Redis; -namespace Foundatio.Messaging +namespace Foundatio.Messaging; + +public class RedisMessageBus : MessageBusBase { - public class RedisMessageBus : MessageBusBase - { - private readonly AsyncLock _lock = new(); - private bool _isSubscribed; - private ChannelMessageQueue _channelMessageQueue = null; + private readonly AsyncLock _lock = new(); + private bool _isSubscribed; + private ChannelMessageQueue _channelMessageQueue = null; + + public RedisMessageBus(RedisMessageBusOptions options) : base(options) { } - public RedisMessageBus(RedisMessageBusOptions options) : base(options) { } + public RedisMessageBus(Builder config) + : this(config(new RedisMessageBusOptionsBuilder()).Build()) { } - public RedisMessageBus(Builder config) - : this(config(new RedisMessageBusOptionsBuilder()).Build()) { } + protected override async Task EnsureTopicSubscriptionAsync(CancellationToken cancellationToken) + { + if (_isSubscribed) + return; - protected override async Task EnsureTopicSubscriptionAsync(CancellationToken cancellationToken) + using (await _lock.LockAsync().AnyContext()) { if (_isSubscribed) return; - using (await _lock.LockAsync().AnyContext()) - { - if (_isSubscribed) - return; - - bool isTraceLogLevelEnabled = _logger.IsEnabled(LogLevel.Trace); - if (isTraceLogLevelEnabled) _logger.LogTrace("Subscribing to topic: {Topic}", _options.Topic); - _channelMessageQueue = await _options.Subscriber.SubscribeAsync(RedisChannel.Literal(_options.Topic)).AnyContext(); - _channelMessageQueue.OnMessage(OnMessage); - _isSubscribed = true; - if (isTraceLogLevelEnabled) _logger.LogTrace("Subscribed to topic: {Topic}", _options.Topic); - } + bool isTraceLogLevelEnabled = _logger.IsEnabled(LogLevel.Trace); + if (isTraceLogLevelEnabled) _logger.LogTrace("Subscribing to topic: {Topic}", _options.Topic); + _channelMessageQueue = await _options.Subscriber.SubscribeAsync(RedisChannel.Literal(_options.Topic)).AnyContext(); + _channelMessageQueue.OnMessage(OnMessage); + _isSubscribed = true; + if (isTraceLogLevelEnabled) _logger.LogTrace("Subscribed to topic: {Topic}", _options.Topic); } + } + + private async Task OnMessage(ChannelMessage channelMessage) + { + if (_logger.IsEnabled(LogLevel.Trace)) + _logger.LogTrace("OnMessage({Channel})", channelMessage.Channel); - private async Task OnMessage(ChannelMessage channelMessage) + if (_subscribers.IsEmpty || !channelMessage.Message.HasValue) { if (_logger.IsEnabled(LogLevel.Trace)) - _logger.LogTrace("OnMessage({Channel})", channelMessage.Channel); + _logger.LogTrace("No subscribers ({Channel})", channelMessage.Channel); + return; + } - if (_subscribers.IsEmpty || !channelMessage.Message.HasValue) + IMessage message; + try + { + var envelope = _serializer.Deserialize((byte[])channelMessage.Message); + message = new Message(envelope.Data, DeserializeMessageBody) { - if (_logger.IsEnabled(LogLevel.Trace)) - _logger.LogTrace("No subscribers ({Channel})", channelMessage.Channel); - return; - } + Type = envelope.Type, + ClrType = GetMappedMessageType(envelope.Type), + CorrelationId = envelope.CorrelationId, + UniqueId = envelope.UniqueId + }; + + foreach (var property in envelope.Properties) + message.Properties.Add(property.Key, property.Value); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "OnMessage({Channel}) Error deserializing message: {Message}", channelMessage.Channel, ex.Message); + return; + } - IMessage message; - try - { - var envelope = _serializer.Deserialize((byte[])channelMessage.Message); - message = new Message(envelope.Data, DeserializeMessageBody) - { - Type = envelope.Type, - ClrType = GetMappedMessageType(envelope.Type), - CorrelationId = envelope.CorrelationId, - UniqueId = envelope.UniqueId - }; - - foreach (var property in envelope.Properties) - message.Properties.Add(property.Key, property.Value); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "OnMessage({Channel}) Error deserializing message: {Message}", channelMessage.Channel, ex.Message); - return; - } + await SendMessageToSubscribersAsync(message).AnyContext(); + } - await SendMessageToSubscribersAsync(message).AnyContext(); + protected override async Task PublishImplAsync(string messageType, object message, MessageOptions options, CancellationToken cancellationToken) + { + var mappedType = GetMappedMessageType(messageType); + if (options.DeliveryDelay.HasValue && options.DeliveryDelay.Value > TimeSpan.Zero) + { + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Schedule delayed message: {MessageType} ({Delay}ms)", messageType, options.DeliveryDelay.Value.TotalMilliseconds); + await AddDelayedMessageAsync(mappedType, message, options.DeliveryDelay.Value).AnyContext(); + return; } - protected override async Task PublishImplAsync(string messageType, object message, MessageOptions options, CancellationToken cancellationToken) + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Message Publish: {MessageType}", messageType); + byte[] bodyData = SerializeMessageBody(messageType, message); + byte[] data = _serializer.SerializeToBytes(new RedisMessageEnvelope { - var mappedType = GetMappedMessageType(messageType); - if (options.DeliveryDelay.HasValue && options.DeliveryDelay.Value > TimeSpan.Zero) - { - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Schedule delayed message: {MessageType} ({Delay}ms)", messageType, options.DeliveryDelay.Value.TotalMilliseconds); - await AddDelayedMessageAsync(mappedType, message, options.DeliveryDelay.Value).AnyContext(); - return; - } + Type = messageType, + Data = bodyData, + CorrelationId = options.CorrelationId, + UniqueId = options.UniqueId, + Properties = options.Properties.ToDictionary(kvp => kvp.Key, kvp => kvp.Value) + }); - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Message Publish: {MessageType}", messageType); - byte[] bodyData = SerializeMessageBody(messageType, message); - byte[] data = _serializer.SerializeToBytes(new RedisMessageEnvelope - { - Type = messageType, - Data = bodyData, - CorrelationId = options.CorrelationId, - UniqueId = options.UniqueId, - Properties = options.Properties.ToDictionary(kvp => kvp.Key, kvp => kvp.Value) - }); + // TODO: Use ILockProvider to lock on UniqueId to ensure it doesn't get duplicated - // TODO: Use ILockProvider to lock on UniqueId to ensure it doesn't get duplicated + await Run.WithRetriesAsync(() => _options.Subscriber.PublishAsync(RedisChannel.Literal(_options.Topic), data, CommandFlags.FireAndForget), logger: _logger, cancellationToken: cancellationToken).AnyContext(); + } - await Run.WithRetriesAsync(() => _options.Subscriber.PublishAsync(RedisChannel.Literal(_options.Topic), data, CommandFlags.FireAndForget), logger: _logger, cancellationToken: cancellationToken).AnyContext(); - } + public override void Dispose() + { + base.Dispose(); - public override void Dispose() + if (_isSubscribed) { - base.Dispose(); - - if (_isSubscribed) + using (_lock.Lock()) { - using (_lock.Lock()) - { - if (!_isSubscribed) - return; - - bool isTraceLogLevelEnabled = _logger.IsEnabled(LogLevel.Trace); - if (isTraceLogLevelEnabled) _logger.LogTrace("Unsubscribing from topic {Topic}", _options.Topic); - _channelMessageQueue?.Unsubscribe(CommandFlags.FireAndForget); - _channelMessageQueue = null; - _isSubscribed = false; - if (isTraceLogLevelEnabled) _logger.LogTrace("Unsubscribed from topic {Topic}", _options.Topic); - } + if (!_isSubscribed) + return; + + bool isTraceLogLevelEnabled = _logger.IsEnabled(LogLevel.Trace); + if (isTraceLogLevelEnabled) _logger.LogTrace("Unsubscribing from topic {Topic}", _options.Topic); + _channelMessageQueue?.Unsubscribe(CommandFlags.FireAndForget); + _channelMessageQueue = null; + _isSubscribed = false; + if (isTraceLogLevelEnabled) _logger.LogTrace("Unsubscribed from topic {Topic}", _options.Topic); } } } +} - public class RedisMessageEnvelope - { - public string UniqueId { get; set; } - public string CorrelationId { get; set; } - public string Type { get; set; } - public byte[] Data { get; set; } - public Dictionary Properties { get; set; } = new Dictionary(); - } +public class RedisMessageEnvelope +{ + public string UniqueId { get; set; } + public string CorrelationId { get; set; } + public string Type { get; set; } + public byte[] Data { get; set; } + public Dictionary Properties { get; set; } = new Dictionary(); } diff --git a/src/Foundatio.Redis/Messaging/RedisMessageBusOptions.cs b/src/Foundatio.Redis/Messaging/RedisMessageBusOptions.cs index 16dd635..1b2e6e2 100644 --- a/src/Foundatio.Redis/Messaging/RedisMessageBusOptions.cs +++ b/src/Foundatio.Redis/Messaging/RedisMessageBusOptions.cs @@ -1,19 +1,17 @@ -using System; -using StackExchange.Redis; +using StackExchange.Redis; -namespace Foundatio.Messaging +namespace Foundatio.Messaging; + +public class RedisMessageBusOptions : SharedMessageBusOptions { - public class RedisMessageBusOptions : SharedMessageBusOptions - { - public ISubscriber Subscriber { get; set; } - } + public ISubscriber Subscriber { get; set; } +} - public class RedisMessageBusOptionsBuilder : SharedMessageBusOptionsBuilder +public class RedisMessageBusOptionsBuilder : SharedMessageBusOptionsBuilder +{ + public RedisMessageBusOptionsBuilder Subscriber(ISubscriber subscriber) { - public RedisMessageBusOptionsBuilder Subscriber(ISubscriber subscriber) - { - Target.Subscriber = subscriber; - return this; - } + Target.Subscriber = subscriber; + return this; } } diff --git a/src/Foundatio.Redis/Metrics/RedisMetricsClient.cs b/src/Foundatio.Redis/Metrics/RedisMetricsClient.cs index cc328b2..d7a8585 100644 --- a/src/Foundatio.Redis/Metrics/RedisMetricsClient.cs +++ b/src/Foundatio.Redis/Metrics/RedisMetricsClient.cs @@ -1,19 +1,17 @@ -using System; -using Foundatio.Caching; +using Foundatio.Caching; -namespace Foundatio.Metrics +namespace Foundatio.Metrics; + +public class RedisMetricsClient : CacheBucketMetricsClientBase { - public class RedisMetricsClient : CacheBucketMetricsClientBase - { - public RedisMetricsClient(RedisMetricsClientOptions options) : base(new RedisCacheClient(o => o.ConnectionMultiplexer(options.ConnectionMultiplexer).LoggerFactory(options.LoggerFactory)), options) { } + public RedisMetricsClient(RedisMetricsClientOptions options) : base(new RedisCacheClient(o => o.ConnectionMultiplexer(options.ConnectionMultiplexer).LoggerFactory(options.LoggerFactory)), options) { } - public RedisMetricsClient(Builder config) - : this(config(new RedisMetricsClientOptionsBuilder()).Build()) { } + public RedisMetricsClient(Builder config) + : this(config(new RedisMetricsClientOptionsBuilder()).Build()) { } - public override void Dispose() - { - base.Dispose(); - _cache.Dispose(); - } + public override void Dispose() + { + base.Dispose(); + _cache.Dispose(); } } diff --git a/src/Foundatio.Redis/Metrics/RedisMetricsClientOptions.cs b/src/Foundatio.Redis/Metrics/RedisMetricsClientOptions.cs index 63ad1f9..afb6f86 100644 --- a/src/Foundatio.Redis/Metrics/RedisMetricsClientOptions.cs +++ b/src/Foundatio.Redis/Metrics/RedisMetricsClientOptions.cs @@ -1,19 +1,17 @@ -using System; -using StackExchange.Redis; +using StackExchange.Redis; -namespace Foundatio.Metrics +namespace Foundatio.Metrics; + +public class RedisMetricsClientOptions : SharedMetricsClientOptions { - public class RedisMetricsClientOptions : SharedMetricsClientOptions - { - public IConnectionMultiplexer ConnectionMultiplexer { get; set; } - } + public IConnectionMultiplexer ConnectionMultiplexer { get; set; } +} - public class RedisMetricsClientOptionsBuilder : SharedMetricsClientOptionsBuilder +public class RedisMetricsClientOptionsBuilder : SharedMetricsClientOptionsBuilder +{ + public RedisMetricsClientOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) { - public RedisMetricsClientOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) - { - Target.ConnectionMultiplexer = connectionMultiplexer; - return this; - } + Target.ConnectionMultiplexer = connectionMultiplexer; + return this; } } diff --git a/src/Foundatio.Redis/Queues/RedisQueue.cs b/src/Foundatio.Redis/Queues/RedisQueue.cs index c897670..8ea9b4a 100644 --- a/src/Foundatio.Redis/Queues/RedisQueue.cs +++ b/src/Foundatio.Redis/Queues/RedisQueue.cs @@ -15,819 +15,818 @@ using StackExchange.Redis; #pragma warning disable 4014 -namespace Foundatio.Queues +namespace Foundatio.Queues; + +public class RedisQueue : QueueBase> where T : class { - public class RedisQueue : QueueBase> where T : class - { - private readonly AsyncLock _lock = new(); - private readonly AsyncAutoResetEvent _autoResetEvent = new(); - private readonly ISubscriber _subscriber; - private readonly RedisCacheClient _cache; - private long _enqueuedCount; - private long _dequeuedCount; - private long _completedCount; - private long _abandonedCount; - private long _workerErrorCount; - private long _workItemTimeoutCount; - private readonly ILockProvider _maintenanceLockProvider; - private Task _maintenanceTask; - private bool _isSubscribed; - private readonly TimeSpan _payloadTimeToLive; - private bool _scriptsLoaded; - private readonly string _listPrefix; - - private LoadedLuaScript _dequeueId; - - public RedisQueue(RedisQueueOptions options) : base(options) - { - if (options.ConnectionMultiplexer == null) - throw new ArgumentException("ConnectionMultiplexer is required."); - - options.ConnectionMultiplexer.ConnectionRestored += ConnectionMultiplexerOnConnectionRestored; - _cache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = options.ConnectionMultiplexer, Serializer = _serializer }); - - _payloadTimeToLive = GetPayloadTtl(); - _subscriber = _options.ConnectionMultiplexer.GetSubscriber(); - - _listPrefix = _options.ConnectionMultiplexer.IsCluster() ? "{q:" + _options.Name + "}" : $"q:{_options.Name}"; - _queueListName = $"{_listPrefix}:in"; - _workListName = $"{_listPrefix}:work"; - _waitListName = $"{_listPrefix}:wait"; - _deadListName = $"{_listPrefix}:dead"; - - // min is 1 second, max is 1 minute - var interval = _options.WorkItemTimeout > TimeSpan.FromSeconds(1) ? _options.WorkItemTimeout.Min(TimeSpan.FromMinutes(1)) : TimeSpan.FromSeconds(1); - _maintenanceLockProvider = new ThrottlingLockProvider(_cache, 1, interval); - - _logger.LogInformation("Queue {QueueId} created. Retries: {Retries} Retry Delay: {RetryDelay:g}, Maintenance Interval: {MaintenanceInterval:g}", QueueId, _options.Retries, _options.RetryDelay, interval); - } + private readonly AsyncLock _lock = new(); + private readonly AsyncAutoResetEvent _autoResetEvent = new(); + private readonly ISubscriber _subscriber; + private readonly RedisCacheClient _cache; + private long _enqueuedCount; + private long _dequeuedCount; + private long _completedCount; + private long _abandonedCount; + private long _workerErrorCount; + private long _workItemTimeoutCount; + private readonly ILockProvider _maintenanceLockProvider; + private Task _maintenanceTask; + private bool _isSubscribed; + private readonly TimeSpan _payloadTimeToLive; + private bool _scriptsLoaded; + private readonly string _listPrefix; + + private LoadedLuaScript _dequeueId; + + public RedisQueue(RedisQueueOptions options) : base(options) + { + if (options.ConnectionMultiplexer == null) + throw new ArgumentException("ConnectionMultiplexer is required."); - public RedisQueue(Builder, RedisQueueOptions> config) - : this(config(new RedisQueueOptionsBuilder()).Build()) { } + options.ConnectionMultiplexer.ConnectionRestored += ConnectionMultiplexerOnConnectionRestored; + _cache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = options.ConnectionMultiplexer, Serializer = _serializer }); - public IDatabase Database => _options.ConnectionMultiplexer.GetDatabase(); + _payloadTimeToLive = GetPayloadTtl(); + _subscriber = _options.ConnectionMultiplexer.GetSubscriber(); - protected override Task EnsureQueueCreatedAsync(CancellationToken cancellationToken = default) => Task.CompletedTask; + _listPrefix = _options.ConnectionMultiplexer.IsCluster() ? "{q:" + _options.Name + "}" : $"q:{_options.Name}"; + _queueListName = $"{_listPrefix}:in"; + _workListName = $"{_listPrefix}:work"; + _waitListName = $"{_listPrefix}:wait"; + _deadListName = $"{_listPrefix}:dead"; - private bool IsMaintenanceRunning => !_options.RunMaintenanceTasks || _maintenanceTask != null && !_maintenanceTask.IsCanceled && !_maintenanceTask.IsFaulted && !_maintenanceTask.IsCompleted; - private async Task EnsureMaintenanceRunningAsync() - { - if (_queueDisposedCancellationTokenSource.IsCancellationRequested || IsMaintenanceRunning) - return; + // min is 1 second, max is 1 minute + var interval = _options.WorkItemTimeout > TimeSpan.FromSeconds(1) ? _options.WorkItemTimeout.Min(TimeSpan.FromMinutes(1)) : TimeSpan.FromSeconds(1); + _maintenanceLockProvider = new ThrottlingLockProvider(_cache, 1, interval); - using (await _lock.LockAsync(_queueDisposedCancellationTokenSource.Token).AnyContext()) - { - if (_queueDisposedCancellationTokenSource.IsCancellationRequested || _maintenanceTask != null) - return; + _logger.LogInformation("Queue {QueueId} created. Retries: {Retries} Retry Delay: {RetryDelay:g}, Maintenance Interval: {MaintenanceInterval:g}", QueueId, _options.Retries, _options.RetryDelay, interval); + } - _logger.LogTrace("Starting maintenance for {Name}.", _options.Name); - _maintenanceTask = Task.Run(() => DoMaintenanceWorkLoopAsync()); - } - } + public RedisQueue(Builder, RedisQueueOptions> config) + : this(config(new RedisQueueOptionsBuilder()).Build()) { } + + public IDatabase Database => _options.ConnectionMultiplexer.GetDatabase(); + + protected override Task EnsureQueueCreatedAsync(CancellationToken cancellationToken = default) => Task.CompletedTask; - private async Task EnsureTopicSubscriptionAsync() + private bool IsMaintenanceRunning => !_options.RunMaintenanceTasks || _maintenanceTask != null && !_maintenanceTask.IsCanceled && !_maintenanceTask.IsFaulted && !_maintenanceTask.IsCompleted; + private async Task EnsureMaintenanceRunningAsync() + { + if (_queueDisposedCancellationTokenSource.IsCancellationRequested || IsMaintenanceRunning) + return; + + using (await _lock.LockAsync(_queueDisposedCancellationTokenSource.Token).AnyContext()) { - if (_queueDisposedCancellationTokenSource.IsCancellationRequested || _isSubscribed) + if (_queueDisposedCancellationTokenSource.IsCancellationRequested || _maintenanceTask != null) return; - using (await _lock.LockAsync(_queueDisposedCancellationTokenSource.Token).AnyContext()) - { - if (_queueDisposedCancellationTokenSource.IsCancellationRequested || _isSubscribed) - return; - - _logger.LogTrace("Subscribing to enqueue messages for {Name}.", _options.Name); - await _subscriber.SubscribeAsync(RedisChannel.Literal(GetTopicName()), OnTopicMessage).AnyContext(); - _isSubscribed = true; - _logger.LogTrace("Subscribed to enqueue messages for {Name}.", _options.Name); - } + _logger.LogTrace("Starting maintenance for {Name}.", _options.Name); + _maintenanceTask = Task.Run(() => DoMaintenanceWorkLoopAsync()); } + } - protected override Task GetQueueStatsImplAsync() + private async Task EnsureTopicSubscriptionAsync() + { + if (_queueDisposedCancellationTokenSource.IsCancellationRequested || _isSubscribed) + return; + + using (await _lock.LockAsync(_queueDisposedCancellationTokenSource.Token).AnyContext()) { - var queued = Database.ListLengthAsync(_queueListName); - var wait = Database.ListLengthAsync(_waitListName); - var working = Database.ListLengthAsync(_workListName); - var deadLetter = Database.ListLengthAsync(_deadListName); + if (_queueDisposedCancellationTokenSource.IsCancellationRequested || _isSubscribed) + return; - return Task.WhenAll(queued, wait, working, deadLetter) - .ContinueWith(t => new QueueStats - { - Queued = queued.Result + wait.Result, - Working = working.Result, - Deadletter = deadLetter.Result, - Enqueued = _enqueuedCount, - Dequeued = _dequeuedCount, - Completed = _completedCount, - Abandoned = _abandonedCount, - Errors = _workerErrorCount, - Timeouts = _workItemTimeoutCount - }, TaskContinuationOptions.OnlyOnRanToCompletion); + _logger.LogTrace("Subscribing to enqueue messages for {Name}.", _options.Name); + await _subscriber.SubscribeAsync(RedisChannel.Literal(GetTopicName()), OnTopicMessage).AnyContext(); + _isSubscribed = true; + _logger.LogTrace("Subscribed to enqueue messages for {Name}.", _options.Name); } + } - protected override QueueStats GetMetricsQueueStats() - { - long queued = Database.ListLength(_queueListName); - long wait = Database.ListLength(_waitListName); - long working = Database.ListLength(_workListName); - long deadLetter = Database.ListLength(_deadListName); + protected override Task GetQueueStatsImplAsync() + { + var queued = Database.ListLengthAsync(_queueListName); + var wait = Database.ListLengthAsync(_waitListName); + var working = Database.ListLengthAsync(_workListName); + var deadLetter = Database.ListLengthAsync(_deadListName); - return new QueueStats + return Task.WhenAll(queued, wait, working, deadLetter) + .ContinueWith(t => new QueueStats { - Queued = queued + wait, - Working = working, - Deadletter = deadLetter, + Queued = queued.Result + wait.Result, + Working = working.Result, + Deadletter = deadLetter.Result, Enqueued = _enqueuedCount, Dequeued = _dequeuedCount, Completed = _completedCount, Abandoned = _abandonedCount, Errors = _workerErrorCount, Timeouts = _workItemTimeoutCount - }; - } + }, TaskContinuationOptions.OnlyOnRanToCompletion); + } - private readonly string _queueListName; - private readonly string _workListName; - private readonly string _waitListName; - private readonly string _deadListName; + protected override QueueStats GetMetricsQueueStats() + { + long queued = Database.ListLength(_queueListName); + long wait = Database.ListLength(_waitListName); + long working = Database.ListLength(_workListName); + long deadLetter = Database.ListLength(_deadListName); + + return new QueueStats + { + Queued = queued + wait, + Working = working, + Deadletter = deadLetter, + Enqueued = _enqueuedCount, + Dequeued = _dequeuedCount, + Completed = _completedCount, + Abandoned = _abandonedCount, + Errors = _workerErrorCount, + Timeouts = _workItemTimeoutCount + }; + } - private string GetPayloadKey(string id) - { - return String.Concat(_listPrefix, ":", id); - } + private readonly string _queueListName; + private readonly string _workListName; + private readonly string _waitListName; + private readonly string _deadListName; - private TimeSpan GetPayloadTtl() - { - var ttl = TimeSpan.Zero; - for (int attempt = 1; attempt <= _options.Retries + 1; attempt++) - ttl = ttl.Add(GetRetryDelay(attempt)); + private string GetPayloadKey(string id) + { + return String.Concat(_listPrefix, ":", id); + } - // minimum of 7 days for payload - return TimeSpan.FromMilliseconds(Math.Max(ttl.TotalMilliseconds * 1.5, TimeSpan.FromDays(7).TotalMilliseconds)); - } + private TimeSpan GetPayloadTtl() + { + var ttl = TimeSpan.Zero; + for (int attempt = 1; attempt <= _options.Retries + 1; attempt++) + ttl = ttl.Add(GetRetryDelay(attempt)); - private string GetAttemptsKey(string id) - { - return String.Concat(_listPrefix, ":", id, ":attempts"); - } + // minimum of 7 days for payload + return TimeSpan.FromMilliseconds(Math.Max(ttl.TotalMilliseconds * 1.5, TimeSpan.FromDays(7).TotalMilliseconds)); + } - private TimeSpan GetAttemptsTtl() - { - return _payloadTimeToLive; - } + private string GetAttemptsKey(string id) + { + return String.Concat(_listPrefix, ":", id, ":attempts"); + } - private string GetEnqueuedTimeKey(string id) - { - return String.Concat(_listPrefix, ":", id, ":enqueued"); - } + private TimeSpan GetAttemptsTtl() + { + return _payloadTimeToLive; + } - private string GetDequeuedTimeKey(string id) - { - return String.Concat(_listPrefix, ":", id, ":dequeued"); - } + private string GetEnqueuedTimeKey(string id) + { + return String.Concat(_listPrefix, ":", id, ":enqueued"); + } - private string GetRenewedTimeKey(string id) - { - return String.Concat(_listPrefix, ":", id, ":renewed"); - } + private string GetDequeuedTimeKey(string id) + { + return String.Concat(_listPrefix, ":", id, ":dequeued"); + } - private TimeSpan GetWorkItemTimeoutTimeTtl() - { - return TimeSpan.FromMilliseconds(Math.Max(_options.WorkItemTimeout.TotalMilliseconds * 1.5, TimeSpan.FromHours(1).TotalMilliseconds)); - } + private string GetRenewedTimeKey(string id) + { + return String.Concat(_listPrefix, ":", id, ":renewed"); + } - private string GetWaitTimeKey(string id) - { - return String.Concat(_listPrefix, ":", id, ":wait"); - } + private TimeSpan GetWorkItemTimeoutTimeTtl() + { + return TimeSpan.FromMilliseconds(Math.Max(_options.WorkItemTimeout.TotalMilliseconds * 1.5, TimeSpan.FromHours(1).TotalMilliseconds)); + } - private TimeSpan GetWaitTimeTtl() - { - return _payloadTimeToLive; - } + private string GetWaitTimeKey(string id) + { + return String.Concat(_listPrefix, ":", id, ":wait"); + } + + private TimeSpan GetWaitTimeTtl() + { + return _payloadTimeToLive; + } + + private string GetTopicName() + { + return String.Concat(_listPrefix, ":in"); + } + + protected override async Task EnqueueImplAsync(T data, QueueEntryOptions options) + { + string id = Guid.NewGuid().ToString("N"); + if (_logger.IsEnabled(LogLevel.Debug)) _logger.LogDebug("Queue {Name} enqueue item: {EntryId}", _options.Name, id); + + if (options.DeliveryDelay.HasValue && options.DeliveryDelay.Value > TimeSpan.Zero) + throw new NotSupportedException("DeliveryDelay is not supported in the Redis queue implementation."); - private string GetTopicName() + bool isTraceLogLevelEnabled = _logger.IsEnabled(LogLevel.Trace); + if (!await OnEnqueuingAsync(data, options).AnyContext()) { - return String.Concat(_listPrefix, ":in"); + if (isTraceLogLevelEnabled) _logger.LogTrace("Aborting enqueue item: {EntryId}", id); + return null; } - protected override async Task EnqueueImplAsync(T data, QueueEntryOptions options) + var now = SystemClock.UtcNow; + var envelope = new RedisPayloadEnvelope { - string id = Guid.NewGuid().ToString("N"); - if (_logger.IsEnabled(LogLevel.Debug)) _logger.LogDebug("Queue {Name} enqueue item: {EntryId}", _options.Name, id); + Properties = options.Properties, + CorrelationId = options.CorrelationId, + Value = data + }; + bool success = await Run.WithRetriesAsync(() => _cache.AddAsync(GetPayloadKey(id), envelope, _payloadTimeToLive), logger: _logger).AnyContext(); + if (!success) + throw new InvalidOperationException("Attempt to set payload failed."); - if (options.DeliveryDelay.HasValue && options.DeliveryDelay.Value > TimeSpan.Zero) - throw new NotSupportedException("DeliveryDelay is not supported in the Redis queue implementation."); + await Run.WithRetriesAsync(() => Task.WhenAll( + _cache.SetAsync(GetEnqueuedTimeKey(id), now.Ticks, _payloadTimeToLive), + Database.ListLeftPushAsync(_queueListName, id) + ), logger: _logger).AnyContext(); - bool isTraceLogLevelEnabled = _logger.IsEnabled(LogLevel.Trace); - if (!await OnEnqueuingAsync(data, options).AnyContext()) - { - if (isTraceLogLevelEnabled) _logger.LogTrace("Aborting enqueue item: {EntryId}", id); - return null; - } - - var now = SystemClock.UtcNow; - var envelope = new RedisPayloadEnvelope - { - Properties = options.Properties, - CorrelationId = options.CorrelationId, - Value = data - }; - bool success = await Run.WithRetriesAsync(() => _cache.AddAsync(GetPayloadKey(id), envelope, _payloadTimeToLive), logger: _logger).AnyContext(); - if (!success) - throw new InvalidOperationException("Attempt to set payload failed."); + try + { + _autoResetEvent.Set(); + await Run.WithRetriesAsync(() => _subscriber.PublishAsync(RedisChannel.Literal(GetTopicName()), id), logger: _logger).AnyContext(); + } + catch (Exception ex) + { + if (isTraceLogLevelEnabled) _logger.LogTrace(ex, "Error publishing topic message"); + } - await Run.WithRetriesAsync(() => Task.WhenAll( - _cache.SetAsync(GetEnqueuedTimeKey(id), now.Ticks, _payloadTimeToLive), - Database.ListLeftPushAsync(_queueListName, id) - ), logger: _logger).AnyContext(); + Interlocked.Increment(ref _enqueuedCount); + var entry = new QueueEntry(id, null, data, this, now, 0); + await OnEnqueuedAsync(entry).AnyContext(); - try - { - _autoResetEvent.Set(); - await Run.WithRetriesAsync(() => _subscriber.PublishAsync(RedisChannel.Literal(GetTopicName()), id), logger: _logger).AnyContext(); - } - catch (Exception ex) - { - if (isTraceLogLevelEnabled) _logger.LogTrace(ex, "Error publishing topic message"); - } + if (isTraceLogLevelEnabled) _logger.LogTrace("Enqueue done"); + return id; + } - Interlocked.Increment(ref _enqueuedCount); - var entry = new QueueEntry(id, null, data, this, now, 0); - await OnEnqueuedAsync(entry).AnyContext(); + private readonly List _workers = []; - if (isTraceLogLevelEnabled) _logger.LogTrace("Enqueue done"); - return id; - } + protected override void StartWorkingImpl(Func, CancellationToken, Task> handler, bool autoComplete, CancellationToken cancellationToken) + { + if (handler == null) + throw new ArgumentNullException(nameof(handler)); - private readonly List _workers = []; + _logger.LogTrace("Queue {Name} start working", _options.Name); - protected override void StartWorkingImpl(Func, CancellationToken, Task> handler, bool autoComplete, CancellationToken cancellationToken) + _workers.Add(Task.Run(async () => { - if (handler == null) - throw new ArgumentNullException(nameof(handler)); - - _logger.LogTrace("Queue {Name} start working", _options.Name); + using var linkedCancellationToken = GetLinkedDisposableCancellationTokenSource(cancellationToken); + _logger.LogTrace("WorkerLoop Start {Name}", _options.Name); - _workers.Add(Task.Run(async () => + while (!linkedCancellationToken.IsCancellationRequested) { - using var linkedCancellationToken = GetLinkedDisposableCancellationTokenSource(cancellationToken); - _logger.LogTrace("WorkerLoop Start {Name}", _options.Name); + _logger.LogTrace("WorkerLoop Signaled {Name}", _options.Name); - while (!linkedCancellationToken.IsCancellationRequested) + IQueueEntry queueEntry = null; + try { - _logger.LogTrace("WorkerLoop Signaled {Name}", _options.Name); - - IQueueEntry queueEntry = null; - try - { - queueEntry = await DequeueImplAsync(linkedCancellationToken.Token).AnyContext(); - } - catch (Exception ex) - { - _logger.LogError(ex, "Error on Dequeue: {Message}", ex.Message); - } - - if (linkedCancellationToken.IsCancellationRequested || queueEntry == null) - continue; + queueEntry = await DequeueImplAsync(linkedCancellationToken.Token).AnyContext(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error on Dequeue: {Message}", ex.Message); + } - try - { - await handler(queueEntry, linkedCancellationToken.Token).AnyContext(); - } - catch (Exception ex) - { - Interlocked.Increment(ref _workerErrorCount); - _logger.LogError(ex, "Worker error: {Message}", ex.Message); + if (linkedCancellationToken.IsCancellationRequested || queueEntry == null) + continue; - if (!queueEntry.IsAbandoned && !queueEntry.IsCompleted) - { - try - { - await queueEntry.AbandonAsync().AnyContext(); - } - catch (Exception abandonEx) - { - _logger.LogError(abandonEx, "Worker error abandoning queue entry: {Message}", abandonEx.Message); - } - } - } + try + { + await handler(queueEntry, linkedCancellationToken.Token).AnyContext(); + } + catch (Exception ex) + { + Interlocked.Increment(ref _workerErrorCount); + _logger.LogError(ex, "Worker error: {Message}", ex.Message); - if (autoComplete && !queueEntry.IsAbandoned && !queueEntry.IsCompleted) + if (!queueEntry.IsAbandoned && !queueEntry.IsCompleted) { try { - await Run.WithRetriesAsync(() => queueEntry.CompleteAsync(), cancellationToken: linkedCancellationToken.Token, logger: _logger).AnyContext(); + await queueEntry.AbandonAsync().AnyContext(); } - catch (Exception ex) + catch (Exception abandonEx) { - _logger.LogError(ex, "Worker error attempting to auto complete entry: {Message}", ex.Message); + _logger.LogError(abandonEx, "Worker error abandoning queue entry: {Message}", abandonEx.Message); } } } - _logger.LogTrace("Worker exiting: {Name} Cancel Requested: {IsCancellationRequested}", _options.Name, linkedCancellationToken.IsCancellationRequested); - }, GetLinkedDisposableCancellationTokenSource(cancellationToken).Token)); - } - - protected override async Task> DequeueImplAsync(CancellationToken linkedCancellationToken) - { - _logger.LogTrace("Queue {Name} dequeuing item...", _options.Name); - - if (!IsMaintenanceRunning) - await EnsureMaintenanceRunningAsync().AnyContext(); - if (!_isSubscribed) - await EnsureTopicSubscriptionAsync().AnyContext(); - - var value = await DequeueIdAsync(linkedCancellationToken).AnyContext(); - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Initial list value: {Value}", value.IsNullOrEmpty ? "" : value.ToString()); - - while (value.IsNullOrEmpty && !linkedCancellationToken.IsCancellationRequested) - { - _logger.LogTrace("Waiting to dequeue item..."); - var sw = Stopwatch.StartNew(); - - try + if (autoComplete && !queueEntry.IsAbandoned && !queueEntry.IsCompleted) { - using var timeoutCancellationTokenSource = new CancellationTokenSource(10000); - using var dequeueCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(linkedCancellationToken, timeoutCancellationTokenSource.Token); - await _autoResetEvent.WaitAsync(dequeueCancellationTokenSource.Token).AnyContext(); + try + { + await Run.WithRetriesAsync(() => queueEntry.CompleteAsync(), cancellationToken: linkedCancellationToken.Token, logger: _logger).AnyContext(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Worker error attempting to auto complete entry: {Message}", ex.Message); + } } - catch (OperationCanceledException) { } + } - sw.Stop(); - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Waited for dequeue: {Elapsed}", sw.Elapsed.ToString()); + _logger.LogTrace("Worker exiting: {Name} Cancel Requested: {IsCancellationRequested}", _options.Name, linkedCancellationToken.IsCancellationRequested); + }, GetLinkedDisposableCancellationTokenSource(cancellationToken).Token)); + } - value = await DequeueIdAsync(linkedCancellationToken).AnyContext(); - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("List value: {Value}", value.IsNullOrEmpty ? "" : value.ToString()); - } + protected override async Task> DequeueImplAsync(CancellationToken linkedCancellationToken) + { + _logger.LogTrace("Queue {Name} dequeuing item...", _options.Name); - if (value.IsNullOrEmpty) - return null; + if (!IsMaintenanceRunning) + await EnsureMaintenanceRunningAsync().AnyContext(); + if (!_isSubscribed) + await EnsureTopicSubscriptionAsync().AnyContext(); - try - { - var entry = await GetQueueEntryAsync(value).AnyContext(); - if (entry == null) - return null; + var value = await DequeueIdAsync(linkedCancellationToken).AnyContext(); + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Initial list value: {Value}", value.IsNullOrEmpty ? "" : value.ToString()); - Interlocked.Increment(ref _dequeuedCount); - await OnDequeuedAsync(entry).AnyContext(); + while (value.IsNullOrEmpty && !linkedCancellationToken.IsCancellationRequested) + { + _logger.LogTrace("Waiting to dequeue item..."); + var sw = Stopwatch.StartNew(); - _logger.LogDebug("Dequeued item: {Value}", value); - return entry; - } - catch (Exception ex) + try { - _logger.LogError(ex, "Error getting dequeued item payload: {Value}", value); - throw; + using var timeoutCancellationTokenSource = new CancellationTokenSource(10000); + using var dequeueCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(linkedCancellationToken, timeoutCancellationTokenSource.Token); + await _autoResetEvent.WaitAsync(dequeueCancellationTokenSource.Token).AnyContext(); } - } + catch (OperationCanceledException) { } - public override async Task RenewLockAsync(IQueueEntry entry) - { - if (_logger.IsEnabled(LogLevel.Debug)) _logger.LogDebug("Queue {Name} renew lock item: {EntryId}", _options.Name, entry.Id); - await Run.WithRetriesAsync(() => _cache.SetAsync(GetRenewedTimeKey(entry.Id), SystemClock.UtcNow.Ticks, GetWorkItemTimeoutTimeTtl()), logger: _logger).AnyContext(); - await OnLockRenewedAsync(entry).AnyContext(); - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Renew lock done: {EntryId}", entry.Id); + sw.Stop(); + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Waited for dequeue: {Elapsed}", sw.Elapsed.ToString()); + + value = await DequeueIdAsync(linkedCancellationToken).AnyContext(); + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("List value: {Value}", value.IsNullOrEmpty ? "" : value.ToString()); } - private async Task> GetQueueEntryAsync(string workId) + if (value.IsNullOrEmpty) + return null; + + try { - var payload = await Run.WithRetriesAsync(() => _cache.GetAsync>(GetPayloadKey(workId)), logger: _logger).AnyContext(); - if (payload.IsNull) - { - if (_logger.IsEnabled(LogLevel.Error)) _logger.LogError("Error getting queue payload: {WorkId}", workId); - await Database.ListRemoveAsync(_workListName, workId).AnyContext(); + var entry = await GetQueueEntryAsync(value).AnyContext(); + if (entry == null) return null; - } - var enqueuedTimeTicks = Run.WithRetriesAsync(() => _cache.GetAsync(GetEnqueuedTimeKey(workId), 0), logger: _logger); - var attemptsValue = Run.WithRetriesAsync(() => _cache.GetAsync(GetAttemptsKey(workId), 0), logger: _logger); - await Task.WhenAll(enqueuedTimeTicks, attemptsValue).AnyContext(); + Interlocked.Increment(ref _dequeuedCount); + await OnDequeuedAsync(entry).AnyContext(); - var queueEntry = new QueueEntry(workId, payload.Value.CorrelationId, payload.Value.Value, this, new DateTime(enqueuedTimeTicks.Result, DateTimeKind.Utc), attemptsValue.Result + 1); - - if (payload.Value.Properties != null) - { - foreach (var property in payload.Value.Properties) - queueEntry.Properties.Add(property.Key, property.Value); - } - - return queueEntry; + _logger.LogDebug("Dequeued item: {Value}", value); + return entry; } - - private async Task DequeueIdAsync(CancellationToken linkedCancellationToken) + catch (Exception ex) { - try - { - return await Run.WithRetriesAsync(async () => - { - var timeout = GetWorkItemTimeoutTimeTtl(); - long now = SystemClock.UtcNow.Ticks; - - await LoadScriptsAsync().AnyContext(); - var result = await Database.ScriptEvaluateAsync(_dequeueId, new - { - queueListName = (RedisKey)_queueListName, - workListName = (RedisKey)_workListName, - listPrefix = _listPrefix, - now, - timeout = timeout.TotalMilliseconds - }).AnyContext(); - return result.ToString(); - }, 3, TimeSpan.FromMilliseconds(100), linkedCancellationToken, _logger).AnyContext(); - } - catch (Exception ex) - { - _logger.LogError(ex, "Queue {Name} dequeue id async error: {Error}", _options.Name, ex.Message); - return RedisValue.Null; - } + _logger.LogError(ex, "Error getting dequeued item payload: {Value}", value); + throw; } + } + + public override async Task RenewLockAsync(IQueueEntry entry) + { + if (_logger.IsEnabled(LogLevel.Debug)) _logger.LogDebug("Queue {Name} renew lock item: {EntryId}", _options.Name, entry.Id); + await Run.WithRetriesAsync(() => _cache.SetAsync(GetRenewedTimeKey(entry.Id), SystemClock.UtcNow.Ticks, GetWorkItemTimeoutTimeTtl()), logger: _logger).AnyContext(); + await OnLockRenewedAsync(entry).AnyContext(); + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Renew lock done: {EntryId}", entry.Id); + } - public override async Task CompleteAsync(IQueueEntry entry) + private async Task> GetQueueEntryAsync(string workId) + { + var payload = await Run.WithRetriesAsync(() => _cache.GetAsync>(GetPayloadKey(workId)), logger: _logger).AnyContext(); + if (payload.IsNull) { - if (_logger.IsEnabled(LogLevel.Debug)) _logger.LogDebug("Queue {Name} complete item: {EntryId}", _options.Name, entry.Id); - if (entry.IsAbandoned || entry.IsCompleted) - { - //_logger.LogDebug("Queue {Name} item already abandoned or completed: {EntryId}", _options.Name, entry.Id); - throw new InvalidOperationException("Queue entry has already been completed or abandoned."); - } + if (_logger.IsEnabled(LogLevel.Error)) _logger.LogError("Error getting queue payload: {WorkId}", workId); + await Database.ListRemoveAsync(_workListName, workId).AnyContext(); + return null; + } - long result = await Run.WithRetriesAsync(() => Database.ListRemoveAsync(_workListName, entry.Id), logger: _logger).AnyContext(); - if (result == 0) - { - _logger.LogDebug("Queue {Name} item not in work list: {EntryId}", _options.Name, entry.Id); - throw new InvalidOperationException("Queue entry not in work list, it may have been auto abandoned."); - } + var enqueuedTimeTicks = Run.WithRetriesAsync(() => _cache.GetAsync(GetEnqueuedTimeKey(workId), 0), logger: _logger); + var attemptsValue = Run.WithRetriesAsync(() => _cache.GetAsync(GetAttemptsKey(workId), 0), logger: _logger); + await Task.WhenAll(enqueuedTimeTicks, attemptsValue).AnyContext(); - await Run.WithRetriesAsync(() => Task.WhenAll( - Database.KeyDeleteAsync(GetPayloadKey(entry.Id)), - Database.KeyDeleteAsync(GetAttemptsKey(entry.Id)), - Database.KeyDeleteAsync(GetEnqueuedTimeKey(entry.Id)), - Database.KeyDeleteAsync(GetDequeuedTimeKey(entry.Id)), - Database.KeyDeleteAsync(GetRenewedTimeKey(entry.Id)), - Database.KeyDeleteAsync(GetWaitTimeKey(entry.Id)) - ), logger: _logger).AnyContext(); + var queueEntry = new QueueEntry(workId, payload.Value.CorrelationId, payload.Value.Value, this, new DateTime(enqueuedTimeTicks.Result, DateTimeKind.Utc), attemptsValue.Result + 1); - Interlocked.Increment(ref _completedCount); - entry.MarkCompleted(); - await OnCompletedAsync(entry).AnyContext(); - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Complete done: {EntryId}", entry.Id); + if (payload.Value.Properties != null) + { + foreach (var property in payload.Value.Properties) + queueEntry.Properties.Add(property.Key, property.Value); } - public override async Task AbandonAsync(IQueueEntry entry) + return queueEntry; + } + + private async Task DequeueIdAsync(CancellationToken linkedCancellationToken) + { + try { - _logger.LogDebug("Queue {Name}:{QueueId} abandon item: {EntryId}", _options.Name, QueueId, entry.Id); - if (entry.IsAbandoned || entry.IsCompleted) + return await Run.WithRetriesAsync(async () => { - _logger.LogError("Queue {Name}:{QueueId} unable to abandon item because already abandoned or completed: {EntryId}", _options.Name, QueueId, entry.Id); - throw new InvalidOperationException("Queue entry has already been completed or abandoned."); - } - - string attemptsCacheKey = GetAttemptsKey(entry.Id); - var attemptsCachedValue = await Run.WithRetriesAsync(() => _cache.GetAsync(attemptsCacheKey), logger: _logger).AnyContext(); - int attempts = 1; - if (attemptsCachedValue.HasValue) - attempts = attemptsCachedValue.Value + 1; + var timeout = GetWorkItemTimeoutTimeTtl(); + long now = SystemClock.UtcNow.Ticks; - var retryDelay = GetRetryDelay(attempts); - _logger.LogInformation("Item: {EntryId}, Retry attempts: {RetryAttempts}, Retries Allowed: {Retries}, Retry Delay: {RetryDelay:g}", entry.Id, attempts - 1, _options.Retries, retryDelay); + await LoadScriptsAsync().AnyContext(); + var result = await Database.ScriptEvaluateAsync(_dequeueId, new + { + queueListName = (RedisKey)_queueListName, + workListName = (RedisKey)_workListName, + listPrefix = _listPrefix, + now, + timeout = timeout.TotalMilliseconds + }).AnyContext(); + return result.ToString(); + }, 3, TimeSpan.FromMilliseconds(100), linkedCancellationToken, _logger).AnyContext(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Queue {Name} dequeue id async error: {Error}", _options.Name, ex.Message); + return RedisValue.Null; + } + } - if (attempts > _options.Retries) - { - _logger.LogInformation("Exceeded retry limit moving to deadletter: {EntryId}", entry.Id); + public override async Task CompleteAsync(IQueueEntry entry) + { + if (_logger.IsEnabled(LogLevel.Debug)) _logger.LogDebug("Queue {Name} complete item: {EntryId}", _options.Name, entry.Id); + if (entry.IsAbandoned || entry.IsCompleted) + { + //_logger.LogDebug("Queue {Name} item already abandoned or completed: {EntryId}", _options.Name, entry.Id); + throw new InvalidOperationException("Queue entry has already been completed or abandoned."); + } - var tx = Database.CreateTransaction(); - tx.AddCondition(Condition.KeyExists(GetRenewedTimeKey(entry.Id))); - tx.ListRemoveAsync(_workListName, entry.Id); - tx.ListLeftPushAsync(_deadListName, entry.Id); - tx.KeyDeleteAsync(GetRenewedTimeKey(entry.Id)); - tx.KeyExpireAsync(GetPayloadKey(entry.Id), _options.DeadLetterTimeToLive); - bool success = await Run.WithRetriesAsync(() => tx.ExecuteAsync(), logger: _logger).AnyContext(); - if (!success) - throw new InvalidOperationException("Queue entry not in work list, it may have been auto abandoned."); + long result = await Run.WithRetriesAsync(() => Database.ListRemoveAsync(_workListName, entry.Id), logger: _logger).AnyContext(); + if (result == 0) + { + _logger.LogDebug("Queue {Name} item not in work list: {EntryId}", _options.Name, entry.Id); + throw new InvalidOperationException("Queue entry not in work list, it may have been auto abandoned."); + } - await Run.WithRetriesAsync(() => Task.WhenAll( - _cache.IncrementAsync(attemptsCacheKey, 1, GetAttemptsTtl()), - Database.KeyDeleteAsync(GetDequeuedTimeKey(entry.Id)), - Database.KeyDeleteAsync(GetWaitTimeKey(entry.Id)) - ), logger: _logger).AnyContext(); - } - else if (retryDelay > TimeSpan.Zero) - { - _logger.LogInformation("Adding item to wait list for future retry: {EntryId}", entry.Id); + await Run.WithRetriesAsync(() => Task.WhenAll( + Database.KeyDeleteAsync(GetPayloadKey(entry.Id)), + Database.KeyDeleteAsync(GetAttemptsKey(entry.Id)), + Database.KeyDeleteAsync(GetEnqueuedTimeKey(entry.Id)), + Database.KeyDeleteAsync(GetDequeuedTimeKey(entry.Id)), + Database.KeyDeleteAsync(GetRenewedTimeKey(entry.Id)), + Database.KeyDeleteAsync(GetWaitTimeKey(entry.Id)) + ), logger: _logger).AnyContext(); - await Run.WithRetriesAsync(() => Task.WhenAll( - _cache.SetAsync(GetWaitTimeKey(entry.Id), SystemClock.UtcNow.Add(retryDelay).Ticks, GetWaitTimeTtl()), - _cache.IncrementAsync(attemptsCacheKey, 1, GetAttemptsTtl()) - ), logger: _logger).AnyContext(); + Interlocked.Increment(ref _completedCount); + entry.MarkCompleted(); + await OnCompletedAsync(entry).AnyContext(); + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Complete done: {EntryId}", entry.Id); + } - var tx = Database.CreateTransaction(); - tx.AddCondition(Condition.KeyExists(GetRenewedTimeKey(entry.Id))); - tx.ListRemoveAsync(_workListName, entry.Id); - tx.ListLeftPushAsync(_waitListName, entry.Id); - tx.KeyDeleteAsync(GetRenewedTimeKey(entry.Id)); - bool success = await Run.WithRetriesAsync(() => tx.ExecuteAsync()).AnyContext(); - if (!success) - throw new InvalidOperationException("Queue entry not in work list, it may have been auto abandoned."); + public override async Task AbandonAsync(IQueueEntry entry) + { + _logger.LogDebug("Queue {Name}:{QueueId} abandon item: {EntryId}", _options.Name, QueueId, entry.Id); + if (entry.IsAbandoned || entry.IsCompleted) + { + _logger.LogError("Queue {Name}:{QueueId} unable to abandon item because already abandoned or completed: {EntryId}", _options.Name, QueueId, entry.Id); + throw new InvalidOperationException("Queue entry has already been completed or abandoned."); + } - await Run.WithRetriesAsync(() => Database.KeyDeleteAsync(GetDequeuedTimeKey(entry.Id)), logger: _logger).AnyContext(); - } - else - { - _logger.LogInformation("Adding item back to queue for retry: {EntryId}", entry.Id); + string attemptsCacheKey = GetAttemptsKey(entry.Id); + var attemptsCachedValue = await Run.WithRetriesAsync(() => _cache.GetAsync(attemptsCacheKey), logger: _logger).AnyContext(); + int attempts = 1; + if (attemptsCachedValue.HasValue) + attempts = attemptsCachedValue.Value + 1; - await Run.WithRetriesAsync(() => _cache.IncrementAsync(attemptsCacheKey, 1, GetAttemptsTtl()), logger: _logger).AnyContext(); + var retryDelay = GetRetryDelay(attempts); + _logger.LogInformation("Item: {EntryId}, Retry attempts: {RetryAttempts}, Retries Allowed: {Retries}, Retry Delay: {RetryDelay:g}", entry.Id, attempts - 1, _options.Retries, retryDelay); - var tx = Database.CreateTransaction(); - tx.AddCondition(Condition.KeyExists(GetRenewedTimeKey(entry.Id))); - tx.ListRemoveAsync(_workListName, entry.Id); - tx.ListLeftPushAsync(_queueListName, entry.Id); - tx.KeyDeleteAsync(GetRenewedTimeKey(entry.Id)); - bool success = await Run.WithRetriesAsync(() => tx.ExecuteAsync(), logger: _logger).AnyContext(); - if (!success) - throw new InvalidOperationException("Queue entry not in work list, it may have been auto abandoned."); + if (attempts > _options.Retries) + { + _logger.LogInformation("Exceeded retry limit moving to deadletter: {EntryId}", entry.Id); - await Run.WithRetriesAsync(() => Task.WhenAll( - Database.KeyDeleteAsync(GetDequeuedTimeKey(entry.Id)), - // This should pulse the monitor. - _subscriber.PublishAsync(RedisChannel.Literal(GetTopicName()), entry.Id) - ), logger: _logger).AnyContext(); - } + var tx = Database.CreateTransaction(); + tx.AddCondition(Condition.KeyExists(GetRenewedTimeKey(entry.Id))); + tx.ListRemoveAsync(_workListName, entry.Id); + tx.ListLeftPushAsync(_deadListName, entry.Id); + tx.KeyDeleteAsync(GetRenewedTimeKey(entry.Id)); + tx.KeyExpireAsync(GetPayloadKey(entry.Id), _options.DeadLetterTimeToLive); + bool success = await Run.WithRetriesAsync(() => tx.ExecuteAsync(), logger: _logger).AnyContext(); + if (!success) + throw new InvalidOperationException("Queue entry not in work list, it may have been auto abandoned."); - Interlocked.Increment(ref _abandonedCount); - entry.MarkAbandoned(); - await OnAbandonedAsync(entry).AnyContext(); - _logger.LogInformation("Abandon complete: {EntryId}", entry.Id); + await Run.WithRetriesAsync(() => Task.WhenAll( + _cache.IncrementAsync(attemptsCacheKey, 1, GetAttemptsTtl()), + Database.KeyDeleteAsync(GetDequeuedTimeKey(entry.Id)), + Database.KeyDeleteAsync(GetWaitTimeKey(entry.Id)) + ), logger: _logger).AnyContext(); } - - private TimeSpan GetRetryDelay(int attempts) + else if (retryDelay > TimeSpan.Zero) { - if (_options.RetryDelay <= TimeSpan.Zero) - { - return TimeSpan.Zero; - } + _logger.LogInformation("Adding item to wait list for future retry: {EntryId}", entry.Id); - int maxMultiplier = _options.RetryMultipliers.Length > 0 ? _options.RetryMultipliers.Last() : 1; - int multiplier = attempts <= _options.RetryMultipliers.Length ? _options.RetryMultipliers[attempts - 1] : maxMultiplier; - return TimeSpan.FromMilliseconds(_options.RetryDelay.TotalMilliseconds * multiplier); - } + await Run.WithRetriesAsync(() => Task.WhenAll( + _cache.SetAsync(GetWaitTimeKey(entry.Id), SystemClock.UtcNow.Add(retryDelay).Ticks, GetWaitTimeTtl()), + _cache.IncrementAsync(attemptsCacheKey, 1, GetAttemptsTtl()) + ), logger: _logger).AnyContext(); - protected override Task> GetDeadletterItemsImplAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } + var tx = Database.CreateTransaction(); + tx.AddCondition(Condition.KeyExists(GetRenewedTimeKey(entry.Id))); + tx.ListRemoveAsync(_workListName, entry.Id); + tx.ListLeftPushAsync(_waitListName, entry.Id); + tx.KeyDeleteAsync(GetRenewedTimeKey(entry.Id)); + bool success = await Run.WithRetriesAsync(() => tx.ExecuteAsync()).AnyContext(); + if (!success) + throw new InvalidOperationException("Queue entry not in work list, it may have been auto abandoned."); - public override async Task DeleteQueueAsync() - { - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Deleting queue: {Name}", _options.Name); - await Task.WhenAll( - DeleteListAsync(_queueListName), - DeleteListAsync(_workListName), - DeleteListAsync(_waitListName), - DeleteListAsync(_deadListName) - ).AnyContext(); - - _enqueuedCount = 0; - _dequeuedCount = 0; - _completedCount = 0; - _abandonedCount = 0; - _workerErrorCount = 0; + await Run.WithRetriesAsync(() => Database.KeyDeleteAsync(GetDequeuedTimeKey(entry.Id)), logger: _logger).AnyContext(); } - - private async Task DeleteListAsync(string name) + else { - var itemIds = await Database.ListRangeAsync(name).AnyContext(); - var tasks = new List(); - foreach (var id in itemIds) - { - tasks.AddRange(new Task[] { - Database.KeyDeleteAsync(GetPayloadKey(id)), - Database.KeyDeleteAsync(GetAttemptsKey(id)), - Database.KeyDeleteAsync(GetEnqueuedTimeKey(id)), - Database.KeyDeleteAsync(GetDequeuedTimeKey(id)), - Database.KeyDeleteAsync(GetRenewedTimeKey(id)), - Database.KeyDeleteAsync(GetWaitTimeKey(id)) - }); - } + _logger.LogInformation("Adding item back to queue for retry: {EntryId}", entry.Id); - tasks.Add(Database.KeyDeleteAsync(name)); - await Task.WhenAll(tasks).AnyContext(); - } + await Run.WithRetriesAsync(() => _cache.IncrementAsync(attemptsCacheKey, 1, GetAttemptsTtl()), logger: _logger).AnyContext(); - private async Task TrimDeadletterItemsAsync(int maxItems) - { - var itemIds = (await Database.ListRangeAsync(_deadListName).AnyContext()).Skip(maxItems); - var tasks = new List(); - foreach (var id in itemIds) - { - tasks.AddRange(new Task[] { - Database.KeyDeleteAsync(GetPayloadKey(id)), - Database.KeyDeleteAsync(GetAttemptsKey(id)), - Database.KeyDeleteAsync(GetEnqueuedTimeKey(id)), - Database.KeyDeleteAsync(GetDequeuedTimeKey(id)), - Database.KeyDeleteAsync(GetRenewedTimeKey(id)), - Database.KeyDeleteAsync(GetWaitTimeKey(id)), - Database.ListRemoveAsync(_queueListName, id), - Database.ListRemoveAsync(_workListName, id), - Database.ListRemoveAsync(_waitListName, id), - Database.ListRemoveAsync(_deadListName, id) - }); - } + var tx = Database.CreateTransaction(); + tx.AddCondition(Condition.KeyExists(GetRenewedTimeKey(entry.Id))); + tx.ListRemoveAsync(_workListName, entry.Id); + tx.ListLeftPushAsync(_queueListName, entry.Id); + tx.KeyDeleteAsync(GetRenewedTimeKey(entry.Id)); + bool success = await Run.WithRetriesAsync(() => tx.ExecuteAsync(), logger: _logger).AnyContext(); + if (!success) + throw new InvalidOperationException("Queue entry not in work list, it may have been auto abandoned."); - await Task.WhenAll(tasks).AnyContext(); + await Run.WithRetriesAsync(() => Task.WhenAll( + Database.KeyDeleteAsync(GetDequeuedTimeKey(entry.Id)), + // This should pulse the monitor. + _subscriber.PublishAsync(RedisChannel.Literal(GetTopicName()), entry.Id) + ), logger: _logger).AnyContext(); } - private void OnTopicMessage(RedisChannel redisChannel, RedisValue redisValue) - { - if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Queue OnMessage {Name}: {Value}", _options.Name, redisValue); - _autoResetEvent.Set(); - } + Interlocked.Increment(ref _abandonedCount); + entry.MarkAbandoned(); + await OnAbandonedAsync(entry).AnyContext(); + _logger.LogInformation("Abandon complete: {EntryId}", entry.Id); + } - private void ConnectionMultiplexerOnConnectionRestored(object sender, ConnectionFailedEventArgs connectionFailedEventArgs) + private TimeSpan GetRetryDelay(int attempts) + { + if (_options.RetryDelay <= TimeSpan.Zero) { - if (_logger.IsEnabled(LogLevel.Information)) _logger.LogInformation("Redis connection restored."); - _scriptsLoaded = false; - _autoResetEvent.Set(); + return TimeSpan.Zero; } - public async Task DoMaintenanceWorkAsync() - { - if (_queueDisposedCancellationTokenSource.IsCancellationRequested) - return; + int maxMultiplier = _options.RetryMultipliers.Length > 0 ? _options.RetryMultipliers.Last() : 1; + int multiplier = attempts <= _options.RetryMultipliers.Length ? _options.RetryMultipliers[attempts - 1] : maxMultiplier; + return TimeSpan.FromMilliseconds(_options.RetryDelay.TotalMilliseconds * multiplier); + } - _logger.LogTrace("Starting DoMaintenance: Name: {Name} Id: {Id}", _options.Name, QueueId); - var utcNow = SystemClock.UtcNow; + protected override Task> GetDeadletterItemsImplAsync(CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } - try - { - var workIds = await Database.ListRangeAsync(_workListName).AnyContext(); - foreach (var workId in workIds) - { - if (_queueDisposedCancellationTokenSource.IsCancellationRequested) - return; + public override async Task DeleteQueueAsync() + { + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Deleting queue: {Name}", _options.Name); + await Task.WhenAll( + DeleteListAsync(_queueListName), + DeleteListAsync(_workListName), + DeleteListAsync(_waitListName), + DeleteListAsync(_deadListName) + ).AnyContext(); + + _enqueuedCount = 0; + _dequeuedCount = 0; + _completedCount = 0; + _abandonedCount = 0; + _workerErrorCount = 0; + } - var renewedTimeTicks = await _cache.GetAsync(GetRenewedTimeKey(workId)).AnyContext(); - if (!renewedTimeTicks.HasValue) - { - _logger.LogTrace("Skipping {WorkId}: no renewed time", workId); - continue; - } + private async Task DeleteListAsync(string name) + { + var itemIds = await Database.ListRangeAsync(name).AnyContext(); + var tasks = new List(); + foreach (var id in itemIds) + { + tasks.AddRange(new Task[] { + Database.KeyDeleteAsync(GetPayloadKey(id)), + Database.KeyDeleteAsync(GetAttemptsKey(id)), + Database.KeyDeleteAsync(GetEnqueuedTimeKey(id)), + Database.KeyDeleteAsync(GetDequeuedTimeKey(id)), + Database.KeyDeleteAsync(GetRenewedTimeKey(id)), + Database.KeyDeleteAsync(GetWaitTimeKey(id)) + }); + } + + tasks.Add(Database.KeyDeleteAsync(name)); + await Task.WhenAll(tasks).AnyContext(); + } - var renewedTime = new DateTime(renewedTimeTicks.Value); - _logger.LogTrace("{WorkId}: Renewed time {RenewedTime:o}", workId, renewedTime); + private async Task TrimDeadletterItemsAsync(int maxItems) + { + var itemIds = (await Database.ListRangeAsync(_deadListName).AnyContext()).Skip(maxItems); + var tasks = new List(); + foreach (var id in itemIds) + { + tasks.AddRange(new Task[] { + Database.KeyDeleteAsync(GetPayloadKey(id)), + Database.KeyDeleteAsync(GetAttemptsKey(id)), + Database.KeyDeleteAsync(GetEnqueuedTimeKey(id)), + Database.KeyDeleteAsync(GetDequeuedTimeKey(id)), + Database.KeyDeleteAsync(GetRenewedTimeKey(id)), + Database.KeyDeleteAsync(GetWaitTimeKey(id)), + Database.ListRemoveAsync(_queueListName, id), + Database.ListRemoveAsync(_workListName, id), + Database.ListRemoveAsync(_waitListName, id), + Database.ListRemoveAsync(_deadListName, id) + }); + } + + await Task.WhenAll(tasks).AnyContext(); + } - if (utcNow.Subtract(renewedTime) <= _options.WorkItemTimeout) - continue; + private void OnTopicMessage(RedisChannel redisChannel, RedisValue redisValue) + { + if (_logger.IsEnabled(LogLevel.Trace)) _logger.LogTrace("Queue OnMessage {Name}: {Value}", _options.Name, redisValue); + _autoResetEvent.Set(); + } - _logger.LogInformation("{WorkId} Auto abandon item. Renewed: {RenewedTime:o} Current: {UtcNow:o} Timeout: {WorkItemTimeout:g} QueueId: {QueueId}", workId, renewedTime, utcNow, _options.WorkItemTimeout, QueueId); - var entry = await GetQueueEntryAsync(workId).AnyContext(); - if (entry == null) - { - _logger.LogError("{WorkId} Error getting queue entry for work item timeout", workId); - continue; - } + private void ConnectionMultiplexerOnConnectionRestored(object sender, ConnectionFailedEventArgs connectionFailedEventArgs) + { + if (_logger.IsEnabled(LogLevel.Information)) _logger.LogInformation("Redis connection restored."); + _scriptsLoaded = false; + _autoResetEvent.Set(); + } - _logger.LogError("{WorkId} AbandonAsync", workId); - await AbandonAsync(entry).AnyContext(); - Interlocked.Increment(ref _workItemTimeoutCount); - } - } - catch (Exception ex) - { - _logger.LogError(ex, "Error checking for work item timeouts: {Message}", ex.Message); - } + public async Task DoMaintenanceWorkAsync() + { + if (_queueDisposedCancellationTokenSource.IsCancellationRequested) + return; - if (_queueDisposedCancellationTokenSource.IsCancellationRequested) - return; + _logger.LogTrace("Starting DoMaintenance: Name: {Name} Id: {Id}", _options.Name, QueueId); + var utcNow = SystemClock.UtcNow; - try + try + { + var workIds = await Database.ListRangeAsync(_workListName).AnyContext(); + foreach (var workId in workIds) { - var waitIds = await Database.ListRangeAsync(_waitListName).AnyContext(); - foreach (var waitId in waitIds) - { - if (_queueDisposedCancellationTokenSource.IsCancellationRequested) - return; - - var waitTimeTicks = await _cache.GetAsync(GetWaitTimeKey(waitId)).AnyContext(); - _logger.LogTrace("{WaitId}: Wait time {WaitTime}", waitId, waitTimeTicks); + if (_queueDisposedCancellationTokenSource.IsCancellationRequested) + return; - if (waitTimeTicks.HasValue && waitTimeTicks.Value > utcNow.Ticks) - continue; + var renewedTimeTicks = await _cache.GetAsync(GetRenewedTimeKey(workId)).AnyContext(); + if (!renewedTimeTicks.HasValue) + { + _logger.LogTrace("Skipping {WorkId}: no renewed time", workId); + continue; + } - _logger.LogTrace("{WaitId}: Getting retry lock", waitId); - _logger.LogDebug("{WaitId}: Adding item back to queue for retry", waitId); + var renewedTime = new DateTime(renewedTimeTicks.Value); + _logger.LogTrace("{WorkId}: Renewed time {RenewedTime:o}", workId, renewedTime); - var tx = Database.CreateTransaction(); - tx.ListRemoveAsync(_waitListName, waitId); - tx.ListLeftPushAsync(_queueListName, waitId); - tx.KeyDeleteAsync(GetWaitTimeKey(waitId)); - bool success = await Run.WithRetriesAsync(() => tx.ExecuteAsync(), logger: _logger).AnyContext(); - if (!success) - throw new Exception("Unable to move item to queue list."); + if (utcNow.Subtract(renewedTime) <= _options.WorkItemTimeout) + continue; - await Run.WithRetriesAsync(() => _subscriber.PublishAsync(RedisChannel.Literal(GetTopicName()), waitId), cancellationToken: _queueDisposedCancellationTokenSource.Token, logger: _logger).AnyContext(); + _logger.LogInformation("{WorkId} Auto abandon item. Renewed: {RenewedTime:o} Current: {UtcNow:o} Timeout: {WorkItemTimeout:g} QueueId: {QueueId}", workId, renewedTime, utcNow, _options.WorkItemTimeout, QueueId); + var entry = await GetQueueEntryAsync(workId).AnyContext(); + if (entry == null) + { + _logger.LogError("{WorkId} Error getting queue entry for work item timeout", workId); + continue; } + + _logger.LogError("{WorkId} AbandonAsync", workId); + await AbandonAsync(entry).AnyContext(); + Interlocked.Increment(ref _workItemTimeoutCount); } - catch (Exception ex) - { - _logger.LogError(ex, "Error adding items back to the queue after the retry delay: {Message}", ex.Message); - } + } + catch (Exception ex) + { + _logger.LogError(ex, "Error checking for work item timeouts: {Message}", ex.Message); + } - if (_queueDisposedCancellationTokenSource.IsCancellationRequested) - return; + if (_queueDisposedCancellationTokenSource.IsCancellationRequested) + return; - try - { - await TrimDeadletterItemsAsync(_options.DeadLetterMaxItems).AnyContext(); - } - catch (Exception ex) + try + { + var waitIds = await Database.ListRangeAsync(_waitListName).AnyContext(); + foreach (var waitId in waitIds) { - _logger.LogError(ex, "Error trimming deadletter items: {0}", ex.Message); + if (_queueDisposedCancellationTokenSource.IsCancellationRequested) + return; + + var waitTimeTicks = await _cache.GetAsync(GetWaitTimeKey(waitId)).AnyContext(); + _logger.LogTrace("{WaitId}: Wait time {WaitTime}", waitId, waitTimeTicks); + + if (waitTimeTicks.HasValue && waitTimeTicks.Value > utcNow.Ticks) + continue; + + _logger.LogTrace("{WaitId}: Getting retry lock", waitId); + _logger.LogDebug("{WaitId}: Adding item back to queue for retry", waitId); + + var tx = Database.CreateTransaction(); + tx.ListRemoveAsync(_waitListName, waitId); + tx.ListLeftPushAsync(_queueListName, waitId); + tx.KeyDeleteAsync(GetWaitTimeKey(waitId)); + bool success = await Run.WithRetriesAsync(() => tx.ExecuteAsync(), logger: _logger).AnyContext(); + if (!success) + throw new Exception("Unable to move item to queue list."); + + await Run.WithRetriesAsync(() => _subscriber.PublishAsync(RedisChannel.Literal(GetTopicName()), waitId), cancellationToken: _queueDisposedCancellationTokenSource.Token, logger: _logger).AnyContext(); } + } + catch (Exception ex) + { + _logger.LogError(ex, "Error adding items back to the queue after the retry delay: {Message}", ex.Message); + } + + if (_queueDisposedCancellationTokenSource.IsCancellationRequested) + return; - _logger.LogTrace("Finished DoMaintenance: Name: {Name} Id: {Id} Duration: {Duration:g}", _options.Name, QueueId, SystemClock.UtcNow.Subtract(utcNow)); + try + { + await TrimDeadletterItemsAsync(_options.DeadLetterMaxItems).AnyContext(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error trimming deadletter items: {0}", ex.Message); } - private async Task DoMaintenanceWorkLoopAsync() + _logger.LogTrace("Finished DoMaintenance: Name: {Name} Id: {Id} Duration: {Duration:g}", _options.Name, QueueId, SystemClock.UtcNow.Subtract(utcNow)); + } + + private async Task DoMaintenanceWorkLoopAsync() + { + while (!_queueDisposedCancellationTokenSource.IsCancellationRequested) { - while (!_queueDisposedCancellationTokenSource.IsCancellationRequested) - { - _logger.LogTrace("Requesting Maintenance Lock. Name: {Name} Id: {Id}", _options.Name, QueueId); + _logger.LogTrace("Requesting Maintenance Lock. Name: {Name} Id: {Id}", _options.Name, QueueId); - var utcNow = SystemClock.UtcNow; - using var linkedCancellationToken = GetLinkedDisposableCancellationTokenSource(new CancellationTokenSource(TimeSpan.FromSeconds(30)).Token); - bool gotLock = await _maintenanceLockProvider.TryUsingAsync($"{_options.Name}-maintenance", DoMaintenanceWorkAsync, cancellationToken: linkedCancellationToken.Token).AnyContext(); + var utcNow = SystemClock.UtcNow; + using var linkedCancellationToken = GetLinkedDisposableCancellationTokenSource(new CancellationTokenSource(TimeSpan.FromSeconds(30)).Token); + bool gotLock = await _maintenanceLockProvider.TryUsingAsync($"{_options.Name}-maintenance", DoMaintenanceWorkAsync, cancellationToken: linkedCancellationToken.Token).AnyContext(); - _logger.LogTrace("{Status} Maintenance Lock. Name: {Name} Id: {Id} Time To Acquire: {AcquireDuration:g}", gotLock ? "Acquired" : "Failed to acquire", _options.Name, QueueId, SystemClock.UtcNow.Subtract(utcNow)); - } + _logger.LogTrace("{Status} Maintenance Lock. Name: {Name} Id: {Id} Time To Acquire: {AcquireDuration:g}", gotLock ? "Acquired" : "Failed to acquire", _options.Name, QueueId, SystemClock.UtcNow.Subtract(utcNow)); } + } - private async Task LoadScriptsAsync() + private async Task LoadScriptsAsync() + { + if (_scriptsLoaded) + return; + + using (await _lock.LockAsync().AnyContext()) { if (_scriptsLoaded) return; - using (await _lock.LockAsync().AnyContext()) - { - if (_scriptsLoaded) - return; - - var dequeueId = LuaScript.Prepare(DequeueIdScript); - - foreach (var endpoint in _options.ConnectionMultiplexer.GetEndPoints()) - { - var server = _options.ConnectionMultiplexer.GetServer(endpoint); - if (server.IsReplica) - continue; + var dequeueId = LuaScript.Prepare(DequeueIdScript); - _dequeueId = await dequeueId.LoadAsync(server).AnyContext(); - } + foreach (var endpoint in _options.ConnectionMultiplexer.GetEndPoints()) + { + var server = _options.ConnectionMultiplexer.GetServer(endpoint); + if (server.IsReplica) + continue; - _scriptsLoaded = true; + _dequeueId = await dequeueId.LoadAsync(server).AnyContext(); } + + _scriptsLoaded = true; } + } - public override void Dispose() - { - base.Dispose(); - _options.ConnectionMultiplexer.ConnectionRestored -= ConnectionMultiplexerOnConnectionRestored; + public override void Dispose() + { + base.Dispose(); + _options.ConnectionMultiplexer.ConnectionRestored -= ConnectionMultiplexerOnConnectionRestored; - if (_isSubscribed) + if (_isSubscribed) + { + lock (_lock.Lock()) { - lock (_lock.Lock()) + if (_isSubscribed) { - if (_isSubscribed) - { - _logger.LogTrace("Unsubscribing from topic {Topic}", GetTopicName()); - _subscriber.Unsubscribe(RedisChannel.Literal(GetTopicName()), OnTopicMessage, CommandFlags.FireAndForget); - _isSubscribed = false; - _logger.LogTrace("Unsubscribed from topic {Topic}", GetTopicName()); - } + _logger.LogTrace("Unsubscribing from topic {Topic}", GetTopicName()); + _subscriber.Unsubscribe(RedisChannel.Literal(GetTopicName()), OnTopicMessage, CommandFlags.FireAndForget); + _isSubscribed = false; + _logger.LogTrace("Unsubscribed from topic {Topic}", GetTopicName()); } } + } - _logger.LogTrace("Got {WorkerCount} workers to cleanup", _workers.Count); - foreach (var worker in _workers) - { - if (worker.IsCompleted) - continue; - - _logger.LogTrace("Attempting to cleanup worker"); - if (!worker.Wait(TimeSpan.FromSeconds(5))) - _logger.LogError("Failed waiting for worker to stop"); - } + _logger.LogTrace("Got {WorkerCount} workers to cleanup", _workers.Count); + foreach (var worker in _workers) + { + if (worker.IsCompleted) + continue; - _cache.Dispose(); + _logger.LogTrace("Attempting to cleanup worker"); + if (!worker.Wait(TimeSpan.FromSeconds(5))) + _logger.LogError("Failed waiting for worker to stop"); } - private static readonly string DequeueIdScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.DequeueId.lua"); + _cache.Dispose(); } - public class RedisPayloadEnvelope - { - public string CorrelationId { get; set; } - public IDictionary Properties { get; set; } - public T Value { get; set; } - } + private static readonly string DequeueIdScript = EmbeddedResourceLoader.GetEmbeddedResource("Foundatio.Redis.Scripts.DequeueId.lua"); +} + +public class RedisPayloadEnvelope +{ + public string CorrelationId { get; set; } + public IDictionary Properties { get; set; } + public T Value { get; set; } } diff --git a/src/Foundatio.Redis/Queues/RedisQueueOptions.cs b/src/Foundatio.Redis/Queues/RedisQueueOptions.cs index 6a200f8..1dd95c5 100644 --- a/src/Foundatio.Redis/Queues/RedisQueueOptions.cs +++ b/src/Foundatio.Redis/Queues/RedisQueueOptions.cs @@ -1,55 +1,54 @@ using System; using StackExchange.Redis; -namespace Foundatio.Queues +namespace Foundatio.Queues; + +// TODO: Make queue settings immutable and stored in redis so that multiple clients can't have different settings. +public class RedisQueueOptions : SharedQueueOptions where T : class +{ + public IConnectionMultiplexer ConnectionMultiplexer { get; set; } + public TimeSpan RetryDelay { get; set; } = TimeSpan.FromMinutes(1); + public int[] RetryMultipliers { get; set; } = { 1, 3, 5, 10 }; + public TimeSpan DeadLetterTimeToLive { get; set; } = TimeSpan.FromDays(1); + public int DeadLetterMaxItems { get; set; } = 100; + public bool RunMaintenanceTasks { get; set; } = true; +} + +public class RedisQueueOptionsBuilder : SharedQueueOptionsBuilder, RedisQueueOptionsBuilder> where T : class { - // TODO: Make queue settings immutable and stored in redis so that multiple clients can't have different settings. - public class RedisQueueOptions : SharedQueueOptions where T : class + public RedisQueueOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) + { + Target.ConnectionMultiplexer = connectionMultiplexer; + return this; + } + + public RedisQueueOptionsBuilder RetryDelay(TimeSpan retryDelay) + { + Target.RetryDelay = retryDelay; + return this; + } + + public RedisQueueOptionsBuilder RetryMultipliers(int[] retryMultipliers) + { + Target.RetryMultipliers = retryMultipliers; + return this; + } + + public RedisQueueOptionsBuilder DeadLetterTimeToLive(TimeSpan deadLetterTimeToLive) + { + Target.DeadLetterTimeToLive = deadLetterTimeToLive; + return this; + } + + public RedisQueueOptionsBuilder DeadLetterMaxItems(int deadLetterMaxItems) { - public IConnectionMultiplexer ConnectionMultiplexer { get; set; } - public TimeSpan RetryDelay { get; set; } = TimeSpan.FromMinutes(1); - public int[] RetryMultipliers { get; set; } = { 1, 3, 5, 10 }; - public TimeSpan DeadLetterTimeToLive { get; set; } = TimeSpan.FromDays(1); - public int DeadLetterMaxItems { get; set; } = 100; - public bool RunMaintenanceTasks { get; set; } = true; + Target.DeadLetterMaxItems = deadLetterMaxItems; + return this; } - public class RedisQueueOptionsBuilder : SharedQueueOptionsBuilder, RedisQueueOptionsBuilder> where T : class + public RedisQueueOptionsBuilder RunMaintenanceTasks(bool runMaintenanceTasks) { - public RedisQueueOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) - { - Target.ConnectionMultiplexer = connectionMultiplexer; - return this; - } - - public RedisQueueOptionsBuilder RetryDelay(TimeSpan retryDelay) - { - Target.RetryDelay = retryDelay; - return this; - } - - public RedisQueueOptionsBuilder RetryMultipliers(int[] retryMultipliers) - { - Target.RetryMultipliers = retryMultipliers; - return this; - } - - public RedisQueueOptionsBuilder DeadLetterTimeToLive(TimeSpan deadLetterTimeToLive) - { - Target.DeadLetterTimeToLive = deadLetterTimeToLive; - return this; - } - - public RedisQueueOptionsBuilder DeadLetterMaxItems(int deadLetterMaxItems) - { - Target.DeadLetterMaxItems = deadLetterMaxItems; - return this; - } - - public RedisQueueOptionsBuilder RunMaintenanceTasks(bool runMaintenanceTasks) - { - Target.RunMaintenanceTasks = runMaintenanceTasks; - return this; - } + Target.RunMaintenanceTasks = runMaintenanceTasks; + return this; } } diff --git a/src/Foundatio.Redis/Storage/RedisFileStorage.cs b/src/Foundatio.Redis/Storage/RedisFileStorage.cs index ad59a80..e9921fb 100644 --- a/src/Foundatio.Redis/Storage/RedisFileStorage.cs +++ b/src/Foundatio.Redis/Storage/RedisFileStorage.cs @@ -12,334 +12,333 @@ using Microsoft.Extensions.Logging.Abstractions; using StackExchange.Redis; -namespace Foundatio.Storage +namespace Foundatio.Storage; + +public class RedisFileStorage : IFileStorage { - public class RedisFileStorage : IFileStorage + private readonly RedisFileStorageOptions _options; + private readonly ISerializer _serializer; + private readonly ILogger _logger; + private readonly string _fileSpecContainer; + + public RedisFileStorage(RedisFileStorageOptions options) { - private readonly RedisFileStorageOptions _options; - private readonly ISerializer _serializer; - private readonly ILogger _logger; - private readonly string _fileSpecContainer; + if (options.ConnectionMultiplexer == null) + throw new ArgumentException("ConnectionMultiplexer is required."); - public RedisFileStorage(RedisFileStorageOptions options) - { - if (options.ConnectionMultiplexer == null) - throw new ArgumentException("ConnectionMultiplexer is required."); + _serializer = options.Serializer ?? DefaultSerializer.Instance; + _logger = options.LoggerFactory?.CreateLogger(GetType()) ?? NullLogger.Instance; - _serializer = options.Serializer ?? DefaultSerializer.Instance; - _logger = options.LoggerFactory?.CreateLogger(GetType()) ?? NullLogger.Instance; + options.ConnectionMultiplexer.ConnectionRestored += ConnectionMultiplexerOnConnectionRestored; + _fileSpecContainer = $"{options.ContainerName}-filespecs"; + _options = options; + } - options.ConnectionMultiplexer.ConnectionRestored += ConnectionMultiplexerOnConnectionRestored; - _fileSpecContainer = $"{options.ContainerName}-filespecs"; - _options = options; - } + public RedisFileStorage(Builder config) + : this(config(new RedisFileStorageOptionsBuilder()).Build()) { } - public RedisFileStorage(Builder config) - : this(config(new RedisFileStorageOptionsBuilder()).Build()) { } + ISerializer IHaveSerializer.Serializer => _serializer; + public IDatabase Database => _options.ConnectionMultiplexer.GetDatabase(); - ISerializer IHaveSerializer.Serializer => _serializer; - public IDatabase Database => _options.ConnectionMultiplexer.GetDatabase(); + public void Dispose() + { + _options.ConnectionMultiplexer.ConnectionRestored -= ConnectionMultiplexerOnConnectionRestored; + } - public void Dispose() - { - _options.ConnectionMultiplexer.ConnectionRestored -= ConnectionMultiplexerOnConnectionRestored; - } + [Obsolete($"Use {nameof(GetFileStreamAsync)} with {nameof(FileAccess)} instead to define read or write behaviour of stream")] + public Task GetFileStreamAsync(string path, CancellationToken cancellationToken = default) + => GetFileStreamAsync(path, StreamMode.Read, cancellationToken); - [Obsolete($"Use {nameof(GetFileStreamAsync)} with {nameof(FileAccess)} instead to define read or write behaviour of stream")] - public Task GetFileStreamAsync(string path, CancellationToken cancellationToken = default) - => GetFileStreamAsync(path, StreamMode.Read, cancellationToken); + public async Task GetFileStreamAsync(string path, StreamMode streamMode, CancellationToken cancellationToken = default) + { + if (String.IsNullOrEmpty(path)) + throw new ArgumentNullException(nameof(path)); - public async Task GetFileStreamAsync(string path, StreamMode streamMode, CancellationToken cancellationToken = default) - { - if (String.IsNullOrEmpty(path)) - throw new ArgumentNullException(nameof(path)); + if (streamMode is StreamMode.Write) + throw new NotSupportedException($"Stream mode {streamMode} is not supported."); - if (streamMode is StreamMode.Write) - throw new NotSupportedException($"Stream mode {streamMode} is not supported."); + string normalizedPath = NormalizePath(path); + _logger.LogTrace("Getting file stream for {Path}", normalizedPath); - string normalizedPath = NormalizePath(path); - _logger.LogTrace("Getting file stream for {Path}", normalizedPath); + var fileContent = await Run.WithRetriesAsync(() => Database.HashGetAsync(_options.ContainerName, normalizedPath), + cancellationToken: cancellationToken, logger: _logger).AnyContext(); - var fileContent = await Run.WithRetriesAsync(() => Database.HashGetAsync(_options.ContainerName, normalizedPath), - cancellationToken: cancellationToken, logger: _logger).AnyContext(); + if (fileContent.IsNull) + { + _logger.LogError("Unable to get file stream for {Path}: File Not Found", normalizedPath); + return null; + } - if (fileContent.IsNull) - { - _logger.LogError("Unable to get file stream for {Path}: File Not Found", normalizedPath); - return null; - } + return new MemoryStream(fileContent); + } - return new MemoryStream(fileContent); - } + public async Task GetFileInfoAsync(string path) + { + if (String.IsNullOrEmpty(path)) + throw new ArgumentNullException(nameof(path)); - public async Task GetFileInfoAsync(string path) + string normalizedPath = NormalizePath(path); + _logger.LogTrace("Getting file info for {Path}", normalizedPath); + + var fileSpec = await Run.WithRetriesAsync(() => Database.HashGetAsync(_fileSpecContainer, normalizedPath), logger: _logger).AnyContext(); + if (!fileSpec.HasValue) { - if (String.IsNullOrEmpty(path)) - throw new ArgumentNullException(nameof(path)); + _logger.LogError("Unable to get file info for {Path}: File Not Found", normalizedPath); + return null; + } - string normalizedPath = NormalizePath(path); - _logger.LogTrace("Getting file info for {Path}", normalizedPath); + return _serializer.Deserialize((byte[])fileSpec); + } - var fileSpec = await Run.WithRetriesAsync(() => Database.HashGetAsync(_fileSpecContainer, normalizedPath), logger: _logger).AnyContext(); - if (!fileSpec.HasValue) - { - _logger.LogError("Unable to get file info for {Path}: File Not Found", normalizedPath); - return null; - } + public Task ExistsAsync(string path) + { + if (String.IsNullOrEmpty(path)) + throw new ArgumentNullException(nameof(path)); - return _serializer.Deserialize((byte[])fileSpec); - } + string normalizedPath = NormalizePath(path); + _logger.LogTrace("Checking if {Path} exists", normalizedPath); - public Task ExistsAsync(string path) - { - if (String.IsNullOrEmpty(path)) - throw new ArgumentNullException(nameof(path)); + return Run.WithRetriesAsync(() => Database.HashExistsAsync(_fileSpecContainer, normalizedPath), logger: _logger); + } - string normalizedPath = NormalizePath(path); - _logger.LogTrace("Checking if {Path} exists", normalizedPath); + public async Task SaveFileAsync(string path, Stream stream, CancellationToken cancellationToken = default) + { + if (String.IsNullOrEmpty(path)) + throw new ArgumentNullException(nameof(path)); + if (stream == null) + throw new ArgumentNullException(nameof(stream)); - return Run.WithRetriesAsync(() => Database.HashExistsAsync(_fileSpecContainer, normalizedPath), logger: _logger); - } + string normalizedPath = NormalizePath(path); + _logger.LogTrace("Saving {Path}", normalizedPath); - public async Task SaveFileAsync(string path, Stream stream, CancellationToken cancellationToken = default) + try { - if (String.IsNullOrEmpty(path)) - throw new ArgumentNullException(nameof(path)); - if (stream == null) - throw new ArgumentNullException(nameof(stream)); + var database = Database; - string normalizedPath = NormalizePath(path); - _logger.LogTrace("Saving {Path}", normalizedPath); + using var memory = new MemoryStream(); + await stream.CopyToAsync(memory, 0x14000, cancellationToken).AnyContext(); + var saveFileTask = database.HashSetAsync(_options.ContainerName, normalizedPath, memory.ToArray()); + long fileSize = memory.Length; + memory.Seek(0, SeekOrigin.Begin); + memory.SetLength(0); - try + _serializer.Serialize(new FileSpec { - var database = Database; - - using var memory = new MemoryStream(); - await stream.CopyToAsync(memory, 0x14000, cancellationToken).AnyContext(); - var saveFileTask = database.HashSetAsync(_options.ContainerName, normalizedPath, memory.ToArray()); - long fileSize = memory.Length; - memory.Seek(0, SeekOrigin.Begin); - memory.SetLength(0); - - _serializer.Serialize(new FileSpec - { - Path = normalizedPath, - Created = DateTime.UtcNow, - Modified = DateTime.UtcNow, - Size = fileSize - }, memory); - var saveSpecTask = database.HashSetAsync(_fileSpecContainer, normalizedPath, memory.ToArray()); - await Run.WithRetriesAsync(() => Task.WhenAll(saveFileTask, saveSpecTask), - cancellationToken: cancellationToken, logger: _logger).AnyContext(); - return true; - } - catch (Exception ex) - { - _logger.LogError(ex, "Error saving {Path}: {Message}", normalizedPath, ex.Message); - return false; - } + Path = normalizedPath, + Created = DateTime.UtcNow, + Modified = DateTime.UtcNow, + Size = fileSize + }, memory); + var saveSpecTask = database.HashSetAsync(_fileSpecContainer, normalizedPath, memory.ToArray()); + await Run.WithRetriesAsync(() => Task.WhenAll(saveFileTask, saveSpecTask), + cancellationToken: cancellationToken, logger: _logger).AnyContext(); + return true; } - - public async Task RenameFileAsync(string path, string newPath, CancellationToken cancellationToken = default) + catch (Exception ex) { - if (String.IsNullOrEmpty(path)) - throw new ArgumentNullException(nameof(path)); - if (String.IsNullOrEmpty(newPath)) - throw new ArgumentNullException(nameof(newPath)); + _logger.LogError(ex, "Error saving {Path}: {Message}", normalizedPath, ex.Message); + return false; + } + } - string normalizedPath = NormalizePath(path); - string normalizedNewPath = NormalizePath(newPath); - _logger.LogInformation("Renaming {Path} to {NewPath}", normalizedPath, normalizedNewPath); + public async Task RenameFileAsync(string path, string newPath, CancellationToken cancellationToken = default) + { + if (String.IsNullOrEmpty(path)) + throw new ArgumentNullException(nameof(path)); + if (String.IsNullOrEmpty(newPath)) + throw new ArgumentNullException(nameof(newPath)); - try - { - var stream = await GetFileStreamAsync(normalizedPath, StreamMode.Read, cancellationToken).AnyContext(); - return await DeleteFileAsync(normalizedPath, cancellationToken).AnyContext() && - await SaveFileAsync(normalizedNewPath, stream, cancellationToken).AnyContext(); - } - catch (Exception ex) - { - _logger.LogError(ex, "Error renaming {Path} to {NewPath}: {Message}", normalizedPath, newPath, ex.Message); - return false; - } - } + string normalizedPath = NormalizePath(path); + string normalizedNewPath = NormalizePath(newPath); + _logger.LogInformation("Renaming {Path} to {NewPath}", normalizedPath, normalizedNewPath); - public async Task CopyFileAsync(string path, string targetPath, CancellationToken cancellationToken = default) + try { - if (String.IsNullOrEmpty(path)) - throw new ArgumentNullException(nameof(path)); - if (String.IsNullOrEmpty(targetPath)) - throw new ArgumentNullException(nameof(targetPath)); + var stream = await GetFileStreamAsync(normalizedPath, StreamMode.Read, cancellationToken).AnyContext(); + return await DeleteFileAsync(normalizedPath, cancellationToken).AnyContext() && + await SaveFileAsync(normalizedNewPath, stream, cancellationToken).AnyContext(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error renaming {Path} to {NewPath}: {Message}", normalizedPath, newPath, ex.Message); + return false; + } + } - string normalizedPath = NormalizePath(path); - string normalizedTargetPath = NormalizePath(targetPath); - _logger.LogInformation("Copying {Path} to {TargetPath}", normalizedPath, normalizedTargetPath); + public async Task CopyFileAsync(string path, string targetPath, CancellationToken cancellationToken = default) + { + if (String.IsNullOrEmpty(path)) + throw new ArgumentNullException(nameof(path)); + if (String.IsNullOrEmpty(targetPath)) + throw new ArgumentNullException(nameof(targetPath)); - try - { - using var stream = await GetFileStreamAsync(normalizedPath, StreamMode.Read, cancellationToken).AnyContext(); - if (stream == null) - return false; + string normalizedPath = NormalizePath(path); + string normalizedTargetPath = NormalizePath(targetPath); + _logger.LogInformation("Copying {Path} to {TargetPath}", normalizedPath, normalizedTargetPath); - return await SaveFileAsync(normalizedTargetPath, stream, cancellationToken).AnyContext(); - } - catch (Exception ex) - { - _logger.LogError(ex, "Error copying {Path} to {TargetPath}: {Message}", normalizedPath, normalizedTargetPath, ex.Message); + try + { + using var stream = await GetFileStreamAsync(normalizedPath, StreamMode.Read, cancellationToken).AnyContext(); + if (stream == null) return false; - } - } - public async Task DeleteFileAsync(string path, CancellationToken cancellationToken = default) + return await SaveFileAsync(normalizedTargetPath, stream, cancellationToken).AnyContext(); + } + catch (Exception ex) { - if (String.IsNullOrEmpty(path)) - throw new ArgumentNullException(nameof(path)); + _logger.LogError(ex, "Error copying {Path} to {TargetPath}: {Message}", normalizedPath, normalizedTargetPath, ex.Message); + return false; + } + } - string normalizedPath = NormalizePath(path); - _logger.LogTrace("Deleting {Path}", normalizedPath); + public async Task DeleteFileAsync(string path, CancellationToken cancellationToken = default) + { + if (String.IsNullOrEmpty(path)) + throw new ArgumentNullException(nameof(path)); - var database = Database; - var deleteSpecTask = database.HashDeleteAsync(_fileSpecContainer, normalizedPath); - var deleteFileTask = database.HashDeleteAsync(_options.ContainerName, normalizedPath); - await Run.WithRetriesAsync(() => Task.WhenAll(deleteSpecTask, deleteFileTask), cancellationToken: cancellationToken, logger: _logger).AnyContext(); - return true; - } + string normalizedPath = NormalizePath(path); + _logger.LogTrace("Deleting {Path}", normalizedPath); - public async Task DeleteFilesAsync(string searchPattern = null, CancellationToken cancellationToken = default) - { - var files = await GetFileListAsync(searchPattern, cancellationToken: cancellationToken).AnyContext(); - int count = 0; + var database = Database; + var deleteSpecTask = database.HashDeleteAsync(_fileSpecContainer, normalizedPath); + var deleteFileTask = database.HashDeleteAsync(_options.ContainerName, normalizedPath); + await Run.WithRetriesAsync(() => Task.WhenAll(deleteSpecTask, deleteFileTask), cancellationToken: cancellationToken, logger: _logger).AnyContext(); + return true; + } - _logger.LogInformation("Deleting {FileCount} files matching {SearchPattern}", files, searchPattern); - foreach (var file in files) - { - await DeleteFileAsync(file.Path, cancellationToken).AnyContext(); - count++; - } - _logger.LogTrace("Finished deleting {FileCount} files matching {SearchPattern}", count, searchPattern); + public async Task DeleteFilesAsync(string searchPattern = null, CancellationToken cancellationToken = default) + { + var files = await GetFileListAsync(searchPattern, cancellationToken: cancellationToken).AnyContext(); + int count = 0; - return count; + _logger.LogInformation("Deleting {FileCount} files matching {SearchPattern}", files, searchPattern); + foreach (var file in files) + { + await DeleteFileAsync(file.Path, cancellationToken).AnyContext(); + count++; } + _logger.LogTrace("Finished deleting {FileCount} files matching {SearchPattern}", count, searchPattern); + + return count; + } - private Task> GetFileListAsync(string searchPattern = null, int? limit = null, int? skip = null, CancellationToken cancellationToken = default) + private Task> GetFileListAsync(string searchPattern = null, int? limit = null, int? skip = null, CancellationToken cancellationToken = default) + { + if (limit is <= 0) + return Task.FromResult(new List()); + + searchPattern = NormalizePath(searchPattern); + string prefix = searchPattern; + Regex patternRegex = null; + int wildcardPos = searchPattern?.IndexOf('*') ?? -1; + if (searchPattern != null && wildcardPos >= 0) { - if (limit is <= 0) - return Task.FromResult(new List()); - - searchPattern = NormalizePath(searchPattern); - string prefix = searchPattern; - Regex patternRegex = null; - int wildcardPos = searchPattern?.IndexOf('*') ?? -1; - if (searchPattern != null && wildcardPos >= 0) - { - patternRegex = new Regex($"^{Regex.Escape(searchPattern).Replace("\\*", ".*?")}$"); - int slashPos = searchPattern.LastIndexOf('/'); - prefix = slashPos >= 0 ? searchPattern.Substring(0, slashPos) : String.Empty; - } - - prefix ??= String.Empty; - int pageSize = limit ?? Int32.MaxValue; - - _logger.LogTrace( - s => s.Property("SearchPattern", searchPattern).Property("Limit", limit).Property("Skip", skip), - "Getting file list matching {Prefix} and {Pattern}...", prefix, patternRegex - ); - - return Task.FromResult(Database.HashScan(_fileSpecContainer, $"{prefix}*") - .Select(entry => _serializer.Deserialize((byte[])entry.Value)) - .Where(fileSpec => patternRegex == null || patternRegex.IsMatch(fileSpec.Path)) - .Take(pageSize) - .ToList() - ); + patternRegex = new Regex($"^{Regex.Escape(searchPattern).Replace("\\*", ".*?")}$"); + int slashPos = searchPattern.LastIndexOf('/'); + prefix = slashPos >= 0 ? searchPattern.Substring(0, slashPos) : String.Empty; } - public async Task GetPagedFileListAsync(int pageSize = 100, string searchPattern = null, CancellationToken cancellationToken = default) - { - if (pageSize <= 0) - return PagedFileListResult.Empty; + prefix ??= String.Empty; + int pageSize = limit ?? Int32.MaxValue; - var criteria = GetRequestCriteria(searchPattern); - var result = new PagedFileListResult(r => Task.FromResult(GetFiles(criteria, 1, pageSize))); - await result.NextPageAsync().AnyContext(); - return result; - } + _logger.LogTrace( + s => s.Property("SearchPattern", searchPattern).Property("Limit", limit).Property("Skip", skip), + "Getting file list matching {Prefix} and {Pattern}...", prefix, patternRegex + ); - private NextPageResult GetFiles(SearchCriteria criteria, int page, int pageSize) - { - int pagingLimit = pageSize; - int skip = (page - 1) * pagingLimit; - if (pagingLimit < Int32.MaxValue) - pagingLimit++; - - _logger.LogTrace( - s => s.Property("Limit", pagingLimit).Property("Skip", skip), - "Getting files matching {Prefix} and {Pattern}...", criteria.Prefix, criteria.Pattern - ); - - var list = Database.HashScan(_fileSpecContainer, $"{criteria.Prefix}*") - .Select(entry => _serializer.Deserialize((byte[])entry.Value)) - .Where(fileSpec => criteria.Pattern == null || criteria.Pattern.IsMatch(fileSpec.Path)) - .Skip(skip) - .Take(pagingLimit) - .ToList(); - - bool hasMore = false; - if (list.Count == pagingLimit) - { - hasMore = true; - list.RemoveAt(pagingLimit - 1); - } + return Task.FromResult(Database.HashScan(_fileSpecContainer, $"{prefix}*") + .Select(entry => _serializer.Deserialize((byte[])entry.Value)) + .Where(fileSpec => patternRegex == null || patternRegex.IsMatch(fileSpec.Path)) + .Take(pageSize) + .ToList() + ); + } - return new NextPageResult - { - Success = true, - HasMore = hasMore, - Files = list, - NextPageFunc = hasMore ? _ => Task.FromResult(GetFiles(criteria, page + 1, pageSize)) : null - }; - } + public async Task GetPagedFileListAsync(int pageSize = 100, string searchPattern = null, CancellationToken cancellationToken = default) + { + if (pageSize <= 0) + return PagedFileListResult.Empty; - private string NormalizePath(string path) - { - return path?.Replace('\\', '/'); - } + var criteria = GetRequestCriteria(searchPattern); + var result = new PagedFileListResult(r => Task.FromResult(GetFiles(criteria, 1, pageSize))); + await result.NextPageAsync().AnyContext(); + return result; + } - private class SearchCriteria + private NextPageResult GetFiles(SearchCriteria criteria, int page, int pageSize) + { + int pagingLimit = pageSize; + int skip = (page - 1) * pagingLimit; + if (pagingLimit < Int32.MaxValue) + pagingLimit++; + + _logger.LogTrace( + s => s.Property("Limit", pagingLimit).Property("Skip", skip), + "Getting files matching {Prefix} and {Pattern}...", criteria.Prefix, criteria.Pattern + ); + + var list = Database.HashScan(_fileSpecContainer, $"{criteria.Prefix}*") + .Select(entry => _serializer.Deserialize((byte[])entry.Value)) + .Where(fileSpec => criteria.Pattern == null || criteria.Pattern.IsMatch(fileSpec.Path)) + .Skip(skip) + .Take(pagingLimit) + .ToList(); + + bool hasMore = false; + if (list.Count == pagingLimit) { - public string Prefix { get; set; } - public Regex Pattern { get; set; } + hasMore = true; + list.RemoveAt(pagingLimit - 1); } - private SearchCriteria GetRequestCriteria(string searchPattern) + return new NextPageResult { - if (String.IsNullOrEmpty(searchPattern)) - return new SearchCriteria { Prefix = String.Empty }; + Success = true, + HasMore = hasMore, + Files = list, + NextPageFunc = hasMore ? _ => Task.FromResult(GetFiles(criteria, page + 1, pageSize)) : null + }; + } - string normalizedSearchPattern = NormalizePath(searchPattern); - int wildcardPos = normalizedSearchPattern.IndexOf('*'); - bool hasWildcard = wildcardPos >= 0; + private string NormalizePath(string path) + { + return path?.Replace('\\', '/'); + } - string prefix = normalizedSearchPattern; - Regex patternRegex = null; + private class SearchCriteria + { + public string Prefix { get; set; } + public Regex Pattern { get; set; } + } - if (hasWildcard) - { - patternRegex = new Regex($"^{Regex.Escape(normalizedSearchPattern).Replace("\\*", ".*?")}$"); - int slashPos = normalizedSearchPattern.LastIndexOf('/'); - prefix = slashPos >= 0 ? normalizedSearchPattern.Substring(0, slashPos) : String.Empty; - } + private SearchCriteria GetRequestCriteria(string searchPattern) + { + if (String.IsNullOrEmpty(searchPattern)) + return new SearchCriteria { Prefix = String.Empty }; - return new SearchCriteria - { - Prefix = prefix, - Pattern = patternRegex - }; - } + string normalizedSearchPattern = NormalizePath(searchPattern); + int wildcardPos = normalizedSearchPattern.IndexOf('*'); + bool hasWildcard = wildcardPos >= 0; + + string prefix = normalizedSearchPattern; + Regex patternRegex = null; - private void ConnectionMultiplexerOnConnectionRestored(object sender, ConnectionFailedEventArgs connectionFailedEventArgs) + if (hasWildcard) { - _logger.LogInformation("Redis connection restored"); + patternRegex = new Regex($"^{Regex.Escape(normalizedSearchPattern).Replace("\\*", ".*?")}$"); + int slashPos = normalizedSearchPattern.LastIndexOf('/'); + prefix = slashPos >= 0 ? normalizedSearchPattern.Substring(0, slashPos) : String.Empty; } + + return new SearchCriteria + { + Prefix = prefix, + Pattern = patternRegex + }; + } + + private void ConnectionMultiplexerOnConnectionRestored(object sender, ConnectionFailedEventArgs connectionFailedEventArgs) + { + _logger.LogInformation("Redis connection restored"); } } diff --git a/src/Foundatio.Redis/Storage/RedisFileStorageOptions.cs b/src/Foundatio.Redis/Storage/RedisFileStorageOptions.cs index 091ef98..dba8e62 100644 --- a/src/Foundatio.Redis/Storage/RedisFileStorageOptions.cs +++ b/src/Foundatio.Redis/Storage/RedisFileStorageOptions.cs @@ -1,26 +1,25 @@ using System; using StackExchange.Redis; -namespace Foundatio.Storage +namespace Foundatio.Storage; + +public class RedisFileStorageOptions : SharedOptions { - public class RedisFileStorageOptions : SharedOptions + public IConnectionMultiplexer ConnectionMultiplexer { get; set; } + public string ContainerName { get; set; } = "storage"; +} + +public class RedisFileStorageOptionsBuilder : SharedOptionsBuilder +{ + public RedisFileStorageOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) { - public IConnectionMultiplexer ConnectionMultiplexer { get; set; } - public string ContainerName { get; set; } = "storage"; + Target.ConnectionMultiplexer = connectionMultiplexer; + return this; } - public class RedisFileStorageOptionsBuilder : SharedOptionsBuilder + public RedisFileStorageOptionsBuilder ContainerName(string containerName) { - public RedisFileStorageOptionsBuilder ConnectionMultiplexer(IConnectionMultiplexer connectionMultiplexer) - { - Target.ConnectionMultiplexer = connectionMultiplexer; - return this; - } - - public RedisFileStorageOptionsBuilder ContainerName(string containerName) - { - Target.ContainerName = containerName ?? throw new ArgumentNullException(nameof(containerName)); - return this; - } + Target.ContainerName = containerName ?? throw new ArgumentNullException(nameof(containerName)); + return this; } } diff --git a/src/Foundatio.Redis/Utility/EmbeddedResourceLoader.cs b/src/Foundatio.Redis/Utility/EmbeddedResourceLoader.cs index e893a51..fa11c5f 100644 --- a/src/Foundatio.Redis/Utility/EmbeddedResourceLoader.cs +++ b/src/Foundatio.Redis/Utility/EmbeddedResourceLoader.cs @@ -1,17 +1,16 @@ using System.IO; using System.Reflection; -namespace Foundatio.Redis.Utility +namespace Foundatio.Redis.Utility; + +internal static class EmbeddedResourceLoader { - internal static class EmbeddedResourceLoader + internal static string GetEmbeddedResource(string name) { - internal static string GetEmbeddedResource(string name) - { - var assembly = typeof(EmbeddedResourceLoader).GetTypeInfo().Assembly; + var assembly = typeof(EmbeddedResourceLoader).GetTypeInfo().Assembly; - using var stream = assembly.GetManifestResourceStream(name); - using var streamReader = new StreamReader(stream); - return streamReader.ReadToEnd(); - } + using var stream = assembly.GetManifestResourceStream(name); + using var streamReader = new StreamReader(stream); + return streamReader.ReadToEnd(); } } diff --git a/tests/Foundatio.Benchmarks/Caching/CacheBenchmarks.cs b/tests/Foundatio.Benchmarks/Caching/CacheBenchmarks.cs index 5c95ce1..d099bfd 100644 --- a/tests/Foundatio.Benchmarks/Caching/CacheBenchmarks.cs +++ b/tests/Foundatio.Benchmarks/Caching/CacheBenchmarks.cs @@ -4,97 +4,96 @@ using Foundatio.Messaging; using StackExchange.Redis; -namespace Foundatio.Benchmarks.Caching +namespace Foundatio.Benchmarks.Caching; + +public class CacheBenchmarks { - public class CacheBenchmarks + private const int ITEM_COUNT = 1000; + private readonly ICacheClient _inMemoryCache = new InMemoryCacheClient(new InMemoryCacheClientOptions()); + private readonly ICacheClient _redisCache; + private readonly ICacheClient _hybridCacheClient; + + public CacheBenchmarks() { - private const int ITEM_COUNT = 1000; - private readonly ICacheClient _inMemoryCache = new InMemoryCacheClient(new InMemoryCacheClientOptions()); - private readonly ICacheClient _redisCache; - private readonly ICacheClient _hybridCacheClient; + var muxer = ConnectionMultiplexer.Connect("localhost"); + _redisCache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = muxer }); + _redisCache.RemoveAllAsync().GetAwaiter().GetResult(); + _hybridCacheClient = new HybridCacheClient(_redisCache, new RedisMessageBus(new RedisMessageBusOptions { Subscriber = muxer.GetSubscriber(), Topic = "test-cache" })); + } - public CacheBenchmarks() - { - var muxer = ConnectionMultiplexer.Connect("localhost"); - _redisCache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = muxer }); - _redisCache.RemoveAllAsync().GetAwaiter().GetResult(); - _hybridCacheClient = new HybridCacheClient(_redisCache, new RedisMessageBus(new RedisMessageBusOptions { Subscriber = muxer.GetSubscriber(), Topic = "test-cache" })); - } + [Benchmark] + public void ProcessInMemoryCache() + { + Process(_inMemoryCache); + } - [Benchmark] - public void ProcessInMemoryCache() - { - Process(_inMemoryCache); - } + [Benchmark] + public void ProcessRedisCache() + { + Process(_redisCache); + } - [Benchmark] - public void ProcessRedisCache() - { - Process(_redisCache); - } + [Benchmark] + public void ProcessHybridRedisCache() + { + Process(_hybridCacheClient); + } - [Benchmark] - public void ProcessHybridRedisCache() - { - Process(_hybridCacheClient); - } + [Benchmark] + public void ProcessInMemoryCacheWithConstantInvalidation() + { + Process(_inMemoryCache, true); + } - [Benchmark] - public void ProcessInMemoryCacheWithConstantInvalidation() - { - Process(_inMemoryCache, true); - } + [Benchmark] + public void ProcessRedisCacheWithConstantInvalidation() + { + Process(_redisCache, true); + } + + [Benchmark] + public void ProcessHybridRedisCacheWithConstantInvalidation() + { + Process(_hybridCacheClient, true); + } - [Benchmark] - public void ProcessRedisCacheWithConstantInvalidation() + private void Process(ICacheClient cache, bool useSingleKey = false) + { + try { - Process(_redisCache, true); + for (int i = 0; i < ITEM_COUNT; i++) + { + string key = useSingleKey ? "test" : String.Concat("test", i); + cache.SetAsync(key, new CacheItem { Id = i }, TimeSpan.FromHours(1)).GetAwaiter().GetResult(); + } } - - [Benchmark] - public void ProcessHybridRedisCacheWithConstantInvalidation() + catch (Exception ex) { - Process(_hybridCacheClient, true); + Console.WriteLine(ex); } - private void Process(ICacheClient cache, bool useSingleKey = false) + try { - try - { - for (int i = 0; i < ITEM_COUNT; i++) - { - string key = useSingleKey ? "test" : String.Concat("test", i); - cache.SetAsync(key, new CacheItem { Id = i }, TimeSpan.FromHours(1)).GetAwaiter().GetResult(); - } - } - catch (Exception ex) + for (int i = 0; i < ITEM_COUNT; i++) { - Console.WriteLine(ex); + string key = useSingleKey ? "test" : String.Concat("test", i); + var entry = cache.GetAsync(key).GetAwaiter().GetResult(); } - try + for (int i = 0; i < ITEM_COUNT; i++) { - for (int i = 0; i < ITEM_COUNT; i++) - { - string key = useSingleKey ? "test" : String.Concat("test", i); - var entry = cache.GetAsync(key).GetAwaiter().GetResult(); - } - - for (int i = 0; i < ITEM_COUNT; i++) - { - string key = useSingleKey ? "test" : "test0"; - var entry = cache.GetAsync(key).GetAwaiter().GetResult(); - } - } - catch (Exception ex) - { - Console.WriteLine(ex); + string key = useSingleKey ? "test" : "test0"; + var entry = cache.GetAsync(key).GetAwaiter().GetResult(); } } + catch (Exception ex) + { + Console.WriteLine(ex); + } } +} - public class CacheItem - { - public int Id { get; set; } - } +public class CacheItem +{ + public int Id { get; set; } } diff --git a/tests/Foundatio.Benchmarks/Program.cs b/tests/Foundatio.Benchmarks/Program.cs index 193af10..3acc126 100644 --- a/tests/Foundatio.Benchmarks/Program.cs +++ b/tests/Foundatio.Benchmarks/Program.cs @@ -5,29 +5,28 @@ using Foundatio.Benchmarks.Caching; using Foundatio.Benchmarks.Queues; -namespace Foundatio.Benchmarks +namespace Foundatio.Benchmarks; + +public class Program { - public class Program + public static void Main(string[] args) { - public static void Main(string[] args) - { - var summary = BenchmarkRunner.Run(); - Console.WriteLine(summary.ToString()); + var summary = BenchmarkRunner.Run(); + Console.WriteLine(summary.ToString()); - summary = BenchmarkRunner.Run(); - Console.WriteLine(summary.ToString()); + summary = BenchmarkRunner.Run(); + Console.WriteLine(summary.ToString()); - summary = BenchmarkRunner.Run(); - Console.WriteLine(summary.ToString()); - Console.ReadKey(); - } + summary = BenchmarkRunner.Run(); + Console.WriteLine(summary.ToString()); + Console.ReadKey(); } +} - public class BenchmarkConfig : ManualConfig +public class BenchmarkConfig : ManualConfig +{ + public BenchmarkConfig() { - public BenchmarkConfig() - { - AddJob(Job.Default.WithWarmupCount(1).WithIterationCount(1)); - } + AddJob(Job.Default.WithWarmupCount(1).WithIterationCount(1)); } } diff --git a/tests/Foundatio.Benchmarks/Queues/JobQueueBenchmarks.cs b/tests/Foundatio.Benchmarks/Queues/JobQueueBenchmarks.cs index c360196..e6eb386 100644 --- a/tests/Foundatio.Benchmarks/Queues/JobQueueBenchmarks.cs +++ b/tests/Foundatio.Benchmarks/Queues/JobQueueBenchmarks.cs @@ -6,74 +6,73 @@ using Microsoft.Extensions.Logging; using StackExchange.Redis; -namespace Foundatio.Benchmarks.Queues +namespace Foundatio.Benchmarks.Queues; + +public class JobQueueBenchmarks { - public class JobQueueBenchmarks - { - private const int ITEM_COUNT = 1000; - private readonly IQueue _inMemoryQueue = new InMemoryQueue(); - private readonly IQueue _redisQueue = new RedisQueue(o => o.ConnectionMultiplexer(ConnectionMultiplexer.Connect("localhost"))); + private const int ITEM_COUNT = 1000; + private readonly IQueue _inMemoryQueue = new InMemoryQueue(); + private readonly IQueue _redisQueue = new RedisQueue(o => o.ConnectionMultiplexer(ConnectionMultiplexer.Connect("localhost"))); - [IterationSetup] - public void Setup() - { - _inMemoryQueue.DeleteQueueAsync().GetAwaiter().GetResult(); - _redisQueue.DeleteQueueAsync().GetAwaiter().GetResult(); - } + [IterationSetup] + public void Setup() + { + _inMemoryQueue.DeleteQueueAsync().GetAwaiter().GetResult(); + _redisQueue.DeleteQueueAsync().GetAwaiter().GetResult(); + } - [IterationSetup(Target = nameof(RunInMemoryJobUntilEmptyAsync))] - public Task EnqueueInMemoryQueueAsync() - { - return EnqueueQueueAsync(_inMemoryQueue); - } + [IterationSetup(Target = nameof(RunInMemoryJobUntilEmptyAsync))] + public Task EnqueueInMemoryQueueAsync() + { + return EnqueueQueueAsync(_inMemoryQueue); + } - [Benchmark] - public Task RunInMemoryJobUntilEmptyAsync() - { - return RunJobUntilEmptyAsync(_inMemoryQueue); - } + [Benchmark] + public Task RunInMemoryJobUntilEmptyAsync() + { + return RunJobUntilEmptyAsync(_inMemoryQueue); + } - [IterationSetup(Target = nameof(RunRedisQueueJobUntilEmptyAsync))] - public Task EnqueueRedisQueueAsync() - { - return EnqueueQueueAsync(_redisQueue); - } + [IterationSetup(Target = nameof(RunRedisQueueJobUntilEmptyAsync))] + public Task EnqueueRedisQueueAsync() + { + return EnqueueQueueAsync(_redisQueue); + } - [Benchmark] - public Task RunRedisQueueJobUntilEmptyAsync() - { - return RunJobUntilEmptyAsync(_redisQueue); - } + [Benchmark] + public Task RunRedisQueueJobUntilEmptyAsync() + { + return RunJobUntilEmptyAsync(_redisQueue); + } - private async Task EnqueueQueueAsync(IQueue queue) + private async Task EnqueueQueueAsync(IQueue queue) + { + try { - try - { - for (int i = 0; i < ITEM_COUNT; i++) - await queue.EnqueueAsync(new QueueItem { Id = i }); - } - catch (Exception ex) - { - Console.WriteLine(ex); - } + for (int i = 0; i < ITEM_COUNT; i++) + await queue.EnqueueAsync(new QueueItem { Id = i }); } - - private Task RunJobUntilEmptyAsync(IQueue queue) + catch (Exception ex) { - var job = new BenchmarkJobQueue(queue); - return job.RunUntilEmptyAsync(); + Console.WriteLine(ex); } } - public class BenchmarkJobQueue : QueueJobBase + private Task RunJobUntilEmptyAsync(IQueue queue) { - public BenchmarkJobQueue(Lazy> queue, ILoggerFactory loggerFactory = null) : base(queue, loggerFactory) { } + var job = new BenchmarkJobQueue(queue); + return job.RunUntilEmptyAsync(); + } +} - public BenchmarkJobQueue(IQueue queue, ILoggerFactory loggerFactory = null) : base(queue, loggerFactory) { } +public class BenchmarkJobQueue : QueueJobBase +{ + public BenchmarkJobQueue(Lazy> queue, ILoggerFactory loggerFactory = null) : base(queue, loggerFactory) { } - protected override Task ProcessQueueEntryAsync(QueueEntryContext context) - { - return Task.FromResult(JobResult.Success); - } + public BenchmarkJobQueue(IQueue queue, ILoggerFactory loggerFactory = null) : base(queue, loggerFactory) { } + + protected override Task ProcessQueueEntryAsync(QueueEntryContext context) + { + return Task.FromResult(JobResult.Success); } } diff --git a/tests/Foundatio.Benchmarks/Queues/QueueBenchmarks.cs b/tests/Foundatio.Benchmarks/Queues/QueueBenchmarks.cs index f7114e5..5ef1aab 100644 --- a/tests/Foundatio.Benchmarks/Queues/QueueBenchmarks.cs +++ b/tests/Foundatio.Benchmarks/Queues/QueueBenchmarks.cs @@ -3,74 +3,73 @@ using Foundatio.Queues; using StackExchange.Redis; -namespace Foundatio.Benchmarks.Queues +namespace Foundatio.Benchmarks.Queues; + +public class QueueBenchmarks { - public class QueueBenchmarks + private const int ITEM_COUNT = 1000; + private readonly IQueue _inMemoryQueue = new InMemoryQueue(); + private readonly IQueue _redisQueue = new RedisQueue(o => o.ConnectionMultiplexer(ConnectionMultiplexer.Connect("localhost"))); + + [IterationSetup] + public void Setup() { - private const int ITEM_COUNT = 1000; - private readonly IQueue _inMemoryQueue = new InMemoryQueue(); - private readonly IQueue _redisQueue = new RedisQueue(o => o.ConnectionMultiplexer(ConnectionMultiplexer.Connect("localhost"))); + _inMemoryQueue.DeleteQueueAsync().GetAwaiter().GetResult(); + _redisQueue.DeleteQueueAsync().GetAwaiter().GetResult(); + } - [IterationSetup] - public void Setup() - { - _inMemoryQueue.DeleteQueueAsync().GetAwaiter().GetResult(); - _redisQueue.DeleteQueueAsync().GetAwaiter().GetResult(); - } + [IterationSetup(Target = nameof(DequeueInMemoryQueue))] + [Benchmark] + public void EnqueueInMemoryQueue() + { + EnqueueQueue(_inMemoryQueue); + } - [IterationSetup(Target = nameof(DequeueInMemoryQueue))] - [Benchmark] - public void EnqueueInMemoryQueue() - { - EnqueueQueue(_inMemoryQueue); - } + [Benchmark] + public void DequeueInMemoryQueue() + { + DequeueQueue(_inMemoryQueue); + } - [Benchmark] - public void DequeueInMemoryQueue() - { - DequeueQueue(_inMemoryQueue); - } + [IterationSetup(Target = nameof(DequeueRedisQueue))] + [Benchmark] + public void EnqueueRedisQueue() + { + EnqueueQueue(_redisQueue); + } - [IterationSetup(Target = nameof(DequeueRedisQueue))] - [Benchmark] - public void EnqueueRedisQueue() + [Benchmark] + public void DequeueRedisQueue() + { + DequeueQueue(_redisQueue); + } + + private void EnqueueQueue(IQueue queue) + { + try { - EnqueueQueue(_redisQueue); + for (int i = 0; i < ITEM_COUNT; i++) + queue.EnqueueAsync(new QueueItem { Id = i }).GetAwaiter().GetResult(); } - - [Benchmark] - public void DequeueRedisQueue() + catch (Exception ex) { - DequeueQueue(_redisQueue); + Console.WriteLine(ex); } + } - private void EnqueueQueue(IQueue queue) + private void DequeueQueue(IQueue queue) + { + try { - try + for (int i = 0; i < ITEM_COUNT; i++) { - for (int i = 0; i < ITEM_COUNT; i++) - queue.EnqueueAsync(new QueueItem { Id = i }).GetAwaiter().GetResult(); - } - catch (Exception ex) - { - Console.WriteLine(ex); + var entry = queue.DequeueAsync(TimeSpan.Zero).GetAwaiter().GetResult(); + entry.CompleteAsync().GetAwaiter().GetResult(); } } - - private void DequeueQueue(IQueue queue) + catch (Exception ex) { - try - { - for (int i = 0; i < ITEM_COUNT; i++) - { - var entry = queue.DequeueAsync(TimeSpan.Zero).GetAwaiter().GetResult(); - entry.CompleteAsync().GetAwaiter().GetResult(); - } - } - catch (Exception ex) - { - Console.WriteLine(ex); - } + Console.WriteLine(ex); } } } diff --git a/tests/Foundatio.Benchmarks/Queues/QueueItem.cs b/tests/Foundatio.Benchmarks/Queues/QueueItem.cs index 0b2b9f8..e16ca50 100644 --- a/tests/Foundatio.Benchmarks/Queues/QueueItem.cs +++ b/tests/Foundatio.Benchmarks/Queues/QueueItem.cs @@ -1,9 +1,6 @@ -using System; +namespace Foundatio.Benchmarks.Queues; -namespace Foundatio.Benchmarks.Queues +public class QueueItem { - public class QueueItem - { - public int Id { get; set; } - } + public int Id { get; set; } } diff --git a/tests/Foundatio.Redis.Tests/Caching/RedisCacheClientTests.cs b/tests/Foundatio.Redis.Tests/Caching/RedisCacheClientTests.cs index 82c827c..a030257 100644 --- a/tests/Foundatio.Redis.Tests/Caching/RedisCacheClientTests.cs +++ b/tests/Foundatio.Redis.Tests/Caching/RedisCacheClientTests.cs @@ -9,270 +9,269 @@ using Xunit; using Xunit.Abstractions; -namespace Foundatio.Redis.Tests.Caching +namespace Foundatio.Redis.Tests.Caching; + +public class RedisCacheClientTests : CacheClientTestsBase { - public class RedisCacheClientTests : CacheClientTestsBase + public RedisCacheClientTests(ITestOutputHelper output) : base(output) { - public RedisCacheClientTests(ITestOutputHelper output) : base(output) - { - var muxer = SharedConnection.GetMuxer(Log); - muxer.FlushAllAsync().GetAwaiter().GetResult(); - } + var muxer = SharedConnection.GetMuxer(Log); + muxer.FlushAllAsync().GetAwaiter().GetResult(); + } - protected override ICacheClient GetCacheClient(bool shouldThrowOnSerializationError = true) - { - return new RedisCacheClient(o => o.ConnectionMultiplexer(SharedConnection.GetMuxer(Log)).LoggerFactory(Log).ShouldThrowOnSerializationError(shouldThrowOnSerializationError)); - } + protected override ICacheClient GetCacheClient(bool shouldThrowOnSerializationError = true) + { + return new RedisCacheClient(o => o.ConnectionMultiplexer(SharedConnection.GetMuxer(Log)).LoggerFactory(Log).ShouldThrowOnSerializationError(shouldThrowOnSerializationError)); + } - [Fact] - public override Task CanGetAllAsync() - { - return base.CanGetAllAsync(); - } + [Fact] + public override Task CanGetAllAsync() + { + return base.CanGetAllAsync(); + } - [Fact] - public override Task CanGetAllWithOverlapAsync() - { - return base.CanGetAllWithOverlapAsync(); - } + [Fact] + public override Task CanGetAllWithOverlapAsync() + { + return base.CanGetAllWithOverlapAsync(); + } - [Fact] - public override Task CanSetAsync() - { - return base.CanSetAsync(); - } + [Fact] + public override Task CanSetAsync() + { + return base.CanSetAsync(); + } - [Fact] - public override Task CanSetAndGetValueAsync() - { - return base.CanSetAndGetValueAsync(); - } + [Fact] + public override Task CanSetAndGetValueAsync() + { + return base.CanSetAndGetValueAsync(); + } - [Fact] - public override Task CanAddAsync() - { - return base.CanAddAsync(); - } + [Fact] + public override Task CanAddAsync() + { + return base.CanAddAsync(); + } - [Fact] - public override Task CanAddConcurrentlyAsync() - { - return base.CanAddConcurrentlyAsync(); - } + [Fact] + public override Task CanAddConcurrentlyAsync() + { + return base.CanAddConcurrentlyAsync(); + } - [Fact] - public override Task CanGetAsync() - { - return base.CanGetAsync(); - } + [Fact] + public override Task CanGetAsync() + { + return base.CanGetAsync(); + } - [Fact] - public override Task CanTryGetAsync() - { - return base.CanTryGetAsync(); - } + [Fact] + public override Task CanTryGetAsync() + { + return base.CanTryGetAsync(); + } - [Fact] - public override Task CanUseScopedCachesAsync() - { - return base.CanUseScopedCachesAsync(); - } + [Fact] + public override Task CanUseScopedCachesAsync() + { + return base.CanUseScopedCachesAsync(); + } - [Fact] - public override Task CanSetAndGetObjectAsync() - { - return base.CanSetAndGetObjectAsync(); - } + [Fact] + public override Task CanSetAndGetObjectAsync() + { + return base.CanSetAndGetObjectAsync(); + } - [Fact] - public override Task CanRemoveByPrefixAsync() - { - return base.CanRemoveByPrefixAsync(); - } + [Fact] + public override Task CanRemoveByPrefixAsync() + { + return base.CanRemoveByPrefixAsync(); + } - [Theory] - [InlineData(50)] - [InlineData(500)] - [InlineData(5000)] - [InlineData(50000)] - public override Task CanRemoveByPrefixMultipleEntriesAsync(int count) - { - return base.CanRemoveByPrefixMultipleEntriesAsync(count); - } + [Theory] + [InlineData(50)] + [InlineData(500)] + [InlineData(5000)] + [InlineData(50000)] + public override Task CanRemoveByPrefixMultipleEntriesAsync(int count) + { + return base.CanRemoveByPrefixMultipleEntriesAsync(count); + } - [Fact] - public override Task CanSetExpirationAsync() - { - return base.CanSetExpirationAsync(); - } + [Fact] + public override Task CanSetExpirationAsync() + { + return base.CanSetExpirationAsync(); + } - [Fact] - public override Task CanIncrementAsync() - { - return base.CanIncrementAsync(); - } + [Fact] + public override Task CanIncrementAsync() + { + return base.CanIncrementAsync(); + } - [Fact] - public override Task CanIncrementAndExpireAsync() - { - return base.CanIncrementAndExpireAsync(); - } + [Fact] + public override Task CanIncrementAndExpireAsync() + { + return base.CanIncrementAndExpireAsync(); + } - [Fact] - public override Task CanGetAndSetDateTimeAsync() - { - return base.CanGetAndSetDateTimeAsync(); - } + [Fact] + public override Task CanGetAndSetDateTimeAsync() + { + return base.CanGetAndSetDateTimeAsync(); + } - [Fact] - public override Task CanRemoveIfEqual() - { - return base.CanRemoveIfEqual(); - } + [Fact] + public override Task CanRemoveIfEqual() + { + return base.CanRemoveIfEqual(); + } - [Fact] - public override Task CanReplaceIfEqual() - { - return base.CanReplaceIfEqual(); - } + [Fact] + public override Task CanReplaceIfEqual() + { + return base.CanReplaceIfEqual(); + } - [Fact] - public override Task CanRoundTripLargeNumbersAsync() - { - return base.CanRoundTripLargeNumbersAsync(); - } + [Fact] + public override Task CanRoundTripLargeNumbersAsync() + { + return base.CanRoundTripLargeNumbersAsync(); + } - [Fact] - public override Task CanRoundTripLargeNumbersWithExpirationAsync() - { - return base.CanRoundTripLargeNumbersWithExpirationAsync(); - } + [Fact] + public override Task CanRoundTripLargeNumbersWithExpirationAsync() + { + return base.CanRoundTripLargeNumbersWithExpirationAsync(); + } - [Fact] - public override Task CanManageListsAsync() - { - return base.CanManageListsAsync(); - } + [Fact] + public override Task CanManageListsAsync() + { + return base.CanManageListsAsync(); + } - [Fact] - public async Task CanUpgradeListType() - { - var db = SharedConnection.GetMuxer(Log).GetDatabase(); - var cache = GetCacheClient(); - if (cache == null) - return; + [Fact] + public async Task CanUpgradeListType() + { + var db = SharedConnection.GetMuxer(Log).GetDatabase(); + var cache = GetCacheClient(); + if (cache == null) + return; - using (cache) - { - var items = new List(); - for (int i = 1; i < 20001; i++) - items.Add(Guid.NewGuid().ToString()); + using (cache) + { + var items = new List(); + for (int i = 1; i < 20001; i++) + items.Add(Guid.NewGuid().ToString()); - await cache.RemoveAllAsync(); + await cache.RemoveAllAsync(); - await db.SetAddAsync("mylist", items.ToArray()); - await cache.ListAddAsync("mylist", new[] { "newitem1", "newitem2" }); - await cache.ListAddAsync("mylist", "newitem3"); + await db.SetAddAsync("mylist", items.ToArray()); + await cache.ListAddAsync("mylist", new[] { "newitem1", "newitem2" }); + await cache.ListAddAsync("mylist", "newitem3"); - var listItems = await cache.GetListAsync("mylist"); - Assert.Equal(items.Count + 3, listItems.Value.Count); + var listItems = await cache.GetListAsync("mylist"); + Assert.Equal(items.Count + 3, listItems.Value.Count); - await cache.RemoveAllAsync(); + await cache.RemoveAllAsync(); - await db.SetAddAsync("mylist", items.ToArray()); - await cache.ListRemoveAsync("mylist", (string)items[10]); + await db.SetAddAsync("mylist", items.ToArray()); + await cache.ListRemoveAsync("mylist", (string)items[10]); - listItems = await cache.GetListAsync("mylist"); - Assert.Equal(items.Count - 1, listItems.Value.Count); - } + listItems = await cache.GetListAsync("mylist"); + Assert.Equal(items.Count - 1, listItems.Value.Count); } + } - [Fact] - public async Task CanManageLargeListsAsync() - { - var cache = GetCacheClient(); - if (cache == null) - return; - - using (cache) - { - await cache.RemoveAllAsync(); - - var items = new List(); - // test paging through items in list - for (int i = 1; i < 20001; i++) - items.Add(Guid.NewGuid().ToString()); - - foreach (var batch in Batch(items, 1000)) - await cache.ListAddAsync("largelist", batch); - - var pagedResult = await cache.GetListAsync("largelist", 1, 5); - Assert.NotNull(pagedResult); - Assert.Equal(5, pagedResult.Value.Count); - Assert.Equal(pagedResult.Value.ToArray(), new[] { items[0], items[1], items[2], items[3], items[4] }); - - pagedResult = await cache.GetListAsync("largelist", 2, 5); - Assert.NotNull(pagedResult); - Assert.Equal(5, pagedResult.Value.Count); - Assert.Equal(pagedResult.Value.ToArray(), new[] { items[5], items[6], items[7], items[8], items[9] }); - - string newGuid1 = Guid.NewGuid().ToString(); - string newGuid2 = Guid.NewGuid().ToString(); - await cache.ListAddAsync("largelist", new[] { newGuid1, newGuid2 }); - - int page = (20000 / 5) + 1; - pagedResult = await cache.GetListAsync("largelist", page, 5); - Assert.NotNull(pagedResult); - Assert.Equal(2, pagedResult.Value.Count); - Assert.Equal(pagedResult.Value.ToArray(), new[] { newGuid1, newGuid2 }); - - long result = await cache.ListAddAsync("largelist", Guid.NewGuid().ToString()); - Assert.Equal(1, result); - - result = await cache.ListRemoveAsync("largelist", items[1]); - Assert.Equal(1, result); - - pagedResult = await cache.GetListAsync("largelist", 1, 5); - Assert.NotNull(pagedResult); - Assert.Equal(5, pagedResult.Value.Count); - Assert.Equal(pagedResult.Value.ToArray(), new[] { items[0], items[2], items[3], items[4], items[5] }); - } - } + [Fact] + public async Task CanManageLargeListsAsync() + { + var cache = GetCacheClient(); + if (cache == null) + return; - [Fact(Skip = "Performance Test")] - public override Task MeasureThroughputAsync() + using (cache) { - return base.MeasureThroughputAsync(); + await cache.RemoveAllAsync(); + + var items = new List(); + // test paging through items in list + for (int i = 1; i < 20001; i++) + items.Add(Guid.NewGuid().ToString()); + + foreach (var batch in Batch(items, 1000)) + await cache.ListAddAsync("largelist", batch); + + var pagedResult = await cache.GetListAsync("largelist", 1, 5); + Assert.NotNull(pagedResult); + Assert.Equal(5, pagedResult.Value.Count); + Assert.Equal(pagedResult.Value.ToArray(), new[] { items[0], items[1], items[2], items[3], items[4] }); + + pagedResult = await cache.GetListAsync("largelist", 2, 5); + Assert.NotNull(pagedResult); + Assert.Equal(5, pagedResult.Value.Count); + Assert.Equal(pagedResult.Value.ToArray(), new[] { items[5], items[6], items[7], items[8], items[9] }); + + string newGuid1 = Guid.NewGuid().ToString(); + string newGuid2 = Guid.NewGuid().ToString(); + await cache.ListAddAsync("largelist", new[] { newGuid1, newGuid2 }); + + int page = (20000 / 5) + 1; + pagedResult = await cache.GetListAsync("largelist", page, 5); + Assert.NotNull(pagedResult); + Assert.Equal(2, pagedResult.Value.Count); + Assert.Equal(pagedResult.Value.ToArray(), new[] { newGuid1, newGuid2 }); + + long result = await cache.ListAddAsync("largelist", Guid.NewGuid().ToString()); + Assert.Equal(1, result); + + result = await cache.ListRemoveAsync("largelist", items[1]); + Assert.Equal(1, result); + + pagedResult = await cache.GetListAsync("largelist", 1, 5); + Assert.NotNull(pagedResult); + Assert.Equal(5, pagedResult.Value.Count); + Assert.Equal(pagedResult.Value.ToArray(), new[] { items[0], items[2], items[3], items[4], items[5] }); } + } - [Fact(Skip = "Performance Test")] - public override Task MeasureSerializerSimpleThroughputAsync() - { - return base.MeasureSerializerSimpleThroughputAsync(); - } + [Fact(Skip = "Performance Test")] + public override Task MeasureThroughputAsync() + { + return base.MeasureThroughputAsync(); + } + + [Fact(Skip = "Performance Test")] + public override Task MeasureSerializerSimpleThroughputAsync() + { + return base.MeasureSerializerSimpleThroughputAsync(); + } - [Fact(Skip = "Performance Test")] - public override Task MeasureSerializerComplexThroughputAsync() + [Fact(Skip = "Performance Test")] + public override Task MeasureSerializerComplexThroughputAsync() + { + return base.MeasureSerializerComplexThroughputAsync(); + } + + private IEnumerable> Batch(IList source, int size) + { + if (size <= 0) throw new ArgumentOutOfRangeException(nameof(size)); + var enumerator = source.GetEnumerator(); + for (int i = 0; i < source.Count; i += size) { - return base.MeasureSerializerComplexThroughputAsync(); + enumerator.MoveNext(); + yield return GetChunk(i, Math.Min(i + size, source.Count)); } - - private IEnumerable> Batch(IList source, int size) + IEnumerable GetChunk(int from, int toExclusive) { - if (size <= 0) throw new ArgumentOutOfRangeException(nameof(size)); - var enumerator = source.GetEnumerator(); - for (int i = 0; i < source.Count; i += size) + for (int j = from; j < toExclusive; j++) { enumerator.MoveNext(); - yield return GetChunk(i, Math.Min(i + size, source.Count)); - } - IEnumerable GetChunk(int from, int toExclusive) - { - for (int j = from; j < toExclusive; j++) - { - enumerator.MoveNext(); - yield return source[j]; - } + yield return source[j]; } } } diff --git a/tests/Foundatio.Redis.Tests/Caching/RedisHybridCacheClientTests.cs b/tests/Foundatio.Redis.Tests/Caching/RedisHybridCacheClientTests.cs index 65871a2..d519919 100644 --- a/tests/Foundatio.Redis.Tests/Caching/RedisHybridCacheClientTests.cs +++ b/tests/Foundatio.Redis.Tests/Caching/RedisHybridCacheClientTests.cs @@ -1,5 +1,4 @@ -using System; -using System.Threading.Tasks; +using System.Threading.Tasks; using Foundatio.Caching; using Foundatio.Redis.Tests.Extensions; using Foundatio.Tests.Caching; @@ -7,113 +6,112 @@ using Xunit; using Xunit.Abstractions; -namespace Foundatio.Redis.Tests.Caching +namespace Foundatio.Redis.Tests.Caching; + +public class RedisHybridCacheClientTests : HybridCacheClientTests { - public class RedisHybridCacheClientTests : HybridCacheClientTests - { - public RedisHybridCacheClientTests(ITestOutputHelper output) : base(output) - { - var muxer = SharedConnection.GetMuxer(Log); - muxer.FlushAllAsync().GetAwaiter().GetResult(); - } - - protected override ICacheClient GetCacheClient(bool shouldThrowOnSerializationError = true) - { - return new RedisHybridCacheClient(o => o - .ConnectionMultiplexer(SharedConnection.GetMuxer(Log)) - .LoggerFactory(Log).ShouldThrowOnSerializationError(shouldThrowOnSerializationError), - localConfig => localConfig - .CloneValues(true) - .ShouldThrowOnSerializationError(shouldThrowOnSerializationError)); - } - - [Fact] - public override Task CanSetAndGetValueAsync() - { - return base.CanSetAndGetValueAsync(); - } - - [Fact] - public override Task CanSetAndGetObjectAsync() - { - return base.CanSetAndGetObjectAsync(); - } - - [Fact] - public override Task CanTryGetAsync() - { - return base.CanTryGetAsync(); - } - - [Fact] - public override Task CanRemoveByPrefixAsync() - { - return base.CanRemoveByPrefixAsync(); - } - - [Theory] - [InlineData(50)] - [InlineData(500)] - [InlineData(5000)] - [InlineData(50000)] - public override Task CanRemoveByPrefixMultipleEntriesAsync(int count) - { - return base.CanRemoveByPrefixMultipleEntriesAsync(count); - } - - [Fact] - public override Task CanUseScopedCachesAsync() - { - return base.CanUseScopedCachesAsync(); - } - - [Fact] - public override Task CanSetExpirationAsync() - { - return base.CanSetExpirationAsync(); - } - - [Fact] - public override Task CanManageListsAsync() - { - return base.CanManageListsAsync(); - } - - [Fact] - public override Task WillUseLocalCache() - { - return base.WillUseLocalCache(); - } - - [Fact(Skip = "Skipping for now until we figure out a timing issue")] - public override Task WillExpireRemoteItems() - { - Log.MinimumLevel = LogLevel.Trace; - return base.WillExpireRemoteItems(); - } - - [Fact] - public override Task WillWorkWithSets() - { - return base.WillWorkWithSets(); - } - - [Fact(Skip = "Performance Test")] - public override Task MeasureThroughputAsync() - { - return base.MeasureThroughputAsync(); - } - - [Fact(Skip = "Performance Test")] - public override Task MeasureSerializerSimpleThroughputAsync() - { - return base.MeasureSerializerSimpleThroughputAsync(); - } - - [Fact(Skip = "Performance Test")] - public override Task MeasureSerializerComplexThroughputAsync() - { - return base.MeasureSerializerComplexThroughputAsync(); - } + public RedisHybridCacheClientTests(ITestOutputHelper output) : base(output) + { + var muxer = SharedConnection.GetMuxer(Log); + muxer.FlushAllAsync().GetAwaiter().GetResult(); + } + + protected override ICacheClient GetCacheClient(bool shouldThrowOnSerializationError = true) + { + return new RedisHybridCacheClient(o => o + .ConnectionMultiplexer(SharedConnection.GetMuxer(Log)) + .LoggerFactory(Log).ShouldThrowOnSerializationError(shouldThrowOnSerializationError), + localConfig => localConfig + .CloneValues(true) + .ShouldThrowOnSerializationError(shouldThrowOnSerializationError)); + } + + [Fact] + public override Task CanSetAndGetValueAsync() + { + return base.CanSetAndGetValueAsync(); + } + + [Fact] + public override Task CanSetAndGetObjectAsync() + { + return base.CanSetAndGetObjectAsync(); + } + + [Fact] + public override Task CanTryGetAsync() + { + return base.CanTryGetAsync(); + } + + [Fact] + public override Task CanRemoveByPrefixAsync() + { + return base.CanRemoveByPrefixAsync(); + } + + [Theory] + [InlineData(50)] + [InlineData(500)] + [InlineData(5000)] + [InlineData(50000)] + public override Task CanRemoveByPrefixMultipleEntriesAsync(int count) + { + return base.CanRemoveByPrefixMultipleEntriesAsync(count); + } + + [Fact] + public override Task CanUseScopedCachesAsync() + { + return base.CanUseScopedCachesAsync(); + } + + [Fact] + public override Task CanSetExpirationAsync() + { + return base.CanSetExpirationAsync(); + } + + [Fact] + public override Task CanManageListsAsync() + { + return base.CanManageListsAsync(); + } + + [Fact] + public override Task WillUseLocalCache() + { + return base.WillUseLocalCache(); + } + + [Fact(Skip = "Skipping for now until we figure out a timing issue")] + public override Task WillExpireRemoteItems() + { + Log.MinimumLevel = LogLevel.Trace; + return base.WillExpireRemoteItems(); + } + + [Fact] + public override Task WillWorkWithSets() + { + return base.WillWorkWithSets(); + } + + [Fact(Skip = "Performance Test")] + public override Task MeasureThroughputAsync() + { + return base.MeasureThroughputAsync(); + } + + [Fact(Skip = "Performance Test")] + public override Task MeasureSerializerSimpleThroughputAsync() + { + return base.MeasureSerializerSimpleThroughputAsync(); + } + + [Fact(Skip = "Performance Test")] + public override Task MeasureSerializerComplexThroughputAsync() + { + return base.MeasureSerializerComplexThroughputAsync(); } } diff --git a/tests/Foundatio.Redis.Tests/Extensions/ConnectionMuliplexerExtensions.cs b/tests/Foundatio.Redis.Tests/Extensions/ConnectionMuliplexerExtensions.cs index 702abdd..72ef254 100644 --- a/tests/Foundatio.Redis.Tests/Extensions/ConnectionMuliplexerExtensions.cs +++ b/tests/Foundatio.Redis.Tests/Extensions/ConnectionMuliplexerExtensions.cs @@ -1,40 +1,38 @@ -using System; -using System.Threading.Tasks; +using System.Threading.Tasks; using StackExchange.Redis; -namespace Foundatio.Redis.Tests.Extensions +namespace Foundatio.Redis.Tests.Extensions; + +public static class ConnectionMultiplexerExtensions { - public static class ConnectionMultiplexerExtensions + public static async Task FlushAllAsync(this ConnectionMultiplexer muxer) { - public static async Task FlushAllAsync(this ConnectionMultiplexer muxer) - { - var endpoints = muxer.GetEndPoints(); - if (endpoints.Length == 0) - return; + var endpoints = muxer.GetEndPoints(); + if (endpoints.Length == 0) + return; - foreach (var endpoint in endpoints) - { - var server = muxer.GetServer(endpoint); - if (!server.IsReplica) - await server.FlushAllDatabasesAsync(); - } - } - - public static async Task CountAllKeysAsync(this ConnectionMultiplexer muxer) + foreach (var endpoint in endpoints) { - var endpoints = muxer.GetEndPoints(); - if (endpoints.Length == 0) - return 0; + var server = muxer.GetServer(endpoint); + if (!server.IsReplica) + await server.FlushAllDatabasesAsync(); + } + } - long count = 0; - foreach (var endpoint in endpoints) - { - var server = muxer.GetServer(endpoint); - if (!server.IsReplica) - count += await server.DatabaseSizeAsync(); - } + public static async Task CountAllKeysAsync(this ConnectionMultiplexer muxer) + { + var endpoints = muxer.GetEndPoints(); + if (endpoints.Length == 0) + return 0; - return count; + long count = 0; + foreach (var endpoint in endpoints) + { + var server = muxer.GetServer(endpoint); + if (!server.IsReplica) + count += await server.DatabaseSizeAsync(); } + + return count; } } diff --git a/tests/Foundatio.Redis.Tests/Jobs/RedisJobQueueTests.cs b/tests/Foundatio.Redis.Tests/Jobs/RedisJobQueueTests.cs index 58ed966..43229bc 100644 --- a/tests/Foundatio.Redis.Tests/Jobs/RedisJobQueueTests.cs +++ b/tests/Foundatio.Redis.Tests/Jobs/RedisJobQueueTests.cs @@ -6,42 +6,41 @@ using Xunit; using Xunit.Abstractions; -namespace Foundatio.Redis.Tests.Jobs +namespace Foundatio.Redis.Tests.Jobs; + +public class RedisJobQueueTests : JobQueueTestsBase { - public class RedisJobQueueTests : JobQueueTestsBase + public RedisJobQueueTests(ITestOutputHelper output) : base(output) { - public RedisJobQueueTests(ITestOutputHelper output) : base(output) - { - var muxer = SharedConnection.GetMuxer(Log); - muxer.FlushAllAsync().GetAwaiter().GetResult(); - } + var muxer = SharedConnection.GetMuxer(Log); + muxer.FlushAllAsync().GetAwaiter().GetResult(); + } - protected override IQueue GetSampleWorkItemQueue(int retries, TimeSpan retryDelay) - { - return new RedisQueue(o => o - .ConnectionMultiplexer(SharedConnection.GetMuxer(Log)) - .Retries(retries) - .RetryDelay(retryDelay) - .LoggerFactory(Log) - ); - } + protected override IQueue GetSampleWorkItemQueue(int retries, TimeSpan retryDelay) + { + return new RedisQueue(o => o + .ConnectionMultiplexer(SharedConnection.GetMuxer(Log)) + .Retries(retries) + .RetryDelay(retryDelay) + .LoggerFactory(Log) + ); + } - [Fact] - public override Task CanRunMultipleQueueJobsAsync() - { - return base.CanRunMultipleQueueJobsAsync(); - } + [Fact] + public override Task CanRunMultipleQueueJobsAsync() + { + return base.CanRunMultipleQueueJobsAsync(); + } - [Fact] - public override Task CanRunQueueJobWithLockFailAsync() - { - return base.CanRunQueueJobWithLockFailAsync(); - } + [Fact] + public override Task CanRunQueueJobWithLockFailAsync() + { + return base.CanRunQueueJobWithLockFailAsync(); + } - [Fact] - public override Task CanRunQueueJobAsync() - { - return base.CanRunQueueJobAsync(); - } + [Fact] + public override Task CanRunQueueJobAsync() + { + return base.CanRunQueueJobAsync(); } } diff --git a/tests/Foundatio.Redis.Tests/Locks/RedisLockTests.cs b/tests/Foundatio.Redis.Tests/Locks/RedisLockTests.cs index 4fe7301..3e646f0 100644 --- a/tests/Foundatio.Redis.Tests/Locks/RedisLockTests.cs +++ b/tests/Foundatio.Redis.Tests/Locks/RedisLockTests.cs @@ -12,103 +12,102 @@ using Xunit; using Xunit.Abstractions; -namespace Foundatio.Redis.Tests.Locks +namespace Foundatio.Redis.Tests.Locks; + +public class RedisLockTests : LockTestBase, IDisposable { - public class RedisLockTests : LockTestBase, IDisposable + private readonly ICacheClient _cache; + private readonly IMessageBus _messageBus; + + public RedisLockTests(ITestOutputHelper output) : base(output) + { + var muxer = SharedConnection.GetMuxer(Log); + muxer.FlushAllAsync().GetAwaiter().GetResult(); + _cache = new RedisCacheClient(o => o.ConnectionMultiplexer(muxer).LoggerFactory(Log)); + _messageBus = new RedisMessageBus(o => o.Subscriber(muxer.GetSubscriber()).Topic("test-lock").LoggerFactory(Log)); + } + + protected override ILockProvider GetThrottlingLockProvider(int maxHits, TimeSpan period) + { + return new ThrottlingLockProvider(_cache, maxHits, period, Log); + } + + protected override ILockProvider GetLockProvider() + { + return new CacheLockProvider(_cache, _messageBus, Log); + } + + [Fact] + public override Task CanAcquireLocksInParallel() + { + return base.CanAcquireLocksInParallel(); + } + + [Fact] + public override Task CanAcquireAndReleaseLockAsync() + { + return base.CanAcquireAndReleaseLockAsync(); + } + + [Fact] + public override Task LockWillTimeoutAsync() + { + return base.LockWillTimeoutAsync(); + } + + [Fact] + public async Task LockWontTimeoutEarly() + { + Log.SetLogLevel(LogLevel.Trace); + Log.SetLogLevel(LogLevel.Trace); + Log.SetLogLevel(LogLevel.Trace); + + var locker = GetLockProvider(); + if (locker == null) + return; + + _logger.LogInformation("Acquiring lock #1"); + var testLock = await locker.AcquireAsync("test", timeUntilExpires: TimeSpan.FromSeconds(1)); + _logger.LogInformation(testLock != null ? "Acquired lock #1" : "Unable to acquire lock #1"); + Assert.NotNull(testLock); + + _logger.LogInformation("Acquiring lock #2"); + var testLock2 = await locker.AcquireAsync("test", acquireTimeout: TimeSpan.FromMilliseconds(500)); + Assert.Null(testLock2); + + _logger.LogInformation("Renew lock #1"); + await testLock.RenewAsync(timeUntilExpires: TimeSpan.FromSeconds(1)); + + _logger.LogInformation("Acquiring lock #3"); + testLock = await locker.AcquireAsync("test", acquireTimeout: TimeSpan.FromMilliseconds(500)); + Assert.Null(testLock); + + var sw = Stopwatch.StartNew(); + _logger.LogInformation("Acquiring lock #4"); + testLock = await locker.AcquireAsync("test", acquireTimeout: TimeSpan.FromSeconds(5)); + sw.Stop(); + _logger.LogInformation(testLock != null ? "Acquired lock #3" : "Unable to acquire lock #4"); + Assert.NotNull(testLock); + Assert.True(sw.ElapsedMilliseconds > 400); + } + + [RetryFact] + public override Task WillThrottleCallsAsync() + { + return base.WillThrottleCallsAsync(); + } + + [Fact] + public override Task LockOneAtATimeAsync() + { + return base.LockOneAtATimeAsync(); + } + + public void Dispose() { - private readonly ICacheClient _cache; - private readonly IMessageBus _messageBus; - - public RedisLockTests(ITestOutputHelper output) : base(output) - { - var muxer = SharedConnection.GetMuxer(Log); - muxer.FlushAllAsync().GetAwaiter().GetResult(); - _cache = new RedisCacheClient(o => o.ConnectionMultiplexer(muxer).LoggerFactory(Log)); - _messageBus = new RedisMessageBus(o => o.Subscriber(muxer.GetSubscriber()).Topic("test-lock").LoggerFactory(Log)); - } - - protected override ILockProvider GetThrottlingLockProvider(int maxHits, TimeSpan period) - { - return new ThrottlingLockProvider(_cache, maxHits, period, Log); - } - - protected override ILockProvider GetLockProvider() - { - return new CacheLockProvider(_cache, _messageBus, Log); - } - - [Fact] - public override Task CanAcquireLocksInParallel() - { - return base.CanAcquireLocksInParallel(); - } - - [Fact] - public override Task CanAcquireAndReleaseLockAsync() - { - return base.CanAcquireAndReleaseLockAsync(); - } - - [Fact] - public override Task LockWillTimeoutAsync() - { - return base.LockWillTimeoutAsync(); - } - - [Fact] - public async Task LockWontTimeoutEarly() - { - Log.SetLogLevel(LogLevel.Trace); - Log.SetLogLevel(LogLevel.Trace); - Log.SetLogLevel(LogLevel.Trace); - - var locker = GetLockProvider(); - if (locker == null) - return; - - _logger.LogInformation("Acquiring lock #1"); - var testLock = await locker.AcquireAsync("test", timeUntilExpires: TimeSpan.FromSeconds(1)); - _logger.LogInformation(testLock != null ? "Acquired lock #1" : "Unable to acquire lock #1"); - Assert.NotNull(testLock); - - _logger.LogInformation("Acquiring lock #2"); - var testLock2 = await locker.AcquireAsync("test", acquireTimeout: TimeSpan.FromMilliseconds(500)); - Assert.Null(testLock2); - - _logger.LogInformation("Renew lock #1"); - await testLock.RenewAsync(timeUntilExpires: TimeSpan.FromSeconds(1)); - - _logger.LogInformation("Acquiring lock #3"); - testLock = await locker.AcquireAsync("test", acquireTimeout: TimeSpan.FromMilliseconds(500)); - Assert.Null(testLock); - - var sw = Stopwatch.StartNew(); - _logger.LogInformation("Acquiring lock #4"); - testLock = await locker.AcquireAsync("test", acquireTimeout: TimeSpan.FromSeconds(5)); - sw.Stop(); - _logger.LogInformation(testLock != null ? "Acquired lock #3" : "Unable to acquire lock #4"); - Assert.NotNull(testLock); - Assert.True(sw.ElapsedMilliseconds > 400); - } - - [RetryFact] - public override Task WillThrottleCallsAsync() - { - return base.WillThrottleCallsAsync(); - } - - [Fact] - public override Task LockOneAtATimeAsync() - { - return base.LockOneAtATimeAsync(); - } - - public void Dispose() - { - _cache.Dispose(); - _messageBus.Dispose(); - var muxer = SharedConnection.GetMuxer(Log); - muxer.FlushAllAsync().GetAwaiter().GetResult(); - } + _cache.Dispose(); + _messageBus.Dispose(); + var muxer = SharedConnection.GetMuxer(Log); + muxer.FlushAllAsync().GetAwaiter().GetResult(); } } diff --git a/tests/Foundatio.Redis.Tests/Messaging/RedisMessageBusTests.cs b/tests/Foundatio.Redis.Tests/Messaging/RedisMessageBusTests.cs index e946645..7655d08 100644 --- a/tests/Foundatio.Redis.Tests/Messaging/RedisMessageBusTests.cs +++ b/tests/Foundatio.Redis.Tests/Messaging/RedisMessageBusTests.cs @@ -12,181 +12,180 @@ using Xunit; using Xunit.Abstractions; -namespace Foundatio.Redis.Tests.Messaging +namespace Foundatio.Redis.Tests.Messaging; + +public class RedisMessageBusTests : MessageBusTestBase { - public class RedisMessageBusTests : MessageBusTestBase + public RedisMessageBusTests(ITestOutputHelper output) : base(output) { - public RedisMessageBusTests(ITestOutputHelper output) : base(output) - { - var muxer = SharedConnection.GetMuxer(Log); - muxer.FlushAllAsync().GetAwaiter().GetResult(); - } + var muxer = SharedConnection.GetMuxer(Log); + muxer.FlushAllAsync().GetAwaiter().GetResult(); + } - protected override IMessageBus GetMessageBus(Func config = null) + protected override IMessageBus GetMessageBus(Func config = null) + { + return new RedisMessageBus(o => { - return new RedisMessageBus(o => - { - o.Subscriber(SharedConnection.GetMuxer(Log).GetSubscriber()); - o.Topic("test-messages"); - o.LoggerFactory(Log); - if (config != null) - config(o.Target); - - return o; - }); - } + o.Subscriber(SharedConnection.GetMuxer(Log).GetSubscriber()); + o.Topic("test-messages"); + o.LoggerFactory(Log); + if (config != null) + config(o.Target); - [Fact] - public override Task CanUseMessageOptionsAsync() - { - return base.CanUseMessageOptionsAsync(); - } + return o; + }); + } - [Fact] - public override Task CanSendMessageAsync() - { - return base.CanSendMessageAsync(); - } + [Fact] + public override Task CanUseMessageOptionsAsync() + { + return base.CanUseMessageOptionsAsync(); + } - [Fact] - public override Task CanHandleNullMessageAsync() - { - return base.CanHandleNullMessageAsync(); - } + [Fact] + public override Task CanSendMessageAsync() + { + return base.CanSendMessageAsync(); + } - [Fact] - public override Task CanSendDerivedMessageAsync() - { - return base.CanSendDerivedMessageAsync(); - } + [Fact] + public override Task CanHandleNullMessageAsync() + { + return base.CanHandleNullMessageAsync(); + } - [Fact] - public override Task CanSendMappedMessageAsync() - { - return base.CanSendMappedMessageAsync(); - } + [Fact] + public override Task CanSendDerivedMessageAsync() + { + return base.CanSendDerivedMessageAsync(); + } - [Fact] - public override Task CanSendDelayedMessageAsync() - { - return base.CanSendDelayedMessageAsync(); - } + [Fact] + public override Task CanSendMappedMessageAsync() + { + return base.CanSendMappedMessageAsync(); + } - [Fact] - public override Task CanSubscribeConcurrentlyAsync() - { - return base.CanSubscribeConcurrentlyAsync(); - } + [Fact] + public override Task CanSendDelayedMessageAsync() + { + return base.CanSendDelayedMessageAsync(); + } - [Fact] - public override Task CanReceiveMessagesConcurrentlyAsync() - { - return base.CanReceiveMessagesConcurrentlyAsync(); - } + [Fact] + public override Task CanSubscribeConcurrentlyAsync() + { + return base.CanSubscribeConcurrentlyAsync(); + } - [Fact] - public override Task CanSendMessageToMultipleSubscribersAsync() - { - return base.CanSendMessageToMultipleSubscribersAsync(); - } + [Fact] + public override Task CanReceiveMessagesConcurrentlyAsync() + { + return base.CanReceiveMessagesConcurrentlyAsync(); + } - [Fact] - public override Task CanTolerateSubscriberFailureAsync() - { - return base.CanTolerateSubscriberFailureAsync(); - } + [Fact] + public override Task CanSendMessageToMultipleSubscribersAsync() + { + return base.CanSendMessageToMultipleSubscribersAsync(); + } - [Fact] - public override Task WillOnlyReceiveSubscribedMessageTypeAsync() - { - return base.WillOnlyReceiveSubscribedMessageTypeAsync(); - } + [Fact] + public override Task CanTolerateSubscriberFailureAsync() + { + return base.CanTolerateSubscriberFailureAsync(); + } - [Fact] - public override Task WillReceiveDerivedMessageTypesAsync() - { - return base.WillReceiveDerivedMessageTypesAsync(); - } + [Fact] + public override Task WillOnlyReceiveSubscribedMessageTypeAsync() + { + return base.WillOnlyReceiveSubscribedMessageTypeAsync(); + } - [Fact] - public override Task CanSubscribeToAllMessageTypesAsync() - { - return base.CanSubscribeToAllMessageTypesAsync(); - } + [Fact] + public override Task WillReceiveDerivedMessageTypesAsync() + { + return base.WillReceiveDerivedMessageTypesAsync(); + } - [Fact] - public override Task CanSubscribeToRawMessagesAsync() - { - return base.CanSubscribeToRawMessagesAsync(); - } + [Fact] + public override Task CanSubscribeToAllMessageTypesAsync() + { + return base.CanSubscribeToAllMessageTypesAsync(); + } - [Fact] - public override Task CanCancelSubscriptionAsync() - { - return base.CanCancelSubscriptionAsync(); - } + [Fact] + public override Task CanSubscribeToRawMessagesAsync() + { + return base.CanSubscribeToRawMessagesAsync(); + } - [Fact] - public override Task WontKeepMessagesWithNoSubscribersAsync() - { - return base.WontKeepMessagesWithNoSubscribersAsync(); - } + [Fact] + public override Task CanCancelSubscriptionAsync() + { + return base.CanCancelSubscriptionAsync(); + } - [Fact] - public override Task CanReceiveFromMultipleSubscribersAsync() - { - return base.CanReceiveFromMultipleSubscribersAsync(); - } + [Fact] + public override Task WontKeepMessagesWithNoSubscribersAsync() + { + return base.WontKeepMessagesWithNoSubscribersAsync(); + } - [Fact] - public override void CanDisposeWithNoSubscribersOrPublishers() - { - base.CanDisposeWithNoSubscribersOrPublishers(); - } + [Fact] + public override Task CanReceiveFromMultipleSubscribersAsync() + { + return base.CanReceiveFromMultipleSubscribersAsync(); + } - [Fact] - public async Task CanDisposeCacheAndQueueAndReceiveSubscribedMessages() - { - var muxer = SharedConnection.GetMuxer(Log); - var messageBus1 = new RedisMessageBus(new RedisMessageBusOptions { Subscriber = muxer.GetSubscriber(), Topic = "test-messages", LoggerFactory = Log }); + [Fact] + public override void CanDisposeWithNoSubscribersOrPublishers() + { + base.CanDisposeWithNoSubscribersOrPublishers(); + } + + [Fact] + public async Task CanDisposeCacheAndQueueAndReceiveSubscribedMessages() + { + var muxer = SharedConnection.GetMuxer(Log); + var messageBus1 = new RedisMessageBus(new RedisMessageBusOptions { Subscriber = muxer.GetSubscriber(), Topic = "test-messages", LoggerFactory = Log }); - var cache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = muxer }); - Assert.NotNull(cache); + var cache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = muxer }); + Assert.NotNull(cache); - var queue = new RedisQueue(new RedisQueueOptions - { - ConnectionMultiplexer = muxer, - LoggerFactory = Log - }); - Assert.NotNull(queue); + var queue = new RedisQueue(new RedisQueueOptions + { + ConnectionMultiplexer = muxer, + LoggerFactory = Log + }); + Assert.NotNull(queue); - using (messageBus1) + using (messageBus1) + { + using (cache) { - using (cache) + using (queue) { - using (queue) + await cache.SetAsync("test", "test", TimeSpan.FromSeconds(10)); + await queue.DequeueAsync(new CancellationToken(true)); + + var countdown = new AsyncCountdownEvent(2); + await messageBus1.SubscribeAsync(msg => { - await cache.SetAsync("test", "test", TimeSpan.FromSeconds(10)); - await queue.DequeueAsync(new CancellationToken(true)); - - var countdown = new AsyncCountdownEvent(2); - await messageBus1.SubscribeAsync(msg => - { - Assert.Equal("Hello", msg.Data); - countdown.Signal(); - }); - - await messageBus1.PublishAsync(new SimpleMessageA { Data = "Hello" }); - await countdown.WaitAsync(TimeSpan.FromSeconds(2)); - Assert.Equal(1, countdown.CurrentCount); - - cache.Dispose(); - queue.Dispose(); - - await messageBus1.PublishAsync(new SimpleMessageA { Data = "Hello" }); - await countdown.WaitAsync(TimeSpan.FromSeconds(2)); - Assert.Equal(0, countdown.CurrentCount); - } + Assert.Equal("Hello", msg.Data); + countdown.Signal(); + }); + + await messageBus1.PublishAsync(new SimpleMessageA { Data = "Hello" }); + await countdown.WaitAsync(TimeSpan.FromSeconds(2)); + Assert.Equal(1, countdown.CurrentCount); + + cache.Dispose(); + queue.Dispose(); + + await messageBus1.PublishAsync(new SimpleMessageA { Data = "Hello" }); + await countdown.WaitAsync(TimeSpan.FromSeconds(2)); + Assert.Equal(0, countdown.CurrentCount); } } } diff --git a/tests/Foundatio.Redis.Tests/Metrics/RedisMetricsTests.cs b/tests/Foundatio.Redis.Tests/Metrics/RedisMetricsTests.cs index ca2a82a..94540c0 100644 --- a/tests/Foundatio.Redis.Tests/Metrics/RedisMetricsTests.cs +++ b/tests/Foundatio.Redis.Tests/Metrics/RedisMetricsTests.cs @@ -7,112 +7,111 @@ using Xunit; using Xunit.Abstractions; -namespace Foundatio.Redis.Tests.Metrics +namespace Foundatio.Redis.Tests.Metrics; + +public class RedisMetricsTests : MetricsClientTestBase, IDisposable { - public class RedisMetricsTests : MetricsClientTestBase, IDisposable + public RedisMetricsTests(ITestOutputHelper output) : base(output) { - public RedisMetricsTests(ITestOutputHelper output) : base(output) - { - var muxer = SharedConnection.GetMuxer(Log); - muxer.FlushAllAsync().GetAwaiter().GetResult(); - } + var muxer = SharedConnection.GetMuxer(Log); + muxer.FlushAllAsync().GetAwaiter().GetResult(); + } #pragma warning disable CS0618 // Type or member is obsolete - public override IMetricsClient GetMetricsClient(bool buffered = false) - { - return new RedisMetricsClient(o => o.ConnectionMultiplexer(SharedConnection.GetMuxer(Log)).Buffered(buffered).LoggerFactory(Log)); - } + public override IMetricsClient GetMetricsClient(bool buffered = false) + { + return new RedisMetricsClient(o => o.ConnectionMultiplexer(SharedConnection.GetMuxer(Log)).Buffered(buffered).LoggerFactory(Log)); + } #pragma warning restore CS0618 // Type or member is obsolete - [Fact] - public override Task CanSetGaugesAsync() - { - return base.CanSetGaugesAsync(); - } + [Fact] + public override Task CanSetGaugesAsync() + { + return base.CanSetGaugesAsync(); + } - [Fact] - public override Task CanIncrementCounterAsync() - { - return base.CanIncrementCounterAsync(); - } + [Fact] + public override Task CanIncrementCounterAsync() + { + return base.CanIncrementCounterAsync(); + } - [RetryFact] - public override Task CanWaitForCounterAsync() - { - return base.CanWaitForCounterAsync(); - } + [RetryFact] + public override Task CanWaitForCounterAsync() + { + return base.CanWaitForCounterAsync(); + } - [Fact] - public override Task CanGetBufferedQueueMetricsAsync() - { - return base.CanGetBufferedQueueMetricsAsync(); - } + [Fact] + public override Task CanGetBufferedQueueMetricsAsync() + { + return base.CanGetBufferedQueueMetricsAsync(); + } - [Fact] - public override Task CanIncrementBufferedCounterAsync() - { - return base.CanIncrementBufferedCounterAsync(); - } + [Fact] + public override Task CanIncrementBufferedCounterAsync() + { + return base.CanIncrementBufferedCounterAsync(); + } - [Fact] - public override Task CanSendBufferedMetricsAsync() - { - return base.CanSendBufferedMetricsAsync(); - } + [Fact] + public override Task CanSendBufferedMetricsAsync() + { + return base.CanSendBufferedMetricsAsync(); + } - [Fact] - public async Task SendGaugesAsync() - { - using var metrics = GetMetricsClient(); - if (!(metrics is IMetricsClientStats stats)) - return; - - int max = 1000; - for (int index = 0; index <= max; index++) - { - metrics.Gauge("mygauge", index); - metrics.Timer("mygauge", index); - } - - Assert.Equal(max, (await stats.GetGaugeStatsAsync("mygauge")).Last); - } + [Fact] + public async Task SendGaugesAsync() + { + using var metrics = GetMetricsClient(); + if (!(metrics is IMetricsClientStats stats)) + return; - [Fact] - public async Task SendGaugesBufferedAsync() + int max = 1000; + for (int index = 0; index <= max; index++) { - using var metrics = GetMetricsClient(true); - if (!(metrics is IMetricsClientStats stats)) - return; + metrics.Gauge("mygauge", index); + metrics.Timer("mygauge", index); + } - int max = 1000; - for (int index = 0; index <= max; index++) - { - metrics.Gauge("mygauge", index); - metrics.Timer("mygauge", index); - } + Assert.Equal(max, (await stats.GetGaugeStatsAsync("mygauge")).Last); + } - if (metrics is IBufferedMetricsClient bufferedMetrics) - await bufferedMetrics.FlushAsync(); + [Fact] + public async Task SendGaugesBufferedAsync() + { + using var metrics = GetMetricsClient(true); + if (!(metrics is IMetricsClientStats stats)) + return; - Assert.Equal(max, (await stats.GetGaugeStatsAsync("mygauge")).Last); + int max = 1000; + for (int index = 0; index <= max; index++) + { + metrics.Gauge("mygauge", index); + metrics.Timer("mygauge", index); } - [Fact] - public async Task SendRedisAsync() - { - var db = SharedConnection.GetMuxer(Log).GetDatabase(); + if (metrics is IBufferedMetricsClient bufferedMetrics) + await bufferedMetrics.FlushAsync(); - int max = 1000; - for (int index = 0; index <= max; index++) - { - await db.SetAddAsync("test", index); - } - } + Assert.Equal(max, (await stats.GetGaugeStatsAsync("mygauge")).Last); + } - public void Dispose() + [Fact] + public async Task SendRedisAsync() + { + var db = SharedConnection.GetMuxer(Log).GetDatabase(); + + int max = 1000; + for (int index = 0; index <= max; index++) { - var muxer = SharedConnection.GetMuxer(Log); - muxer.FlushAllAsync().GetAwaiter().GetResult(); + await db.SetAddAsync("test", index); } } + + public void Dispose() + { + var muxer = SharedConnection.GetMuxer(Log); + muxer.FlushAllAsync().GetAwaiter().GetResult(); + } } diff --git a/tests/Foundatio.Redis.Tests/Queues/RedisQueueTests.cs b/tests/Foundatio.Redis.Tests/Queues/RedisQueueTests.cs index e06b3b1..da49eeb 100644 --- a/tests/Foundatio.Redis.Tests/Queues/RedisQueueTests.cs +++ b/tests/Foundatio.Redis.Tests/Queues/RedisQueueTests.cs @@ -24,794 +24,793 @@ #pragma warning disable 4014 -namespace Foundatio.Redis.Tests.Queues +namespace Foundatio.Redis.Tests.Queues; + +public class RedisQueueTests : QueueTestBase { - public class RedisQueueTests : QueueTestBase + public RedisQueueTests(ITestOutputHelper output) : base(output) { - public RedisQueueTests(ITestOutputHelper output) : base(output) - { - var muxer = SharedConnection.GetMuxer(Log); - while (muxer.CountAllKeysAsync().GetAwaiter().GetResult() != 0) - muxer.FlushAllAsync().GetAwaiter().GetResult(); - } + var muxer = SharedConnection.GetMuxer(Log); + while (muxer.CountAllKeysAsync().GetAwaiter().GetResult() != 0) + muxer.FlushAllAsync().GetAwaiter().GetResult(); + } - protected override IQueue GetQueue(int retries = 1, TimeSpan? workItemTimeout = null, TimeSpan? retryDelay = null, int[] retryMultipliers = null, int deadLetterMaxItems = 100, bool runQueueMaintenance = true) - { - var queue = new RedisQueue(o => o - .ConnectionMultiplexer(SharedConnection.GetMuxer(Log)) - .Retries(retries) - .RetryDelay(retryDelay.GetValueOrDefault(TimeSpan.FromMinutes(1))) - .RetryMultipliers(retryMultipliers ?? new[] { 1, 3, 5, 10 }) - .DeadLetterMaxItems(deadLetterMaxItems) - .WorkItemTimeout(workItemTimeout.GetValueOrDefault(TimeSpan.FromMinutes(5))) - .RunMaintenanceTasks(runQueueMaintenance) - .LoggerFactory(Log) - ); - - _logger.LogDebug("Queue Id: {queueId}", queue.QueueId); - return queue; - } + protected override IQueue GetQueue(int retries = 1, TimeSpan? workItemTimeout = null, TimeSpan? retryDelay = null, int[] retryMultipliers = null, int deadLetterMaxItems = 100, bool runQueueMaintenance = true) + { + var queue = new RedisQueue(o => o + .ConnectionMultiplexer(SharedConnection.GetMuxer(Log)) + .Retries(retries) + .RetryDelay(retryDelay.GetValueOrDefault(TimeSpan.FromMinutes(1))) + .RetryMultipliers(retryMultipliers ?? new[] { 1, 3, 5, 10 }) + .DeadLetterMaxItems(deadLetterMaxItems) + .WorkItemTimeout(workItemTimeout.GetValueOrDefault(TimeSpan.FromMinutes(5))) + .RunMaintenanceTasks(runQueueMaintenance) + .LoggerFactory(Log) + ); + + _logger.LogDebug("Queue Id: {queueId}", queue.QueueId); + return queue; + } - [Fact] - public override Task CanQueueAndDequeueWorkItemAsync() - { - return base.CanQueueAndDequeueWorkItemAsync(); - } + [Fact] + public override Task CanQueueAndDequeueWorkItemAsync() + { + return base.CanQueueAndDequeueWorkItemAsync(); + } - [Fact] - public override Task CanDequeueWithCancelledTokenAsync() - { - return base.CanDequeueWithCancelledTokenAsync(); - } + [Fact] + public override Task CanDequeueWithCancelledTokenAsync() + { + return base.CanDequeueWithCancelledTokenAsync(); + } - [Fact] - public override Task CanDequeueEfficientlyAsync() - { - return base.CanDequeueEfficientlyAsync(); - } + [Fact] + public override Task CanDequeueEfficientlyAsync() + { + return base.CanDequeueEfficientlyAsync(); + } - [Fact] - public override Task CanResumeDequeueEfficientlyAsync() - { - return base.CanResumeDequeueEfficientlyAsync(); - } + [Fact] + public override Task CanResumeDequeueEfficientlyAsync() + { + return base.CanResumeDequeueEfficientlyAsync(); + } - [Fact] - public override Task CanQueueAndDequeueMultipleWorkItemsAsync() - { - return base.CanQueueAndDequeueMultipleWorkItemsAsync(); - } + [Fact] + public override Task CanQueueAndDequeueMultipleWorkItemsAsync() + { + return base.CanQueueAndDequeueMultipleWorkItemsAsync(); + } - [Fact] - public override Task WillNotWaitForItemAsync() - { - return base.WillNotWaitForItemAsync(); - } + [Fact] + public override Task WillNotWaitForItemAsync() + { + return base.WillNotWaitForItemAsync(); + } - [Fact] - public override Task WillWaitForItemAsync() - { - return base.WillWaitForItemAsync(); - } + [Fact] + public override Task WillWaitForItemAsync() + { + return base.WillWaitForItemAsync(); + } - [Fact] - public override Task DequeueWaitWillGetSignaledAsync() - { - return base.DequeueWaitWillGetSignaledAsync(); - } + [Fact] + public override Task DequeueWaitWillGetSignaledAsync() + { + return base.DequeueWaitWillGetSignaledAsync(); + } - [Fact] - public override Task CanUseQueueWorkerAsync() - { - return base.CanUseQueueWorkerAsync(); - } + [Fact] + public override Task CanUseQueueWorkerAsync() + { + return base.CanUseQueueWorkerAsync(); + } - [Fact] - public override Task CanRenewLockAsync() - { - return base.CanRenewLockAsync(); - } + [Fact] + public override Task CanRenewLockAsync() + { + return base.CanRenewLockAsync(); + } - [Fact] - public override Task CanHandleErrorInWorkerAsync() - { - return base.CanHandleErrorInWorkerAsync(); - } + [Fact] + public override Task CanHandleErrorInWorkerAsync() + { + return base.CanHandleErrorInWorkerAsync(); + } - [Fact] - public override Task WorkItemsWillTimeoutAsync() - { - return base.WorkItemsWillTimeoutAsync(); - } + [Fact] + public override Task WorkItemsWillTimeoutAsync() + { + return base.WorkItemsWillTimeoutAsync(); + } - [Fact] - public override Task WorkItemsWillGetMovedToDeadletterAsync() - { - return base.WorkItemsWillGetMovedToDeadletterAsync(); - } + [Fact] + public override Task WorkItemsWillGetMovedToDeadletterAsync() + { + return base.WorkItemsWillGetMovedToDeadletterAsync(); + } - [Fact] - public override Task CanAutoCompleteWorkerAsync() - { - return base.CanAutoCompleteWorkerAsync(); - } + [Fact] + public override Task CanAutoCompleteWorkerAsync() + { + return base.CanAutoCompleteWorkerAsync(); + } - [RetryFact] - public override Task CanHaveMultipleQueueInstancesAsync() - { - return base.CanHaveMultipleQueueInstancesAsync(); - } + [RetryFact] + public override Task CanHaveMultipleQueueInstancesAsync() + { + return base.CanHaveMultipleQueueInstancesAsync(); + } - [Fact] - public override Task CanDelayRetryAsync() - { - return base.CanDelayRetryAsync(); - } + [Fact] + public override Task CanDelayRetryAsync() + { + return base.CanDelayRetryAsync(); + } - [Fact] - public override Task CanRunWorkItemWithMetricsAsync() - { - return base.CanRunWorkItemWithMetricsAsync(); - } + [Fact] + public override Task CanRunWorkItemWithMetricsAsync() + { + return base.CanRunWorkItemWithMetricsAsync(); + } - [Fact] - public override Task CanAbandonQueueEntryOnceAsync() - { - return base.CanAbandonQueueEntryOnceAsync(); - } + [Fact] + public override Task CanAbandonQueueEntryOnceAsync() + { + return base.CanAbandonQueueEntryOnceAsync(); + } - [Fact] - public override Task CanCompleteQueueEntryOnceAsync() - { - return base.CanCompleteQueueEntryOnceAsync(); - } + [Fact] + public override Task CanCompleteQueueEntryOnceAsync() + { + return base.CanCompleteQueueEntryOnceAsync(); + } - [Fact] - public override Task CanHandleAutoAbandonInWorker() - { - return base.CanHandleAutoAbandonInWorker(); - } + [Fact] + public override Task CanHandleAutoAbandonInWorker() + { + return base.CanHandleAutoAbandonInWorker(); + } - [Fact(Skip = "Need to fix some sort of left over metrics from other tests issue")] - public override Task CanUseQueueOptionsAsync() - { - return base.CanUseQueueOptionsAsync(); - } + [Fact(Skip = "Need to fix some sort of left over metrics from other tests issue")] + public override Task CanUseQueueOptionsAsync() + { + return base.CanUseQueueOptionsAsync(); + } - [RetryFact] - public override async Task CanDequeueWithLockingAsync() - { - var muxer = SharedConnection.GetMuxer(Log); - using var cache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = muxer, LoggerFactory = Log }); - using var messageBus = new RedisMessageBus(new RedisMessageBusOptions { Subscriber = muxer.GetSubscriber(), Topic = "test-queue", LoggerFactory = Log }); - var distributedLock = new CacheLockProvider(cache, messageBus, Log); - await CanDequeueWithLockingImpAsync(distributedLock); - } + [RetryFact] + public override async Task CanDequeueWithLockingAsync() + { + var muxer = SharedConnection.GetMuxer(Log); + using var cache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = muxer, LoggerFactory = Log }); + using var messageBus = new RedisMessageBus(new RedisMessageBusOptions { Subscriber = muxer.GetSubscriber(), Topic = "test-queue", LoggerFactory = Log }); + var distributedLock = new CacheLockProvider(cache, messageBus, Log); + await CanDequeueWithLockingImpAsync(distributedLock); + } + + [Fact] + public override async Task CanHaveMultipleQueueInstancesWithLockingAsync() + { + var muxer = SharedConnection.GetMuxer(Log); + using var cache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = muxer, LoggerFactory = Log }); + using var messageBus = new RedisMessageBus(new RedisMessageBusOptions { Subscriber = muxer.GetSubscriber(), Topic = "test-queue", LoggerFactory = Log }); + var distributedLock = new CacheLockProvider(cache, messageBus, Log); + await CanHaveMultipleQueueInstancesWithLockingImplAsync(distributedLock); + } - [Fact] - public override async Task CanHaveMultipleQueueInstancesWithLockingAsync() + [Fact] + public async Task VerifyCacheKeysAreCorrect() + { + var queue = GetQueue(retries: 3, workItemTimeout: TimeSpan.FromSeconds(2), retryDelay: TimeSpan.Zero, runQueueMaintenance: false); + if (queue == null) + return; + + using (queue) { var muxer = SharedConnection.GetMuxer(Log); - using var cache = new RedisCacheClient(new RedisCacheClientOptions { ConnectionMultiplexer = muxer, LoggerFactory = Log }); - using var messageBus = new RedisMessageBus(new RedisMessageBusOptions { Subscriber = muxer.GetSubscriber(), Topic = "test-queue", LoggerFactory = Log }); - var distributedLock = new CacheLockProvider(cache, messageBus, Log); - await CanHaveMultipleQueueInstancesWithLockingImplAsync(distributedLock); + var db = muxer.GetDatabase(); + string listPrefix = muxer.IsCluster() ? "{q:SimpleWorkItem}" : "q:SimpleWorkItem"; + + string id = await queue.EnqueueAsync(new SimpleWorkItem { Data = "blah", Id = 1 }); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); + Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); + Assert.Equal(3, await muxer.CountAllKeysAsync()); + + _logger.LogInformation("-----"); + + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + var workItem = await queue.DequeueAsync(); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); + Assert.Equal(5, await muxer.CountAllKeysAsync()); + + await Task.Delay(TimeSpan.FromSeconds(4)); + + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); + Assert.Equal(5, await muxer.CountAllKeysAsync()); + + _logger.LogInformation("-----"); + + await workItem.CompleteAsync(); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.Equal(0, await muxer.CountAllKeysAsync()); } + } - [Fact] - public async Task VerifyCacheKeysAreCorrect() - { - var queue = GetQueue(retries: 3, workItemTimeout: TimeSpan.FromSeconds(2), retryDelay: TimeSpan.Zero, runQueueMaintenance: false); - if (queue == null) - return; + [Fact] + public async Task VerifyCacheKeysAreCorrectAfterAbandon() + { + var queue = GetQueue(retries: 2, workItemTimeout: TimeSpan.FromMilliseconds(100), retryDelay: TimeSpan.Zero, runQueueMaintenance: false) as RedisQueue; + if (queue == null) + return; + using (TestSystemClock.Install()) + { using (queue) { var muxer = SharedConnection.GetMuxer(Log); var db = muxer.GetDatabase(); string listPrefix = muxer.IsCluster() ? "{q:SimpleWorkItem}" : "q:SimpleWorkItem"; - string id = await queue.EnqueueAsync(new SimpleWorkItem { Data = "blah", Id = 1 }); + string id = await queue.EnqueueAsync(new SimpleWorkItem + { + Data = "blah", + Id = 1 + }); + _logger.LogTrace("SimpleWorkItem Id: {0}", id); + + var workItem = await queue.DequeueAsync(); + await workItem.AbandonAsync(); Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.Equal(3, await muxer.CountAllKeysAsync()); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); + Assert.Equal(4, await muxer.CountAllKeysAsync()); - _logger.LogInformation("-----"); + workItem = await queue.DequeueAsync(); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); + Assert.Equal(6, await muxer.CountAllKeysAsync()); + // let the work item timeout and become auto abandoned. + TestSystemClock.AddTime(TimeSpan.FromMilliseconds(250)); + await queue.DoMaintenanceWorkAsync(); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); + Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + Assert.Equal(2, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); + Assert.Equal(1, (await queue.GetQueueStatsAsync()).Timeouts); + Assert.InRange(await muxer.CountAllKeysAsync(), 3, 4); + + // should go to deadletter now + workItem = await queue.DequeueAsync(); + await workItem.AbandonAsync(); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:dead")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + Assert.Equal(3, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); + Assert.InRange(await muxer.CountAllKeysAsync(), 4, 5); + } + } + } + + [Fact] + public async Task VerifyCacheKeysAreCorrectAfterAbandonWithRetryDelay() + { + var queue = GetQueue(retries: 2, workItemTimeout: TimeSpan.FromMilliseconds(100), retryDelay: TimeSpan.FromMilliseconds(250), runQueueMaintenance: false) as RedisQueue; + if (queue == null) + return; + + using (TestSystemClock.Install()) + { + using (queue) + { + var muxer = SharedConnection.GetMuxer(Log); + var db = muxer.GetDatabase(); + string listPrefix = muxer.IsCluster() ? "{q:SimpleWorkItem}" : "q:SimpleWorkItem"; + + string id = await queue.EnqueueAsync(new SimpleWorkItem + { + Data = "blah", + Id = 1 + }); var workItem = await queue.DequeueAsync(); + await workItem.AbandonAsync(); Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:wait")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":wait")); Assert.Equal(5, await muxer.CountAllKeysAsync()); - await Task.Delay(TimeSpan.FromSeconds(4)); + TestSystemClock.AddTime(TimeSpan.FromSeconds(1)); + await queue.DoMaintenanceWorkAsync(); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); + Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:wait")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); + Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); + Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":wait")); + Assert.InRange(await muxer.CountAllKeysAsync(), 4, 5); + workItem = await queue.DequeueAsync(); Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); - Assert.Equal(5, await muxer.CountAllKeysAsync()); - - _logger.LogInformation("-----"); + Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); + Assert.InRange(await muxer.CountAllKeysAsync(), 6, 7); await workItem.CompleteAsync(); Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.Equal(0, await muxer.CountAllKeysAsync()); + Assert.InRange(await muxer.CountAllKeysAsync(), 0, 1); } } + } - [Fact] - public async Task VerifyCacheKeysAreCorrectAfterAbandon() + [Fact] + public async Task CanTrimDeadletterItems() + { + var queue = GetQueue(retries: 0, workItemTimeout: TimeSpan.FromMilliseconds(50), deadLetterMaxItems: 3, runQueueMaintenance: false) as RedisQueue; + if (queue == null) + return; + + using (queue) { - var queue = GetQueue(retries: 2, workItemTimeout: TimeSpan.FromMilliseconds(100), retryDelay: TimeSpan.Zero, runQueueMaintenance: false) as RedisQueue; - if (queue == null) - return; + var muxer = SharedConnection.GetMuxer(Log); + var db = muxer.GetDatabase(); + string listPrefix = muxer.IsCluster() ? "{q:SimpleWorkItem}" : "q:SimpleWorkItem"; - using (TestSystemClock.Install()) + var workItemIds = new List(); + for (int i = 0; i < 10; i++) { - using (queue) - { - var muxer = SharedConnection.GetMuxer(Log); - var db = muxer.GetDatabase(); - string listPrefix = muxer.IsCluster() ? "{q:SimpleWorkItem}" : "q:SimpleWorkItem"; - - string id = await queue.EnqueueAsync(new SimpleWorkItem - { - Data = "blah", - Id = 1 - }); - _logger.LogTrace("SimpleWorkItem Id: {0}", id); - - var workItem = await queue.DequeueAsync(); - await workItem.AbandonAsync(); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); - Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); - Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); - Assert.Equal(4, await muxer.CountAllKeysAsync()); - - workItem = await queue.DequeueAsync(); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); - Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); - Assert.Equal(6, await muxer.CountAllKeysAsync()); - - // let the work item timeout and become auto abandoned. - TestSystemClock.AddTime(TimeSpan.FromMilliseconds(250)); - await queue.DoMaintenanceWorkAsync(); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); - Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); - Assert.Equal(2, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); - Assert.Equal(1, (await queue.GetQueueStatsAsync()).Timeouts); - Assert.InRange(await muxer.CountAllKeysAsync(), 3, 4); - - // should go to deadletter now - workItem = await queue.DequeueAsync(); - await workItem.AbandonAsync(); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:dead")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); - Assert.Equal(3, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); - Assert.InRange(await muxer.CountAllKeysAsync(), 4, 5); - } + string id = await queue.EnqueueAsync(new SimpleWorkItem { Data = "blah", Id = i }); + _logger.LogTrace(id); + workItemIds.Add(id); } - } - [Fact] - public async Task VerifyCacheKeysAreCorrectAfterAbandonWithRetryDelay() - { - var queue = GetQueue(retries: 2, workItemTimeout: TimeSpan.FromMilliseconds(100), retryDelay: TimeSpan.FromMilliseconds(250), runQueueMaintenance: false) as RedisQueue; - if (queue == null) - return; - - using (TestSystemClock.Install()) + for (int i = 0; i < 10; i++) { - using (queue) - { - var muxer = SharedConnection.GetMuxer(Log); - var db = muxer.GetDatabase(); - string listPrefix = muxer.IsCluster() ? "{q:SimpleWorkItem}" : "q:SimpleWorkItem"; - - string id = await queue.EnqueueAsync(new SimpleWorkItem - { - Data = "blah", - Id = 1 - }); - var workItem = await queue.DequeueAsync(); - await workItem.AbandonAsync(); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:wait")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); - Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":wait")); - Assert.Equal(5, await muxer.CountAllKeysAsync()); - - TestSystemClock.AddTime(TimeSpan.FromSeconds(1)); - await queue.DoMaintenanceWorkAsync(); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); - Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:wait")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); - Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":wait")); - Assert.InRange(await muxer.CountAllKeysAsync(), 4, 5); - - workItem = await queue.DequeueAsync(); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(1, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":renewed")); - Assert.Equal(1, await db.StringGetAsync("q:SimpleWorkItem:" + id + ":attempts")); - Assert.InRange(await muxer.CountAllKeysAsync(), 6, 7); - - await workItem.CompleteAsync(); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":enqueued")); - Assert.False(await db.KeyExistsAsync("q:SimpleWorkItem:" + id + ":dequeued")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.InRange(await muxer.CountAllKeysAsync(), 0, 1); - } + var workItem = await queue.DequeueAsync(); + await workItem.AbandonAsync(); + _logger.LogTrace("Abandoning: " + workItem.Id); } - } - [Fact] - public async Task CanTrimDeadletterItems() - { - var queue = GetQueue(retries: 0, workItemTimeout: TimeSpan.FromMilliseconds(50), deadLetterMaxItems: 3, runQueueMaintenance: false) as RedisQueue; - if (queue == null) - return; + workItemIds.Reverse(); + await queue.DoMaintenanceWorkAsync(); - using (queue) + foreach (object id in workItemIds.Take(3)) { - var muxer = SharedConnection.GetMuxer(Log); - var db = muxer.GetDatabase(); - string listPrefix = muxer.IsCluster() ? "{q:SimpleWorkItem}" : "q:SimpleWorkItem"; - - var workItemIds = new List(); - for (int i = 0; i < 10; i++) - { - string id = await queue.EnqueueAsync(new SimpleWorkItem { Data = "blah", Id = i }); - _logger.LogTrace(id); - workItemIds.Add(id); - } + _logger.LogTrace("Checking: " + id); + Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); + } - for (int i = 0; i < 10; i++) - { - var workItem = await queue.DequeueAsync(); - await workItem.AbandonAsync(); - _logger.LogTrace("Abandoning: " + workItem.Id); - } + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); + Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:wait")); + Assert.Equal(3, await db.ListLengthAsync($"{listPrefix}:dead")); + Assert.InRange(await muxer.CountAllKeysAsync(), 10, 11); + } + } - workItemIds.Reverse(); - await queue.DoMaintenanceWorkAsync(); + [Fact] + public async Task VerifyFirstDequeueTimeout() + { - foreach (object id in workItemIds.Take(3)) - { - _logger.LogTrace("Checking: " + id); - Assert.True(await db.KeyExistsAsync("q:SimpleWorkItem:" + id)); - } + var workItemTimeout = TimeSpan.FromMilliseconds(100); + var itemData = "blah"; + var itemId = 1; - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:in")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:work")); - Assert.Equal(0, await db.ListLengthAsync($"{listPrefix}:wait")); - Assert.Equal(3, await db.ListLengthAsync($"{listPrefix}:dead")); - Assert.InRange(await muxer.CountAllKeysAsync(), 10, 11); - } - } + var queue = GetQueue(retries: 0, workItemTimeout: workItemTimeout, retryDelay: TimeSpan.Zero, runQueueMaintenance: false) as RedisQueue; + if (queue == null) + return; - [Fact] - public async Task VerifyFirstDequeueTimeout() + using (queue) { + await queue.DeleteQueueAsync(); - var workItemTimeout = TimeSpan.FromMilliseconds(100); - var itemData = "blah"; - var itemId = 1; + // Start DequeueAsync but allow it to yield. + var itemTask = queue.DequeueAsync(); - var queue = GetQueue(retries: 0, workItemTimeout: workItemTimeout, retryDelay: TimeSpan.Zero, runQueueMaintenance: false) as RedisQueue; - if (queue == null) - return; + // Wait longer than the workItemTimeout. + // This is the period between a queue having DequeueAsync called on it and the first item being enqueued. + await SystemClock.SleepAsync(workItemTimeout.Add(TimeSpan.FromMilliseconds(1))); - using (queue) + // Add an item. DequeueAsync can now return. + string id = await queue.EnqueueAsync(new SimpleWorkItem { - await queue.DeleteQueueAsync(); - - // Start DequeueAsync but allow it to yield. - var itemTask = queue.DequeueAsync(); + Data = itemData, + Id = itemId + }); - // Wait longer than the workItemTimeout. - // This is the period between a queue having DequeueAsync called on it and the first item being enqueued. - await SystemClock.SleepAsync(workItemTimeout.Add(TimeSpan.FromMilliseconds(1))); + // Run DoMaintenanceWorkAsync to verify that our item will not be auto-abandoned. + await queue.DoMaintenanceWorkAsync(); - // Add an item. DequeueAsync can now return. - string id = await queue.EnqueueAsync(new SimpleWorkItem - { - Data = itemData, - Id = itemId - }); + // Completing the item will throw if the item is abandoned. + var item = await itemTask; + await item.CompleteAsync(); - // Run DoMaintenanceWorkAsync to verify that our item will not be auto-abandoned. - await queue.DoMaintenanceWorkAsync(); + var value = item.Value; + Assert.NotNull(value); + Assert.Equal(itemData, value.Data); + Assert.Equal(itemId, value.Id); + } + } - // Completing the item will throw if the item is abandoned. - var item = await itemTask; - await item.CompleteAsync(); + // test to reproduce issue #64 - https://github.com/FoundatioFx/Foundatio.Redis/issues/64 + //[Fact(Skip ="This test needs to simulate database timeout which makes the runtime ~5 sec which might be too big to be run automatically")] + [Fact] + public async Task DatabaseTimeoutDuringDequeueHandledCorectly_Issue64() + { + // not using GetQueue() here because I need to change the ops timeout in the redis connection string + const int OPS_TIMEOUT_MS = 100; + string connectionString = Configuration.GetConnectionString("RedisConnectionString") + $",syncTimeout={OPS_TIMEOUT_MS},asyncTimeout={OPS_TIMEOUT_MS}"; ; + var muxer = await ConnectionMultiplexer.ConnectAsync(connectionString); - var value = item.Value; - Assert.NotNull(value); - Assert.Equal(itemData, value.Data); - Assert.Equal(itemId, value.Id); - } - } + const string QUEUE_NAME = "Test"; + var queue = new RedisQueue(o => o + .ConnectionMultiplexer(muxer) + .LoggerFactory(Log) + .Name(QUEUE_NAME) + .RunMaintenanceTasks(false) + ); - // test to reproduce issue #64 - https://github.com/FoundatioFx/Foundatio.Redis/issues/64 - //[Fact(Skip ="This test needs to simulate database timeout which makes the runtime ~5 sec which might be too big to be run automatically")] - [Fact] - public async Task DatabaseTimeoutDuringDequeueHandledCorectly_Issue64() + using (queue) { - // not using GetQueue() here because I need to change the ops timeout in the redis connection string - const int OPS_TIMEOUT_MS = 100; - string connectionString = Configuration.GetConnectionString("RedisConnectionString") + $",syncTimeout={OPS_TIMEOUT_MS},asyncTimeout={OPS_TIMEOUT_MS}"; ; - var muxer = await ConnectionMultiplexer.ConnectAsync(connectionString); - - const string QUEUE_NAME = "Test"; - var queue = new RedisQueue(o => o - .ConnectionMultiplexer(muxer) - .LoggerFactory(Log) - .Name(QUEUE_NAME) - .RunMaintenanceTasks(false) - ); + await queue.DeleteQueueAsync(); - using (queue) - { - await queue.DeleteQueueAsync(); - - // enqueue item to queue, no reader yet - await queue.EnqueueAsync(new SimpleWorkItem()); + // enqueue item to queue, no reader yet + await queue.EnqueueAsync(new SimpleWorkItem()); - // create database, we want to cause delay in redis to reproduce the issue - var database = muxer.GetDatabase(); + // create database, we want to cause delay in redis to reproduce the issue + var database = muxer.GetDatabase(); - // sync / async ops timeout is not working as described: https://stackexchange.github.io/StackExchange.Redis/Configuration - // it should have timed out after 100 ms but it actually takes a lot more time to time out so we have to use longer delay until this issue is resolved - // value can be up to 1,000,000 - 1 - //const int DELAY_TIME_USEC = 200000; // 200 msec - //string databaseDelayScript = $"local usecnow = tonumber(redis.call(\"time\")[2]); while ((((tonumber(redis.call(\"time\")[2]) - usecnow) + 1000000) % 1000000) < {DELAY_TIME_USEC}) do end"; + // sync / async ops timeout is not working as described: https://stackexchange.github.io/StackExchange.Redis/Configuration + // it should have timed out after 100 ms but it actually takes a lot more time to time out so we have to use longer delay until this issue is resolved + // value can be up to 1,000,000 - 1 + //const int DELAY_TIME_USEC = 200000; // 200 msec + //string databaseDelayScript = $"local usecnow = tonumber(redis.call(\"time\")[2]); while ((((tonumber(redis.call(\"time\")[2]) - usecnow) + 1000000) % 1000000) < {DELAY_TIME_USEC}) do end"; - const int DELAY_TIME_SEC = 5; - string databaseDelayScript = $@" + const int DELAY_TIME_SEC = 5; + string databaseDelayScript = $@" local now = tonumber(redis.call(""time"")[1]); while ((((tonumber(redis.call(""time"")[1]) - now))) < {DELAY_TIME_SEC}) " + - "do end"; +"do end"; - // db will be busy for DELAY_TIME_USEC which will cause timeout on the dequeue to follow - database.ScriptEvaluateAsync(databaseDelayScript); + // db will be busy for DELAY_TIME_USEC which will cause timeout on the dequeue to follow + database.ScriptEvaluateAsync(databaseDelayScript); - var completion = new TaskCompletionSource(); - await queue.StartWorkingAsync(async (item) => - { - await item.CompleteAsync(); - completion.SetResult(true); - }); + var completion = new TaskCompletionSource(); + await queue.StartWorkingAsync(async (item) => + { + await item.CompleteAsync(); + completion.SetResult(true); + }); - // wait for the databaseDelayScript to finish - await Task.Delay(DELAY_TIME_SEC * 1000); + // wait for the databaseDelayScript to finish + await Task.Delay(DELAY_TIME_SEC * 1000); - // item should've either time out at some iterations and after databaseDelayScript is done be received - // or it might have moved to work, in this case we want to make sure the correct keys were created - var stopwatch = Stopwatch.StartNew(); - bool success = false; - while (stopwatch.Elapsed.TotalSeconds < 10) - { + // item should've either time out at some iterations and after databaseDelayScript is done be received + // or it might have moved to work, in this case we want to make sure the correct keys were created + var stopwatch = Stopwatch.StartNew(); + bool success = false; + while (stopwatch.Elapsed.TotalSeconds < 10) + { - string workListName = $"q:{QUEUE_NAME}:work"; - long workListLen = await database.ListLengthAsync(new RedisKey(workListName)); - var item = await database.ListLeftPopAsync(workListName); - string dequeuedItemKey = String.Concat("q:", QUEUE_NAME, ":", item, ":dequeued"); - bool dequeuedItemKeyExists = await database.KeyExistsAsync(new RedisKey(dequeuedItemKey)); - if (workListLen == 1) - { - Assert.True(dequeuedItemKeyExists); - success = true; - break; - } - - var timeoutCancellationTokenSource = new CancellationTokenSource(); - var completedTask = await Task.WhenAny(completion.Task, Task.Delay(TimeSpan.FromMilliseconds(100), timeoutCancellationTokenSource.Token)); - if (completion.Task == completedTask) - { - success = true; - break; - } + string workListName = $"q:{QUEUE_NAME}:work"; + long workListLen = await database.ListLengthAsync(new RedisKey(workListName)); + var item = await database.ListLeftPopAsync(workListName); + string dequeuedItemKey = String.Concat("q:", QUEUE_NAME, ":", item, ":dequeued"); + bool dequeuedItemKeyExists = await database.KeyExistsAsync(new RedisKey(dequeuedItemKey)); + if (workListLen == 1) + { + Assert.True(dequeuedItemKeyExists); + success = true; + break; } - Assert.True(success); + var timeoutCancellationTokenSource = new CancellationTokenSource(); + var completedTask = await Task.WhenAny(completion.Task, Task.Delay(TimeSpan.FromMilliseconds(100), timeoutCancellationTokenSource.Token)); + if (completion.Task == completedTask) + { + success = true; + break; + } } + + Assert.True(success); } + } - // TODO: Need to write tests that verify the cache data is correct after each operation. + // TODO: Need to write tests that verify the cache data is correct after each operation. - [Fact(Skip = "Performance Test")] - public async Task MeasureThroughputWithRandomFailures() + [Fact(Skip = "Performance Test")] + public async Task MeasureThroughputWithRandomFailures() + { + var queue = GetQueue(retries: 3, workItemTimeout: TimeSpan.FromSeconds(2), retryDelay: TimeSpan.Zero); + if (queue == null) + return; + + using (queue) { - var queue = GetQueue(retries: 3, workItemTimeout: TimeSpan.FromSeconds(2), retryDelay: TimeSpan.Zero); - if (queue == null) - return; + await queue.DeleteQueueAsync(); - using (queue) + const int workItemCount = 1000; + for (int i = 0; i < workItemCount; i++) { - await queue.DeleteQueueAsync(); - - const int workItemCount = 1000; - for (int i = 0; i < workItemCount; i++) + await queue.EnqueueAsync(new SimpleWorkItem { - await queue.EnqueueAsync(new SimpleWorkItem - { - Data = "Hello" - }); - } - Assert.Equal(workItemCount, (await queue.GetQueueStatsAsync()).Queued); - - var metrics = new InMemoryMetricsClient(new InMemoryMetricsClientOptions()); - var workItem = await queue.DequeueAsync(TimeSpan.Zero); - while (workItem != null) - { - Assert.Equal("Hello", workItem.Value.Data); - if (RandomData.GetBool(10)) - await workItem.AbandonAsync(); - else - await workItem.CompleteAsync(); - - metrics.Counter("work"); - workItem = await queue.DequeueAsync(TimeSpan.FromMilliseconds(100)); - } - _logger.LogTrace((await metrics.GetCounterStatsAsync("work")).ToString()); + Data = "Hello" + }); + } + Assert.Equal(workItemCount, (await queue.GetQueueStatsAsync()).Queued); - var stats = await queue.GetQueueStatsAsync(); - Assert.True(stats.Dequeued >= workItemCount); - Assert.Equal(workItemCount, stats.Completed + stats.Deadletter); - Assert.Equal(0, stats.Queued); + var metrics = new InMemoryMetricsClient(new InMemoryMetricsClientOptions()); + var workItem = await queue.DequeueAsync(TimeSpan.Zero); + while (workItem != null) + { + Assert.Equal("Hello", workItem.Value.Data); + if (RandomData.GetBool(10)) + await workItem.AbandonAsync(); + else + await workItem.CompleteAsync(); - var muxer = SharedConnection.GetMuxer(Log); - _logger.LogTrace("# Keys: {0}", muxer.CountAllKeysAsync()); + metrics.Counter("work"); + workItem = await queue.DequeueAsync(TimeSpan.FromMilliseconds(100)); } + _logger.LogTrace((await metrics.GetCounterStatsAsync("work")).ToString()); + + var stats = await queue.GetQueueStatsAsync(); + Assert.True(stats.Dequeued >= workItemCount); + Assert.Equal(workItemCount, stats.Completed + stats.Deadletter); + Assert.Equal(0, stats.Queued); + + var muxer = SharedConnection.GetMuxer(Log); + _logger.LogTrace("# Keys: {0}", muxer.CountAllKeysAsync()); } + } + + [Fact(Skip = "Performance Test")] + public async Task MeasureThroughput() + { + var queue = GetQueue(retries: 3, workItemTimeout: TimeSpan.FromSeconds(2), retryDelay: TimeSpan.FromSeconds(1)); + if (queue == null) + return; - [Fact(Skip = "Performance Test")] - public async Task MeasureThroughput() + using (queue) { - var queue = GetQueue(retries: 3, workItemTimeout: TimeSpan.FromSeconds(2), retryDelay: TimeSpan.FromSeconds(1)); - if (queue == null) - return; + await queue.DeleteQueueAsync(); - using (queue) + const int workItemCount = 1000; + for (int i = 0; i < workItemCount; i++) { - await queue.DeleteQueueAsync(); - - const int workItemCount = 1000; - for (int i = 0; i < workItemCount; i++) + await queue.EnqueueAsync(new SimpleWorkItem { - await queue.EnqueueAsync(new SimpleWorkItem - { - Data = "Hello" - }); - } - Assert.Equal(workItemCount, (await queue.GetQueueStatsAsync()).Queued); + Data = "Hello" + }); + } + Assert.Equal(workItemCount, (await queue.GetQueueStatsAsync()).Queued); - var metrics = new InMemoryMetricsClient(new InMemoryMetricsClientOptions()); - var workItem = await queue.DequeueAsync(TimeSpan.Zero); - while (workItem != null) - { - Assert.Equal("Hello", workItem.Value.Data); - await workItem.CompleteAsync(); - metrics.Counter("work"); + var metrics = new InMemoryMetricsClient(new InMemoryMetricsClientOptions()); + var workItem = await queue.DequeueAsync(TimeSpan.Zero); + while (workItem != null) + { + Assert.Equal("Hello", workItem.Value.Data); + await workItem.CompleteAsync(); + metrics.Counter("work"); - workItem = await queue.DequeueAsync(TimeSpan.Zero); - } - _logger.LogTrace((await metrics.GetCounterStatsAsync("work")).ToString()); + workItem = await queue.DequeueAsync(TimeSpan.Zero); + } + _logger.LogTrace((await metrics.GetCounterStatsAsync("work")).ToString()); - var stats = await queue.GetQueueStatsAsync(); - Assert.Equal(workItemCount, stats.Dequeued); - Assert.Equal(workItemCount, stats.Completed); - Assert.Equal(0, stats.Queued); + var stats = await queue.GetQueueStatsAsync(); + Assert.Equal(workItemCount, stats.Dequeued); + Assert.Equal(workItemCount, stats.Completed); + Assert.Equal(0, stats.Queued); - var muxer = SharedConnection.GetMuxer(Log); - _logger.LogTrace("# Keys: {0}", muxer.CountAllKeysAsync()); - } + var muxer = SharedConnection.GetMuxer(Log); + _logger.LogTrace("# Keys: {0}", muxer.CountAllKeysAsync()); } + } + + [Fact(Skip = "Performance Test")] + public async Task MeasureWorkerThroughput() + { + var queue = GetQueue(retries: 3, workItemTimeout: TimeSpan.FromSeconds(2), retryDelay: TimeSpan.FromSeconds(1)); + if (queue == null) + return; - [Fact(Skip = "Performance Test")] - public async Task MeasureWorkerThroughput() + using (queue) { - var queue = GetQueue(retries: 3, workItemTimeout: TimeSpan.FromSeconds(2), retryDelay: TimeSpan.FromSeconds(1)); - if (queue == null) - return; + await queue.DeleteQueueAsync(); - using (queue) + const int workItemCount = 1; + for (int i = 0; i < workItemCount; i++) { - await queue.DeleteQueueAsync(); - - const int workItemCount = 1; - for (int i = 0; i < workItemCount; i++) - { - await queue.EnqueueAsync(new SimpleWorkItem - { - Data = "Hello" - }); - } - Assert.Equal(workItemCount, (await queue.GetQueueStatsAsync()).Queued); - - var countdown = new AsyncCountdownEvent(workItemCount); - var metrics = new InMemoryMetricsClient(new InMemoryMetricsClientOptions()); - await queue.StartWorkingAsync(async workItem => + await queue.EnqueueAsync(new SimpleWorkItem { - Assert.Equal("Hello", workItem.Value.Data); - await workItem.CompleteAsync(); - metrics.Counter("work"); - countdown.Signal(); + Data = "Hello" }); + } + Assert.Equal(workItemCount, (await queue.GetQueueStatsAsync()).Queued); - await countdown.WaitAsync(TimeSpan.FromMinutes(1)); - Assert.Equal(0, countdown.CurrentCount); - _logger.LogTrace((await metrics.GetCounterStatsAsync("work")).ToString()); + var countdown = new AsyncCountdownEvent(workItemCount); + var metrics = new InMemoryMetricsClient(new InMemoryMetricsClientOptions()); + await queue.StartWorkingAsync(async workItem => + { + Assert.Equal("Hello", workItem.Value.Data); + await workItem.CompleteAsync(); + metrics.Counter("work"); + countdown.Signal(); + }); - var stats = await queue.GetQueueStatsAsync(); - Assert.Equal(workItemCount, stats.Dequeued); - Assert.Equal(workItemCount, stats.Completed); - Assert.Equal(0, stats.Queued); + await countdown.WaitAsync(TimeSpan.FromMinutes(1)); + Assert.Equal(0, countdown.CurrentCount); + _logger.LogTrace((await metrics.GetCounterStatsAsync("work")).ToString()); - var muxer = SharedConnection.GetMuxer(Log); - _logger.LogTrace("# Keys: {0}", muxer.CountAllKeysAsync()); - } - } + var stats = await queue.GetQueueStatsAsync(); + Assert.Equal(workItemCount, stats.Dequeued); + Assert.Equal(workItemCount, stats.Completed); + Assert.Equal(0, stats.Queued); - [Fact] - public override Task VerifyRetryAttemptsAsync() - { - return base.VerifyRetryAttemptsAsync(); + var muxer = SharedConnection.GetMuxer(Log); + _logger.LogTrace("# Keys: {0}", muxer.CountAllKeysAsync()); } + } - [Fact] - public override Task VerifyDelayedRetryAttemptsAsync() - { - return base.VerifyDelayedRetryAttemptsAsync(); - } + [Fact] + public override Task VerifyRetryAttemptsAsync() + { + return base.VerifyRetryAttemptsAsync(); + } - [Fact] - public async Task CanHaveDifferentMessageTypeInQueueWithSameNameAsync() - { - await HandlerCommand1Async(); - await HandlerCommand2Async(); + [Fact] + public override Task VerifyDelayedRetryAttemptsAsync() + { + return base.VerifyDelayedRetryAttemptsAsync(); + } - await Task.Delay(1000); + [Fact] + public async Task CanHaveDifferentMessageTypeInQueueWithSameNameAsync() + { + await HandlerCommand1Async(); + await HandlerCommand2Async(); - await Publish1Async(); - await Publish2Async(); - } + await Task.Delay(1000); - private IQueue CreateQueue(bool allQueuesTheSameName = true) where T : class - { - var name = typeof(T).FullName.Trim().Replace(".", string.Empty).ToLower(); + await Publish1Async(); + await Publish2Async(); + } - if (allQueuesTheSameName) - name = "cmd"; + private IQueue CreateQueue(bool allQueuesTheSameName = true) where T : class + { + var name = typeof(T).FullName.Trim().Replace(".", string.Empty).ToLower(); - var queue = new RedisQueue(o => o - .ConnectionMultiplexer(SharedConnection.GetMuxer(Log)) - .Name(name) - .LoggerFactory(Log) - ); + if (allQueuesTheSameName) + name = "cmd"; - _logger.LogDebug("Queue Id: {queueId}", queue.QueueId); - return queue; - } + var queue = new RedisQueue(o => o + .ConnectionMultiplexer(SharedConnection.GetMuxer(Log)) + .Name(name) + .LoggerFactory(Log) + ); - private Task HandlerCommand1Async() - { - var q = CreateQueue(); + _logger.LogDebug("Queue Id: {queueId}", queue.QueueId); + return queue; + } - return q.StartWorkingAsync((entry, token) => - { - _logger.LogInformation($"{SystemClock.UtcNow:O}: Handler1\t{entry.Value.GetType().Name} {entry.Value.Id}"); - Assert.InRange(entry.Value.Id, 100, 199); - return Task.CompletedTask; - }); - } + private Task HandlerCommand1Async() + { + var q = CreateQueue(); - private Task HandlerCommand2Async() + return q.StartWorkingAsync((entry, token) => { - var q = CreateQueue(); + _logger.LogInformation($"{SystemClock.UtcNow:O}: Handler1\t{entry.Value.GetType().Name} {entry.Value.Id}"); + Assert.InRange(entry.Value.Id, 100, 199); + return Task.CompletedTask; + }); + } - return q.StartWorkingAsync((entry, token) => - { - _logger.LogInformation($"{SystemClock.UtcNow:O}: Handler2\t{entry.Value.GetType().Name} {entry.Value.Id}"); - Assert.InRange(entry.Value.Id, 200, 299); - return Task.CompletedTask; - }, true); - } + private Task HandlerCommand2Async() + { + var q = CreateQueue(); - private async Task Publish1Async() + return q.StartWorkingAsync((entry, token) => { - var q = CreateQueue(); + _logger.LogInformation($"{SystemClock.UtcNow:O}: Handler2\t{entry.Value.GetType().Name} {entry.Value.Id}"); + Assert.InRange(entry.Value.Id, 200, 299); + return Task.CompletedTask; + }, true); + } - for (var i = 0; i < 10; i++) - { - var cmd = new Command1(100 + i); - _logger.LogInformation($"{DateTime.UtcNow:O}: Publish\tCommand1 {cmd.Id}"); - await q.EnqueueAsync(cmd); - } - } + private async Task Publish1Async() + { + var q = CreateQueue(); - private async Task Publish2Async() + for (var i = 0; i < 10; i++) { - var q = CreateQueue(); - - for (var i = 0; i < 10; i++) - { - var cmd = new Command2(200 + i); - _logger.LogInformation($"{DateTime.UtcNow:O}: Publish\tCommand2 {cmd.Id}"); - await q.EnqueueAsync(cmd); - } + var cmd = new Command1(100 + i); + _logger.LogInformation($"{DateTime.UtcNow:O}: Publish\tCommand1 {cmd.Id}"); + await q.EnqueueAsync(cmd); } + } - public class Command1 + private async Task Publish2Async() + { + var q = CreateQueue(); + + for (var i = 0; i < 10; i++) { - public Command1() { } + var cmd = new Command2(200 + i); + _logger.LogInformation($"{DateTime.UtcNow:O}: Publish\tCommand2 {cmd.Id}"); + await q.EnqueueAsync(cmd); + } + } - public Command1(int id) - { - Id = id; - } + public class Command1 + { + public Command1() { } - public int Id { get; set; } + public Command1(int id) + { + Id = id; } - public class Command2 - { - public Command2() { } + public int Id { get; set; } + } - public Command2(int id) - { - Id = id; - } + public class Command2 + { + public Command2() { } - public int Id { get; set; } + public Command2(int id) + { + Id = id; } + + public int Id { get; set; } } } diff --git a/tests/Foundatio.Redis.Tests/SharedConnection.cs b/tests/Foundatio.Redis.Tests/SharedConnection.cs index 3ad7ebd..5694af1 100644 --- a/tests/Foundatio.Redis.Tests/SharedConnection.cs +++ b/tests/Foundatio.Redis.Tests/SharedConnection.cs @@ -3,22 +3,21 @@ using Microsoft.Extensions.Logging; using StackExchange.Redis; -namespace Foundatio.Redis.Tests +namespace Foundatio.Redis.Tests; + +public static class SharedConnection { - public static class SharedConnection - { - private static ConnectionMultiplexer _muxer; + private static ConnectionMultiplexer _muxer; - public static ConnectionMultiplexer GetMuxer(ILoggerFactory loggerFactory) - { - string connectionString = Configuration.GetConnectionString("RedisConnectionString"); - if (String.IsNullOrEmpty(connectionString)) - return null; + public static ConnectionMultiplexer GetMuxer(ILoggerFactory loggerFactory) + { + string connectionString = Configuration.GetConnectionString("RedisConnectionString"); + if (String.IsNullOrEmpty(connectionString)) + return null; - if (_muxer == null) - _muxer = ConnectionMultiplexer.Connect(connectionString, o => o.LoggerFactory = loggerFactory); + if (_muxer == null) + _muxer = ConnectionMultiplexer.Connect(connectionString, o => o.LoggerFactory = loggerFactory); - return _muxer; - } + return _muxer; } } diff --git a/tests/Foundatio.Redis.Tests/Storage/RedisFileStorageTests.cs b/tests/Foundatio.Redis.Tests/Storage/RedisFileStorageTests.cs index 0edc101..6754c54 100644 --- a/tests/Foundatio.Redis.Tests/Storage/RedisFileStorageTests.cs +++ b/tests/Foundatio.Redis.Tests/Storage/RedisFileStorageTests.cs @@ -5,127 +5,126 @@ using Xunit; using Xunit.Abstractions; -namespace Foundatio.Redis.Tests.Storage +namespace Foundatio.Redis.Tests.Storage; + +public class RedisFileStorageTests : FileStorageTestsBase { - public class RedisFileStorageTests : FileStorageTestsBase - { - public RedisFileStorageTests(ITestOutputHelper output) : base(output) - { - var muxer = SharedConnection.GetMuxer(Log); - muxer.FlushAllAsync().GetAwaiter().GetResult(); - } - - protected override IFileStorage GetStorage() - { - return new RedisFileStorage(o => o.ConnectionMultiplexer(SharedConnection.GetMuxer(Log)).LoggerFactory(Log)); - } - - [Fact] - public override Task CanGetEmptyFileListOnMissingDirectoryAsync() - { - return base.CanGetEmptyFileListOnMissingDirectoryAsync(); - } - - [Fact] - public override Task CanGetFileListForSingleFolderAsync() - { - return base.CanGetFileListForSingleFolderAsync(); - } - - [Fact] - public override Task CanGetPagedFileListForSingleFolderAsync() - { - return base.CanGetPagedFileListForSingleFolderAsync(); - } - - [Fact] - public override Task CanGetFileInfoAsync() - { - return base.CanGetFileInfoAsync(); - } - - [Fact] - public override Task CanGetNonExistentFileInfoAsync() - { - return base.CanGetNonExistentFileInfoAsync(); - } - - [Fact] - public override Task CanSaveFilesAsync() - { - return base.CanSaveFilesAsync(); - } - - [Fact] - public override Task CanManageFilesAsync() - { - return base.CanManageFilesAsync(); - } - - [Fact] - public override Task CanRenameFilesAsync() - { - return base.CanRenameFilesAsync(); - } - - [Fact] - public override Task CanConcurrentlyManageFilesAsync() - { - return base.CanConcurrentlyManageFilesAsync(); - } - - [Fact] - public override void CanUseDataDirectory() - { - base.CanUseDataDirectory(); - } - - [Fact] - public override Task CanDeleteEntireFolderAsync() - { - return base.CanDeleteEntireFolderAsync(); - } - - [Fact] - public override Task CanDeleteEntireFolderWithWildcardAsync() - { - return base.CanDeleteEntireFolderWithWildcardAsync(); - } - - [Fact] - public override Task CanDeleteFolderWithMultiFolderWildcardsAsync() - { - return base.CanDeleteFolderWithMultiFolderWildcardsAsync(); - } - - [Fact] - public override Task CanDeleteSpecificFilesAsync() - { - return base.CanDeleteSpecificFilesAsync(); - } - - [Fact] - public override Task CanDeleteNestedFolderAsync() - { - return base.CanDeleteNestedFolderAsync(); - } - - [Fact] - public override Task CanDeleteSpecificFilesInNestedFolderAsync() - { - return base.CanDeleteSpecificFilesInNestedFolderAsync(); - } - - [Fact] - public override Task CanRoundTripSeekableStreamAsync() - { - return base.CanRoundTripSeekableStreamAsync(); - } - - [Fact] - public override Task WillRespectStreamOffsetAsync() - { - return base.WillRespectStreamOffsetAsync(); - } + public RedisFileStorageTests(ITestOutputHelper output) : base(output) + { + var muxer = SharedConnection.GetMuxer(Log); + muxer.FlushAllAsync().GetAwaiter().GetResult(); + } + + protected override IFileStorage GetStorage() + { + return new RedisFileStorage(o => o.ConnectionMultiplexer(SharedConnection.GetMuxer(Log)).LoggerFactory(Log)); + } + + [Fact] + public override Task CanGetEmptyFileListOnMissingDirectoryAsync() + { + return base.CanGetEmptyFileListOnMissingDirectoryAsync(); + } + + [Fact] + public override Task CanGetFileListForSingleFolderAsync() + { + return base.CanGetFileListForSingleFolderAsync(); + } + + [Fact] + public override Task CanGetPagedFileListForSingleFolderAsync() + { + return base.CanGetPagedFileListForSingleFolderAsync(); + } + + [Fact] + public override Task CanGetFileInfoAsync() + { + return base.CanGetFileInfoAsync(); + } + + [Fact] + public override Task CanGetNonExistentFileInfoAsync() + { + return base.CanGetNonExistentFileInfoAsync(); + } + + [Fact] + public override Task CanSaveFilesAsync() + { + return base.CanSaveFilesAsync(); + } + + [Fact] + public override Task CanManageFilesAsync() + { + return base.CanManageFilesAsync(); + } + + [Fact] + public override Task CanRenameFilesAsync() + { + return base.CanRenameFilesAsync(); + } + + [Fact] + public override Task CanConcurrentlyManageFilesAsync() + { + return base.CanConcurrentlyManageFilesAsync(); + } + + [Fact] + public override void CanUseDataDirectory() + { + base.CanUseDataDirectory(); + } + + [Fact] + public override Task CanDeleteEntireFolderAsync() + { + return base.CanDeleteEntireFolderAsync(); + } + + [Fact] + public override Task CanDeleteEntireFolderWithWildcardAsync() + { + return base.CanDeleteEntireFolderWithWildcardAsync(); + } + + [Fact] + public override Task CanDeleteFolderWithMultiFolderWildcardsAsync() + { + return base.CanDeleteFolderWithMultiFolderWildcardsAsync(); + } + + [Fact] + public override Task CanDeleteSpecificFilesAsync() + { + return base.CanDeleteSpecificFilesAsync(); + } + + [Fact] + public override Task CanDeleteNestedFolderAsync() + { + return base.CanDeleteNestedFolderAsync(); + } + + [Fact] + public override Task CanDeleteSpecificFilesInNestedFolderAsync() + { + return base.CanDeleteSpecificFilesInNestedFolderAsync(); + } + + [Fact] + public override Task CanRoundTripSeekableStreamAsync() + { + return base.CanRoundTripSeekableStreamAsync(); + } + + [Fact] + public override Task WillRespectStreamOffsetAsync() + { + return base.WillRespectStreamOffsetAsync(); } }