From 87242314350eac3089970f21968464fab30a4201 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Tue, 9 Sep 2025 14:44:14 +0100 Subject: [PATCH 01/27] Fix for FT.CURSOR in cluster; requires single server - update SE.Redis ref to allow new GetServer(RedisKey) usage - add utility API to capture an IServer and database if using cluster - create internal AggregationResult subclass that includes the IServer - capture server in Aggregate[Async] - create new overloads for CusorDel[Async] and CursorRead[Async] that take AggregationResult, and push consumers towards that overload - use captured server/database when appropriate - use the new API from tests - add new I[Async]Enumerable API for simplicity: AggregateEnumerable[Async] - add tests for new API - use cluster env from cursor tests --- Directory.Packages.props | 2 +- src/NRedisStack/Auxiliary.cs | 10 ++ .../PublicAPI/PublicAPI.Unshipped.txt | 6 + src/NRedisStack/ResponseParser.cs | 14 +++ src/NRedisStack/Search/AggregationRequest.cs | 2 + src/NRedisStack/Search/AggregationResult.cs | 19 ++- src/NRedisStack/Search/SearchCommands.cs | 86 ++++++++++++- src/NRedisStack/Search/SearchCommandsAsync.cs | 95 +++++++++++++- tests/NRedisStack.Tests/Search/SearchTests.cs | 118 +++++++++++++----- 9 files changed, 312 insertions(+), 40 deletions(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index aa207a94..7467f144 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -12,7 +12,7 @@ - + diff --git a/src/NRedisStack/Auxiliary.cs b/src/NRedisStack/Auxiliary.cs index 33b7857b..25f456f3 100644 --- a/src/NRedisStack/Auxiliary.cs +++ b/src/NRedisStack/Auxiliary.cs @@ -67,12 +67,22 @@ public static RedisResult Execute(this IDatabase db, SerializedCommand command) return db.Execute(command.Command, command.Args); } + internal static RedisResult Execute(this IServer server, int? db, SerializedCommand command) + { + return server.Execute(db, command.Command, command.Args); + } + public static async Task ExecuteAsync(this IDatabaseAsync db, SerializedCommand command) { ((IDatabase)db).SetInfoInPipeline(); return await db.ExecuteAsync(command.Command, command.Args); } + internal static async Task ExecuteAsync(this IServer server, int? db, SerializedCommand command) + { + return await server.ExecuteAsync(db, command.Command, command.Args); + } + public static List ExecuteBroadcast(this IDatabase db, string command) => db.ExecuteBroadcast(new SerializedCommand(command)); diff --git a/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt b/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt index 7dc5c581..8461e73c 100644 --- a/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt +++ b/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt @@ -1 +1,7 @@ #nullable enable +NRedisStack.SearchCommands.AggregateEnumerable(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IEnumerable! +NRedisStack.SearchCommands.CursorDel(NRedisStack.Search.AggregationResult! result) -> bool +NRedisStack.SearchCommands.CursorRead(NRedisStack.Search.AggregationResult! result, int? count = null) -> NRedisStack.Search.AggregationResult! +NRedisStack.SearchCommandsAsync.AggregateEnumerableAsync(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IAsyncEnumerable! +NRedisStack.SearchCommandsAsync.CursorDelAsync(NRedisStack.Search.AggregationResult! result) -> System.Threading.Tasks.Task! +NRedisStack.SearchCommandsAsync.CursorReadAsync(NRedisStack.Search.AggregationResult! result, int? count = null) -> System.Threading.Tasks.Task! diff --git a/src/NRedisStack/ResponseParser.cs b/src/NRedisStack/ResponseParser.cs index 8382771a..8687aad9 100644 --- a/src/NRedisStack/ResponseParser.cs +++ b/src/NRedisStack/ResponseParser.cs @@ -737,6 +737,20 @@ public static AggregationResult ToAggregationResult(this RedisResult result, Agg } } + internal static AggregationResult ToAggregationResult(this RedisResult result, string indexName, AggregationRequest query, IServer? server, int? database) + { + if (query.IsWithCursor()) + { + var results = (RedisResult[])result!; + + return new AggregationResult.WithCursorAggregationResult(indexName, results[0], (long)results[1], server, database); + } + else + { + return new(result); + } + } + public static Dictionary[] ToDictionarys(this RedisResult result) { var resArr = (RedisResult[])result!; diff --git a/src/NRedisStack/Search/AggregationRequest.cs b/src/NRedisStack/Search/AggregationRequest.cs index cd3bcd8a..8cac4f77 100644 --- a/src/NRedisStack/Search/AggregationRequest.cs +++ b/src/NRedisStack/Search/AggregationRequest.cs @@ -128,6 +128,7 @@ public AggregationRequest Cursor(int? count = null, long? maxIdle = null) if (count != null) { + Count = count; args.Add(SearchArgs.COUNT); args.Add(count); } @@ -139,6 +140,7 @@ public AggregationRequest Cursor(int? count = null, long? maxIdle = null) } return this; } + internal int? Count { get; set; } public AggregationRequest Params(Dictionary nameValue) { diff --git a/src/NRedisStack/Search/AggregationResult.cs b/src/NRedisStack/Search/AggregationResult.cs index 3eb4827d..6cb23add 100644 --- a/src/NRedisStack/Search/AggregationResult.cs +++ b/src/NRedisStack/Search/AggregationResult.cs @@ -3,15 +3,29 @@ namespace NRedisStack.Search; -public sealed class AggregationResult +public class AggregationResult { + // internal subclass for WITHCURSOR calls, which need to be issued to the same connection + internal sealed class WithCursorAggregationResult : AggregationResult + { + internal WithCursorAggregationResult(string indexName, RedisResult result, long cursorId, IServer? server, + int? database) : base(result, cursorId) + { + IndexName = indexName; + Server = server; + Database = database; + } + public string IndexName { get; } + public IServer? Server { get; } + public int? Database { get; } + } + public long TotalResults { get; } private readonly Dictionary[] _results; private Dictionary[]? _resultsAsRedisValues; public long CursorId { get; } - internal AggregationResult(RedisResult result, long cursorId = -1) { var arr = (RedisResult[])result!; @@ -45,7 +59,6 @@ internal AggregationResult(RedisResult result, long cursorId = -1) CursorId = cursorId; } - /// /// takes a Redis multi-bulk array represented by a RedisResult[] and recursively processes its elements. /// For each element in the array, it checks if it's another multi-bulk array, and if so, it recursively calls itself. diff --git a/src/NRedisStack/Search/SearchCommands.cs b/src/NRedisStack/Search/SearchCommands.cs index 381cb13b..fc9c7413 100644 --- a/src/NRedisStack/Search/SearchCommands.cs +++ b/src/NRedisStack/Search/SearchCommands.cs @@ -1,4 +1,6 @@ +using System.ComponentModel; using NRedisStack.Search; +using NRedisStack.Search.Aggregation; using NRedisStack.Search.DataTypes; using StackExchange.Redis; namespace NRedisStack; @@ -16,8 +18,54 @@ public RedisResult[] _List() public AggregationResult Aggregate(string index, AggregationRequest query) { SetDefaultDialectIfUnset(query); - var result = db.Execute(SearchCommandBuilder.Aggregate(index, query)); - return result.ToAggregationResult(query); + IServer? server = null; + int? database = null; + + var command = SearchCommandBuilder.Aggregate(index, query); + if (query.IsWithCursor()) + { + // we can issue this anywhere, but follow-up calls need to be on the same server + server = GetRandomServerForCluster(db, out database); + } + + RedisResult result; + if (server is not null) + { + result = server.Execute(database, command); + } + else + { + result = db.Execute(command); + } + + return result.ToAggregationResult(index, query, server, database); + } + + public IEnumerable AggregateEnumerable(string index, AggregationRequest query) + { + if (!query.IsWithCursor()) query.Cursor(); + + var result = Aggregate(index, query); + try + { + while (true) + { + var count = checked((int)result.TotalResults); + for (int i = 0; i < count; i++) + { + yield return result.GetRow(i); + } + if (result.CursorId == 0) break; + result = CursorRead(result, query.Count); + } + } + finally + { + if (result.CursorId != 0) + { + CursorDel(result); + } + } } /// @@ -72,17 +120,51 @@ public bool Create(string indexName, Schema schema) } /// + [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead.")] + [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public bool CursorDel(string indexName, long cursorId) { return db.Execute(SearchCommandBuilder.CursorDel(indexName, cursorId)).OKtoBoolean(); } + public bool CursorDel(AggregationResult result) + { + if (result is not AggregationResult.WithCursorAggregationResult withCursor) + { + throw new ArgumentException( + message: $"{nameof(CursorDelAsync)} must be called with a value returned from a previous call to {nameof(AggregateAsync)} with a cursor.", + paramName: nameof(result)); + } + + var command = SearchCommandBuilder.CursorDel(withCursor.IndexName, withCursor.CursorId); + var resp = withCursor.Server is { } server + ? server.Execute(withCursor.Database, command) + : db.Execute(command); + return resp.OKtoBoolean(); + } + /// + [Obsolete("When possible, use CursorReadAsync(AggregationResult, int?) instead.")] + [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public AggregationResult CursorRead(string indexName, long cursorId, int? count = null) { var resp = db.Execute(SearchCommandBuilder.CursorRead(indexName, cursorId, count)).ToArray(); return new(resp[0], (long)resp[1]); } + + public AggregationResult CursorRead(AggregationResult result, int? count = null) + { + if (result is not AggregationResult.WithCursorAggregationResult withCursor) + { + throw new ArgumentException(message: $"{nameof(CursorReadAsync)} must be called with a value returned from a previous call to {nameof(AggregateAsync)} with a cursor.", paramName: nameof(result)); + } + var command = SearchCommandBuilder.CursorRead(withCursor.IndexName, withCursor.CursorId, count); + var rawResult = withCursor.Server is { } server + ? server.Execute(withCursor.Database, command) + : db.Execute(command); + var resp = rawResult.ToArray(); + return new AggregationResult.WithCursorAggregationResult(withCursor.IndexName, resp[0], (long)resp[1], withCursor.Server, withCursor.Database); + } /// public long DictAdd(string dict, params string[] terms) diff --git a/src/NRedisStack/Search/SearchCommandsAsync.cs b/src/NRedisStack/Search/SearchCommandsAsync.cs index f4757af3..d75dc337 100644 --- a/src/NRedisStack/Search/SearchCommandsAsync.cs +++ b/src/NRedisStack/Search/SearchCommandsAsync.cs @@ -1,4 +1,6 @@ +using System.ComponentModel; using NRedisStack.Search; +using NRedisStack.Search.Aggregation; using NRedisStack.Search.DataTypes; using StackExchange.Redis; namespace NRedisStack; @@ -40,20 +42,71 @@ public async Task _ListAsync() return (await _db.ExecuteAsync(SearchCommandBuilder._List())).ToArray(); } + internal static IServer? GetRandomServerForCluster(IDatabaseAsync db, out int? database) + { + var server = db.Multiplexer.GetServer(key: default(RedisKey)); + // ReSharper disable once ConditionIsAlwaysTrueOrFalseAccordingToNullableAPIContract + if (server is null || server.ServerType != ServerType.Cluster) + { + database = null; + return null; + } + // This is vexingly misplaced, but: it doesn't actually matter for cluster + database = db is IDatabase nonAsync ? nonAsync.Database : null; + return server; + } + /// public async Task AggregateAsync(string index, AggregationRequest query) { SetDefaultDialectIfUnset(query); - var result = await _db.ExecuteAsync(SearchCommandBuilder.Aggregate(index, query)); + IServer? server = null; + int? database = null; + + var command = SearchCommandBuilder.Aggregate(index, query); if (query.IsWithCursor()) { - var results = (RedisResult[])result!; + // we can issue this anywhere, but follow-up calls need to be on the same server + server = GetRandomServerForCluster(_db, out database); + } - return new(results[0], (long)results[1]); + RedisResult result; + if (server is not null) + { + result = await server.ExecuteAsync(database, command); } else { - return new(result); + result = await _db.ExecuteAsync(command); + } + + return result.ToAggregationResult(index, query, server, database); + } + + public async IAsyncEnumerable AggregateEnumerableAsync(string index, AggregationRequest query) + { + if (!query.IsWithCursor()) query.Cursor(); + + var result = await AggregateAsync(index, query); + try + { + while (true) + { + var count = checked((int)result.TotalResults); + for (int i = 0; i < count; i++) + { + yield return result.GetRow(i); + } + if (result.CursorId == 0) break; + result = await CursorReadAsync(result, query.Count); + } + } + finally + { + if (result.CursorId != 0) + { + await CursorDelAsync(result); + } } } @@ -108,18 +161,52 @@ public async Task CreateAsync(string indexName, Schema schema) } /// + [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead.")] + [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public async Task CursorDelAsync(string indexName, long cursorId) { return (await _db.ExecuteAsync(SearchCommandBuilder.CursorDel(indexName, cursorId))).OKtoBoolean(); } + public async Task CursorDelAsync(AggregationResult result) + { + if (result is not AggregationResult.WithCursorAggregationResult withCursor) + { + throw new ArgumentException( + message: $"{nameof(CursorDelAsync)} must be called with a value returned from a previous call to {nameof(AggregateAsync)} with a cursor.", + paramName: nameof(result)); + } + + var command = SearchCommandBuilder.CursorDel(withCursor.IndexName, withCursor.CursorId); + var pending = withCursor.Server is { } server + ? server.ExecuteAsync(withCursor.Database, command) + : _db.ExecuteAsync(command); + return (await pending).OKtoBoolean(); + } + /// + [Obsolete("When possible, use CursorReadAsync(AggregationResult, int?) instead.")] + [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public async Task CursorReadAsync(string indexName, long cursorId, int? count = null) { var resp = (await _db.ExecuteAsync(SearchCommandBuilder.CursorRead(indexName, cursorId, count))).ToArray(); return new(resp[0], (long)resp[1]); } + public async Task CursorReadAsync(AggregationResult result, int? count = null) + { + if (result is not AggregationResult.WithCursorAggregationResult withCursor) + { + throw new ArgumentException(message: $"{nameof(CursorReadAsync)} must be called with a value returned from a previous call to {nameof(AggregateAsync)} with a cursor.", paramName: nameof(result)); + } + var command = SearchCommandBuilder.CursorRead(withCursor.IndexName, withCursor.CursorId, count); + var pending = withCursor.Server is { } server + ? server.ExecuteAsync(withCursor.Database, command) + : _db.ExecuteAsync(command); + var resp = (await pending).ToArray(); + return new AggregationResult.WithCursorAggregationResult(withCursor.IndexName, resp[0], (long)resp[1], withCursor.Server, withCursor.Database); + } + /// public async Task DictAddAsync(string dict, params string[] terms) { diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 119faeff..286a6c8b 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -1095,7 +1095,7 @@ public async Task TestDialectConfigAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCursor(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1125,21 +1125,17 @@ public async Task TestCursor(string endpointId) Assert.Equal(0.0, row.Value.GetDouble("nosuchcol")); Assert.Null(row.Value.GetString("nosuchcol")); - res = ft.CursorRead(index, res.CursorId, 1); + res = ft.CursorRead(res, 1); Row? row2 = res.GetRow(0); Assert.NotNull(row2); Assert.Equal("abc", row2.Value.GetString("name")); Assert.Equal(10, row2.Value.GetLong("sum")); - Assert.True(ft.CursorDel(index, res.CursorId)); + Assert.True(ft.CursorDel(res)); - try - { - ft.CursorRead(index, res.CursorId, 1); - Assert.True(false); - } - catch (RedisException) { } + var ex = Assert.Throws(() => ft.CursorRead(res, 1)); + Assert.Contains("Cursor not found", ex.Message, StringComparison.OrdinalIgnoreCase); _ = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1148,16 +1144,49 @@ public async Task TestCursor(string endpointId) await Task.Delay(1000).ConfigureAwait(false); - try - { - ft.CursorRead(index, res.CursorId, 1); - Assert.True(false); - } - catch (RedisException) { } + ex = Assert.Throws(() => ft.CursorRead(res, 1)); + Assert.Contains("Cursor not found", ex.Message, StringComparison.OrdinalIgnoreCase); } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] + public void TestCursorEnumerable(string endpointId) + { + IDatabase db = GetCleanDatabase(endpointId); + var ft = db.FT(); + Schema sc = new(); + sc.AddTextField("name", 1.0, sortable: true); + sc.AddNumericField("count", sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); + AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); + AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + + AggregationRequest r = new AggregationRequest() + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .SortBy(10, SortedField.Desc("@sum")) + .Cursor(1, 3000); + + // actual search + using var iter = ft.AggregateEnumerable(index, r).GetEnumerator(); + Assert.True(iter.MoveNext()); + var row = iter.Current; + Assert.Equal("def", row.GetString("name")); + Assert.Equal(30, row.GetLong("sum")); + Assert.Equal(30.0, row.GetDouble("sum")); + + Assert.Equal(0L, row.GetLong("nosuchcol")); + Assert.Equal(0.0, row.GetDouble("nosuchcol")); + Assert.Null(row.GetString("nosuchcol")); + + Assert.True(iter.MoveNext()); + row = iter.Current; + Assert.Equal("abc", row.GetString("name")); + Assert.Equal(10, row.GetLong("sum")); + } + + [SkippableTheory] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCursorAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1187,21 +1216,17 @@ public async Task TestCursorAsync(string endpointId) Assert.Equal(0.0, row.Value.GetDouble("nosuchcol")); Assert.Null(row.Value.GetString("nosuchcol")); - res = await ft.CursorReadAsync(index, res.CursorId, 1); + res = await ft.CursorReadAsync(res, 1); Row? row2 = res.GetRow(0); Assert.NotNull(row2); Assert.Equal("abc", row2.Value.GetString("name")); Assert.Equal(10, row2.Value.GetLong("sum")); - Assert.True(await ft.CursorDelAsync(index, res.CursorId)); + Assert.True(await ft.CursorDelAsync(res)); - try - { - await ft.CursorReadAsync(index, res.CursorId, 1); - Assert.True(false); - } - catch (RedisException) { } + var ex = await Assert.ThrowsAsync(async () => await ft.CursorReadAsync(res, 1)); + Assert.Contains("Cursor not found", ex.Message, StringComparison.OrdinalIgnoreCase); _ = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1210,12 +1235,45 @@ public async Task TestCursorAsync(string endpointId) await Task.Delay(1000).ConfigureAwait(false); - try - { - await ft.CursorReadAsync(index, res.CursorId, 1); - Assert.True(false); - } - catch (RedisException) { } + ex = await Assert.ThrowsAsync(async () => await ft.CursorReadAsync(res, 1)); + Assert.Contains("Cursor not found", ex.Message, StringComparison.OrdinalIgnoreCase); + } + + [SkippableTheory] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] + public async Task TestCursorEnumerableAsync(string endpointId) + { + IDatabase db = GetCleanDatabase(endpointId); + var ft = db.FT(); + Schema sc = new(); + sc.AddTextField("name", 1.0, sortable: true); + sc.AddNumericField("count", sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); + AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); + AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + + AggregationRequest r = new AggregationRequest() + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .SortBy(10, SortedField.Desc("@sum")) + .Cursor(1, 3000); + + // actual search + await using var iter = ft.AggregateEnumerableAsync(index, r).GetAsyncEnumerator(); + Assert.True(await iter.MoveNextAsync()); + var row = iter.Current; + Assert.Equal("def", row.GetString("name")); + Assert.Equal(30, row.GetLong("sum")); + Assert.Equal(30.0, row.GetDouble("sum")); + + Assert.Equal(0L, row.GetLong("nosuchcol")); + Assert.Equal(0.0, row.GetDouble("nosuchcol")); + Assert.Null(row.GetString("nosuchcol")); + + Assert.True(await iter.MoveNextAsync()); + row = iter.Current; + Assert.Equal("abc", row.GetString("name")); + Assert.Equal(10, row.GetLong("sum")); } [SkipIfRedisTheory(Is.Enterprise)] From 3986900bea0b1a75bb8fd94121f0bbfb339278a7 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Tue, 9 Sep 2025 15:06:43 +0100 Subject: [PATCH 02/27] update interfaces --- .../PublicAPI/PublicAPI.Unshipped.txt | 8 +++- src/NRedisStack/Search/ISearchCommands.cs | 38 +++++++++++++++++-- .../Search/ISearchCommandsAsync.cs | 32 ++++++++++++++++ src/NRedisStack/Search/SearchCommands.cs | 4 +- src/NRedisStack/Search/SearchCommandsAsync.cs | 4 +- tests/NRedisStack.Tests/Search/SearchTests.cs | 4 +- 6 files changed, 80 insertions(+), 10 deletions(-) diff --git a/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt b/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt index 8461e73c..00dcb3fb 100644 --- a/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt +++ b/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt @@ -1,7 +1,13 @@ #nullable enable +NRedisStack.ISearchCommands.AggregateEnumerable(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IEnumerable! +NRedisStack.ISearchCommands.CursorDel(NRedisStack.Search.AggregationResult! result) -> bool +NRedisStack.ISearchCommands.CursorRead(NRedisStack.Search.AggregationResult! result, int? count = null) -> NRedisStack.Search.AggregationResult! +NRedisStack.ISearchCommandsAsync.AggregateAsyncEnumerable(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IAsyncEnumerable! +NRedisStack.ISearchCommandsAsync.CursorDelAsync(NRedisStack.Search.AggregationResult! result) -> System.Threading.Tasks.Task! +NRedisStack.ISearchCommandsAsync.CursorReadAsync(NRedisStack.Search.AggregationResult! result, int? count = null) -> System.Threading.Tasks.Task! NRedisStack.SearchCommands.AggregateEnumerable(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IEnumerable! NRedisStack.SearchCommands.CursorDel(NRedisStack.Search.AggregationResult! result) -> bool NRedisStack.SearchCommands.CursorRead(NRedisStack.Search.AggregationResult! result, int? count = null) -> NRedisStack.Search.AggregationResult! -NRedisStack.SearchCommandsAsync.AggregateEnumerableAsync(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IAsyncEnumerable! +NRedisStack.SearchCommandsAsync.AggregateAsyncEnumerable(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IAsyncEnumerable! NRedisStack.SearchCommandsAsync.CursorDelAsync(NRedisStack.Search.AggregationResult! result) -> System.Threading.Tasks.Task! NRedisStack.SearchCommandsAsync.CursorReadAsync(NRedisStack.Search.AggregationResult! result, int? count = null) -> System.Threading.Tasks.Task! diff --git a/src/NRedisStack/Search/ISearchCommands.cs b/src/NRedisStack/Search/ISearchCommands.cs index 86408bfe..98d695ab 100644 --- a/src/NRedisStack/Search/ISearchCommands.cs +++ b/src/NRedisStack/Search/ISearchCommands.cs @@ -1,4 +1,6 @@ +using System.ComponentModel; using NRedisStack.Search; +using NRedisStack.Search.Aggregation; using NRedisStack.Search.DataTypes; using StackExchange.Redis; @@ -18,11 +20,20 @@ public interface ISearchCommands /// Run a search query on an index, and perform aggregate transformations on the results. /// /// The index name. - /// The query + /// The query. /// An object /// AggregationResult Aggregate(string index, AggregationRequest query); + /// + /// Run a search query on an index, and perform aggregate transformations on the results. + /// + /// The index name. + /// The query. + /// A sequence of values. + /// + IEnumerable AggregateEnumerable(string index, AggregationRequest query); + /// /// Add an alias to an index. /// @@ -92,22 +103,43 @@ public interface ISearchCommands /// /// Delete a cursor from the index. /// - /// The index name + /// The index name. /// The cursor's ID. /// if it has been deleted, if it did not exist. /// + [Obsolete("When possible, use CursorDel(AggregationResult) instead.")] + [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] bool CursorDel(string indexName, long cursorId); + + /// + /// Delete a cursor from the index. + /// + /// The result of a previous call to Aggregate or CursorRead. + /// if it has been deleted, if it did not exist. + /// + bool CursorDel(AggregationResult result); /// /// Read next results from an existing cursor. /// - /// The index name + /// The index name. /// The cursor's ID. /// Limit the amount of returned results. /// A AggregationResult object with the results /// + [Obsolete("When possible, use AggregateEnumerable or CursorRead(AggregationResult, int?) instead.")] + [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] AggregationResult CursorRead(string indexName, long cursorId, int? count = null); + /// + /// Read next results from an existing cursor. + /// + /// The result of a previous call to Aggregate or CursorRead. + /// Limit the amount of returned results. + /// A AggregationResult object with the results + /// + public AggregationResult CursorRead(AggregationResult result, int? count = null); + /// /// Add terms to a dictionary. /// diff --git a/src/NRedisStack/Search/ISearchCommandsAsync.cs b/src/NRedisStack/Search/ISearchCommandsAsync.cs index f9088dfc..345b494f 100644 --- a/src/NRedisStack/Search/ISearchCommandsAsync.cs +++ b/src/NRedisStack/Search/ISearchCommandsAsync.cs @@ -1,4 +1,6 @@ +using System.ComponentModel; using NRedisStack.Search; +using NRedisStack.Search.Aggregation; using NRedisStack.Search.DataTypes; using StackExchange.Redis; @@ -22,6 +24,15 @@ public interface ISearchCommandsAsync /// Task AggregateAsync(string index, AggregationRequest query); + /// + /// Run a search query on an index, and perform aggregate transformations on the results. + /// + /// The index name. + /// The query. + /// A sequence of values. + /// + IAsyncEnumerable AggregateAsyncEnumerable(string index, AggregationRequest query); + /// /// Add an alias to an index. /// @@ -95,8 +106,18 @@ public interface ISearchCommandsAsync /// The cursor's ID. /// if it has been deleted, if it did not exist. /// + [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead.")] + [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] Task CursorDelAsync(string indexName, long cursorId); + /// + /// Delete a cursor from the index. + /// + /// The result of a previous call to AggregateAsync or CursorReadAsync. + /// if it has been deleted, if it did not exist. + /// + Task CursorDelAsync(AggregationResult result); + /// /// Read next results from an existing cursor. /// @@ -105,8 +126,19 @@ public interface ISearchCommandsAsync /// Limit the amount of returned results. /// A AggregationResult object with the results /// + [Obsolete("When possible, use AggregateAsyncEnumerable or CursorReadAsync(AggregationResult, int?) instead.")] + [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] Task CursorReadAsync(string indexName, long cursorId, int? count = null); + /// + /// Read next results from an existing cursor. + /// + /// The result of a previous AggregateAsync or CursorReadAsync call. + /// Limit the amount of returned results. + /// A AggregationResult object with the results + /// + Task CursorReadAsync(AggregationResult result, int? count = null); + /// /// Add terms to a dictionary. /// diff --git a/src/NRedisStack/Search/SearchCommands.cs b/src/NRedisStack/Search/SearchCommands.cs index fc9c7413..ee08c12d 100644 --- a/src/NRedisStack/Search/SearchCommands.cs +++ b/src/NRedisStack/Search/SearchCommands.cs @@ -120,7 +120,7 @@ public bool Create(string indexName, Schema schema) } /// - [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead.")] + [Obsolete("When possible, use CursorDel(AggregationResult) instead.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public bool CursorDel(string indexName, long cursorId) { @@ -144,7 +144,7 @@ public bool CursorDel(AggregationResult result) } /// - [Obsolete("When possible, use CursorReadAsync(AggregationResult, int?) instead.")] + [Obsolete("When possible, use CusorReadEnumerable or CursorRead(AggregationResult, int?) instead.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public AggregationResult CursorRead(string indexName, long cursorId, int? count = null) { diff --git a/src/NRedisStack/Search/SearchCommandsAsync.cs b/src/NRedisStack/Search/SearchCommandsAsync.cs index d75dc337..1f471182 100644 --- a/src/NRedisStack/Search/SearchCommandsAsync.cs +++ b/src/NRedisStack/Search/SearchCommandsAsync.cs @@ -83,7 +83,7 @@ public async Task AggregateAsync(string index, AggregationReq return result.ToAggregationResult(index, query, server, database); } - public async IAsyncEnumerable AggregateEnumerableAsync(string index, AggregationRequest query) + public async IAsyncEnumerable AggregateAsyncEnumerable(string index, AggregationRequest query) { if (!query.IsWithCursor()) query.Cursor(); @@ -185,7 +185,7 @@ public async Task CursorDelAsync(AggregationResult result) } /// - [Obsolete("When possible, use CursorReadAsync(AggregationResult, int?) instead.")] + [Obsolete("When possible, use AggregateAsyncEnumerable or CursorReadAsync(AggregationResult, int?) instead.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public async Task CursorReadAsync(string indexName, long cursorId, int? count = null) { diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 286a6c8b..7d653aa9 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -1205,7 +1205,7 @@ public async Task TestCursorAsync(string endpointId) .Cursor(1, 3000); // actual search - AggregationResult res = ft.Aggregate(index, r); + AggregationResult res = await ft.AggregateAsync(index, r); Row? row = res.GetRow(0); Assert.NotNull(row); Assert.Equal("def", row.Value.GetString("name")); @@ -1259,7 +1259,7 @@ public async Task TestCursorEnumerableAsync(string endpointId) .Cursor(1, 3000); // actual search - await using var iter = ft.AggregateEnumerableAsync(index, r).GetAsyncEnumerator(); + await using var iter = ft.AggregateAsyncEnumerable(index, r).GetAsyncEnumerator(); Assert.True(await iter.MoveNextAsync()); var row = iter.Current; Assert.Equal("def", row.GetString("name")); From c9be7e5cec7d8f35504f04a2136ef87f5dcf8aaf Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Tue, 9 Sep 2025 15:21:09 +0100 Subject: [PATCH 03/27] dotnet format --- src/NRedisStack/Search/ISearchCommands.cs | 2 +- src/NRedisStack/Search/SearchCommands.cs | 4 ++-- tests/NRedisStack.Tests/Search/SearchTests.cs | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/NRedisStack/Search/ISearchCommands.cs b/src/NRedisStack/Search/ISearchCommands.cs index 98d695ab..df132424 100644 --- a/src/NRedisStack/Search/ISearchCommands.cs +++ b/src/NRedisStack/Search/ISearchCommands.cs @@ -110,7 +110,7 @@ public interface ISearchCommands [Obsolete("When possible, use CursorDel(AggregationResult) instead.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] bool CursorDel(string indexName, long cursorId); - + /// /// Delete a cursor from the index. /// diff --git a/src/NRedisStack/Search/SearchCommands.cs b/src/NRedisStack/Search/SearchCommands.cs index ee08c12d..64a8e416 100644 --- a/src/NRedisStack/Search/SearchCommands.cs +++ b/src/NRedisStack/Search/SearchCommands.cs @@ -27,7 +27,7 @@ public AggregationResult Aggregate(string index, AggregationRequest query) // we can issue this anywhere, but follow-up calls need to be on the same server server = GetRandomServerForCluster(db, out database); } - + RedisResult result; if (server is not null) { @@ -151,7 +151,7 @@ public AggregationResult CursorRead(string indexName, long cursorId, int? count var resp = db.Execute(SearchCommandBuilder.CursorRead(indexName, cursorId, count)).ToArray(); return new(resp[0], (long)resp[1]); } - + public AggregationResult CursorRead(AggregationResult result, int? count = null) { if (result is not AggregationResult.WithCursorAggregationResult withCursor) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 7d653aa9..19adcd06 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -1238,7 +1238,7 @@ public async Task TestCursorAsync(string endpointId) ex = await Assert.ThrowsAsync(async () => await ft.CursorReadAsync(res, 1)); Assert.Contains("Cursor not found", ex.Message, StringComparison.OrdinalIgnoreCase); } - + [SkippableTheory] [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCursorEnumerableAsync(string endpointId) From c6ecb13fb77cf890aa802a24d7a586ad280c272a Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 12:26:19 +0100 Subject: [PATCH 04/27] use correct routing in AddDocument --- tests/NRedisStack.Tests/Search/SearchTests.cs | 57 +++++++++---------- 1 file changed, 26 insertions(+), 31 deletions(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 19adcd06..c4011e68 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -19,41 +19,27 @@ public class SearchTests(EndpointsFixture endpointsFixture) : AbstractNRedisStac private void AddDocument(IDatabase db, Document doc) { - string key = doc.Id; - var properties = doc.GetProperties(); - // HashEntry[] hash = new HashEntry[properties.Count()]; - // for(int i = 0; i < properties.Count(); i++) - // { - // var property = properties.ElementAt(i); - // hash[i] = new HashEntry(property.Key, property.Value); - // } - // db.HashSet(key, hash); - var nameValue = new List() { key }; - foreach (var item in properties) - { - nameValue.Add(item.Key); - nameValue.Add(item.Value); - } - db.Execute("HSET", nameValue); + var hash = doc.GetProperties() + .Select(pair => new HashEntry(pair.Key, pair.Value)) + .ToArray(); + db.HashSet(doc.Id, hash); } private void AddDocument(IDatabase db, string key, Dictionary objDictionary) { Dictionary strDictionary = new(); - // HashEntry[] hash = new HashEntry[objDictionary.Count()]; - // for(int i = 0; i < objDictionary.Count(); i++) - // { - // var property = objDictionary.ElementAt(i); - // hash[i] = new HashEntry(property.Key, property.Value.ToString()); - // } - // db.HashSet(key, hash); - var nameValue = new List() { key }; - foreach (var item in objDictionary) - { - nameValue.Add(item.Key); - nameValue.Add(item.Value); - } - db.Execute("HSET", nameValue); + var hash = objDictionary + .Select(pair => new HashEntry(pair.Key, pair.Value switch + { + string s => (RedisValue)s, + byte[] b => b, + int i => i, + long l => l, + double d => d, + _ => throw new ArgumentException($"Unsupported type: {pair.Value.GetType()}"), + })) + .ToArray(); + db.HashSet(key, hash); } [SkipIfRedisTheory(Is.Enterprise)] @@ -1444,7 +1430,16 @@ public void TestDropIndex(string endpointId) { Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase); } - Assert.Equal("100", db.Execute("DBSIZE").ToString()); + + var count = 0L; + foreach (var server in db.Multiplexer.GetServers()) + { + if (!server.IsReplica) + { + count += server.DatabaseSize(); + } + } + Assert.Equal(100, count); } [SkippableTheory] From 30f99a4a4b12a1906d11fa9283f2aa971320e1d0 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 12:28:56 +0100 Subject: [PATCH 05/27] .gitignore - docker containers --- .gitignore | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.gitignore b/.gitignore index 2476cd6b..60d644b6 100644 --- a/.gitignore +++ b/.gitignore @@ -410,3 +410,8 @@ tests/NRedisStack.Tests/redis_credentials/redis_user.crt # global.json global.json tests/NRedisStack.Tests/lcov.net8.0.info + +# docker containers +tests/dockers/cluster/ +tests/dockers/standalone/ +tests/dockers/all/ From db6d6d70eb549425e7dbc6958a239025f19a16b1 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 13:02:29 +0100 Subject: [PATCH 06/27] dotnet format --- tests/NRedisStack.Tests/Search/SearchTests.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index c4011e68..f70474bf 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -39,7 +39,7 @@ private void AddDocument(IDatabase db, string key, Dictionary ob _ => throw new ArgumentException($"Unsupported type: {pair.Value.GetType()}"), })) .ToArray(); - db.HashSet(key, hash); + db.HashSet(key, hash); } [SkipIfRedisTheory(Is.Enterprise)] From de30e25744eca42d06fd8828302cc0151d6e7c7e Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 14:33:20 +0100 Subject: [PATCH 07/27] - enable all-environments over almost all FT tests - workaround DBSIZE usage - use IP in endpoints.json to prevent double-counting of servers - compensate for NumDocs oddity on cluster, and don't test detailed numbers (which vary by shard) --- tests/NRedisStack.Tests/Search/SearchTests.cs | 432 +++++++++++------- tests/dockers/endpoints.json | 14 +- 2 files changed, 273 insertions(+), 173 deletions(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index f70474bf..cfe4db3d 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -9,10 +9,12 @@ using System.Runtime.InteropServices; using NetTopologySuite.IO; using NetTopologySuite.Geometries; +using Xunit.Abstractions; namespace NRedisStack.Tests.Search; -public class SearchTests(EndpointsFixture endpointsFixture) : AbstractNRedisStackTest(endpointsFixture), IDisposable +public class SearchTests(EndpointsFixture endpointsFixture, ITestOutputHelper log) + : AbstractNRedisStackTest(endpointsFixture, log), IDisposable { // private readonly string key = "SEARCH_TESTS"; private readonly string index = "TEST_INDEX"; @@ -89,7 +91,7 @@ public async Task TestAggregationRequestVerbatimAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregationRequestTimeout(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -111,7 +113,7 @@ public void TestAggregationRequestTimeout(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestAggregationRequestTimeoutAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -133,7 +135,7 @@ public async Task TestAggregationRequestTimeoutAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregations(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -172,7 +174,7 @@ public void TestAggregations(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestAggregationsAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -212,7 +214,7 @@ public async Task TestAggregationsAsync(string endpointId) [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregationsLoad(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -240,7 +242,7 @@ public void TestAggregationsLoad(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestAggregationsLoadAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -270,7 +272,7 @@ public async Task TestAggregationsLoadAsync(string endpointId) [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregationRequestParamsDialect(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -301,7 +303,7 @@ public void TestAggregationRequestParamsDialect(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestAggregationRequestParamsDialectAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -333,7 +335,7 @@ public async Task TestAggregationRequestParamsDialectAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregationRequestParamsWithDefaultDialect(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -364,7 +366,7 @@ public void TestAggregationRequestParamsWithDefaultDialect(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestAggregationRequestParamsWithDefaultDialectAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -403,7 +405,7 @@ public void TestDefaultDialectError() } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAlias(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -432,7 +434,7 @@ public void TestAlias(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestAliasAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -461,7 +463,7 @@ public async Task TestAliasAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestApplyAndFilterAggregations(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -503,7 +505,7 @@ public void TestApplyAndFilterAggregations(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestCreate(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -535,7 +537,7 @@ public void TestCreate(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCreateAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -561,7 +563,7 @@ public async Task TestCreateAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void CreateNoParams(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -589,7 +591,7 @@ public void CreateNoParams(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task CreateNoParamsAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -617,7 +619,7 @@ public async Task CreateNoParamsAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void CreateWithFieldNames(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -646,7 +648,7 @@ public void CreateWithFieldNames(string endpointId) } [SkipIfRedisTheory(Comparison.LessThan, "7.9.0")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void FailWhenAttributeNotExist(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -659,7 +661,7 @@ public void FailWhenAttributeNotExist(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task CreateWithFieldNamesAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -688,7 +690,7 @@ public async Task CreateWithFieldNamesAsync(string endpointId) } [SkipIfRedisTheory(Comparison.LessThan, "7.9.0")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task FailWhenAttributeNotExistAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -701,7 +703,7 @@ public async Task FailWhenAttributeNotExistAsync(string endpointId) } [SkipIfRedisTheory(Is.Enterprise)] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void AlterAdd(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -715,10 +717,24 @@ public void AlterAdd(string endpointId) var fields = new HashEntry("title", "hello world"); //fields.("title", "hello world"); + Assert.Equal(0, DatabaseSize(db, out int replicas)); + Log($"Replicas: {replicas}"); for (int i = 0; i < 100; i++) { db.HashSet($"doc{i}", fields.Name, fields.Value); } + Assert.Equal(100, DatabaseSize(db)); + var info = ft.Info(index); + Assert.Equal(index, info.IndexName); + if (endpointId == EndpointsFixture.Env.Cluster) + { + Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}"); + } + else + { + Assert.Equal(100, info.NumDocs); + } + SearchResult res = ft.Search(index, new("hello world")); Assert.Equal(100, res.TotalResults); @@ -733,7 +749,9 @@ public void AlterAdd(string endpointId) SearchResult res2 = ft.Search(index, new("@tags:{tagA}")); Assert.Equal(100, res2.TotalResults); - var info = ft.Info(index); + Assert.Equal(100, DatabaseSize(db)); + + info = ft.Info(index); Assert.Equal(index, info.IndexName); Assert.Empty(info.IndexOption); // Assert.Equal(,info.IndexDefinition); @@ -741,31 +759,40 @@ public void AlterAdd(string endpointId) Assert.Equal("TAG", info.Attributes[1]["type"].ToString()); Assert.Equal("name", info.Attributes[2]["attribute"].ToString()); - Assert.Equal(100, info.NumDocs); - Assert.NotNull(info.MaxDocId); - Assert.Equal(102, info.NumTerms); - Assert.True(info.NumRecords >= 200); - Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines - Assert.Equal(0, info.VectorIndexSzMebibytes); - Assert.Equal(208, info.TotalInvertedIndexBlocks); - Assert.True(info.OffsetVectorsSzMebibytes < 1); - Assert.True(info.DocTableSizeMebibytes < 1); - Assert.Equal(0, info.SortableValueSizeMebibytes); - Assert.True(info.KeyTableSizeMebibytes < 1); - Assert.Equal(8, (int)info.RecordsPerDocAvg); - Assert.True(info.BytesPerRecordAvg > 5); - Assert.True(info.OffsetsPerTermAvg > 0.8); - Assert.Equal(8, info.OffsetBitsPerRecordAvg); - Assert.Equal(0, info.HashIndexingFailures); - Assert.Equal(0, info.Indexing); - Assert.Equal(1, info.PercentIndexed); - Assert.Equal(4, info.NumberOfUses); - Assert.Equal(7, info.GcStats.Count); - Assert.Equal(4, info.CursorStats.Count); + if (endpointId == EndpointsFixture.Env.Cluster) + { + Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}"); + } + else + { + Assert.Equal(100, info.NumDocs); + + // these numbers don't make sense when considering a shard + Assert.NotNull(info.MaxDocId); + Assert.Equal(102, info.NumTerms); + Assert.True(info.NumRecords >= 200); + Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines + Assert.Equal(0, info.VectorIndexSzMebibytes); + Assert.Equal(208, info.TotalInvertedIndexBlocks); + Assert.True(info.OffsetVectorsSzMebibytes < 1); + Assert.True(info.DocTableSizeMebibytes < 1); + Assert.Equal(0, info.SortableValueSizeMebibytes); + Assert.True(info.KeyTableSizeMebibytes < 1); + Assert.Equal(8, (int)info.RecordsPerDocAvg); + Assert.True(info.BytesPerRecordAvg > 5); + Assert.True(info.OffsetsPerTermAvg > 0.8); + Assert.Equal(8, info.OffsetBitsPerRecordAvg); + Assert.Equal(0, info.HashIndexingFailures); + Assert.Equal(0, info.Indexing); + Assert.Equal(1, info.PercentIndexed); + Assert.Equal(5, info.NumberOfUses); + Assert.Equal(7, info.GcStats.Count); + Assert.Equal(4, info.CursorStats.Count); + } } [SkipIfRedisTheory(Is.Enterprise)] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task AlterAddAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -785,6 +812,16 @@ public async Task AlterAddAsync(string endpointId) } SearchResult res = ft.Search(index, new("hello world")); Assert.Equal(100, res.TotalResults); + var info = ft.Info(index); + Assert.Equal(index, info.IndexName); + if (endpointId == EndpointsFixture.Env.Cluster) + { + Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}"); + } + else + { + Assert.Equal(100, info.NumDocs); + } Assert.True(await ft.AlterAsync(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5))); for (int i = 0; i < 100; i++) @@ -797,36 +834,47 @@ public async Task AlterAddAsync(string endpointId) SearchResult res2 = ft.Search(index, new("@tags:{tagA}")); Assert.Equal(100, res2.TotalResults); - var info = await ft.InfoAsync(index); + Assert.Equal(100, await DatabaseSizeAsync(db)); + + info = await ft.InfoAsync(index); Assert.Equal(index, info.IndexName); Assert.Equal("title", info.Attributes[0]["identifier"].ToString()); Assert.Equal("TAG", info.Attributes[1]["type"].ToString()); Assert.Equal("name", info.Attributes[2]["attribute"].ToString()); - Assert.Equal(100, info.NumDocs); - Assert.Equal("300", info.MaxDocId); - Assert.Equal(102, info.NumTerms); - Assert.True(info.NumRecords >= 200); - Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines - Assert.Equal(0, info.VectorIndexSzMebibytes); - Assert.Equal(208, info.TotalInvertedIndexBlocks); - Assert.True(info.OffsetVectorsSzMebibytes < 1); - Assert.True(info.DocTableSizeMebibytes < 1); - Assert.Equal(0, info.SortableValueSizeMebibytes); - Assert.True(info.KeyTableSizeMebibytes < 1); - Assert.Equal(8, (int)info.RecordsPerDocAvg); - Assert.True(info.BytesPerRecordAvg > 5); - Assert.True(info.OffsetsPerTermAvg > 0.8); - Assert.Equal(8, info.OffsetBitsPerRecordAvg); - Assert.Equal(0, info.HashIndexingFailures); - Assert.Equal(0, info.Indexing); - Assert.Equal(1, info.PercentIndexed); - Assert.Equal(4, info.NumberOfUses); - Assert.Equal(7, info.GcStats.Count); - Assert.Equal(4, info.CursorStats.Count); + if (endpointId == EndpointsFixture.Env.Cluster) + { + Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}"); + } + else + { + Assert.Equal(100, info.NumDocs); + + // these numbers don't make sense when considering a shard + Assert.Equal("300", info.MaxDocId); + Assert.Equal(102, info.NumTerms); + Assert.True(info.NumRecords >= 200); + Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines + Assert.Equal(0, info.VectorIndexSzMebibytes); + Assert.Equal(208, info.TotalInvertedIndexBlocks); + Assert.True(info.OffsetVectorsSzMebibytes < 1); + Assert.True(info.DocTableSizeMebibytes < 1); + Assert.Equal(0, info.SortableValueSizeMebibytes); + Assert.True(info.KeyTableSizeMebibytes < 1); + Assert.Equal(8, (int)info.RecordsPerDocAvg); + Assert.True(info.BytesPerRecordAvg > 5); + Assert.True(info.OffsetsPerTermAvg > 0.8); + Assert.Equal(8, info.OffsetBitsPerRecordAvg); + Assert.Equal(0, info.HashIndexingFailures); + Assert.Equal(0, info.Indexing); + Assert.Equal(1, info.PercentIndexed); + Assert.Equal(5, info.NumberOfUses); + Assert.Equal(7, info.GcStats.Count); + Assert.Equal(4, info.CursorStats.Count); + } } [SkipIfRedisTheory(Is.Enterprise)] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void AlterAddSortable(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -865,31 +913,40 @@ public void AlterAddSortable(string endpointId) Assert.Equal("title", info.Attributes[0]["identifier"].ToString()); Assert.Equal("TAG", info.Attributes[1]["type"].ToString()); Assert.Equal("name", info.Attributes[2]["attribute"].ToString()); - Assert.Equal(100, info.NumDocs); - Assert.NotNull(info.MaxDocId); - Assert.Equal(102, info.NumTerms); - Assert.True(info.NumRecords >= 200); - Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines - Assert.Equal(0, info.VectorIndexSzMebibytes); - Assert.Equal(208, info.TotalInvertedIndexBlocks); - Assert.True(info.OffsetVectorsSzMebibytes < 1); - Assert.True(info.DocTableSizeMebibytes < 1); - Assert.Equal(0, info.SortableValueSizeMebibytes); - Assert.True(info.KeyTableSizeMebibytes < 1); - Assert.Equal(8, (int)info.RecordsPerDocAvg); - Assert.True(info.BytesPerRecordAvg > 5); - Assert.True(info.OffsetsPerTermAvg > 0.8); - Assert.Equal(8, info.OffsetBitsPerRecordAvg); - Assert.Equal(0, info.HashIndexingFailures); - Assert.Equal(0, info.Indexing); - Assert.Equal(1, info.PercentIndexed); - Assert.Equal(4, info.NumberOfUses); - Assert.Equal(7, info.GcStats.Count); - Assert.Equal(4, info.CursorStats.Count); + if (endpointId == EndpointsFixture.Env.Cluster) + { + Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}"); + } + else + { + Assert.Equal(100, info.NumDocs); + + // these numbers don't make sense when considering a shard + Assert.NotNull(info.MaxDocId); + Assert.Equal(102, info.NumTerms); + Assert.True(info.NumRecords >= 200); + Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines + Assert.Equal(0, info.VectorIndexSzMebibytes); + Assert.Equal(208, info.TotalInvertedIndexBlocks); + Assert.True(info.OffsetVectorsSzMebibytes < 1); + Assert.True(info.DocTableSizeMebibytes < 1); + Assert.Equal(0, info.SortableValueSizeMebibytes); + Assert.True(info.KeyTableSizeMebibytes < 1); + Assert.Equal(8, (int)info.RecordsPerDocAvg); + Assert.True(info.BytesPerRecordAvg > 5); + Assert.True(info.OffsetsPerTermAvg > 0.8); + Assert.Equal(8, info.OffsetBitsPerRecordAvg); + Assert.Equal(0, info.HashIndexingFailures); + Assert.Equal(0, info.Indexing); + Assert.Equal(1, info.PercentIndexed); + Assert.Equal(4, info.NumberOfUses); + Assert.Equal(7, info.GcStats.Count); + Assert.Equal(4, info.CursorStats.Count); + } } [SkipIfRedisTheory(Comparison.LessThan, "7.3.0")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void InfoWithIndexEmptyAndIndexMissing(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -924,7 +981,7 @@ public void InfoWithIndexEmptyAndIndexMissing(string endpointId) } [SkipIfRedisTheory(Is.Enterprise)] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task AlterAddSortableAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -961,27 +1018,36 @@ public async Task AlterAddSortableAsync(string endpointId) Assert.Equal("title", info.Attributes[0]["identifier"].ToString()); Assert.Equal("TAG", info.Attributes[1]["type"].ToString()); Assert.Equal("name", info.Attributes[2]["attribute"].ToString()); - Assert.Equal(100, info.NumDocs); - Assert.Equal("300", info.MaxDocId); - Assert.Equal(102, info.NumTerms); - Assert.True(info.NumRecords >= 200); - Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines - Assert.Equal(0, info.VectorIndexSzMebibytes); - Assert.Equal(208, info.TotalInvertedIndexBlocks); - Assert.True(info.OffsetVectorsSzMebibytes < 1); - Assert.True(info.DocTableSizeMebibytes < 1); - Assert.Equal(0, info.SortableValueSizeMebibytes); - Assert.True(info.KeyTableSizeMebibytes < 1); - Assert.Equal(8, (int)info.RecordsPerDocAvg); - Assert.True(info.BytesPerRecordAvg > 5); - Assert.True(info.OffsetsPerTermAvg > 0.8); - Assert.Equal(8, info.OffsetBitsPerRecordAvg); - Assert.Equal(0, info.HashIndexingFailures); - Assert.Equal(0, info.Indexing); - Assert.Equal(1, info.PercentIndexed); - Assert.Equal(4, info.NumberOfUses); - Assert.Equal(7, info.GcStats.Count); - Assert.Equal(4, info.CursorStats.Count); + if (endpointId == EndpointsFixture.Env.Cluster) + { + Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}"); + } + else + { + Assert.Equal(100, info.NumDocs); + + // these numbers don't make sense when considering a shard + Assert.Equal("300", info.MaxDocId); + Assert.Equal(102, info.NumTerms); + Assert.True(info.NumRecords >= 200); + Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines + Assert.Equal(0, info.VectorIndexSzMebibytes); + Assert.Equal(208, info.TotalInvertedIndexBlocks); + Assert.True(info.OffsetVectorsSzMebibytes < 1); + Assert.True(info.DocTableSizeMebibytes < 1); + Assert.Equal(0, info.SortableValueSizeMebibytes); + Assert.True(info.KeyTableSizeMebibytes < 1); + Assert.Equal(8, (int)info.RecordsPerDocAvg); + Assert.True(info.BytesPerRecordAvg > 5); + Assert.True(info.OffsetsPerTermAvg > 0.8); + Assert.Equal(8, info.OffsetBitsPerRecordAvg); + Assert.Equal(0, info.HashIndexingFailures); + Assert.Equal(0, info.Indexing); + Assert.Equal(1, info.PercentIndexed); + Assert.Equal(4, info.NumberOfUses); + Assert.Equal(7, info.GcStats.Count); + Assert.Equal(4, info.CursorStats.Count); + } } // TODO : fix with FT.CONFIG response change @@ -1355,7 +1421,13 @@ public void TestAggregationGroupBy(string endpointId) req = new AggregationRequest("redis").GroupBy( "@parent", Reducers.FirstValue("@title").As("first")); - res = ft.Aggregate("idx", req).GetRow(0); + var agg = ft.Aggregate("idx", req); + Log($"results: {agg.TotalResults}"); + for (int i = 0 ; i < agg.TotalResults; i++) + { + Log($"parent: {agg.GetRow(i)["parent"]}, first: {agg.GetRow(i)["first"]}"); + } + res = agg.GetRow(0); Assert.Equal("redis", res["parent"]); Assert.Equal("RediSearch", res["first"]); @@ -1382,7 +1454,7 @@ public void TestAggregationGroupBy(string endpointId) [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestDictionary(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1401,7 +1473,7 @@ public void TestDictionary(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestDropIndex(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1431,19 +1503,46 @@ public void TestDropIndex(string endpointId) Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase); } + Assert.Equal(100, DatabaseSize(db)); + } + + private int DatabaseSize(IDatabase db) => DatabaseSize(db, out _); + + private int DatabaseSize(IDatabase db, out int replicaCount) + { + replicaCount = 0; var count = 0L; foreach (var server in db.Multiplexer.GetServers()) { - if (!server.IsReplica) + if (server.IsReplica) + { + replicaCount++; + } + else { count += server.DatabaseSize(); } } - Assert.Equal(100, count); + + return checked((int)count); + } + + private async Task DatabaseSizeAsync(IDatabase db) + { + var count = 0L; + foreach (var server in db.Multiplexer.GetServers()) + { + if (!server.IsReplica) + { + count += await server.DatabaseSizeAsync(); + } + } + + return checked((int)count); } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestDropIndexAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1472,11 +1571,12 @@ public async Task TestDropIndexAsync(string endpointId) { Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase); } - Assert.Equal("100", db.Execute("DBSIZE").ToString()); + + Assert.Equal(100, DatabaseSize(db)); } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void dropIndexDD(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1498,11 +1598,11 @@ public void dropIndexDD(string endpointId) RedisResult[] keys = (RedisResult[])db.Execute("KEYS", "*")!; Assert.Empty(keys); - Assert.Equal("0", db.Execute("DBSIZE").ToString()); + Assert.Equal(0, DatabaseSize(db)); } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task dropIndexDDAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1524,11 +1624,11 @@ public async Task dropIndexDDAsync(string endpointId) RedisResult[] keys = (RedisResult[])db.Execute("KEYS", "*")!; Assert.Empty(keys); - Assert.Equal("0", db.Execute("DBSIZE").ToString()); + Assert.Equal(0, DatabaseSize(db)); } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestDictionaryAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1548,7 +1648,7 @@ public async Task TestDictionaryAsync(string endpointId) readonly string explainQuery = "@f3:f3_val @f2:f2_val @f1:f1_val"; [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestExplain(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1572,7 +1672,7 @@ public void TestExplain(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestExplainAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1641,7 +1741,7 @@ public async Task TestExplainCliAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestExplainWithDefaultDialect(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1658,7 +1758,7 @@ public void TestExplainWithDefaultDialect(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestExplainWithDefaultDialectAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1675,7 +1775,7 @@ public async Task TestExplainWithDefaultDialectAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestSynonym(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1701,7 +1801,7 @@ public void TestSynonym(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestSynonymAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1740,7 +1840,7 @@ public void TestModulePrefixs() } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task GetTagFieldSyncAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1798,7 +1898,7 @@ public async Task GetTagFieldSyncAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestGetTagFieldWithNonDefaultSeparatorSyncAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1923,7 +2023,7 @@ public void TestFTCreateParamsCommandBuilderNoStopwords() } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestFilters(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -1975,7 +2075,7 @@ public void TestFilters(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestFiltersAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2232,7 +2332,7 @@ public void TestFieldsCommandBuilder() } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestLimit(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2252,7 +2352,7 @@ public void TestLimit(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestLimitAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2322,7 +2422,7 @@ public void TestVectorCount_Issue70() } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void VectorSimilaritySearch(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2365,7 +2465,7 @@ public void VectorSimilaritySearch(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void QueryingVectorFields(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2412,7 +2512,7 @@ public async Task TestVectorFieldJson_Issue102Async() } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestQueryAddParam_DefaultDialect(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2431,7 +2531,7 @@ public void TestQueryAddParam_DefaultDialect(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestQueryAddParam_DefaultDialectAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2450,7 +2550,7 @@ public async Task TestQueryAddParam_DefaultDialectAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestQueryParamsWithParams_DefaultDialect(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2478,7 +2578,7 @@ public void TestQueryParamsWithParams_DefaultDialect(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestBasicSpellCheck(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2498,7 +2598,7 @@ public void TestBasicSpellCheck(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestBasicSpellCheckAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2518,7 +2618,7 @@ public async Task TestBasicSpellCheckAsync(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestCrossTermDictionary(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2542,7 +2642,7 @@ public void TestCrossTermDictionary(string endpointId) } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCrossTermDictionaryAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2610,7 +2710,7 @@ public async Task TestDialectBoundAsync() } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestQueryParamsWithParams_DefaultDialectAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2846,7 +2946,7 @@ public async Task getSuggestionLengthAndDeleteSuggestionAsync() } [SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.9")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestProfileSearch(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2870,7 +2970,7 @@ public void TestProfileSearch(string endpointId) } [SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.9")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestProfileSearchAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2893,7 +2993,7 @@ public async Task TestProfileSearchAsync(string endpointId) } [SkipIfRedisTheory(Is.Enterprise, Comparison.GreaterThanOrEqual, "7.9")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestProfileSearch_WithoutCoordinator(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2913,7 +3013,7 @@ public void TestProfileSearch_WithoutCoordinator(string endpointId) } [SkipIfRedisTheory(Is.Enterprise, Comparison.GreaterThanOrEqual, "7.9")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestProfileSearchAsync_WithoutCoordinator(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2933,7 +3033,7 @@ public async Task TestProfileSearchAsync_WithoutCoordinator(string endpointId) } [SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.9")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestProfile(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -2969,7 +3069,7 @@ public void TestProfile(string endpointId) } [SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.9")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestProfileAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3005,7 +3105,7 @@ public async Task TestProfileAsync(string endpointId) } [SkipIfRedisTheory(Is.Enterprise, Comparison.GreaterThanOrEqual, "7.9")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestProfile_WithoutCoordinator(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3035,7 +3135,7 @@ public void TestProfile_WithoutCoordinator(string endpointId) } [SkipIfRedisTheory(Is.Enterprise, Comparison.GreaterThanOrEqual, "7.9")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestProfileAsync_WithoutCoordinator(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3065,7 +3165,7 @@ public async Task TestProfileAsync_WithoutCoordinator(string endpointId) } [SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.3.240")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestProfileIssue306(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3095,7 +3195,7 @@ public void TestProfileIssue306(string endpointId) } [SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.3.240")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestProfileAsyncIssue306(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3136,7 +3236,7 @@ public void TestProfileCommandBuilder() } [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void Issue175(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3154,7 +3254,7 @@ public void Issue175(string endpointId) } [SkipIfRedisTheory(Comparison.LessThan, "7.2.1")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void GeoShapeFilterSpherical(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3219,7 +3319,7 @@ public void GeoShapeFilterSpherical(string endpointId) } [SkipIfRedisTheory(Comparison.LessThan, "7.2.1")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task GeoShapeFilterSphericalAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3284,7 +3384,7 @@ public async Task GeoShapeFilterSphericalAsync(string endpointId) } [SkipIfRedisTheory(Comparison.LessThan, "7.2.1")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void GeoShapeFilterFlat(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3338,7 +3438,7 @@ public void GeoShapeFilterFlat(string endpointId) } [SkipIfRedisTheory(Comparison.LessThan, "7.2.1")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task GeoShapeFilterFlatAsync(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3405,7 +3505,7 @@ public void Issue230() } [SkipIfRedisTheory(Comparison.LessThan, "7.3.240")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestNumericInDialect4(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3428,7 +3528,7 @@ public void TestNumericInDialect4(string endpointId) } [SkipIfRedisTheory(Comparison.LessThan, "7.3.240")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestNumericOperatorsInDialect4(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3458,7 +3558,7 @@ public void TestNumericOperatorsInDialect4(string endpointId) } [SkipIfRedisTheory(Comparison.LessThan, "7.3.240")] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestNumericLogicalOperatorsInDialect4(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); @@ -3512,7 +3612,7 @@ public void TestDocumentLoad_Issue352() /// https://redis.io/docs/latest/commands/ft.search/#:~:text=If%20a%20relevant%20key%20expires,the%20total%20number%20of%20results. /// [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestDocumentLoadWithDB_Issue352(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); diff --git a/tests/dockers/endpoints.json b/tests/dockers/endpoints.json index 1afd1fda..3b0e1994 100644 --- a/tests/dockers/endpoints.json +++ b/tests/dockers/endpoints.json @@ -1,17 +1,17 @@ { "standalone":{ "endpoints": [ - "localhost:6379" + "127.0.0.1:6379" ] }, "cluster": { "endpoints": [ - "localhost:16379", - "localhost:16380", - "localhost:16381", - "localhost:16382", - "localhost:16383", - "localhost:16384" + "127.0.0.1:16379", + "127.0.0.1:16380", + "127.0.0.1:16381", + "127.0.0.1:16382", + "127.0.0.1:16383", + "127.0.0.1:16384" ] } } \ No newline at end of file From 25eccdb72c31088a87b14440eed9b211905f52da Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 14:57:11 +0100 Subject: [PATCH 08/27] don't hit disconnected servers when crawling endpoints --- tests/NRedisStack.Tests/AbstractNRedisStackTest.cs | 2 +- tests/NRedisStack.Tests/Search/SearchTests.cs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/NRedisStack.Tests/AbstractNRedisStackTest.cs b/tests/NRedisStack.Tests/AbstractNRedisStackTest.cs index 5f2063db..2e694da8 100644 --- a/tests/NRedisStack.Tests/AbstractNRedisStackTest.cs +++ b/tests/NRedisStack.Tests/AbstractNRedisStackTest.cs @@ -52,7 +52,7 @@ protected IDatabase GetCleanDatabase(string endpointId = EndpointsFixture.Env.St { var server = redis.GetServer(endPoint); - if (server.IsReplica) continue; + if (server.IsReplica || !server.IsConnected) continue; server.Execute("FLUSHALL"); } diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index cfe4db3d..61d6fc81 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -1514,7 +1514,7 @@ private int DatabaseSize(IDatabase db, out int replicaCount) var count = 0L; foreach (var server in db.Multiplexer.GetServers()) { - if (server.IsReplica) + if (server.IsReplica || !server.IsConnected) { replicaCount++; } @@ -1532,7 +1532,7 @@ private async Task DatabaseSizeAsync(IDatabase db) var count = 0L; foreach (var server in db.Multiplexer.GetServers()) { - if (!server.IsReplica) + if (!server.IsReplica && server.IsConnected) { count += await server.DatabaseSizeAsync(); } From 584198885ed91db97b138dbc86233cbff3369264 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 15:06:46 +0100 Subject: [PATCH 09/27] dotnet format --- tests/NRedisStack.Tests/Search/SearchTests.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 61d6fc81..119acbc8 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -1423,7 +1423,7 @@ public void TestAggregationGroupBy(string endpointId) "@parent", Reducers.FirstValue("@title").As("first")); var agg = ft.Aggregate("idx", req); Log($"results: {agg.TotalResults}"); - for (int i = 0 ; i < agg.TotalResults; i++) + for (int i = 0; i < agg.TotalResults; i++) { Log($"parent: {agg.GetRow(i)["parent"]}, first: {agg.GetRow(i)["first"]}"); } From 2aa9e630b98f0dbf8665a38c29e17896b9ac943a Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 15:36:49 +0100 Subject: [PATCH 10/27] more search test tweaks --- tests/NRedisStack.Tests/Search/SearchTests.cs | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 119acbc8..03fbb10d 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -485,6 +485,7 @@ public void TestApplyAndFilterAggregations(string endpointId) AddDocument(db, new Document("data4").Set("name", "abc").Set("subj1", 30).Set("subj2", 20)); AddDocument(db, new Document("data5").Set("name", "def").Set("subj1", 65).Set("subj2", 45)); AddDocument(db, new Document("data6").Set("name", "ghi").Set("subj1", 70).Set("subj2", 70)); + Assert.Equal(6, DatabaseSize(db)); // in part, this is to allow replication to catch up AggregationRequest r = new AggregationRequest().Apply("(@subj1+@subj2)/2", "attemptavg") .GroupBy("@name", Reducers.Avg("@attemptavg").As("avgscore")) @@ -522,6 +523,7 @@ public void TestCreate(string endpointId) db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]); db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]); + Assert.Equal(7, DatabaseSize(db)); // in part, this is to allow replication to catch up var noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -552,6 +554,8 @@ public async Task TestCreateAsync(string endpointId) db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]); db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]); + Assert.Equal(7, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + var noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); var res1 = ft.Search(index, new("@first:Jo*")); @@ -576,6 +580,7 @@ public void CreateNoParams(string endpointId) db.HashSet("student:3333", [new("first", "El"), new("last", "Mark"), new("age", 17)]); db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", 21)]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", 20)]); + Assert.Equal(4, DatabaseSize(db)); // in part, this is to allow replication to catch up SearchResult noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -604,6 +609,7 @@ public async Task CreateNoParamsAsync(string endpointId) db.HashSet("student:3333", [new("first", "El"), new("last", "Mark"), new("age", 17)]); db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", 21)]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", 20)]); + Assert.Equal(4, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up SearchResult noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -1159,6 +1165,7 @@ public async Task TestCursor(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1213,6 +1220,7 @@ public void TestCursorEnumerable(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1250,6 +1258,7 @@ public async Task TestCursorAsync(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + Assert.Equal(3, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1304,6 +1313,7 @@ public async Task TestCursorEnumerableAsync(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + Assert.Equal(3, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -2265,7 +2275,7 @@ public void TestQueryCommandBuilderScore() IDatabase db = GetCleanDatabase(); var ft = db.FT(); - db.Execute("JSON.SET", "doc:1", "$", "[{\"arr\": [1, 2, 3]}, {\"val\": \"hello\"}, {\"val\": \"world\"}]"); + db.Execute("JSON.SET", (RedisKey)"doc:1", "$", "[{\"arr\": [1, 2, 3]}, {\"val\": \"hello\"}, {\"val\": \"world\"}]"); db.Execute("FT.CREATE", "idx", "ON", "JSON", "PREFIX", "1", "doc:", "SCHEMA", "$..arr", "AS", "arr", "NUMERIC", "$..val", "AS", "val", "TEXT"); // sleep: Thread.Sleep(2000); @@ -2343,6 +2353,7 @@ public void TestLimit(string endpointId) Document doc2 = new("doc2", new() { { "t1", "b" }, { "t2", "a" } }); AddDocument(db, doc1); AddDocument(db, doc2); + Assert.Equal(2, DatabaseSize(db)); // in part, this is to allow replication to catch up var req = new AggregationRequest("*").SortBy("@t1").Limit(1); var res = ft.Aggregate("idx", req); @@ -2363,6 +2374,7 @@ public async Task TestLimitAsync(string endpointId) Document doc2 = new("doc2", new() { { "t1", "b" }, { "t2", "a" } }); AddDocument(db, doc1); AddDocument(db, doc2); + Assert.Equal(2, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up var req = new AggregationRequest("*").SortBy("@t1").Limit(1, 1); var res = await ft.AggregateAsync("idx", req); @@ -2447,7 +2459,7 @@ public void VectorSimilaritySearch(string endpointId) float[] vec = [2, 2, 2, 2]; byte[] queryVec = MemoryMarshal.Cast(vec).ToArray(); - + Assert.Equal(4, DatabaseSize(db)); // in part, this is to allow replication to catch up var query = new Query("*=>[KNN 3 @vector $query_vec]") .AddParam("query_vec", queryVec) .SetSortBy("__vector_score") @@ -2485,6 +2497,7 @@ public void QueryingVectorFields(string endpointId) db.HashSet("b", "v", "aaaabaaa"); db.HashSet("c", "v", "aaaaabaa"); + Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up var q = new Query("*=>[KNN 2 @v $vec]").ReturnFields("__v_score").Dialect(2); var res = ft.Search("idx", q.AddParam("vec", "aaaaaaaa")); Assert.Equal(2, res.TotalResults); @@ -2525,6 +2538,7 @@ public void TestQueryAddParam_DefaultDialect(string endpointId) db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); + Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = ft.Search("idx", query); Assert.Equal(2, res.TotalResults); @@ -2544,6 +2558,7 @@ public async Task TestQueryAddParam_DefaultDialectAsync(string endpointId) db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); + Assert.Equal(3, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = await ft.SearchAsync("idx", query); Assert.Equal(2, res.TotalResults); @@ -2563,6 +2578,7 @@ public void TestQueryParamsWithParams_DefaultDialect(string endpointId) db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); + Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = ft.Search("idx", query); Assert.Equal(2, res.TotalResults); @@ -2590,6 +2606,7 @@ public void TestBasicSpellCheck(string endpointId) db.HashSet("doc1", [new("name", "name2"), new("body", "body2")]); db.HashSet("doc1", [new("name", "name2"), new("body", "name2")]); + Assert.Equal(1, DatabaseSize(db)); // in part, this is to allow replication to catch up var reply = ft.SpellCheck(index, "name"); Assert.Single(reply.Keys); Assert.Equal("name", reply.Keys.First()); @@ -2610,6 +2627,7 @@ public async Task TestBasicSpellCheckAsync(string endpointId) db.HashSet("doc1", [new("name", "name2"), new("body", "body2")]); db.HashSet("doc1", [new("name", "name2"), new("body", "name2")]); + Assert.Equal(1, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up var reply = await ft.SpellCheckAsync(index, "name"); Assert.Single(reply.Keys); Assert.Equal("name", reply.Keys.First()); @@ -2723,6 +2741,7 @@ public async Task TestQueryParamsWithParams_DefaultDialectAsync(string endpointI db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); + Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = await ft.SearchAsync("idx", query); Assert.Equal(2, res.TotalResults); From 9525f387bb130ecb2a64a483ef476e80ab927311 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 15:40:09 +0100 Subject: [PATCH 11/27] rev SE.Redis for RedisValue fix --- Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index 7467f144..6625890b 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -12,7 +12,7 @@ - + From aba8dda1235186d21a66a06b20015e34ba4f79ac Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 15:59:43 +0100 Subject: [PATCH 12/27] more test fixes --- tests/NRedisStack.Tests/Search/SearchTests.cs | 37 ++++++++++++++++--- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 03fbb10d..4b51b483 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -223,21 +223,26 @@ public void TestAggregationsLoad(string endpointId) ft.Create("idx", new(), sc); AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world")); + Assert.Equal(1, DatabaseSize(db)); // in part, this is to allow replication to catch up // load t1 var req = new AggregationRequest("*").Load(new FieldName("t1")); var res = ft.Aggregate("idx", req); + Assert.NotNull(res[0]?["t1"]); Assert.Equal("hello", res[0]!["t1"].ToString()); // load t2 req = new AggregationRequest("*").Load(new FieldName("t2")); res = ft.Aggregate("idx", req); + Assert.NotNull(res[0]?["t2"]); Assert.Equal("world", res[0]!["t2"]); // load all req = new AggregationRequest("*").LoadAll(); res = ft.Aggregate("idx", req); + Assert.NotNull(res[0]?["t1"]); Assert.Equal("hello", res[0]!["t1"].ToString()); + Assert.NotNull(res[0]?["t2"]); Assert.Equal("world", res[0]!["t2"]); } @@ -251,26 +256,29 @@ public async Task TestAggregationsLoadAsync(string endpointId) await ft.CreateAsync("idx", new(), sc); AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world")); + Assert.Equal(1, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up // load t1 var req = new AggregationRequest("*").Load(new FieldName("t1")); var res = await ft.AggregateAsync("idx", req); + Assert.NotNull(res[0]?["t1"]); Assert.Equal("hello", res[0]!["t1"].ToString()); // load t2 req = new AggregationRequest("*").Load(new FieldName("t2")); res = await ft.AggregateAsync("idx", req); + Assert.NotNull(res[0]?["t2"]); Assert.Equal("world", res[0]!["t2"]); // load all req = new AggregationRequest("*").LoadAll(); res = await ft.AggregateAsync("idx", req); + Assert.NotNull(res[0]?["t1"]); Assert.Equal("hello", res[0]!["t1"].ToString()); + Assert.NotNull(res[0]?["t2"]); Assert.Equal("world", res[0]!["t2"]); } - - [SkippableTheory] [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregationRequestParamsDialect(string endpointId) @@ -418,7 +426,16 @@ public void TestAlias(string endpointId) doc.Add("field1", "value"); AddDocument(db, "doc1", doc); - Assert.True(ft.AliasAdd("ALIAS1", index)); + try + { + Assert.True(ft.AliasAdd("ALIAS1", index)); + } + catch (RedisServerException rse) + { + Skip.If(rse.Message.StartsWith("CROSSSLOT"), "legacy failure"); + throw; + } + SearchResult res1 = ft.Search("ALIAS1", new Query("*").ReturnFields("field1")); Assert.Equal(1, res1.TotalResults); Assert.Equal("value", res1.Documents[0]["field1"]); @@ -447,7 +464,16 @@ public async Task TestAliasAsync(string endpointId) doc.Add("field1", "value"); AddDocument(db, "doc1", doc); - Assert.True(await ft.AliasAddAsync("ALIAS1", index)); + try + { + Assert.True(await ft.AliasAddAsync("ALIAS1", index)); + } + catch (RedisServerException rse) + { + Skip.If(rse.Message.StartsWith("CROSSSLOT"), "legacy failure"); + throw; + } + SearchResult res1 = ft.Search("ALIAS1", new Query("*").ReturnFields("field1")); Assert.Equal(1, res1.TotalResults); Assert.Equal("value", res1.Documents[0]["field1"]); @@ -642,6 +668,7 @@ public void CreateWithFieldNames(string endpointId) db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]); db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]); + Assert.Equal(7, DatabaseSize(db)); // in part, this is to allow replication to catch up SearchResult noFilters = ft.Search(index, new()); Assert.Equal(5, noFilters.TotalResults); @@ -3631,7 +3658,7 @@ public void TestDocumentLoad_Issue352() /// https://redis.io/docs/latest/commands/ft.search/#:~:text=If%20a%20relevant%20key%20expires,the%20total%20number%20of%20results. /// [SkippableTheory] - [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] + [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))] public async Task TestDocumentLoadWithDB_Issue352(string endpointId) { IDatabase db = GetCleanDatabase(endpointId); From 41aab7e1fd1491279bbc6dc92a455095993e3709 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 16:04:54 +0100 Subject: [PATCH 13/27] dotnet format... again --- tests/NRedisStack.Tests/Search/SearchTests.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 4b51b483..e4c58e0d 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -430,7 +430,7 @@ public void TestAlias(string endpointId) { Assert.True(ft.AliasAdd("ALIAS1", index)); } - catch (RedisServerException rse) + catch (RedisServerException rse) { Skip.If(rse.Message.StartsWith("CROSSSLOT"), "legacy failure"); throw; @@ -468,7 +468,7 @@ public async Task TestAliasAsync(string endpointId) { Assert.True(await ft.AliasAddAsync("ALIAS1", index)); } - catch (RedisServerException rse) + catch (RedisServerException rse) { Skip.If(rse.Message.StartsWith("CROSSSLOT"), "legacy failure"); throw; From ec6f1eac0582536398d123b8ce5942ce5b06d576 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 16:15:05 +0100 Subject: [PATCH 14/27] fix routing of dictionary methods --- src/NRedisStack/Search/SearchCommandBuilder.cs | 6 +++--- tests/NRedisStack.Tests/Search/SearchTests.cs | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/NRedisStack/Search/SearchCommandBuilder.cs b/src/NRedisStack/Search/SearchCommandBuilder.cs index b2206a07..12fa9f07 100644 --- a/src/NRedisStack/Search/SearchCommandBuilder.cs +++ b/src/NRedisStack/Search/SearchCommandBuilder.cs @@ -91,7 +91,7 @@ public static SerializedCommand DictAdd(string dict, params string[] terms) throw new ArgumentOutOfRangeException("At least one term must be provided"); } - var args = new List(terms.Length + 1) { dict }; + var args = new List(terms.Length + 1) { (RedisKey)dict }; foreach (var t in terms) { args.Add(t); @@ -107,7 +107,7 @@ public static SerializedCommand DictDel(string dict, params string[] terms) throw new ArgumentOutOfRangeException("At least one term must be provided"); } - var args = new List(terms.Length + 1) { dict }; + var args = new List(terms.Length + 1) { (RedisKey)dict }; foreach (var t in terms) { args.Add(t); @@ -118,7 +118,7 @@ public static SerializedCommand DictDel(string dict, params string[] terms) public static SerializedCommand DictDump(string dict) { - return new(FT.DICTDUMP, dict); + return new(FT.DICTDUMP, (RedisKey)dict); } public static SerializedCommand DropIndex(string indexName, bool dd = false) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index e4c58e0d..8589a629 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -2679,6 +2679,7 @@ public void TestCrossTermDictionary(string endpointId) } }; + Assert.Equal(0, DatabaseSize(db)); // in part, this is to allow replication to catch up Assert.Equal(expected, ft.SpellCheck(index, "Tooni toque kerfuffle", new FTSpellCheckParams() From 38000736af800224325382ca750d1d06a3a8151f Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 16:20:53 +0100 Subject: [PATCH 15/27] actually: not a key --- src/NRedisStack/Search/SearchCommandBuilder.cs | 6 +++--- tests/NRedisStack.Tests/Search/SearchTests.cs | 7 +++++-- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/NRedisStack/Search/SearchCommandBuilder.cs b/src/NRedisStack/Search/SearchCommandBuilder.cs index 12fa9f07..b2206a07 100644 --- a/src/NRedisStack/Search/SearchCommandBuilder.cs +++ b/src/NRedisStack/Search/SearchCommandBuilder.cs @@ -91,7 +91,7 @@ public static SerializedCommand DictAdd(string dict, params string[] terms) throw new ArgumentOutOfRangeException("At least one term must be provided"); } - var args = new List(terms.Length + 1) { (RedisKey)dict }; + var args = new List(terms.Length + 1) { dict }; foreach (var t in terms) { args.Add(t); @@ -107,7 +107,7 @@ public static SerializedCommand DictDel(string dict, params string[] terms) throw new ArgumentOutOfRangeException("At least one term must be provided"); } - var args = new List(terms.Length + 1) { (RedisKey)dict }; + var args = new List(terms.Length + 1) { dict }; foreach (var t in terms) { args.Add(t); @@ -118,7 +118,7 @@ public static SerializedCommand DictDel(string dict, params string[] terms) public static SerializedCommand DictDump(string dict) { - return new(FT.DICTDUMP, (RedisKey)dict); + return new(FT.DICTDUMP, dict); } public static SerializedCommand DropIndex(string indexName, bool dd = false) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 8589a629..d5b4125a 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -1498,7 +1498,7 @@ public void TestDictionary(string endpointId) var ft = db.FT(); Assert.Equal(3L, ft.DictAdd("dict", "bar", "foo", "hello world")); - + Assert.Equal(0, DatabaseSize(db)); // in part, this is to allow replication to catch up var dumResult = ft.DictDump("dict"); int i = 0; Assert.Equal("bar", dumResult[i++].ToString()); @@ -1506,6 +1506,7 @@ public void TestDictionary(string endpointId) Assert.Equal("hello world", dumResult[i].ToString()); Assert.Equal(3L, ft.DictDel("dict", "foo", "bar", "hello world")); + Assert.Equal(0, DatabaseSize(db)); // in part, this is to allow replication to catch up Assert.Empty(ft.DictDump("dict")); } @@ -1672,7 +1673,7 @@ public async Task TestDictionaryAsync(string endpointId) var ft = db.FT(); Assert.Equal(3L, await ft.DictAddAsync("dict", "bar", "foo", "hello world")); - + Assert.Equal(0, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up var dumResult = await ft.DictDumpAsync("dict"); int i = 0; Assert.Equal("bar", dumResult[i++].ToString()); @@ -1680,6 +1681,7 @@ public async Task TestDictionaryAsync(string endpointId) Assert.Equal("hello world", dumResult[i].ToString()); Assert.Equal(3L, await ft.DictDelAsync("dict", "foo", "bar", "hello world")); + Assert.Equal(0, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up Assert.Empty((await ft.DictDumpAsync("dict"))); } @@ -2704,6 +2706,7 @@ public async Task TestCrossTermDictionaryAsync(string endpointId) } }; + Assert.Equal(0, DatabaseSize(db)); // in part, this is to allow replication to catch up Assert.Equal(expected, await ft.SpellCheckAsync(index, "Tooni toque kerfuffle", new FTSpellCheckParams() From 2dfbb609dbb4eb94b6cb344ccc1753dd742a537d Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 16:46:58 +0100 Subject: [PATCH 16/27] try to add more replication stability --- tests/NRedisStack.Tests/Search/SearchTests.cs | 96 ++++++++++++------- 1 file changed, 59 insertions(+), 37 deletions(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index d5b4125a..4ea829aa 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -44,6 +44,24 @@ private void AddDocument(IDatabase db, string key, Dictionary ob db.HashSet(key, hash); } + private void AssertDatabaseSize(IDatabase db, int expected) + { + // in part, this is to allow replication to catch up + for (int i = 0; i < 10; i++) + { + Assert.Equal(expected, DatabaseSize(db)); + } + } + + private async Task AssertDatabaseSizeAsync(IDatabase db, int expected) + { + // in part, this is to allow replication to catch up + for (int i = 0; i < 10; i++) + { + Assert.Equal(expected, await DatabaseSizeAsync(db)); + } + } + [SkipIfRedisTheory(Is.Enterprise)] [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregationRequestVerbatim(string endpointId) @@ -223,7 +241,7 @@ public void TestAggregationsLoad(string endpointId) ft.Create("idx", new(), sc); AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world")); - Assert.Equal(1, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 1); // load t1 var req = new AggregationRequest("*").Load(new FieldName("t1")); @@ -256,7 +274,7 @@ public async Task TestAggregationsLoadAsync(string endpointId) await ft.CreateAsync("idx", new(), sc); AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world")); - Assert.Equal(1, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 1); // load t1 var req = new AggregationRequest("*").Load(new FieldName("t1")); @@ -511,13 +529,17 @@ public void TestApplyAndFilterAggregations(string endpointId) AddDocument(db, new Document("data4").Set("name", "abc").Set("subj1", 30).Set("subj2", 20)); AddDocument(db, new Document("data5").Set("name", "def").Set("subj1", 65).Set("subj2", 45)); AddDocument(db, new Document("data6").Set("name", "ghi").Set("subj1", 70).Set("subj2", 70)); - Assert.Equal(6, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 6); AggregationRequest r = new AggregationRequest().Apply("(@subj1+@subj2)/2", "attemptavg") .GroupBy("@name", Reducers.Avg("@attemptavg").As("avgscore")) .Filter("@avgscore>=50") .SortBy(10, SortedField.Asc("@name")); + // abc: 20+70 => 45, 30+20 => 25, filtered out + // def: 60+40 => 50, 65+45 => 55, avg 52.5 + // ghi: 50+80 => 65, 70+70 => 70, avg 67.5 + // actual search AggregationResult res = ft.Aggregate(index, r); Assert.Equal(2, res.TotalResults); @@ -549,7 +571,7 @@ public void TestCreate(string endpointId) db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]); db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]); - Assert.Equal(7, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 7); var noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -580,7 +602,7 @@ public async Task TestCreateAsync(string endpointId) db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]); db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]); - Assert.Equal(7, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 7); var noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -606,7 +628,7 @@ public void CreateNoParams(string endpointId) db.HashSet("student:3333", [new("first", "El"), new("last", "Mark"), new("age", 17)]); db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", 21)]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", 20)]); - Assert.Equal(4, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 4); SearchResult noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -635,7 +657,7 @@ public async Task CreateNoParamsAsync(string endpointId) db.HashSet("student:3333", [new("first", "El"), new("last", "Mark"), new("age", 17)]); db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", 21)]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", 20)]); - Assert.Equal(4, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 4); SearchResult noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -668,7 +690,7 @@ public void CreateWithFieldNames(string endpointId) db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]); db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]); - Assert.Equal(7, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 7); SearchResult noFilters = ft.Search(index, new()); Assert.Equal(5, noFilters.TotalResults); @@ -750,13 +772,12 @@ public void AlterAdd(string endpointId) var fields = new HashEntry("title", "hello world"); //fields.("title", "hello world"); - Assert.Equal(0, DatabaseSize(db, out int replicas)); - Log($"Replicas: {replicas}"); + AssertDatabaseSize(db, 0); for (int i = 0; i < 100; i++) { db.HashSet($"doc{i}", fields.Name, fields.Value); } - Assert.Equal(100, DatabaseSize(db)); + AssertDatabaseSize(db, 100); var info = ft.Info(index); Assert.Equal(index, info.IndexName); if (endpointId == EndpointsFixture.Env.Cluster) @@ -782,7 +803,7 @@ public void AlterAdd(string endpointId) SearchResult res2 = ft.Search(index, new("@tags:{tagA}")); Assert.Equal(100, res2.TotalResults); - Assert.Equal(100, DatabaseSize(db)); + AssertDatabaseSize(db, 100); info = ft.Info(index); Assert.Equal(index, info.IndexName); @@ -867,7 +888,7 @@ public async Task AlterAddAsync(string endpointId) SearchResult res2 = ft.Search(index, new("@tags:{tagA}")); Assert.Equal(100, res2.TotalResults); - Assert.Equal(100, await DatabaseSizeAsync(db)); + await AssertDatabaseSizeAsync(db, 100); info = await ft.InfoAsync(index); Assert.Equal(index, info.IndexName); @@ -1192,7 +1213,7 @@ public async Task TestCursor(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); - Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 3); AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1247,7 +1268,7 @@ public void TestCursorEnumerable(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); - Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 3); AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1285,7 +1306,7 @@ public async Task TestCursorAsync(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); - Assert.Equal(3, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 3); AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1340,7 +1361,7 @@ public async Task TestCursorEnumerableAsync(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); - Assert.Equal(3, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 3); AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1498,7 +1519,7 @@ public void TestDictionary(string endpointId) var ft = db.FT(); Assert.Equal(3L, ft.DictAdd("dict", "bar", "foo", "hello world")); - Assert.Equal(0, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 0); var dumResult = ft.DictDump("dict"); int i = 0; Assert.Equal("bar", dumResult[i++].ToString()); @@ -1506,7 +1527,7 @@ public void TestDictionary(string endpointId) Assert.Equal("hello world", dumResult[i].ToString()); Assert.Equal(3L, ft.DictDel("dict", "foo", "bar", "hello world")); - Assert.Equal(0, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 0); Assert.Empty(ft.DictDump("dict")); } @@ -1541,7 +1562,7 @@ public void TestDropIndex(string endpointId) Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase); } - Assert.Equal(100, DatabaseSize(db)); + AssertDatabaseSize(db, 100); } private int DatabaseSize(IDatabase db) => DatabaseSize(db, out _); @@ -1610,7 +1631,7 @@ public async Task TestDropIndexAsync(string endpointId) Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase); } - Assert.Equal(100, DatabaseSize(db)); + AssertDatabaseSize(db, 100); } [SkippableTheory] @@ -1636,7 +1657,7 @@ public void dropIndexDD(string endpointId) RedisResult[] keys = (RedisResult[])db.Execute("KEYS", "*")!; Assert.Empty(keys); - Assert.Equal(0, DatabaseSize(db)); + AssertDatabaseSize(db, 0); } [SkippableTheory] @@ -1662,7 +1683,7 @@ public async Task dropIndexDDAsync(string endpointId) RedisResult[] keys = (RedisResult[])db.Execute("KEYS", "*")!; Assert.Empty(keys); - Assert.Equal(0, DatabaseSize(db)); + AssertDatabaseSize(db, 0); } [SkippableTheory] @@ -1673,7 +1694,7 @@ public async Task TestDictionaryAsync(string endpointId) var ft = db.FT(); Assert.Equal(3L, await ft.DictAddAsync("dict", "bar", "foo", "hello world")); - Assert.Equal(0, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 0); var dumResult = await ft.DictDumpAsync("dict"); int i = 0; Assert.Equal("bar", dumResult[i++].ToString()); @@ -1681,7 +1702,7 @@ public async Task TestDictionaryAsync(string endpointId) Assert.Equal("hello world", dumResult[i].ToString()); Assert.Equal(3L, await ft.DictDelAsync("dict", "foo", "bar", "hello world")); - Assert.Equal(0, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 0); Assert.Empty((await ft.DictDumpAsync("dict"))); } @@ -2382,7 +2403,7 @@ public void TestLimit(string endpointId) Document doc2 = new("doc2", new() { { "t1", "b" }, { "t2", "a" } }); AddDocument(db, doc1); AddDocument(db, doc2); - Assert.Equal(2, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 2); var req = new AggregationRequest("*").SortBy("@t1").Limit(1); var res = ft.Aggregate("idx", req); @@ -2403,7 +2424,7 @@ public async Task TestLimitAsync(string endpointId) Document doc2 = new("doc2", new() { { "t1", "b" }, { "t2", "a" } }); AddDocument(db, doc1); AddDocument(db, doc2); - Assert.Equal(2, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 2); var req = new AggregationRequest("*").SortBy("@t1").Limit(1, 1); var res = await ft.AggregateAsync("idx", req); @@ -2488,7 +2509,7 @@ public void VectorSimilaritySearch(string endpointId) float[] vec = [2, 2, 2, 2]; byte[] queryVec = MemoryMarshal.Cast(vec).ToArray(); - Assert.Equal(4, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 4); var query = new Query("*=>[KNN 3 @vector $query_vec]") .AddParam("query_vec", queryVec) .SetSortBy("__vector_score") @@ -2526,7 +2547,7 @@ public void QueryingVectorFields(string endpointId) db.HashSet("b", "v", "aaaabaaa"); db.HashSet("c", "v", "aaaaabaa"); - Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 3); var q = new Query("*=>[KNN 2 @v $vec]").ReturnFields("__v_score").Dialect(2); var res = ft.Search("idx", q.AddParam("vec", "aaaaaaaa")); Assert.Equal(2, res.TotalResults); @@ -2567,7 +2588,7 @@ public void TestQueryAddParam_DefaultDialect(string endpointId) db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); - Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 3); Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = ft.Search("idx", query); Assert.Equal(2, res.TotalResults); @@ -2587,7 +2608,7 @@ public async Task TestQueryAddParam_DefaultDialectAsync(string endpointId) db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); - Assert.Equal(3, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 3); Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = await ft.SearchAsync("idx", query); Assert.Equal(2, res.TotalResults); @@ -2607,7 +2628,7 @@ public void TestQueryParamsWithParams_DefaultDialect(string endpointId) db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); - Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 3); Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = ft.Search("idx", query); Assert.Equal(2, res.TotalResults); @@ -2635,7 +2656,7 @@ public void TestBasicSpellCheck(string endpointId) db.HashSet("doc1", [new("name", "name2"), new("body", "body2")]); db.HashSet("doc1", [new("name", "name2"), new("body", "name2")]); - Assert.Equal(1, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 1); var reply = ft.SpellCheck(index, "name"); Assert.Single(reply.Keys); Assert.Equal("name", reply.Keys.First()); @@ -2656,7 +2677,7 @@ public async Task TestBasicSpellCheckAsync(string endpointId) db.HashSet("doc1", [new("name", "name2"), new("body", "body2")]); db.HashSet("doc1", [new("name", "name2"), new("body", "name2")]); - Assert.Equal(1, await DatabaseSizeAsync(db)); // in part, this is to allow replication to catch up + await AssertDatabaseSizeAsync(db, 1); var reply = await ft.SpellCheckAsync(index, "name"); Assert.Single(reply.Keys); Assert.Equal("name", reply.Keys.First()); @@ -2681,7 +2702,8 @@ public void TestCrossTermDictionary(string endpointId) } }; - Assert.Equal(0, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 0); + Assert.Equal(expected, ft.SpellCheck(index, "Tooni toque kerfuffle", new FTSpellCheckParams() @@ -2706,7 +2728,7 @@ public async Task TestCrossTermDictionaryAsync(string endpointId) } }; - Assert.Equal(0, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 0); Assert.Equal(expected, await ft.SpellCheckAsync(index, "Tooni toque kerfuffle", new FTSpellCheckParams() @@ -2772,7 +2794,7 @@ public async Task TestQueryParamsWithParams_DefaultDialectAsync(string endpointI db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); - Assert.Equal(3, DatabaseSize(db)); // in part, this is to allow replication to catch up + AssertDatabaseSize(db, 3); Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = await ft.SearchAsync("idx", query); Assert.Equal(2, res.TotalResults); From 5d05a64c1deb6cf4702c03b63e85fa89fcca438c Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 17:05:20 +0100 Subject: [PATCH 17/27] TestApplyAndFilterAggregations - loop attempt --- tests/NRedisStack.Tests/Search/SearchTests.cs | 42 ++++++++++++------- 1 file changed, 26 insertions(+), 16 deletions(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 4ea829aa..afd6476d 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -531,28 +531,38 @@ public void TestApplyAndFilterAggregations(string endpointId) AddDocument(db, new Document("data6").Set("name", "ghi").Set("subj1", 70).Set("subj2", 70)); AssertDatabaseSize(db, 6); - AggregationRequest r = new AggregationRequest().Apply("(@subj1+@subj2)/2", "attemptavg") - .GroupBy("@name", Reducers.Avg("@attemptavg").As("avgscore")) - .Filter("@avgscore>=50") - .SortBy(10, SortedField.Asc("@name")); + int maxAttempts = endpointId == EndpointsFixture.Env.Cluster ? 10 : 3; + for (int attempt = 1; attempt <= maxAttempts; attempt++) + { + AggregationRequest r = new AggregationRequest().Apply("(@subj1+@subj2)/2", "attemptavg") + .GroupBy("@name", Reducers.Avg("@attemptavg").As("avgscore")) + .Filter("@avgscore>=50") + .SortBy(10, SortedField.Asc("@name")); - // abc: 20+70 => 45, 30+20 => 25, filtered out - // def: 60+40 => 50, 65+45 => 55, avg 52.5 - // ghi: 50+80 => 65, 70+70 => 70, avg 67.5 + // abc: 20+70 => 45, 30+20 => 25, filtered out + // def: 60+40 => 50, 65+45 => 55, avg 52.5 + // ghi: 50+80 => 65, 70+70 => 70, avg 67.5 - // actual search - AggregationResult res = ft.Aggregate(index, r); - Assert.Equal(2, res.TotalResults); + // actual search + AggregationResult res = ft.Aggregate(index, r); + Assert.Equal(2, res.TotalResults); - Row r1 = res.GetRow(0); - Assert.Equal("def", r1.GetString("name")); - Assert.Equal(52.5, r1.GetDouble("avgscore"), 0); + Row r1 = res.GetRow(0); + Row r2 = res.GetRow(1); + Log($"Attempt {attempt} of {maxAttempts}: avgscore {r2.GetDouble("avgscore")}"); + if (!IsNear(r2.GetDouble("avgscore"), 67.5)) continue; // this test can be flakey on cluster - Row r2 = res.GetRow(1); - Assert.Equal("ghi", r2.GetString("name")); - Assert.Equal(67.5, r2.GetDouble("avgscore"), 0); + Assert.Equal("def", r1.GetString("name")); + Assert.Equal(52.5, r1.GetDouble("avgscore"), 0); + + Assert.Equal("ghi", r2.GetString("name")); + Assert.Equal(67.5, r2.GetDouble("avgscore"), 0); + break; // success! + } } + private static bool IsNear(double a, double b, double epsilon = 0.1) => Math.Abs(a - b) < epsilon; + [SkippableTheory] [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestCreate(string endpointId) From d6e81aa5ff06d0002f9bc8569128059d2b0e4831 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 10 Sep 2025 17:06:29 +0100 Subject: [PATCH 18/27] only continue on last attempt! --- tests/NRedisStack.Tests/Search/SearchTests.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index afd6476d..bbfef1dd 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -550,7 +550,7 @@ public void TestApplyAndFilterAggregations(string endpointId) Row r1 = res.GetRow(0); Row r2 = res.GetRow(1); Log($"Attempt {attempt} of {maxAttempts}: avgscore {r2.GetDouble("avgscore")}"); - if (!IsNear(r2.GetDouble("avgscore"), 67.5)) continue; // this test can be flakey on cluster + if (attempt != maxAttempts && !IsNear(r2.GetDouble("avgscore"), 67.5)) continue; // this test can be flakey on cluster Assert.Equal("def", r1.GetString("name")); Assert.Equal(52.5, r1.GetDouble("avgscore"), 0); From cec6e594f07c969981f525da93341b0032aba054 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Thu, 11 Sep 2025 11:13:38 +0100 Subject: [PATCH 19/27] allow even more time in TestApplyAndFilterAggregations --- tests/NRedisStack.Tests/Search/SearchTests.cs | 556 ++++++++++-------- 1 file changed, 318 insertions(+), 238 deletions(-) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index bbfef1dd..a619c342 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -1,4 +1,4 @@ -#pragma warning disable CS0618, CS0612 // allow testing obsolete methods +#pragma warning disable CS0618, CS0612 // allow testing obsolete methods using Xunit; using StackExchange.Redis; using NRedisStack.RedisStackCommands; @@ -79,7 +79,7 @@ public void TestAggregationRequestVerbatim(string endpointId) Assert.Equal(1, res.TotalResults); r = new AggregationRequest("kitti") - .Verbatim(); + .Verbatim(); res = ft.Aggregate(index, r); Assert.Equal(0, res.TotalResults); @@ -102,7 +102,7 @@ public async Task TestAggregationRequestVerbatimAsync(string endpointId) Assert.Equal(1, res.TotalResults); r = new AggregationRequest("kitti") - .Verbatim(); + .Verbatim(); res = await ft.AggregateAsync(index, r); Assert.Equal(0, res.TotalResults); @@ -123,8 +123,8 @@ public void TestAggregationRequestTimeout(string endpointId) AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); AggregationRequest r = new AggregationRequest() - .GroupBy("@name", Reducers.Sum("@count").As("sum")) - .Timeout(5000); + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Timeout(5000); AggregationResult res = ft.Aggregate(index, r); Assert.Equal(2, res.TotalResults); @@ -145,8 +145,8 @@ public async Task TestAggregationRequestTimeoutAsync(string endpointId) AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); AggregationRequest r = new AggregationRequest() - .GroupBy("@name", Reducers.Sum("@count").As("sum")) - .Timeout(5000); + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Timeout(5000); AggregationResult res = await ft.AggregateAsync(index, r); Assert.Equal(2, res.TotalResults); @@ -171,7 +171,7 @@ public void TestAggregations(string endpointId) AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) - .SortBy(10, SortedField.Desc("@sum")); + .SortBy(10, SortedField.Desc("@sum")); // actual search var res = ft.Aggregate(index, r); @@ -210,7 +210,7 @@ public async Task TestAggregationsAsync(string endpointId) AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) - .SortBy(10, SortedField.Desc("@sum")); + .SortBy(10, SortedField.Desc("@sum")); // actual search var res = await ft.AggregateAsync(index, r); @@ -316,9 +316,9 @@ public void TestAggregationRequestParamsDialect(string endpointId) parameters.Add("count", "10"); AggregationRequest r = new AggregationRequest("$name") - .GroupBy("@name", Reducers.Sum("@count").As("sum")) - .Params(parameters) - .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2 + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Params(parameters) + .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2 AggregationResult res = ft.Aggregate(index, r); Assert.Equal(1, res.TotalResults); @@ -348,9 +348,9 @@ public async Task TestAggregationRequestParamsDialectAsync(string endpointId) AggregationRequest r = new AggregationRequest("$name") - .GroupBy("@name", Reducers.Sum("@count").As("sum")) - .Params(parameters) - .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2 + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Params(parameters) + .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2 AggregationResult res = await ft.AggregateAsync(index, r); Assert.Equal(1, res.TotalResults); @@ -379,9 +379,9 @@ public void TestAggregationRequestParamsWithDefaultDialect(string endpointId) parameters.Add("count", "10"); AggregationRequest r = new AggregationRequest("$name") - .GroupBy("@name", Reducers.Sum("@count").As("sum")) - .Params(parameters); // From documentation - To use PARAMS, DIALECT must be set to 2 - // which is the default as we set in the constructor (FT(2)) + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Params(parameters); // From documentation - To use PARAMS, DIALECT must be set to 2 + // which is the default as we set in the constructor (FT(2)) AggregationResult res = ft.Aggregate(index, r); Assert.Equal(1, res.TotalResults); @@ -410,9 +410,9 @@ public async Task TestAggregationRequestParamsWithDefaultDialectAsync(string end parameters.Add("count", "10"); AggregationRequest r = new AggregationRequest("$name") - .GroupBy("@name", Reducers.Sum("@count").As("sum")) - .Params(parameters); // From documentation - To use PARAMS, DIALECT must be set to 2 - // which is the default as we set in the constructor (FT(2)) + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Params(parameters); // From documentation - To use PARAMS, DIALECT must be set to 2 + // which is the default as we set in the constructor (FT(2)) AggregationResult res = await ft.AggregateAsync(index, r); Assert.Equal(1, res.TotalResults); @@ -550,7 +550,11 @@ public void TestApplyAndFilterAggregations(string endpointId) Row r1 = res.GetRow(0); Row r2 = res.GetRow(1); Log($"Attempt {attempt} of {maxAttempts}: avgscore {r2.GetDouble("avgscore")}"); - if (attempt != maxAttempts && !IsNear(r2.GetDouble("avgscore"), 67.5)) continue; // this test can be flakey on cluster + if (attempt != maxAttempts && !IsNear(r2.GetDouble("avgscore"), 67.5)) + { + Thread.Sleep(400); // allow extra cluster replication time + continue; + } Assert.Equal("def", r1.GetString("name")); Assert.Equal(52.5, r1.GetDouble("avgscore"), 0); @@ -764,7 +768,8 @@ public async Task FailWhenAttributeNotExistAsync(string endpointId) .AddField(new TextField(FieldName.Of("last"))); Assert.True(await ft.CreateAsync(index, FTCreateParams.CreateParams().Prefix("student:", "pupil:"), sc)); - RedisServerException exc = await Assert.ThrowsAsync(async () => await ft.SearchAsync(index, new("@first:Jo*"))); + RedisServerException exc = + await Assert.ThrowsAsync(async () => await ft.SearchAsync(index, new("@first:Jo*"))); } [SkipIfRedisTheory(Is.Enterprise)] @@ -787,6 +792,7 @@ public void AlterAdd(string endpointId) { db.HashSet($"doc{i}", fields.Name, fields.Value); } + AssertDatabaseSize(db, 100); var info = ft.Info(index); Assert.Equal(index, info.IndexName); @@ -805,11 +811,15 @@ public void AlterAdd(string endpointId) Assert.True(ft.Alter(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5))); for (int i = 0; i < 100; i++) { - var fields2 = new HashEntry[] { new("name", "name" + i), - new("tags", $"tagA,tagB,tag{i}") }; + var fields2 = new HashEntry[] + { + new("name", "name" + i), + new("tags", $"tagA,tagB,tag{i}") + }; // assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2)); db.HashSet($"doc{i}", fields2); } + SearchResult res2 = ft.Search(index, new("@tags:{tagA}")); Assert.Equal(100, res2.TotalResults); @@ -874,6 +884,7 @@ public async Task AlterAddAsync(string endpointId) { db.HashSet($"doc{i}", fields.Name, fields.Value); } + SearchResult res = ft.Search(index, new("hello world")); Assert.Equal(100, res.TotalResults); var info = ft.Info(index); @@ -890,11 +901,15 @@ public async Task AlterAddAsync(string endpointId) Assert.True(await ft.AlterAsync(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5))); for (int i = 0; i < 100; i++) { - var fields2 = new HashEntry[] { new("name", "name" + i), - new("tags", $"tagA,tagB,tag{i}") }; + var fields2 = new HashEntry[] + { + new("name", "name" + i), + new("tags", $"tagA,tagB,tag{i}") + }; // assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2)); db.HashSet($"doc{i}", fields2); } + SearchResult res2 = ft.Search(index, new("@tags:{tagA}")); Assert.Equal(100, res2.TotalResults); @@ -956,17 +971,22 @@ public void AlterAddSortable(string endpointId) { db.HashSet($"doc{i}", fields.Name, fields.Value); } + SearchResult res = ft.Search(index, new("hello world")); Assert.Equal(100, res.TotalResults); Assert.True(ft.Alter(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5))); for (int i = 0; i < 100; i++) { - var fields2 = new HashEntry[] { new("name", "name" + i), - new("tags", $"tagA,tagB,tag{i}") }; + var fields2 = new HashEntry[] + { + new("name", "name" + i), + new("tags", $"tagA,tagB,tag{i}") + }; // assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2)); db.HashSet($"doc{i}", fields2); } + SearchResult res2 = ft.Search(index, new("@tags:{tagA}")); Assert.Equal(100, res2.TotalResults); @@ -1063,17 +1083,22 @@ public async Task AlterAddSortableAsync(string endpointId) { db.HashSet($"doc{i}", fields.Name, fields.Value); } + SearchResult res = ft.Search(index, new("hello world")); Assert.Equal(100, res.TotalResults); Assert.True(await ft.AlterAsync(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5))); for (int i = 0; i < 100; i++) { - var fields2 = new HashEntry[] { new("name", "name" + i), - new("tags", $"tagA,tagB,tag{i}") }; + var fields2 = new HashEntry[] + { + new("name", "name" + i), + new("tags", $"tagA,tagB,tag{i}") + }; // assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2)); db.HashSet($"doc{i}", fields2); } + SearchResult res2 = ft.Search(index, new("@tags:{tagA}")); Assert.Equal(100, res2.TotalResults); @@ -1148,7 +1173,13 @@ public void configOnTimeout(string endpointId) Assert.True(ft.ConfigSet("ON_TIMEOUT", "fail")); Assert.Equal("fail", ft.ConfigGet("ON_TIMEOUT")["ON_TIMEOUT"]); - try { ft.ConfigSet("ON_TIMEOUT", "null"); } catch (RedisServerException) { } + try + { + ft.ConfigSet("ON_TIMEOUT", "null"); + } + catch (RedisServerException) + { + } } // TODO : fix with FT.CONFIG response change @@ -1161,7 +1192,13 @@ public async Task configOnTimeoutAsync(string endpointId) Assert.True(await ft.ConfigSetAsync("ON_TIMEOUT", "fail")); Assert.Equal("fail", (await ft.ConfigGetAsync("ON_TIMEOUT"))["ON_TIMEOUT"]); - try { ft.ConfigSet("ON_TIMEOUT", "null"); } catch (RedisServerException) { } + try + { + ft.ConfigSet("ON_TIMEOUT", "null"); + } + catch (RedisServerException) + { + } } // TODO : fix with FT.CONFIG response change @@ -1405,30 +1442,34 @@ public void TestAggregationGroupBy(string endpointId) // Creating the index definition and schema ft.Create("idx", new(), new Schema().AddNumericField("random_num") - .AddTextField("title") - .AddTextField("body") - .AddTextField("parent")); + .AddTextField("title") + .AddTextField("body") + .AddTextField("parent")); // Indexing a document - AddDocument(db, "search", new(){ - { "title", "RediSearch" }, - { "body", "Redisearch impements a search engine on top of redis" }, - { "parent", "redis" }, - { "random_num", 10 }}); + AddDocument(db, "search", new() + { + { "title", "RediSearch" }, + { "body", "Redisearch impements a search engine on top of redis" }, + { "parent", "redis" }, + { "random_num", 10 } + }); AddDocument(db, "ai", new() { - { "title", "RedisAI" }, - { "body", "RedisAI executes Deep Learning/Machine Learning models and managing their data." }, - { "parent", "redis" }, - { "random_num", 3 }}); + { "title", "RedisAI" }, + { "body", "RedisAI executes Deep Learning/Machine Learning models and managing their data." }, + { "parent", "redis" }, + { "random_num", 3 } + }); AddDocument(db, "json", new() { - { "title", "RedisJson" }, - { "body", "RedisJSON implements ECMA-404 The JSON Data Interchange Standard as a native data type." }, - { "parent", "redis" }, - { "random_num", 8 }}); + { "title", "RedisJson" }, + { "body", "RedisJSON implements ECMA-404 The JSON Data Interchange Standard as a native data type." }, + { "parent", "redis" }, + { "random_num", 8 } + }); var req = new AggregationRequest("redis").GroupBy("@parent", Reducers.Count()); var res = ft.Aggregate("idx", req).GetRow(0); @@ -1475,7 +1516,7 @@ public void TestAggregationGroupBy(string endpointId) "@parent", Reducers.Quantile("@random_num", 0.5)); res = ft.Aggregate("idx", req).GetRow(0); Assert.Equal("redis", res["parent"]); - Assert.Equal(8, res.GetLong("__generated_aliasquantilerandom_num,0.5")); // median of 3,8,10 + Assert.Equal(8, res.GetLong("__generated_aliasquantilerandom_num,0.5")); // median of 3,8,10 req = new AggregationRequest("redis").GroupBy( "@parent", Reducers.ToList("@title")); @@ -1495,6 +1536,7 @@ public void TestAggregationGroupBy(string endpointId) { Log($"parent: {agg.GetRow(i)["parent"]}, first: {agg.GetRow(i)["first"]}"); } + res = agg.GetRow(0); Assert.Equal("redis", res["parent"]); Assert.Equal("RediSearch", res["first"]); @@ -1511,8 +1553,8 @@ public void TestAggregationGroupBy(string endpointId) Assert.Contains(actual[1].ToString(), possibleValues); req = new AggregationRequest("redis") - .Load(new FieldName("__key")) - .GroupBy("@parent", Reducers.ToList("__key").As("docs")); + .Load(new FieldName("__key")) + .GroupBy("@parent", Reducers.ToList("__key").As("docs")); res = db.FT().Aggregate("idx", req).GetRow(0); actual = (List)res.Get("docs"); @@ -1717,6 +1759,7 @@ public async Task TestDictionaryAsync(string endpointId) } readonly string explainQuery = "@f3:f3_val @f2:f2_val @f1:f1_val"; + [SkippableTheory] [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestExplain(string endpointId) @@ -1737,8 +1780,6 @@ public void TestExplain(string endpointId) res = ft.Explain(index, explainQuery, 2); Assert.NotNull(res); Assert.False(res.Length == 0); - - } [SkippableTheory] @@ -2036,36 +2077,40 @@ public void TestFTCreateParamsCommandBuilder() .AddTagField("category", separator: ";"); var ftCreateParams = FTCreateParams.CreateParams().On(IndexDataType.JSON) - .AddPrefix("doc:") - .Filter("@category:{red}") - .Language("English") - .LanguageField("play") - .Score(1.0) - .ScoreField("chapter") - .PayloadField("txt") - .MaxTextFields() - .NoOffsets() - .Temporary(10) - .NoHighlights() - .NoFields() - .NoFreqs() - .Stopwords(new[] { "foo", "bar" }) - .SkipInitialScan(); + .AddPrefix("doc:") + .Filter("@category:{red}") + .Language("English") + .LanguageField("play") + .Score(1.0) + .ScoreField("chapter") + .PayloadField("txt") + .MaxTextFields() + .NoOffsets() + .Temporary(10) + .NoHighlights() + .NoFields() + .NoFreqs() + .Stopwords(new[] { "foo", "bar" }) + .SkipInitialScan(); var builedCommand = SearchCommandBuilder.Create(index, ftCreateParams, sc); - var expectedArgs = new object[] { "TEST_INDEX", "ON", "JSON", "PREFIX", 1, - "doc:", "FILTER", "@category:{red}", "LANGUAGE", - "English", "LANGUAGE_FIELD", "play", "SCORE", 1, - "SCORE_FIELD", "chapter", "PAYLOAD_FIELD", "txt", - "MAXTEXTFIELDS", "NOOFFSETS", "TEMPORARY", 10, - "NOHL", "NOFIELDS", "NOFREQS", "STOPWORDS", 2, - "foo", "bar", "SKIPINITIALSCAN", "SCHEMA", "title", - "TEXT", "category", "TAG", "SEPARATOR", ";" }; + var expectedArgs = new object[] + { + "TEST_INDEX", "ON", "JSON", "PREFIX", 1, + "doc:", "FILTER", "@category:{red}", "LANGUAGE", + "English", "LANGUAGE_FIELD", "play", "SCORE", 1, + "SCORE_FIELD", "chapter", "PAYLOAD_FIELD", "txt", + "MAXTEXTFIELDS", "NOOFFSETS", "TEMPORARY", 10, + "NOHL", "NOFIELDS", "NOFREQS", "STOPWORDS", 2, + "foo", "bar", "SKIPINITIALSCAN", "SCHEMA", "title", + "TEXT", "category", "TAG", "SEPARATOR", ";" + }; for (int i = 0; i < expectedArgs.Length; i++) { Assert.Equal(expectedArgs[i].ToString(), builedCommand.Args[i].ToString()); } + Assert.Equal("FT.CREATE", builedCommand.Command.ToString()); } @@ -2080,8 +2125,11 @@ public void TestFTCreateParamsCommandBuilderNoStopwords() var ftCreateParams = FTCreateParams.CreateParams().NoStopwords(); - var expectedArgs = new object[] { "TEST_INDEX", "STOPWORDS", 0, "SCHEMA", "title", - "TEXT", "category", "TAG", "SEPARATOR", ";" }; + var expectedArgs = new object[] + { + "TEST_INDEX", "STOPWORDS", 0, "SCHEMA", "title", + "TEXT", "category", "TAG", "SEPARATOR", ";" + }; var builedCommand = SearchCommandBuilder.Create(index, ftCreateParams, sc); @@ -2089,6 +2137,7 @@ public void TestFTCreateParamsCommandBuilderNoStopwords() { Assert.Equal(expectedArgs[i].ToString(), builedCommand.Args[i].ToString()); } + Assert.Equal("FT.CREATE", builedCommand.Command.ToString()); } @@ -2108,16 +2157,16 @@ public void TestFilters(string endpointId) // Add the two documents to the index AddDocument(db, "doc1", new() { - { "txt", "foo bar" }, - { "num", "3.141" }, - { "loc", "-0.441,51.458" } - }); + { "txt", "foo bar" }, + { "num", "3.141" }, + { "loc", "-0.441,51.458" } + }); AddDocument(db, "doc2", new() { - { "txt", "foo baz" }, - { "num", "2" }, - { "loc", "-0.1,51.2" } - }); + { "txt", "foo baz" }, + { "num", "2" }, + { "loc", "-0.1,51.2" } + }); // WaitForIndex(client, ft.IndexName ?? "idx"); // Test numerical filter @@ -2160,16 +2209,16 @@ public async Task TestFiltersAsync(string endpointId) // Add the two documents to the index AddDocument(db, "doc1", new() { - { "txt", "foo bar" }, - { "num", "3.141" }, - { "loc", "-0.441,51.458" } - }); + { "txt", "foo bar" }, + { "num", "3.141" }, + { "loc", "-0.441,51.458" } + }); AddDocument(db, "doc2", new() { - { "txt", "foo baz" }, - { "num", "2" }, - { "loc", "-0.1,51.2" } - }); + { "txt", "foo baz" }, + { "num", "2" }, + { "loc", "-0.1,51.2" } + }); // WaitForIndex(client, ft.IndexName ?? "idx"); // Test numerical filter @@ -2200,90 +2249,94 @@ public async Task TestFiltersAsync(string endpointId) public void TestQueryCommandBuilder() { var testQuery = new Query("foo").HighlightFields(new Query.HighlightTags("", ""), "txt") - .SetVerbatim() - .SetNoStopwords() - .SetWithScores() - .SetPayload("txt") - .SetLanguage("English") - .SetScorer("TFIDF") - //.SetExplainScore() - .SetWithPayloads() - .SetSortBy("txt", true) - .Limit(0, 11) - .SummarizeFields(20, 3, ";", "txt") - .LimitKeys("key1", "key2") - .LimitFields("txt") - .ReturnFields("txt") - .AddParam("name", "value") - .Dialect(1) - .Slop(0) - .Timeout(1000) - .SetInOrder() - .SetExpander("myexpander"); + .SetVerbatim() + .SetNoStopwords() + .SetWithScores() + .SetPayload("txt") + .SetLanguage("English") + .SetScorer("TFIDF") + //.SetExplainScore() + .SetWithPayloads() + .SetSortBy("txt", true) + .Limit(0, 11) + .SummarizeFields(20, 3, ";", "txt") + .LimitKeys("key1", "key2") + .LimitFields("txt") + .ReturnFields("txt") + .AddParam("name", "value") + .Dialect(1) + .Slop(0) + .Timeout(1000) + .SetInOrder() + .SetExpander("myexpander"); var buildCommand = SearchCommandBuilder.Search("idx", testQuery); - var expectedArgs = new List {"idx", - "foo", - "VERBATIM", - "NOSTOPWORDS", - "WITHSCORES", - "WITHPAYLOADS", - "LANGUAGE", - "English", - "SCORER", - "TFIDF", - "INFIELDS", - "1", - "txt", - "SORTBY", - "txt", - "ASC", - "PAYLOAD", - "txt", - "LIMIT", - "0", - "11", - "HIGHLIGHT", - "FIELDS", - "1", - "txt", - "TAGS", - "", - "", - "SUMMARIZE", - "FIELDS", - "1", - "txt", - "FRAGS", - "3", - "LEN", - "20", - "SEPARATOR", - ";", - "INKEYS", - "2", - "key1", - "key2", - "RETURN", - "1", - "txt", - "PARAMS", - "2", - "name", - "value", - "DIALECT", - "1", - "SLOP", - "0", - "TIMEOUT", - "1000", - "INORDER", - "EXPANDER", - "myexpander"}; + var expectedArgs = new List + { + "idx", + "foo", + "VERBATIM", + "NOSTOPWORDS", + "WITHSCORES", + "WITHPAYLOADS", + "LANGUAGE", + "English", + "SCORER", + "TFIDF", + "INFIELDS", + "1", + "txt", + "SORTBY", + "txt", + "ASC", + "PAYLOAD", + "txt", + "LIMIT", + "0", + "11", + "HIGHLIGHT", + "FIELDS", + "1", + "txt", + "TAGS", + "", + "", + "SUMMARIZE", + "FIELDS", + "1", + "txt", + "FRAGS", + "3", + "LEN", + "20", + "SEPARATOR", + ";", + "INKEYS", + "2", + "key1", + "key2", + "RETURN", + "1", + "txt", + "PARAMS", + "2", + "name", + "value", + "DIALECT", + "1", + "SLOP", + "0", + "TIMEOUT", + "1000", + "INORDER", + "EXPANDER", + "myexpander" + }; for (int i = 0; i < buildCommand.Args.Count(); i++) { Assert.Equal(expectedArgs[i].ToString(), buildCommand.Args[i].ToString()); } + Assert.Equal("FT.SEARCH", buildCommand.Command); // test that the command not throw an exception: var db = GetCleanDatabase(); @@ -2297,27 +2350,31 @@ public void TestQueryCommandBuilder() public void TestQueryCommandBuilderReturnField() { var testQuery = new Query("foo").HighlightFields("txt") - .ReturnFields(new FieldName("txt")) - .SetNoContent(); + .ReturnFields(new FieldName("txt")) + .SetNoContent(); var buildCommand = SearchCommandBuilder.Search("idx", testQuery); - var expectedArgs = new List {"idx", - "foo", - "NOCONTENT", - "HIGHLIGHT", - "FIELDS", - "1", - "txt", - "RETURN", - "1", - "txt"}; + var expectedArgs = new List + { + "idx", + "foo", + "NOCONTENT", + "HIGHLIGHT", + "FIELDS", + "1", + "txt", + "RETURN", + "1", + "txt" + }; Assert.Equal(expectedArgs.Count(), buildCommand.Args.Count()); for (int i = 0; i < buildCommand.Args.Count(); i++) { Assert.Equal(expectedArgs[i].ToString(), buildCommand.Args[i].ToString()); } + Assert.Equal("FT.SEARCH", buildCommand.Command); // test that the command not throw an exception: @@ -2335,8 +2392,10 @@ public void TestQueryCommandBuilderScore() IDatabase db = GetCleanDatabase(); var ft = db.FT(); - db.Execute("JSON.SET", (RedisKey)"doc:1", "$", "[{\"arr\": [1, 2, 3]}, {\"val\": \"hello\"}, {\"val\": \"world\"}]"); - db.Execute("FT.CREATE", "idx", "ON", "JSON", "PREFIX", "1", "doc:", "SCHEMA", "$..arr", "AS", "arr", "NUMERIC", "$..val", "AS", "val", "TEXT"); + db.Execute("JSON.SET", (RedisKey)"doc:1", "$", + "[{\"arr\": [1, 2, 3]}, {\"val\": \"hello\"}, {\"val\": \"world\"}]"); + db.Execute("FT.CREATE", "idx", "ON", "JSON", "PREFIX", "1", "doc:", "SCHEMA", "$..arr", "AS", "arr", "NUMERIC", + "$..val", "AS", "val", "TEXT"); // sleep: Thread.Sleep(2000); @@ -2357,7 +2416,8 @@ public void TestFieldsCommandBuilder() .AddTagField(FieldName.Of("tag"), true, true, true, ";", true, true) .AddVectorField("vec", VectorField.VectorAlgo.FLAT, new() { { "dim", 10 } }); var buildCommand = SearchCommandBuilder.Create("idx", new(), sc); - var expectedArgs = new List { + var expectedArgs = new List + { "idx", "SCHEMA", "txt", @@ -2490,6 +2550,7 @@ public void TestVectorCount_Issue70() { Assert.Equal(expected[i].ToString(), actual.Args[i].ToString()); } + Assert.Equal(expected.Count(), actual.Args.Length); } @@ -2506,12 +2567,13 @@ public void VectorSimilaritySearch(string endpointId) json.Set("vec:3", "$", "{\"vector\":[3,3,3,3]}"); json.Set("vec:4", "$", "{\"vector\":[4,4,4,4]}"); - var schema = new Schema().AddVectorField(FieldName.Of("$.vector").As("vector"), VectorField.VectorAlgo.FLAT, new() - { - ["TYPE"] = "FLOAT32", - ["DIM"] = "4", - ["DISTANCE_METRIC"] = "L2", - }); + var schema = new Schema().AddVectorField(FieldName.Of("$.vector").As("vector"), VectorField.VectorAlgo.FLAT, + new() + { + ["TYPE"] = "FLOAT32", + ["DIM"] = "4", + ["DISTANCE_METRIC"] = "L2", + }); var idxDef = new FTCreateParams().On(IndexDataType.JSON).Prefix("vec:"); Assert.True(ft.Create("vss_idx", idxDef, schema)); @@ -2521,9 +2583,9 @@ public void VectorSimilaritySearch(string endpointId) AssertDatabaseSize(db, 4); var query = new Query("*=>[KNN 3 @vector $query_vec]") - .AddParam("query_vec", queryVec) - .SetSortBy("__vector_score") - .Dialect(2); + .AddParam("query_vec", queryVec) + .SetSortBy("__vector_score") + .Dialect(2); var res = ft.Search("vss_idx", query); Assert.Equal(3, res.TotalResults); @@ -2715,10 +2777,10 @@ public void TestCrossTermDictionary(string endpointId) AssertDatabaseSize(db, 0); Assert.Equal(expected, ft.SpellCheck(index, - "Tooni toque kerfuffle", - new FTSpellCheckParams() - .IncludeTerm("slang") - .ExcludeTerm("slang"))); + "Tooni toque kerfuffle", + new FTSpellCheckParams() + .IncludeTerm("slang") + .ExcludeTerm("slang"))); } [SkippableTheory] @@ -2740,10 +2802,10 @@ public async Task TestCrossTermDictionaryAsync(string endpointId) AssertDatabaseSize(db, 0); Assert.Equal(expected, await ft.SpellCheckAsync(index, - "Tooni toque kerfuffle", - new FTSpellCheckParams() - .IncludeTerm("slang") - .ExcludeTerm("slang"))); + "Tooni toque kerfuffle", + new FTSpellCheckParams() + .IncludeTerm("slang") + .ExcludeTerm("slang"))); } [Fact] @@ -2765,7 +2827,8 @@ public async Task TestDistanceBoundAsync() ft.Create(index, new(), new Schema().AddTextField("name").AddTextField("body")); // distance suppose to be between 1 and 4 - await Assert.ThrowsAsync(async () => await ft.SpellCheckAsync(index, "name", new FTSpellCheckParams().Distance(0))); + await Assert.ThrowsAsync(async () => + await ft.SpellCheckAsync(index, "name", new FTSpellCheckParams().Distance(0))); } [Fact] @@ -2787,7 +2850,8 @@ public async Task TestDialectBoundAsync() ft.Create(index, new(), new Schema().AddTextField("t")); // dialect 0 is not valid - await Assert.ThrowsAsync(async () => await ft.SpellCheckAsync(index, "name", new FTSpellCheckParams().Dialect(0))); + await Assert.ThrowsAsync(async () => + await ft.SpellCheckAsync(index, "name", new FTSpellCheckParams().Dialect(0))); } [SkippableTheory] @@ -3039,7 +3103,7 @@ public void TestProfileSearch(string endpointId) db.HashSet("doc1", [ new("t1", "foo"), - new("t2", "bar") + new("t2", "bar") ]); var profile = ft.ProfileOnSearch(index, new("foo")); @@ -3063,7 +3127,7 @@ public async Task TestProfileSearchAsync(string endpointId) db.HashSet("doc1", [ new("t1", "foo"), - new("t2", "bar") + new("t2", "bar") ]); var profile = await ft.ProfileOnSearchAsync(index, new("foo")); @@ -3086,7 +3150,7 @@ public void TestProfileSearch_WithoutCoordinator(string endpointId) db.HashSet("doc1", [ new("t1", "foo"), - new("t2", "bar") + new("t2", "bar") ]); var profile = ft.ProfileSearch(index, new("foo")); @@ -3106,7 +3170,7 @@ public async Task TestProfileSearchAsync_WithoutCoordinator(string endpointId) db.HashSet("doc1", [ new("t1", "foo"), - new("t2", "bar") + new("t2", "bar") ]); var profile = await ft.ProfileSearchAsync(index, new("foo")); @@ -3283,7 +3347,8 @@ public async Task TestProfileAsyncIssue306(string endpointId) IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); - await ft.CreateAsync(index, new Schema().AddTextField("t", sortable: true)); // Calling FT.CREATR without FTCreateParams + await ft.CreateAsync(index, + new Schema().AddTextField("t", sortable: true)); // Calling FT.CREATR without FTCreateParams db.HashSet("1", "t", "hello"); db.HashSet("2", "t", "world"); @@ -3327,10 +3392,10 @@ public void Issue175(string endpointId) var sortable = true; var ftParams = new FTCreateParams() - .On(IndexDataType.JSON) - .Prefix("doc:"); + .On(IndexDataType.JSON) + .Prefix("doc:"); var schema = new Schema().AddTagField("tag", sortable, false, false, "|") - .AddTextField("text", 1, sortable); + .AddTextField("text", 1, sortable); Assert.True(ft.Create("myIndex", ftParams, schema)); } @@ -3410,7 +3475,8 @@ public async Task GeoShapeFilterSphericalAsync(string endpointId) WKTReader reader = new(); GeometryFactory factory = new(); - Assert.True(await ft.CreateAsync(index, new Schema().AddGeoShapeField("geom", GeoShapeField.CoordinateSystem.SPHERICAL))); + Assert.True(await ft.CreateAsync(index, + new Schema().AddGeoShapeField("geom", GeoShapeField.CoordinateSystem.SPHERICAL))); // Create polygons Polygon small = factory.CreatePolygon([ @@ -3439,7 +3505,8 @@ public async Task GeoShapeFilterSphericalAsync(string endpointId) new(34.9000, 29.7000) ]); - var res = await ft.SearchAsync(index, new Query($"@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); + var res = await ft.SearchAsync(index, + new Query($"@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); Assert.Equal(1, res.TotalResults); Assert.Single(res.Documents); Assert.Equal(small, reader.Read(res.Documents[0]["geom"].ToString())); @@ -3452,7 +3519,8 @@ public async Task GeoShapeFilterSphericalAsync(string endpointId) new(34.9002, 29.7002) ]); - res = await ft.SearchAsync(index, new Query($"@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3)); + res = await ft.SearchAsync(index, + new Query($"@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3)); Assert.Equal(2, res.TotalResults); Assert.Equal(2, res.Documents.Count); @@ -3460,7 +3528,8 @@ public async Task GeoShapeFilterSphericalAsync(string endpointId) Point point = factory.CreatePoint(new Coordinate(34.9010, 29.7010)); db.HashSet("point", "geom", point.ToString()); - res = await ft.SearchAsync(index, new Query($"@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); + res = await ft.SearchAsync(index, + new Query($"@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); Assert.Equal(2, res.TotalResults); Assert.Equal(2, res.Documents.Count); } @@ -3479,23 +3548,24 @@ public void GeoShapeFilterFlat(string endpointId) // polygon type Polygon small = factory.CreatePolygon([ new(1, 1), - new(1, 100), new(100, 100), new(100, 1), new(1, 1) + new(1, 100), new(100, 100), new(100, 1), new(1, 1) ]); db.HashSet("small", "geom", small.ToString()); Polygon large = factory.CreatePolygon([ new(1, 1), - new(1, 200), new(200, 200), new(200, 1), new(1, 1) + new(1, 200), new(200, 200), new(200, 1), new(1, 1) ]); db.HashSet("large", "geom", large.ToString()); // within condition Polygon within = factory.CreatePolygon([ new(0, 0), - new(0, 150), new(150, 150), new(150, 0), new(0, 0) + new(0, 150), new(150, 150), new(150, 0), new(0, 0) ]); - SearchResult res = ft.Search(index, new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); + SearchResult res = ft.Search(index, + new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); Assert.Equal(1, res.TotalResults); Assert.Single(res.Documents); Assert.Equal(small, reader.Read(res.Documents[0]["geom"].ToString())); @@ -3503,7 +3573,7 @@ public void GeoShapeFilterFlat(string endpointId) // contains condition Polygon contains = factory.CreatePolygon([ new(2, 2), - new(2, 50), new(50, 50), new(50, 2), new(2, 2) + new(2, 50), new(50, 50), new(50, 2), new(2, 2) ]); res = ft.Search(index, new Query("@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3)); @@ -3528,28 +3598,30 @@ public async Task GeoShapeFilterFlatAsync(string endpointId) WKTReader reader = new(); GeometryFactory factory = new(); - Assert.True(await ft.CreateAsync(index, new Schema().AddGeoShapeField("geom", GeoShapeField.CoordinateSystem.FLAT))); + Assert.True(await ft.CreateAsync(index, + new Schema().AddGeoShapeField("geom", GeoShapeField.CoordinateSystem.FLAT))); // polygon type Polygon small = factory.CreatePolygon([ new(1, 1), - new(1, 100), new(100, 100), new(100, 1), new(1, 1) + new(1, 100), new(100, 100), new(100, 1), new(1, 1) ]); db.HashSet("small", "geom", small.ToString()); Polygon large = factory.CreatePolygon([ new(1, 1), - new(1, 200), new(200, 200), new(200, 1), new(1, 1) + new(1, 200), new(200, 200), new(200, 1), new(1, 1) ]); db.HashSet("large", "geom", large.ToString()); // within condition Polygon within = factory.CreatePolygon([ new(0, 0), - new(0, 150), new(150, 150), new(150, 0), new(0, 0) + new(0, 150), new(150, 150), new(150, 0), new(0, 0) ]); - SearchResult res = await ft.SearchAsync(index, new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); + SearchResult res = await ft.SearchAsync(index, + new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); Assert.Equal(1, res.TotalResults); Assert.Single(res.Documents); Assert.Equal(small, reader.Read(res.Documents[0]["geom"].ToString())); @@ -3557,10 +3629,11 @@ public async Task GeoShapeFilterFlatAsync(string endpointId) // contains condition Polygon contains = factory.CreatePolygon([ new(2, 2), - new(2, 50), new(50, 50), new(50, 2), new(2, 2) + new(2, 50), new(50, 50), new(50, 2), new(2, 2) ]); - res = await ft.SearchAsync(index, new Query("@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3)); + res = await ft.SearchAsync(index, + new Query("@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3)); Assert.Equal(2, res.TotalResults); Assert.Equal(2, res.Documents.Count); @@ -3568,7 +3641,8 @@ public async Task GeoShapeFilterFlatAsync(string endpointId) Point point = factory.CreatePoint(new Coordinate(10, 10)); db.HashSet("point", "geom", point.ToString()); - res = await ft.SearchAsync(index, new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); + res = await ft.SearchAsync(index, + new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3)); Assert.Equal(2, res.TotalResults); Assert.Equal(2, res.Documents.Count); } @@ -3577,12 +3651,14 @@ public async Task GeoShapeFilterFlatAsync(string endpointId) public void Issue230() { var request = new AggregationRequest("*", 3).Filter("@StatusId==1") - .GroupBy("@CreatedDay", Reducers.CountDistinct("@UserId"), Reducers.Count().As("count")); + .GroupBy("@CreatedDay", Reducers.CountDistinct("@UserId"), Reducers.Count().As("count")); var buildCommand = SearchCommandBuilder.Aggregate("idx:users", request); // expected: FT.AGGREGATE idx:users * FILTER @StatusId==1 GROUPBY 1 @CreatedDay REDUCE COUNT_DISTINCT 1 @UserId REDUCE COUNT 0 AS count DIALECT 3 Assert.Equal("FT.AGGREGATE", buildCommand.Command); - Assert.Equal(["idx:users", "*", "FILTER", "@StatusId==1", "GROUPBY", 1, "@CreatedDay", "REDUCE", "COUNT_DISTINCT", 1, "@UserId", "REDUCE", "COUNT", 0, "AS", "count", "DIALECT", 3 + Assert.Equal([ + "idx:users", "*", "FILTER", "@StatusId==1", "GROUPBY", 1, "@CreatedDay", "REDUCE", "COUNT_DISTINCT", 1, + "@UserId", "REDUCE", "COUNT", 0, "AS", "count", "DIALECT", 3 ], buildCommand.Args); } @@ -3636,7 +3712,6 @@ public void TestNumericOperatorsInDialect4(string endpointId) Assert.Equal(1, ft.Search(index, new("@version:[-inf 124]")).TotalResults); Assert.Equal(1, ft.Search(index, new Query("@version<=124").Dialect(4)).TotalResults); - } [SkipIfRedisTheory(Comparison.LessThan, "7.3.240")] @@ -3700,7 +3775,8 @@ public async Task TestDocumentLoadWithDB_Issue352(string endpointId) IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); - Schema sc = new Schema().AddTextField("firstText", 1.0).AddTextField("lastText", 1.0).AddNumericField("ageNumeric"); + Schema sc = new Schema().AddTextField("firstText", 1.0).AddTextField("lastText", 1.0) + .AddNumericField("ageNumeric"); Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); Document? droppedDocument = null; @@ -3744,7 +3820,11 @@ public async Task TestDocumentLoadWithDB_Issue352(string endpointId) List tasks = []; // try with 3 different tasks simultaneously to increase the chance of hitting it - for (int i = 0; i < 3; i++) { tasks.Add(Task.Run(checker)); } + for (int i = 0; i < 3; i++) + { + tasks.Add(Task.Run(checker)); + } + Task checkTask = Task.WhenAll(tasks); await Task.WhenAny(checkTask, Task.Delay(1000)); var keyTtl = db.KeyTimeToLive("student:22222"); @@ -3757,4 +3837,4 @@ public async Task TestDocumentLoadWithDB_Issue352(string endpointId) // Without fix for Issue352, document load in this case fails %100 with my local test runs,, and %100 success with fixed version. // The results in pipeline should be the same. } -} +} \ No newline at end of file From 6cef256ac77861952d4bf29eb263f901f381e755 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Thu, 11 Sep 2025 11:24:16 +0100 Subject: [PATCH 20/27] fix CI mstest on .net9 --- tests/NRedisStack.Tests/NRedisStack.Tests.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/NRedisStack.Tests/NRedisStack.Tests.csproj b/tests/NRedisStack.Tests/NRedisStack.Tests.csproj index 5d756454..83bd48e5 100644 --- a/tests/NRedisStack.Tests/NRedisStack.Tests.csproj +++ b/tests/NRedisStack.Tests/NRedisStack.Tests.csproj @@ -22,7 +22,7 @@ - + runtime; build; native; contentfiles; analyzers; buildtransitive all From e68ba11072bbc0a636663500f2a28918b44e6aeb Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Thu, 11 Sep 2025 11:48:13 +0100 Subject: [PATCH 21/27] grandfather many cluster tests pre 8 --- tests/NRedisStack.Tests/Search/SearchTests.cs | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index a619c342..39583a20 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -19,6 +19,15 @@ public class SearchTests(EndpointsFixture endpointsFixture, ITestOutputHelper lo // private readonly string key = "SEARCH_TESTS"; private readonly string index = "TEST_INDEX"; + private static void SkipClusterPre8(string endpointId) + { + // Many of the FT.* commands are ... more awkward pre 8 when using cluster. Rather than + // fight eventual-consistency/timing issues: grandfather the existing behaviour, and start + // afresh from v8, where things behave much more predictably and reasonably. + Skip.If(endpointId == EndpointsFixture.Env.Cluster + && EndpointsFixture.RedisVersion.Major < 8, "Ignoring cluster tests for FT.SEARCH pre Redis 8.0"); + } + private void AddDocument(IDatabase db, Document doc) { var hash = doc.GetProperties() @@ -235,6 +244,7 @@ public async Task TestAggregationsAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregationsLoad(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); var sc = new Schema().AddTextField("t1").AddTextField("t2"); @@ -632,6 +642,7 @@ public async Task TestCreateAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void CreateNoParams(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -690,6 +701,7 @@ public async Task CreateNoParamsAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void CreateWithFieldNames(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddField(new TextField(FieldName.Of("first").As("given"))) @@ -869,6 +881,7 @@ public void AlterAdd(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task AlterAddAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddTextField("title", 1.0); @@ -956,6 +969,7 @@ public async Task AlterAddAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void AlterAddSortable(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddTextField("title", 1.0, sortable: true); @@ -1251,6 +1265,7 @@ public async Task TestDialectConfigAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCursor(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new(); @@ -1306,6 +1321,7 @@ public async Task TestCursor(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestCursorEnumerable(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new(); @@ -1344,6 +1360,7 @@ public void TestCursorEnumerable(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCursorAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new(); @@ -1399,6 +1416,7 @@ public async Task TestCursorAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCursorEnumerableAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new(); @@ -1587,6 +1605,7 @@ public void TestDictionary(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestDropIndex(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddTextField("title", 1.0); @@ -1656,6 +1675,7 @@ private async Task DatabaseSizeAsync(IDatabase db) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestDropIndexAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddTextField("title", 1.0); @@ -1690,6 +1710,7 @@ public async Task TestDropIndexAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void dropIndexDD(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddTextField("title", 1.0); @@ -1716,6 +1737,7 @@ public void dropIndexDD(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task dropIndexDDAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddTextField("title", 1.0); @@ -2145,6 +2167,7 @@ public void TestFTCreateParamsCommandBuilderNoStopwords() [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestFilters(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); // Create the index with the same fields as in the original test @@ -2465,6 +2488,7 @@ public void TestFieldsCommandBuilder() [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestLimit(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -2486,6 +2510,7 @@ public void TestLimit(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestLimitAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -2602,6 +2627,7 @@ public void VectorSimilaritySearch(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void QueryingVectorFields(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); var json = db.JSON(); @@ -2690,6 +2716,7 @@ public async Task TestQueryAddParam_DefaultDialectAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestQueryParamsWithParams_DefaultDialect(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(2); @@ -2761,6 +2788,7 @@ public async Task TestBasicSpellCheckAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestCrossTermDictionary(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -2787,6 +2815,7 @@ public void TestCrossTermDictionary(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCrossTermDictionaryAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -2858,6 +2887,7 @@ await Assert.ThrowsAsync(async () => [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestQueryParamsWithParams_DefaultDialectAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(2); @@ -3344,6 +3374,7 @@ public void TestProfileIssue306(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestProfileAsyncIssue306(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -3404,6 +3435,7 @@ public void Issue175(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void GeoShapeFilterSpherical(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -3469,6 +3501,7 @@ public void GeoShapeFilterSpherical(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task GeoShapeFilterSphericalAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); From d6f6020b69ad0dff6ed7ab030aaf16d0408a5ef8 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Thu, 11 Sep 2025 11:48:36 +0100 Subject: [PATCH 22/27] update local docker file --- tests/dockers/docker-compose.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/dockers/docker-compose.yml b/tests/dockers/docker-compose.yml index 98ef921e..e63da254 100644 --- a/tests/dockers/docker-compose.yml +++ b/tests/dockers/docker-compose.yml @@ -3,7 +3,7 @@ services: redis: - image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:rs-7.4.0-v1} + image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:8.2.1-pre} container_name: redis-standalone environment: - TLS_ENABLED=yes @@ -21,7 +21,7 @@ services: - all cluster: - image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:rs-7.4.0-v1} + image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:8.2.1-pre} container_name: redis-cluster environment: - REDIS_CLUSTER=yes @@ -38,4 +38,4 @@ services: - "./cluster:/redis/work" profiles: - cluster - - all \ No newline at end of file + - all From c7097eed3a03fa103ce42b0fd1ee100d9b6932f4 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Thu, 11 Sep 2025 12:00:17 +0100 Subject: [PATCH 23/27] skip a bunch more tests on cluster pre 8 --- tests/NRedisStack.Tests/Search/SearchTests.cs | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 39583a20..be9c7433 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -165,6 +165,7 @@ public async Task TestAggregationRequestTimeoutAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregations(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new(); @@ -204,6 +205,7 @@ public void TestAggregations(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestAggregationsAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new(); @@ -278,6 +280,7 @@ public void TestAggregationsLoad(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestAggregationsLoadAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); var sc = new Schema().AddTextField("t1").AddTextField("t2"); @@ -520,6 +523,7 @@ public async Task TestAliasAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestApplyAndFilterAggregations(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new(); @@ -614,6 +618,7 @@ public void TestCreate(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestCreateAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); var schema = new Schema().AddTextField("first").AddTextField("last").AddNumericField("age"); @@ -672,6 +677,7 @@ public void CreateNoParams(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task CreateNoParamsAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -745,6 +751,7 @@ public void FailWhenAttributeNotExist(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task CreateWithFieldNamesAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddField(new TextField(FieldName.Of("first").As("given"))) @@ -788,6 +795,7 @@ public async Task FailWhenAttributeNotExistAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void AlterAdd(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddTextField("title", 1.0); @@ -1082,6 +1090,7 @@ public void InfoWithIndexEmptyAndIndexMissing(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task AlterAddSortableAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema().AddTextField("title", 1.0, sortable: true); @@ -1585,6 +1594,7 @@ public void TestAggregationGroupBy(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestDictionary(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -1764,6 +1774,7 @@ public async Task dropIndexDDAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestDictionaryAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -1976,6 +1987,7 @@ public void TestModulePrefixs() [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task GetTagFieldSyncAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema() @@ -2034,6 +2046,7 @@ public async Task GetTagFieldSyncAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestGetTagFieldWithNonDefaultSeparatorSyncAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); Schema sc = new Schema() @@ -2220,6 +2233,7 @@ public void TestFilters(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestFiltersAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); // Create the index with the same fields as in the original test @@ -2583,6 +2597,7 @@ public void TestVectorCount_Issue70() [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void VectorSimilaritySearch(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); var json = db.JSON(); @@ -2676,6 +2691,7 @@ public async Task TestVectorFieldJson_Issue102Async() [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestQueryAddParam_DefaultDialect(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(2); @@ -2696,6 +2712,7 @@ public void TestQueryAddParam_DefaultDialect(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestQueryAddParam_DefaultDialectAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(2); @@ -3172,6 +3189,7 @@ public async Task TestProfileSearchAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestProfileSearch_WithoutCoordinator(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -3192,6 +3210,7 @@ public void TestProfileSearch_WithoutCoordinator(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestProfileSearchAsync_WithoutCoordinator(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -3284,6 +3303,7 @@ public async Task TestProfileAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestProfile_WithoutCoordinator(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -3314,6 +3334,7 @@ public void TestProfile_WithoutCoordinator(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task TestProfileAsync_WithoutCoordinator(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -3344,6 +3365,7 @@ public async Task TestProfileAsync_WithoutCoordinator(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestProfileIssue306(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); @@ -3571,6 +3593,7 @@ public async Task GeoShapeFilterSphericalAsync(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void GeoShapeFilterFlat(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); WKTReader reader = new(); @@ -3626,6 +3649,7 @@ public void GeoShapeFilterFlat(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public async Task GeoShapeFilterFlatAsync(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); WKTReader reader = new(); From cb9c8df46e20a9758e7a32813413bfa99dd3d362 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Thu, 11 Sep 2025 12:03:56 +0100 Subject: [PATCH 24/27] skip TestCreate on cluster < 8 --- tests/NRedisStack.Tests/Search/SearchTests.cs | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index be9c7433..0c1f101a 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -585,6 +585,7 @@ public void TestApplyAndFilterAggregations(string endpointId) [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestCreate(string endpointId) { + SkipClusterPre8(endpointId); IDatabase db = GetCleanDatabase(endpointId); var ft = db.FT(); var schema = new Schema().AddTextField("first").AddTextField("last").AddNumericField("age"); From 1a494f0debd3a00b19d64b9e2d0b5adeedd129dc Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 17 Sep 2025 10:01:09 +0100 Subject: [PATCH 25/27] Update tests/dockers/docker-compose.yml Co-authored-by: atakavci --- tests/dockers/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/dockers/docker-compose.yml b/tests/dockers/docker-compose.yml index e63da254..652885f2 100644 --- a/tests/dockers/docker-compose.yml +++ b/tests/dockers/docker-compose.yml @@ -3,7 +3,7 @@ services: redis: - image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:8.2.1-pre} + image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:8.2.1} container_name: redis-standalone environment: - TLS_ENABLED=yes From e49b48038c95718102826dca7438cd4047ea4d68 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 17 Sep 2025 10:06:54 +0100 Subject: [PATCH 26/27] clarify that the enumerable APIs may involve multiple operations --- src/NRedisStack/Search/ISearchCommandsAsync.cs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/NRedisStack/Search/ISearchCommandsAsync.cs b/src/NRedisStack/Search/ISearchCommandsAsync.cs index 345b494f..13d5e3f2 100644 --- a/src/NRedisStack/Search/ISearchCommandsAsync.cs +++ b/src/NRedisStack/Search/ISearchCommandsAsync.cs @@ -16,7 +16,8 @@ public interface ISearchCommandsAsync Task _ListAsync(); /// - /// Run a search query on an index, and perform aggregate transformations on the results. + /// Run a search query on an index, and perform aggregate transformations on the results. This operates + /// as a cursor and may involve multiple commands to the server. /// /// The index name. /// The query @@ -25,7 +26,8 @@ public interface ISearchCommandsAsync Task AggregateAsync(string index, AggregationRequest query); /// - /// Run a search query on an index, and perform aggregate transformations on the results. + /// Run a search query on an index, and perform aggregate transformations on the results. This operates + /// as a cursor and may involve multiple commands to the server. /// /// The index name. /// The query. From 59045a5cda72fcb4f6b3537c08d55f72284aab4c Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 17 Sep 2025 10:11:11 +0100 Subject: [PATCH 27/27] clarify why/when the old cursor API will fail --- src/NRedisStack/Search/ISearchCommands.cs | 4 ++-- src/NRedisStack/Search/ISearchCommandsAsync.cs | 4 ++-- src/NRedisStack/Search/SearchCommands.cs | 4 ++-- src/NRedisStack/Search/SearchCommandsAsync.cs | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/NRedisStack/Search/ISearchCommands.cs b/src/NRedisStack/Search/ISearchCommands.cs index df132424..a3c4c387 100644 --- a/src/NRedisStack/Search/ISearchCommands.cs +++ b/src/NRedisStack/Search/ISearchCommands.cs @@ -107,7 +107,7 @@ public interface ISearchCommands /// The cursor's ID. /// if it has been deleted, if it did not exist. /// - [Obsolete("When possible, use CursorDel(AggregationResult) instead.")] + [Obsolete("When possible, use CursorDel(AggregationResult) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] bool CursorDel(string indexName, long cursorId); @@ -127,7 +127,7 @@ public interface ISearchCommands /// Limit the amount of returned results. /// A AggregationResult object with the results /// - [Obsolete("When possible, use AggregateEnumerable or CursorRead(AggregationResult, int?) instead.")] + [Obsolete("When possible, use AggregateEnumerable or CursorRead(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] AggregationResult CursorRead(string indexName, long cursorId, int? count = null); diff --git a/src/NRedisStack/Search/ISearchCommandsAsync.cs b/src/NRedisStack/Search/ISearchCommandsAsync.cs index 13d5e3f2..9ecad397 100644 --- a/src/NRedisStack/Search/ISearchCommandsAsync.cs +++ b/src/NRedisStack/Search/ISearchCommandsAsync.cs @@ -108,7 +108,7 @@ public interface ISearchCommandsAsync /// The cursor's ID. /// if it has been deleted, if it did not exist. /// - [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead.")] + [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] Task CursorDelAsync(string indexName, long cursorId); @@ -128,7 +128,7 @@ public interface ISearchCommandsAsync /// Limit the amount of returned results. /// A AggregationResult object with the results /// - [Obsolete("When possible, use AggregateAsyncEnumerable or CursorReadAsync(AggregationResult, int?) instead.")] + [Obsolete("When possible, use AggregateAsyncEnumerable or CursorReadAsync(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] Task CursorReadAsync(string indexName, long cursorId, int? count = null); diff --git a/src/NRedisStack/Search/SearchCommands.cs b/src/NRedisStack/Search/SearchCommands.cs index 64a8e416..d7112de2 100644 --- a/src/NRedisStack/Search/SearchCommands.cs +++ b/src/NRedisStack/Search/SearchCommands.cs @@ -120,7 +120,7 @@ public bool Create(string indexName, Schema schema) } /// - [Obsolete("When possible, use CursorDel(AggregationResult) instead.")] + [Obsolete("When possible, use CursorDel(AggregationResult) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public bool CursorDel(string indexName, long cursorId) { @@ -144,7 +144,7 @@ public bool CursorDel(AggregationResult result) } /// - [Obsolete("When possible, use CusorReadEnumerable or CursorRead(AggregationResult, int?) instead.")] + [Obsolete("When possible, use CusorReadEnumerable or CursorRead(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public AggregationResult CursorRead(string indexName, long cursorId, int? count = null) { diff --git a/src/NRedisStack/Search/SearchCommandsAsync.cs b/src/NRedisStack/Search/SearchCommandsAsync.cs index 1f471182..b26979ec 100644 --- a/src/NRedisStack/Search/SearchCommandsAsync.cs +++ b/src/NRedisStack/Search/SearchCommandsAsync.cs @@ -161,7 +161,7 @@ public async Task CreateAsync(string indexName, Schema schema) } /// - [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead.")] + [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public async Task CursorDelAsync(string indexName, long cursorId) { @@ -185,7 +185,7 @@ public async Task CursorDelAsync(AggregationResult result) } /// - [Obsolete("When possible, use AggregateAsyncEnumerable or CursorReadAsync(AggregationResult, int?) instead.")] + [Obsolete("When possible, use AggregateAsyncEnumerable or CursorReadAsync(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")] [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)] public async Task CursorReadAsync(string indexName, long cursorId, int? count = null) {