diff --git a/.gitignore b/.gitignore
index 2476cd6b..60d644b6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -410,3 +410,8 @@ tests/NRedisStack.Tests/redis_credentials/redis_user.crt
# global.json
global.json
tests/NRedisStack.Tests/lcov.net8.0.info
+
+# docker containers
+tests/dockers/cluster/
+tests/dockers/standalone/
+tests/dockers/all/
diff --git a/Directory.Packages.props b/Directory.Packages.props
index aa207a94..6625890b 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -12,7 +12,7 @@
-
+
diff --git a/src/NRedisStack/Auxiliary.cs b/src/NRedisStack/Auxiliary.cs
index 33b7857b..25f456f3 100644
--- a/src/NRedisStack/Auxiliary.cs
+++ b/src/NRedisStack/Auxiliary.cs
@@ -67,12 +67,22 @@ public static RedisResult Execute(this IDatabase db, SerializedCommand command)
return db.Execute(command.Command, command.Args);
}
+ internal static RedisResult Execute(this IServer server, int? db, SerializedCommand command)
+ {
+ return server.Execute(db, command.Command, command.Args);
+ }
+
public static async Task ExecuteAsync(this IDatabaseAsync db, SerializedCommand command)
{
((IDatabase)db).SetInfoInPipeline();
return await db.ExecuteAsync(command.Command, command.Args);
}
+ internal static async Task ExecuteAsync(this IServer server, int? db, SerializedCommand command)
+ {
+ return await server.ExecuteAsync(db, command.Command, command.Args);
+ }
+
public static List ExecuteBroadcast(this IDatabase db, string command)
=> db.ExecuteBroadcast(new SerializedCommand(command));
diff --git a/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt b/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt
index 7dc5c581..00dcb3fb 100644
--- a/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt
+++ b/src/NRedisStack/PublicAPI/PublicAPI.Unshipped.txt
@@ -1 +1,13 @@
#nullable enable
+NRedisStack.ISearchCommands.AggregateEnumerable(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IEnumerable!
+NRedisStack.ISearchCommands.CursorDel(NRedisStack.Search.AggregationResult! result) -> bool
+NRedisStack.ISearchCommands.CursorRead(NRedisStack.Search.AggregationResult! result, int? count = null) -> NRedisStack.Search.AggregationResult!
+NRedisStack.ISearchCommandsAsync.AggregateAsyncEnumerable(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IAsyncEnumerable!
+NRedisStack.ISearchCommandsAsync.CursorDelAsync(NRedisStack.Search.AggregationResult! result) -> System.Threading.Tasks.Task!
+NRedisStack.ISearchCommandsAsync.CursorReadAsync(NRedisStack.Search.AggregationResult! result, int? count = null) -> System.Threading.Tasks.Task!
+NRedisStack.SearchCommands.AggregateEnumerable(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IEnumerable!
+NRedisStack.SearchCommands.CursorDel(NRedisStack.Search.AggregationResult! result) -> bool
+NRedisStack.SearchCommands.CursorRead(NRedisStack.Search.AggregationResult! result, int? count = null) -> NRedisStack.Search.AggregationResult!
+NRedisStack.SearchCommandsAsync.AggregateAsyncEnumerable(string! index, NRedisStack.Search.AggregationRequest! query) -> System.Collections.Generic.IAsyncEnumerable!
+NRedisStack.SearchCommandsAsync.CursorDelAsync(NRedisStack.Search.AggregationResult! result) -> System.Threading.Tasks.Task!
+NRedisStack.SearchCommandsAsync.CursorReadAsync(NRedisStack.Search.AggregationResult! result, int? count = null) -> System.Threading.Tasks.Task!
diff --git a/src/NRedisStack/ResponseParser.cs b/src/NRedisStack/ResponseParser.cs
index 8382771a..8687aad9 100644
--- a/src/NRedisStack/ResponseParser.cs
+++ b/src/NRedisStack/ResponseParser.cs
@@ -737,6 +737,20 @@ public static AggregationResult ToAggregationResult(this RedisResult result, Agg
}
}
+ internal static AggregationResult ToAggregationResult(this RedisResult result, string indexName, AggregationRequest query, IServer? server, int? database)
+ {
+ if (query.IsWithCursor())
+ {
+ var results = (RedisResult[])result!;
+
+ return new AggregationResult.WithCursorAggregationResult(indexName, results[0], (long)results[1], server, database);
+ }
+ else
+ {
+ return new(result);
+ }
+ }
+
public static Dictionary[] ToDictionarys(this RedisResult result)
{
var resArr = (RedisResult[])result!;
diff --git a/src/NRedisStack/Search/AggregationRequest.cs b/src/NRedisStack/Search/AggregationRequest.cs
index cd3bcd8a..8cac4f77 100644
--- a/src/NRedisStack/Search/AggregationRequest.cs
+++ b/src/NRedisStack/Search/AggregationRequest.cs
@@ -128,6 +128,7 @@ public AggregationRequest Cursor(int? count = null, long? maxIdle = null)
if (count != null)
{
+ Count = count;
args.Add(SearchArgs.COUNT);
args.Add(count);
}
@@ -139,6 +140,7 @@ public AggregationRequest Cursor(int? count = null, long? maxIdle = null)
}
return this;
}
+ internal int? Count { get; set; }
public AggregationRequest Params(Dictionary nameValue)
{
diff --git a/src/NRedisStack/Search/AggregationResult.cs b/src/NRedisStack/Search/AggregationResult.cs
index 3eb4827d..6cb23add 100644
--- a/src/NRedisStack/Search/AggregationResult.cs
+++ b/src/NRedisStack/Search/AggregationResult.cs
@@ -3,15 +3,29 @@
namespace NRedisStack.Search;
-public sealed class AggregationResult
+public class AggregationResult
{
+ // internal subclass for WITHCURSOR calls, which need to be issued to the same connection
+ internal sealed class WithCursorAggregationResult : AggregationResult
+ {
+ internal WithCursorAggregationResult(string indexName, RedisResult result, long cursorId, IServer? server,
+ int? database) : base(result, cursorId)
+ {
+ IndexName = indexName;
+ Server = server;
+ Database = database;
+ }
+ public string IndexName { get; }
+ public IServer? Server { get; }
+ public int? Database { get; }
+ }
+
public long TotalResults { get; }
private readonly Dictionary[] _results;
private Dictionary[]? _resultsAsRedisValues;
public long CursorId { get; }
-
internal AggregationResult(RedisResult result, long cursorId = -1)
{
var arr = (RedisResult[])result!;
@@ -45,7 +59,6 @@ internal AggregationResult(RedisResult result, long cursorId = -1)
CursorId = cursorId;
}
-
///
/// takes a Redis multi-bulk array represented by a RedisResult[] and recursively processes its elements.
/// For each element in the array, it checks if it's another multi-bulk array, and if so, it recursively calls itself.
diff --git a/src/NRedisStack/Search/ISearchCommands.cs b/src/NRedisStack/Search/ISearchCommands.cs
index 86408bfe..a3c4c387 100644
--- a/src/NRedisStack/Search/ISearchCommands.cs
+++ b/src/NRedisStack/Search/ISearchCommands.cs
@@ -1,4 +1,6 @@
+using System.ComponentModel;
using NRedisStack.Search;
+using NRedisStack.Search.Aggregation;
using NRedisStack.Search.DataTypes;
using StackExchange.Redis;
@@ -18,11 +20,20 @@ public interface ISearchCommands
/// Run a search query on an index, and perform aggregate transformations on the results.
///
/// The index name.
- /// The query
+ /// The query.
/// An object
///
AggregationResult Aggregate(string index, AggregationRequest query);
+ ///
+ /// Run a search query on an index, and perform aggregate transformations on the results.
+ ///
+ /// The index name.
+ /// The query.
+ /// A sequence of values.
+ ///
+ IEnumerable AggregateEnumerable(string index, AggregationRequest query);
+
///
/// Add an alias to an index.
///
@@ -92,22 +103,43 @@ public interface ISearchCommands
///
/// Delete a cursor from the index.
///
- /// The index name
+ /// The index name.
/// The cursor's ID.
/// if it has been deleted, if it did not exist.
///
+ [Obsolete("When possible, use CursorDel(AggregationResult) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")]
+ [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)]
bool CursorDel(string indexName, long cursorId);
+ ///
+ /// Delete a cursor from the index.
+ ///
+ /// The result of a previous call to Aggregate or CursorRead.
+ /// if it has been deleted, if it did not exist.
+ ///
+ bool CursorDel(AggregationResult result);
+
///
/// Read next results from an existing cursor.
///
- /// The index name
+ /// The index name.
/// The cursor's ID.
/// Limit the amount of returned results.
/// A AggregationResult object with the results
///
+ [Obsolete("When possible, use AggregateEnumerable or CursorRead(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")]
+ [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)]
AggregationResult CursorRead(string indexName, long cursorId, int? count = null);
+ ///
+ /// Read next results from an existing cursor.
+ ///
+ /// The result of a previous call to Aggregate or CursorRead.
+ /// Limit the amount of returned results.
+ /// A AggregationResult object with the results
+ ///
+ public AggregationResult CursorRead(AggregationResult result, int? count = null);
+
///
/// Add terms to a dictionary.
///
diff --git a/src/NRedisStack/Search/ISearchCommandsAsync.cs b/src/NRedisStack/Search/ISearchCommandsAsync.cs
index f9088dfc..9ecad397 100644
--- a/src/NRedisStack/Search/ISearchCommandsAsync.cs
+++ b/src/NRedisStack/Search/ISearchCommandsAsync.cs
@@ -1,4 +1,6 @@
+using System.ComponentModel;
using NRedisStack.Search;
+using NRedisStack.Search.Aggregation;
using NRedisStack.Search.DataTypes;
using StackExchange.Redis;
@@ -14,7 +16,8 @@ public interface ISearchCommandsAsync
Task _ListAsync();
///
- /// Run a search query on an index, and perform aggregate transformations on the results.
+ /// Run a search query on an index, and perform aggregate transformations on the results. This operates
+ /// as a cursor and may involve multiple commands to the server.
///
/// The index name.
/// The query
@@ -22,6 +25,16 @@ public interface ISearchCommandsAsync
///
Task AggregateAsync(string index, AggregationRequest query);
+ ///
+ /// Run a search query on an index, and perform aggregate transformations on the results. This operates
+ /// as a cursor and may involve multiple commands to the server.
+ ///
+ /// The index name.
+ /// The query.
+ /// A sequence of values.
+ ///
+ IAsyncEnumerable AggregateAsyncEnumerable(string index, AggregationRequest query);
+
///
/// Add an alias to an index.
///
@@ -95,8 +108,18 @@ public interface ISearchCommandsAsync
/// The cursor's ID.
/// if it has been deleted, if it did not exist.
///
+ [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")]
+ [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)]
Task CursorDelAsync(string indexName, long cursorId);
+ ///
+ /// Delete a cursor from the index.
+ ///
+ /// The result of a previous call to AggregateAsync or CursorReadAsync.
+ /// if it has been deleted, if it did not exist.
+ ///
+ Task CursorDelAsync(AggregationResult result);
+
///
/// Read next results from an existing cursor.
///
@@ -105,8 +128,19 @@ public interface ISearchCommandsAsync
/// Limit the amount of returned results.
/// A AggregationResult object with the results
///
+ [Obsolete("When possible, use AggregateAsyncEnumerable or CursorReadAsync(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")]
+ [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)]
Task CursorReadAsync(string indexName, long cursorId, int? count = null);
+ ///
+ /// Read next results from an existing cursor.
+ ///
+ /// The result of a previous AggregateAsync or CursorReadAsync call.
+ /// Limit the amount of returned results.
+ /// A AggregationResult object with the results
+ ///
+ Task CursorReadAsync(AggregationResult result, int? count = null);
+
///
/// Add terms to a dictionary.
///
diff --git a/src/NRedisStack/Search/SearchCommands.cs b/src/NRedisStack/Search/SearchCommands.cs
index 381cb13b..d7112de2 100644
--- a/src/NRedisStack/Search/SearchCommands.cs
+++ b/src/NRedisStack/Search/SearchCommands.cs
@@ -1,4 +1,6 @@
+using System.ComponentModel;
using NRedisStack.Search;
+using NRedisStack.Search.Aggregation;
using NRedisStack.Search.DataTypes;
using StackExchange.Redis;
namespace NRedisStack;
@@ -16,8 +18,54 @@ public RedisResult[] _List()
public AggregationResult Aggregate(string index, AggregationRequest query)
{
SetDefaultDialectIfUnset(query);
- var result = db.Execute(SearchCommandBuilder.Aggregate(index, query));
- return result.ToAggregationResult(query);
+ IServer? server = null;
+ int? database = null;
+
+ var command = SearchCommandBuilder.Aggregate(index, query);
+ if (query.IsWithCursor())
+ {
+ // we can issue this anywhere, but follow-up calls need to be on the same server
+ server = GetRandomServerForCluster(db, out database);
+ }
+
+ RedisResult result;
+ if (server is not null)
+ {
+ result = server.Execute(database, command);
+ }
+ else
+ {
+ result = db.Execute(command);
+ }
+
+ return result.ToAggregationResult(index, query, server, database);
+ }
+
+ public IEnumerable AggregateEnumerable(string index, AggregationRequest query)
+ {
+ if (!query.IsWithCursor()) query.Cursor();
+
+ var result = Aggregate(index, query);
+ try
+ {
+ while (true)
+ {
+ var count = checked((int)result.TotalResults);
+ for (int i = 0; i < count; i++)
+ {
+ yield return result.GetRow(i);
+ }
+ if (result.CursorId == 0) break;
+ result = CursorRead(result, query.Count);
+ }
+ }
+ finally
+ {
+ if (result.CursorId != 0)
+ {
+ CursorDel(result);
+ }
+ }
}
///
@@ -72,18 +120,52 @@ public bool Create(string indexName, Schema schema)
}
///
+ [Obsolete("When possible, use CursorDel(AggregationResult) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")]
+ [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)]
public bool CursorDel(string indexName, long cursorId)
{
return db.Execute(SearchCommandBuilder.CursorDel(indexName, cursorId)).OKtoBoolean();
}
+ public bool CursorDel(AggregationResult result)
+ {
+ if (result is not AggregationResult.WithCursorAggregationResult withCursor)
+ {
+ throw new ArgumentException(
+ message: $"{nameof(CursorDelAsync)} must be called with a value returned from a previous call to {nameof(AggregateAsync)} with a cursor.",
+ paramName: nameof(result));
+ }
+
+ var command = SearchCommandBuilder.CursorDel(withCursor.IndexName, withCursor.CursorId);
+ var resp = withCursor.Server is { } server
+ ? server.Execute(withCursor.Database, command)
+ : db.Execute(command);
+ return resp.OKtoBoolean();
+ }
+
///
+ [Obsolete("When possible, use CusorReadEnumerable or CursorRead(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")]
+ [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)]
public AggregationResult CursorRead(string indexName, long cursorId, int? count = null)
{
var resp = db.Execute(SearchCommandBuilder.CursorRead(indexName, cursorId, count)).ToArray();
return new(resp[0], (long)resp[1]);
}
+ public AggregationResult CursorRead(AggregationResult result, int? count = null)
+ {
+ if (result is not AggregationResult.WithCursorAggregationResult withCursor)
+ {
+ throw new ArgumentException(message: $"{nameof(CursorReadAsync)} must be called with a value returned from a previous call to {nameof(AggregateAsync)} with a cursor.", paramName: nameof(result));
+ }
+ var command = SearchCommandBuilder.CursorRead(withCursor.IndexName, withCursor.CursorId, count);
+ var rawResult = withCursor.Server is { } server
+ ? server.Execute(withCursor.Database, command)
+ : db.Execute(command);
+ var resp = rawResult.ToArray();
+ return new AggregationResult.WithCursorAggregationResult(withCursor.IndexName, resp[0], (long)resp[1], withCursor.Server, withCursor.Database);
+ }
+
///
public long DictAdd(string dict, params string[] terms)
{
diff --git a/src/NRedisStack/Search/SearchCommandsAsync.cs b/src/NRedisStack/Search/SearchCommandsAsync.cs
index f4757af3..b26979ec 100644
--- a/src/NRedisStack/Search/SearchCommandsAsync.cs
+++ b/src/NRedisStack/Search/SearchCommandsAsync.cs
@@ -1,4 +1,6 @@
+using System.ComponentModel;
using NRedisStack.Search;
+using NRedisStack.Search.Aggregation;
using NRedisStack.Search.DataTypes;
using StackExchange.Redis;
namespace NRedisStack;
@@ -40,20 +42,71 @@ public async Task _ListAsync()
return (await _db.ExecuteAsync(SearchCommandBuilder._List())).ToArray();
}
+ internal static IServer? GetRandomServerForCluster(IDatabaseAsync db, out int? database)
+ {
+ var server = db.Multiplexer.GetServer(key: default(RedisKey));
+ // ReSharper disable once ConditionIsAlwaysTrueOrFalseAccordingToNullableAPIContract
+ if (server is null || server.ServerType != ServerType.Cluster)
+ {
+ database = null;
+ return null;
+ }
+ // This is vexingly misplaced, but: it doesn't actually matter for cluster
+ database = db is IDatabase nonAsync ? nonAsync.Database : null;
+ return server;
+ }
+
///
public async Task AggregateAsync(string index, AggregationRequest query)
{
SetDefaultDialectIfUnset(query);
- var result = await _db.ExecuteAsync(SearchCommandBuilder.Aggregate(index, query));
+ IServer? server = null;
+ int? database = null;
+
+ var command = SearchCommandBuilder.Aggregate(index, query);
if (query.IsWithCursor())
{
- var results = (RedisResult[])result!;
+ // we can issue this anywhere, but follow-up calls need to be on the same server
+ server = GetRandomServerForCluster(_db, out database);
+ }
- return new(results[0], (long)results[1]);
+ RedisResult result;
+ if (server is not null)
+ {
+ result = await server.ExecuteAsync(database, command);
}
else
{
- return new(result);
+ result = await _db.ExecuteAsync(command);
+ }
+
+ return result.ToAggregationResult(index, query, server, database);
+ }
+
+ public async IAsyncEnumerable AggregateAsyncEnumerable(string index, AggregationRequest query)
+ {
+ if (!query.IsWithCursor()) query.Cursor();
+
+ var result = await AggregateAsync(index, query);
+ try
+ {
+ while (true)
+ {
+ var count = checked((int)result.TotalResults);
+ for (int i = 0; i < count; i++)
+ {
+ yield return result.GetRow(i);
+ }
+ if (result.CursorId == 0) break;
+ result = await CursorReadAsync(result, query.Count);
+ }
+ }
+ finally
+ {
+ if (result.CursorId != 0)
+ {
+ await CursorDelAsync(result);
+ }
}
}
@@ -108,18 +161,52 @@ public async Task CreateAsync(string indexName, Schema schema)
}
///
+ [Obsolete("When possible, use CursorDelAsync(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")]
+ [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)]
public async Task CursorDelAsync(string indexName, long cursorId)
{
return (await _db.ExecuteAsync(SearchCommandBuilder.CursorDel(indexName, cursorId))).OKtoBoolean();
}
+ public async Task CursorDelAsync(AggregationResult result)
+ {
+ if (result is not AggregationResult.WithCursorAggregationResult withCursor)
+ {
+ throw new ArgumentException(
+ message: $"{nameof(CursorDelAsync)} must be called with a value returned from a previous call to {nameof(AggregateAsync)} with a cursor.",
+ paramName: nameof(result));
+ }
+
+ var command = SearchCommandBuilder.CursorDel(withCursor.IndexName, withCursor.CursorId);
+ var pending = withCursor.Server is { } server
+ ? server.ExecuteAsync(withCursor.Database, command)
+ : _db.ExecuteAsync(command);
+ return (await pending).OKtoBoolean();
+ }
+
///
+ [Obsolete("When possible, use AggregateAsyncEnumerable or CursorReadAsync(AggregationResult, int?) instead. This legacy API will not work correctly on CLUSTER environments, but will continue to work for single-node deployments.")]
+ [Browsable(false), EditorBrowsable(EditorBrowsableState.Never)]
public async Task CursorReadAsync(string indexName, long cursorId, int? count = null)
{
var resp = (await _db.ExecuteAsync(SearchCommandBuilder.CursorRead(indexName, cursorId, count))).ToArray();
return new(resp[0], (long)resp[1]);
}
+ public async Task CursorReadAsync(AggregationResult result, int? count = null)
+ {
+ if (result is not AggregationResult.WithCursorAggregationResult withCursor)
+ {
+ throw new ArgumentException(message: $"{nameof(CursorReadAsync)} must be called with a value returned from a previous call to {nameof(AggregateAsync)} with a cursor.", paramName: nameof(result));
+ }
+ var command = SearchCommandBuilder.CursorRead(withCursor.IndexName, withCursor.CursorId, count);
+ var pending = withCursor.Server is { } server
+ ? server.ExecuteAsync(withCursor.Database, command)
+ : _db.ExecuteAsync(command);
+ var resp = (await pending).ToArray();
+ return new AggregationResult.WithCursorAggregationResult(withCursor.IndexName, resp[0], (long)resp[1], withCursor.Server, withCursor.Database);
+ }
+
///
public async Task DictAddAsync(string dict, params string[] terms)
{
diff --git a/tests/NRedisStack.Tests/AbstractNRedisStackTest.cs b/tests/NRedisStack.Tests/AbstractNRedisStackTest.cs
index 5f2063db..2e694da8 100644
--- a/tests/NRedisStack.Tests/AbstractNRedisStackTest.cs
+++ b/tests/NRedisStack.Tests/AbstractNRedisStackTest.cs
@@ -52,7 +52,7 @@ protected IDatabase GetCleanDatabase(string endpointId = EndpointsFixture.Env.St
{
var server = redis.GetServer(endPoint);
- if (server.IsReplica) continue;
+ if (server.IsReplica || !server.IsConnected) continue;
server.Execute("FLUSHALL");
}
diff --git a/tests/NRedisStack.Tests/NRedisStack.Tests.csproj b/tests/NRedisStack.Tests/NRedisStack.Tests.csproj
index 5d756454..83bd48e5 100644
--- a/tests/NRedisStack.Tests/NRedisStack.Tests.csproj
+++ b/tests/NRedisStack.Tests/NRedisStack.Tests.csproj
@@ -22,7 +22,7 @@
-
+
runtime; build; native; contentfiles; analyzers; buildtransitive
all
diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs
index 119faeff..0c1f101a 100644
--- a/tests/NRedisStack.Tests/Search/SearchTests.cs
+++ b/tests/NRedisStack.Tests/Search/SearchTests.cs
@@ -1,4 +1,4 @@
-#pragma warning disable CS0618, CS0612 // allow testing obsolete methods
+#pragma warning disable CS0618, CS0612 // allow testing obsolete methods
using Xunit;
using StackExchange.Redis;
using NRedisStack.RedisStackCommands;
@@ -9,51 +9,66 @@
using System.Runtime.InteropServices;
using NetTopologySuite.IO;
using NetTopologySuite.Geometries;
+using Xunit.Abstractions;
namespace NRedisStack.Tests.Search;
-public class SearchTests(EndpointsFixture endpointsFixture) : AbstractNRedisStackTest(endpointsFixture), IDisposable
+public class SearchTests(EndpointsFixture endpointsFixture, ITestOutputHelper log)
+ : AbstractNRedisStackTest(endpointsFixture, log), IDisposable
{
// private readonly string key = "SEARCH_TESTS";
private readonly string index = "TEST_INDEX";
+ private static void SkipClusterPre8(string endpointId)
+ {
+ // Many of the FT.* commands are ... more awkward pre 8 when using cluster. Rather than
+ // fight eventual-consistency/timing issues: grandfather the existing behaviour, and start
+ // afresh from v8, where things behave much more predictably and reasonably.
+ Skip.If(endpointId == EndpointsFixture.Env.Cluster
+ && EndpointsFixture.RedisVersion.Major < 8, "Ignoring cluster tests for FT.SEARCH pre Redis 8.0");
+ }
+
private void AddDocument(IDatabase db, Document doc)
{
- string key = doc.Id;
- var properties = doc.GetProperties();
- // HashEntry[] hash = new HashEntry[properties.Count()];
- // for(int i = 0; i < properties.Count(); i++)
- // {
- // var property = properties.ElementAt(i);
- // hash[i] = new HashEntry(property.Key, property.Value);
- // }
- // db.HashSet(key, hash);
- var nameValue = new List() { key };
- foreach (var item in properties)
- {
- nameValue.Add(item.Key);
- nameValue.Add(item.Value);
- }
- db.Execute("HSET", nameValue);
+ var hash = doc.GetProperties()
+ .Select(pair => new HashEntry(pair.Key, pair.Value))
+ .ToArray();
+ db.HashSet(doc.Id, hash);
}
private void AddDocument(IDatabase db, string key, Dictionary objDictionary)
{
Dictionary strDictionary = new();
- // HashEntry[] hash = new HashEntry[objDictionary.Count()];
- // for(int i = 0; i < objDictionary.Count(); i++)
- // {
- // var property = objDictionary.ElementAt(i);
- // hash[i] = new HashEntry(property.Key, property.Value.ToString());
- // }
- // db.HashSet(key, hash);
- var nameValue = new List() { key };
- foreach (var item in objDictionary)
- {
- nameValue.Add(item.Key);
- nameValue.Add(item.Value);
+ var hash = objDictionary
+ .Select(pair => new HashEntry(pair.Key, pair.Value switch
+ {
+ string s => (RedisValue)s,
+ byte[] b => b,
+ int i => i,
+ long l => l,
+ double d => d,
+ _ => throw new ArgumentException($"Unsupported type: {pair.Value.GetType()}"),
+ }))
+ .ToArray();
+ db.HashSet(key, hash);
+ }
+
+ private void AssertDatabaseSize(IDatabase db, int expected)
+ {
+ // in part, this is to allow replication to catch up
+ for (int i = 0; i < 10; i++)
+ {
+ Assert.Equal(expected, DatabaseSize(db));
+ }
+ }
+
+ private async Task AssertDatabaseSizeAsync(IDatabase db, int expected)
+ {
+ // in part, this is to allow replication to catch up
+ for (int i = 0; i < 10; i++)
+ {
+ Assert.Equal(expected, await DatabaseSizeAsync(db));
}
- db.Execute("HSET", nameValue);
}
[SkipIfRedisTheory(Is.Enterprise)]
@@ -73,7 +88,7 @@ public void TestAggregationRequestVerbatim(string endpointId)
Assert.Equal(1, res.TotalResults);
r = new AggregationRequest("kitti")
- .Verbatim();
+ .Verbatim();
res = ft.Aggregate(index, r);
Assert.Equal(0, res.TotalResults);
@@ -96,14 +111,14 @@ public async Task TestAggregationRequestVerbatimAsync(string endpointId)
Assert.Equal(1, res.TotalResults);
r = new AggregationRequest("kitti")
- .Verbatim();
+ .Verbatim();
res = await ft.AggregateAsync(index, r);
Assert.Equal(0, res.TotalResults);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestAggregationRequestTimeout(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -117,15 +132,15 @@ public void TestAggregationRequestTimeout(string endpointId)
AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25));
AggregationRequest r = new AggregationRequest()
- .GroupBy("@name", Reducers.Sum("@count").As("sum"))
- .Timeout(5000);
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
+ .Timeout(5000);
AggregationResult res = ft.Aggregate(index, r);
Assert.Equal(2, res.TotalResults);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestAggregationRequestTimeoutAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -139,17 +154,18 @@ public async Task TestAggregationRequestTimeoutAsync(string endpointId)
AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25));
AggregationRequest r = new AggregationRequest()
- .GroupBy("@name", Reducers.Sum("@count").As("sum"))
- .Timeout(5000);
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
+ .Timeout(5000);
AggregationResult res = await ft.AggregateAsync(index, r);
Assert.Equal(2, res.TotalResults);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestAggregations(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new();
@@ -165,7 +181,7 @@ public void TestAggregations(string endpointId)
AggregationRequest r = new AggregationRequest()
.GroupBy("@name", Reducers.Sum("@count").As("sum"))
- .SortBy(10, SortedField.Desc("@sum"));
+ .SortBy(10, SortedField.Desc("@sum"));
// actual search
var res = ft.Aggregate(index, r);
@@ -186,9 +202,10 @@ public void TestAggregations(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestAggregationsAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new();
@@ -204,7 +221,7 @@ public async Task TestAggregationsAsync(string endpointId)
AggregationRequest r = new AggregationRequest()
.GroupBy("@name", Reducers.Sum("@count").As("sum"))
- .SortBy(10, SortedField.Desc("@sum"));
+ .SortBy(10, SortedField.Desc("@sum"));
// actual search
var res = await ft.AggregateAsync(index, r);
@@ -226,65 +243,75 @@ public async Task TestAggregationsAsync(string endpointId)
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestAggregationsLoad(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
var sc = new Schema().AddTextField("t1").AddTextField("t2");
ft.Create("idx", new(), sc);
AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world"));
+ AssertDatabaseSize(db, 1);
// load t1
var req = new AggregationRequest("*").Load(new FieldName("t1"));
var res = ft.Aggregate("idx", req);
+ Assert.NotNull(res[0]?["t1"]);
Assert.Equal("hello", res[0]!["t1"].ToString());
// load t2
req = new AggregationRequest("*").Load(new FieldName("t2"));
res = ft.Aggregate("idx", req);
+ Assert.NotNull(res[0]?["t2"]);
Assert.Equal("world", res[0]!["t2"]);
// load all
req = new AggregationRequest("*").LoadAll();
res = ft.Aggregate("idx", req);
+ Assert.NotNull(res[0]?["t1"]);
Assert.Equal("hello", res[0]!["t1"].ToString());
+ Assert.NotNull(res[0]?["t2"]);
Assert.Equal("world", res[0]!["t2"]);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestAggregationsLoadAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
var sc = new Schema().AddTextField("t1").AddTextField("t2");
await ft.CreateAsync("idx", new(), sc);
AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world"));
+ await AssertDatabaseSizeAsync(db, 1);
// load t1
var req = new AggregationRequest("*").Load(new FieldName("t1"));
var res = await ft.AggregateAsync("idx", req);
+ Assert.NotNull(res[0]?["t1"]);
Assert.Equal("hello", res[0]!["t1"].ToString());
// load t2
req = new AggregationRequest("*").Load(new FieldName("t2"));
res = await ft.AggregateAsync("idx", req);
+ Assert.NotNull(res[0]?["t2"]);
Assert.Equal("world", res[0]!["t2"]);
// load all
req = new AggregationRequest("*").LoadAll();
res = await ft.AggregateAsync("idx", req);
+ Assert.NotNull(res[0]?["t1"]);
Assert.Equal("hello", res[0]!["t1"].ToString());
+ Assert.NotNull(res[0]?["t2"]);
Assert.Equal("world", res[0]!["t2"]);
}
-
-
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestAggregationRequestParamsDialect(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -302,9 +329,9 @@ public void TestAggregationRequestParamsDialect(string endpointId)
parameters.Add("count", "10");
AggregationRequest r = new AggregationRequest("$name")
- .GroupBy("@name", Reducers.Sum("@count").As("sum"))
- .Params(parameters)
- .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
+ .Params(parameters)
+ .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2
AggregationResult res = ft.Aggregate(index, r);
Assert.Equal(1, res.TotalResults);
@@ -315,7 +342,7 @@ public void TestAggregationRequestParamsDialect(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestAggregationRequestParamsDialectAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -334,9 +361,9 @@ public async Task TestAggregationRequestParamsDialectAsync(string endpointId)
AggregationRequest r = new AggregationRequest("$name")
- .GroupBy("@name", Reducers.Sum("@count").As("sum"))
- .Params(parameters)
- .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
+ .Params(parameters)
+ .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2
AggregationResult res = await ft.AggregateAsync(index, r);
Assert.Equal(1, res.TotalResults);
@@ -347,7 +374,7 @@ public async Task TestAggregationRequestParamsDialectAsync(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestAggregationRequestParamsWithDefaultDialect(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -365,9 +392,9 @@ public void TestAggregationRequestParamsWithDefaultDialect(string endpointId)
parameters.Add("count", "10");
AggregationRequest r = new AggregationRequest("$name")
- .GroupBy("@name", Reducers.Sum("@count").As("sum"))
- .Params(parameters); // From documentation - To use PARAMS, DIALECT must be set to 2
- // which is the default as we set in the constructor (FT(2))
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
+ .Params(parameters); // From documentation - To use PARAMS, DIALECT must be set to 2
+ // which is the default as we set in the constructor (FT(2))
AggregationResult res = ft.Aggregate(index, r);
Assert.Equal(1, res.TotalResults);
@@ -378,7 +405,7 @@ public void TestAggregationRequestParamsWithDefaultDialect(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestAggregationRequestParamsWithDefaultDialectAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -396,9 +423,9 @@ public async Task TestAggregationRequestParamsWithDefaultDialectAsync(string end
parameters.Add("count", "10");
AggregationRequest r = new AggregationRequest("$name")
- .GroupBy("@name", Reducers.Sum("@count").As("sum"))
- .Params(parameters); // From documentation - To use PARAMS, DIALECT must be set to 2
- // which is the default as we set in the constructor (FT(2))
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
+ .Params(parameters); // From documentation - To use PARAMS, DIALECT must be set to 2
+ // which is the default as we set in the constructor (FT(2))
AggregationResult res = await ft.AggregateAsync(index, r);
Assert.Equal(1, res.TotalResults);
@@ -417,7 +444,7 @@ public void TestDefaultDialectError()
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestAlias(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -430,7 +457,16 @@ public void TestAlias(string endpointId)
doc.Add("field1", "value");
AddDocument(db, "doc1", doc);
- Assert.True(ft.AliasAdd("ALIAS1", index));
+ try
+ {
+ Assert.True(ft.AliasAdd("ALIAS1", index));
+ }
+ catch (RedisServerException rse)
+ {
+ Skip.If(rse.Message.StartsWith("CROSSSLOT"), "legacy failure");
+ throw;
+ }
+
SearchResult res1 = ft.Search("ALIAS1", new Query("*").ReturnFields("field1"));
Assert.Equal(1, res1.TotalResults);
Assert.Equal("value", res1.Documents[0]["field1"]);
@@ -446,7 +482,7 @@ public void TestAlias(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestAliasAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -459,7 +495,16 @@ public async Task TestAliasAsync(string endpointId)
doc.Add("field1", "value");
AddDocument(db, "doc1", doc);
- Assert.True(await ft.AliasAddAsync("ALIAS1", index));
+ try
+ {
+ Assert.True(await ft.AliasAddAsync("ALIAS1", index));
+ }
+ catch (RedisServerException rse)
+ {
+ Skip.If(rse.Message.StartsWith("CROSSSLOT"), "legacy failure");
+ throw;
+ }
+
SearchResult res1 = ft.Search("ALIAS1", new Query("*").ReturnFields("field1"));
Assert.Equal(1, res1.TotalResults);
Assert.Equal("value", res1.Documents[0]["field1"]);
@@ -475,9 +520,10 @@ public async Task TestAliasAsync(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestApplyAndFilterAggregations(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new();
@@ -497,29 +543,49 @@ public void TestApplyAndFilterAggregations(string endpointId)
AddDocument(db, new Document("data4").Set("name", "abc").Set("subj1", 30).Set("subj2", 20));
AddDocument(db, new Document("data5").Set("name", "def").Set("subj1", 65).Set("subj2", 45));
AddDocument(db, new Document("data6").Set("name", "ghi").Set("subj1", 70).Set("subj2", 70));
+ AssertDatabaseSize(db, 6);
- AggregationRequest r = new AggregationRequest().Apply("(@subj1+@subj2)/2", "attemptavg")
- .GroupBy("@name", Reducers.Avg("@attemptavg").As("avgscore"))
- .Filter("@avgscore>=50")
- .SortBy(10, SortedField.Asc("@name"));
-
- // actual search
- AggregationResult res = ft.Aggregate(index, r);
- Assert.Equal(2, res.TotalResults);
+ int maxAttempts = endpointId == EndpointsFixture.Env.Cluster ? 10 : 3;
+ for (int attempt = 1; attempt <= maxAttempts; attempt++)
+ {
+ AggregationRequest r = new AggregationRequest().Apply("(@subj1+@subj2)/2", "attemptavg")
+ .GroupBy("@name", Reducers.Avg("@attemptavg").As("avgscore"))
+ .Filter("@avgscore>=50")
+ .SortBy(10, SortedField.Asc("@name"));
+
+ // abc: 20+70 => 45, 30+20 => 25, filtered out
+ // def: 60+40 => 50, 65+45 => 55, avg 52.5
+ // ghi: 50+80 => 65, 70+70 => 70, avg 67.5
+
+ // actual search
+ AggregationResult res = ft.Aggregate(index, r);
+ Assert.Equal(2, res.TotalResults);
+
+ Row r1 = res.GetRow(0);
+ Row r2 = res.GetRow(1);
+ Log($"Attempt {attempt} of {maxAttempts}: avgscore {r2.GetDouble("avgscore")}");
+ if (attempt != maxAttempts && !IsNear(r2.GetDouble("avgscore"), 67.5))
+ {
+ Thread.Sleep(400); // allow extra cluster replication time
+ continue;
+ }
- Row r1 = res.GetRow(0);
- Assert.Equal("def", r1.GetString("name"));
- Assert.Equal(52.5, r1.GetDouble("avgscore"), 0);
+ Assert.Equal("def", r1.GetString("name"));
+ Assert.Equal(52.5, r1.GetDouble("avgscore"), 0);
- Row r2 = res.GetRow(1);
- Assert.Equal("ghi", r2.GetString("name"));
- Assert.Equal(67.5, r2.GetDouble("avgscore"), 0);
+ Assert.Equal("ghi", r2.GetString("name"));
+ Assert.Equal(67.5, r2.GetDouble("avgscore"), 0);
+ break; // success!
+ }
}
+ private static bool IsNear(double a, double b, double epsilon = 0.1) => Math.Abs(a - b) < epsilon;
+
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestCreate(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
var schema = new Schema().AddTextField("first").AddTextField("last").AddNumericField("age");
@@ -534,6 +600,7 @@ public void TestCreate(string endpointId)
db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]);
db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]);
db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]);
+ AssertDatabaseSize(db, 7);
var noFilters = ft.Search(index, new());
Assert.Equal(4, noFilters.TotalResults);
@@ -549,9 +616,10 @@ public void TestCreate(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestCreateAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
var schema = new Schema().AddTextField("first").AddTextField("last").AddNumericField("age");
@@ -564,6 +632,8 @@ public async Task TestCreateAsync(string endpointId)
db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]);
db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]);
db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]);
+ await AssertDatabaseSizeAsync(db, 7);
+
var noFilters = ft.Search(index, new());
Assert.Equal(4, noFilters.TotalResults);
var res1 = ft.Search(index, new("@first:Jo*"));
@@ -575,9 +645,10 @@ public async Task TestCreateAsync(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void CreateNoParams(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -588,6 +659,7 @@ public void CreateNoParams(string endpointId)
db.HashSet("student:3333", [new("first", "El"), new("last", "Mark"), new("age", 17)]);
db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", 21)]);
db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", 20)]);
+ AssertDatabaseSize(db, 4);
SearchResult noFilters = ft.Search(index, new());
Assert.Equal(4, noFilters.TotalResults);
@@ -603,9 +675,10 @@ public void CreateNoParams(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task CreateNoParamsAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -616,6 +689,7 @@ public async Task CreateNoParamsAsync(string endpointId)
db.HashSet("student:3333", [new("first", "El"), new("last", "Mark"), new("age", 17)]);
db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", 21)]);
db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", 20)]);
+ await AssertDatabaseSizeAsync(db, 4);
SearchResult noFilters = ft.Search(index, new());
Assert.Equal(4, noFilters.TotalResults);
@@ -631,9 +705,10 @@ public async Task CreateNoParamsAsync(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void CreateWithFieldNames(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddField(new TextField(FieldName.Of("first").As("given")))
@@ -648,6 +723,7 @@ public void CreateWithFieldNames(string endpointId)
db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]);
db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]);
db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]);
+ AssertDatabaseSize(db, 7);
SearchResult noFilters = ft.Search(index, new());
Assert.Equal(5, noFilters.TotalResults);
@@ -660,7 +736,7 @@ public void CreateWithFieldNames(string endpointId)
}
[SkipIfRedisTheory(Comparison.LessThan, "7.9.0")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void FailWhenAttributeNotExist(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -673,9 +749,10 @@ public void FailWhenAttributeNotExist(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task CreateWithFieldNamesAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddField(new TextField(FieldName.Of("first").As("given")))
@@ -702,7 +779,7 @@ public async Task CreateWithFieldNamesAsync(string endpointId)
}
[SkipIfRedisTheory(Comparison.LessThan, "7.9.0")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task FailWhenAttributeNotExistAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -711,13 +788,15 @@ public async Task FailWhenAttributeNotExistAsync(string endpointId)
.AddField(new TextField(FieldName.Of("last")));
Assert.True(await ft.CreateAsync(index, FTCreateParams.CreateParams().Prefix("student:", "pupil:"), sc));
- RedisServerException exc = await Assert.ThrowsAsync(async () => await ft.SearchAsync(index, new("@first:Jo*")));
+ RedisServerException exc =
+ await Assert.ThrowsAsync(async () => await ft.SearchAsync(index, new("@first:Jo*")));
}
[SkipIfRedisTheory(Is.Enterprise)]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void AlterAdd(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddTextField("title", 1.0);
@@ -729,25 +808,45 @@ public void AlterAdd(string endpointId)
var fields = new HashEntry("title", "hello world");
//fields.("title", "hello world");
+ AssertDatabaseSize(db, 0);
for (int i = 0; i < 100; i++)
{
db.HashSet($"doc{i}", fields.Name, fields.Value);
}
+
+ AssertDatabaseSize(db, 100);
+ var info = ft.Info(index);
+ Assert.Equal(index, info.IndexName);
+ if (endpointId == EndpointsFixture.Env.Cluster)
+ {
+ Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}");
+ }
+ else
+ {
+ Assert.Equal(100, info.NumDocs);
+ }
+
SearchResult res = ft.Search(index, new("hello world"));
Assert.Equal(100, res.TotalResults);
Assert.True(ft.Alter(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5)));
for (int i = 0; i < 100; i++)
{
- var fields2 = new HashEntry[] { new("name", "name" + i),
- new("tags", $"tagA,tagB,tag{i}") };
+ var fields2 = new HashEntry[]
+ {
+ new("name", "name" + i),
+ new("tags", $"tagA,tagB,tag{i}")
+ };
// assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2));
db.HashSet($"doc{i}", fields2);
}
+
SearchResult res2 = ft.Search(index, new("@tags:{tagA}"));
Assert.Equal(100, res2.TotalResults);
- var info = ft.Info(index);
+ AssertDatabaseSize(db, 100);
+
+ info = ft.Info(index);
Assert.Equal(index, info.IndexName);
Assert.Empty(info.IndexOption);
// Assert.Equal(,info.IndexDefinition);
@@ -755,33 +854,43 @@ public void AlterAdd(string endpointId)
Assert.Equal("TAG", info.Attributes[1]["type"].ToString());
Assert.Equal("name", info.Attributes[2]["attribute"].ToString());
- Assert.Equal(100, info.NumDocs);
- Assert.NotNull(info.MaxDocId);
- Assert.Equal(102, info.NumTerms);
- Assert.True(info.NumRecords >= 200);
- Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
- Assert.Equal(0, info.VectorIndexSzMebibytes);
- Assert.Equal(208, info.TotalInvertedIndexBlocks);
- Assert.True(info.OffsetVectorsSzMebibytes < 1);
- Assert.True(info.DocTableSizeMebibytes < 1);
- Assert.Equal(0, info.SortableValueSizeMebibytes);
- Assert.True(info.KeyTableSizeMebibytes < 1);
- Assert.Equal(8, (int)info.RecordsPerDocAvg);
- Assert.True(info.BytesPerRecordAvg > 5);
- Assert.True(info.OffsetsPerTermAvg > 0.8);
- Assert.Equal(8, info.OffsetBitsPerRecordAvg);
- Assert.Equal(0, info.HashIndexingFailures);
- Assert.Equal(0, info.Indexing);
- Assert.Equal(1, info.PercentIndexed);
- Assert.Equal(4, info.NumberOfUses);
- Assert.Equal(7, info.GcStats.Count);
- Assert.Equal(4, info.CursorStats.Count);
+ if (endpointId == EndpointsFixture.Env.Cluster)
+ {
+ Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}");
+ }
+ else
+ {
+ Assert.Equal(100, info.NumDocs);
+
+ // these numbers don't make sense when considering a shard
+ Assert.NotNull(info.MaxDocId);
+ Assert.Equal(102, info.NumTerms);
+ Assert.True(info.NumRecords >= 200);
+ Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
+ Assert.Equal(0, info.VectorIndexSzMebibytes);
+ Assert.Equal(208, info.TotalInvertedIndexBlocks);
+ Assert.True(info.OffsetVectorsSzMebibytes < 1);
+ Assert.True(info.DocTableSizeMebibytes < 1);
+ Assert.Equal(0, info.SortableValueSizeMebibytes);
+ Assert.True(info.KeyTableSizeMebibytes < 1);
+ Assert.Equal(8, (int)info.RecordsPerDocAvg);
+ Assert.True(info.BytesPerRecordAvg > 5);
+ Assert.True(info.OffsetsPerTermAvg > 0.8);
+ Assert.Equal(8, info.OffsetBitsPerRecordAvg);
+ Assert.Equal(0, info.HashIndexingFailures);
+ Assert.Equal(0, info.Indexing);
+ Assert.Equal(1, info.PercentIndexed);
+ Assert.Equal(5, info.NumberOfUses);
+ Assert.Equal(7, info.GcStats.Count);
+ Assert.Equal(4, info.CursorStats.Count);
+ }
}
[SkipIfRedisTheory(Is.Enterprise)]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task AlterAddAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddTextField("title", 1.0);
@@ -797,52 +906,79 @@ public async Task AlterAddAsync(string endpointId)
{
db.HashSet($"doc{i}", fields.Name, fields.Value);
}
+
SearchResult res = ft.Search(index, new("hello world"));
Assert.Equal(100, res.TotalResults);
+ var info = ft.Info(index);
+ Assert.Equal(index, info.IndexName);
+ if (endpointId == EndpointsFixture.Env.Cluster)
+ {
+ Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}");
+ }
+ else
+ {
+ Assert.Equal(100, info.NumDocs);
+ }
Assert.True(await ft.AlterAsync(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5)));
for (int i = 0; i < 100; i++)
{
- var fields2 = new HashEntry[] { new("name", "name" + i),
- new("tags", $"tagA,tagB,tag{i}") };
+ var fields2 = new HashEntry[]
+ {
+ new("name", "name" + i),
+ new("tags", $"tagA,tagB,tag{i}")
+ };
// assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2));
db.HashSet($"doc{i}", fields2);
}
+
SearchResult res2 = ft.Search(index, new("@tags:{tagA}"));
Assert.Equal(100, res2.TotalResults);
- var info = await ft.InfoAsync(index);
+ await AssertDatabaseSizeAsync(db, 100);
+
+ info = await ft.InfoAsync(index);
Assert.Equal(index, info.IndexName);
Assert.Equal("title", info.Attributes[0]["identifier"].ToString());
Assert.Equal("TAG", info.Attributes[1]["type"].ToString());
Assert.Equal("name", info.Attributes[2]["attribute"].ToString());
- Assert.Equal(100, info.NumDocs);
- Assert.Equal("300", info.MaxDocId);
- Assert.Equal(102, info.NumTerms);
- Assert.True(info.NumRecords >= 200);
- Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
- Assert.Equal(0, info.VectorIndexSzMebibytes);
- Assert.Equal(208, info.TotalInvertedIndexBlocks);
- Assert.True(info.OffsetVectorsSzMebibytes < 1);
- Assert.True(info.DocTableSizeMebibytes < 1);
- Assert.Equal(0, info.SortableValueSizeMebibytes);
- Assert.True(info.KeyTableSizeMebibytes < 1);
- Assert.Equal(8, (int)info.RecordsPerDocAvg);
- Assert.True(info.BytesPerRecordAvg > 5);
- Assert.True(info.OffsetsPerTermAvg > 0.8);
- Assert.Equal(8, info.OffsetBitsPerRecordAvg);
- Assert.Equal(0, info.HashIndexingFailures);
- Assert.Equal(0, info.Indexing);
- Assert.Equal(1, info.PercentIndexed);
- Assert.Equal(4, info.NumberOfUses);
- Assert.Equal(7, info.GcStats.Count);
- Assert.Equal(4, info.CursorStats.Count);
+ if (endpointId == EndpointsFixture.Env.Cluster)
+ {
+ Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}");
+ }
+ else
+ {
+ Assert.Equal(100, info.NumDocs);
+
+ // these numbers don't make sense when considering a shard
+ Assert.Equal("300", info.MaxDocId);
+ Assert.Equal(102, info.NumTerms);
+ Assert.True(info.NumRecords >= 200);
+ Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
+ Assert.Equal(0, info.VectorIndexSzMebibytes);
+ Assert.Equal(208, info.TotalInvertedIndexBlocks);
+ Assert.True(info.OffsetVectorsSzMebibytes < 1);
+ Assert.True(info.DocTableSizeMebibytes < 1);
+ Assert.Equal(0, info.SortableValueSizeMebibytes);
+ Assert.True(info.KeyTableSizeMebibytes < 1);
+ Assert.Equal(8, (int)info.RecordsPerDocAvg);
+ Assert.True(info.BytesPerRecordAvg > 5);
+ Assert.True(info.OffsetsPerTermAvg > 0.8);
+ Assert.Equal(8, info.OffsetBitsPerRecordAvg);
+ Assert.Equal(0, info.HashIndexingFailures);
+ Assert.Equal(0, info.Indexing);
+ Assert.Equal(1, info.PercentIndexed);
+ Assert.Equal(5, info.NumberOfUses);
+ Assert.Equal(7, info.GcStats.Count);
+ Assert.Equal(4, info.CursorStats.Count);
+ }
}
[SkipIfRedisTheory(Is.Enterprise)]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void AlterAddSortable(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddTextField("title", 1.0, sortable: true);
@@ -858,17 +994,22 @@ public void AlterAddSortable(string endpointId)
{
db.HashSet($"doc{i}", fields.Name, fields.Value);
}
+
SearchResult res = ft.Search(index, new("hello world"));
Assert.Equal(100, res.TotalResults);
Assert.True(ft.Alter(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5)));
for (int i = 0; i < 100; i++)
{
- var fields2 = new HashEntry[] { new("name", "name" + i),
- new("tags", $"tagA,tagB,tag{i}") };
+ var fields2 = new HashEntry[]
+ {
+ new("name", "name" + i),
+ new("tags", $"tagA,tagB,tag{i}")
+ };
// assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2));
db.HashSet($"doc{i}", fields2);
}
+
SearchResult res2 = ft.Search(index, new("@tags:{tagA}"));
Assert.Equal(100, res2.TotalResults);
@@ -879,31 +1020,40 @@ public void AlterAddSortable(string endpointId)
Assert.Equal("title", info.Attributes[0]["identifier"].ToString());
Assert.Equal("TAG", info.Attributes[1]["type"].ToString());
Assert.Equal("name", info.Attributes[2]["attribute"].ToString());
- Assert.Equal(100, info.NumDocs);
- Assert.NotNull(info.MaxDocId);
- Assert.Equal(102, info.NumTerms);
- Assert.True(info.NumRecords >= 200);
- Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
- Assert.Equal(0, info.VectorIndexSzMebibytes);
- Assert.Equal(208, info.TotalInvertedIndexBlocks);
- Assert.True(info.OffsetVectorsSzMebibytes < 1);
- Assert.True(info.DocTableSizeMebibytes < 1);
- Assert.Equal(0, info.SortableValueSizeMebibytes);
- Assert.True(info.KeyTableSizeMebibytes < 1);
- Assert.Equal(8, (int)info.RecordsPerDocAvg);
- Assert.True(info.BytesPerRecordAvg > 5);
- Assert.True(info.OffsetsPerTermAvg > 0.8);
- Assert.Equal(8, info.OffsetBitsPerRecordAvg);
- Assert.Equal(0, info.HashIndexingFailures);
- Assert.Equal(0, info.Indexing);
- Assert.Equal(1, info.PercentIndexed);
- Assert.Equal(4, info.NumberOfUses);
- Assert.Equal(7, info.GcStats.Count);
- Assert.Equal(4, info.CursorStats.Count);
+ if (endpointId == EndpointsFixture.Env.Cluster)
+ {
+ Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}");
+ }
+ else
+ {
+ Assert.Equal(100, info.NumDocs);
+
+ // these numbers don't make sense when considering a shard
+ Assert.NotNull(info.MaxDocId);
+ Assert.Equal(102, info.NumTerms);
+ Assert.True(info.NumRecords >= 200);
+ Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
+ Assert.Equal(0, info.VectorIndexSzMebibytes);
+ Assert.Equal(208, info.TotalInvertedIndexBlocks);
+ Assert.True(info.OffsetVectorsSzMebibytes < 1);
+ Assert.True(info.DocTableSizeMebibytes < 1);
+ Assert.Equal(0, info.SortableValueSizeMebibytes);
+ Assert.True(info.KeyTableSizeMebibytes < 1);
+ Assert.Equal(8, (int)info.RecordsPerDocAvg);
+ Assert.True(info.BytesPerRecordAvg > 5);
+ Assert.True(info.OffsetsPerTermAvg > 0.8);
+ Assert.Equal(8, info.OffsetBitsPerRecordAvg);
+ Assert.Equal(0, info.HashIndexingFailures);
+ Assert.Equal(0, info.Indexing);
+ Assert.Equal(1, info.PercentIndexed);
+ Assert.Equal(4, info.NumberOfUses);
+ Assert.Equal(7, info.GcStats.Count);
+ Assert.Equal(4, info.CursorStats.Count);
+ }
}
[SkipIfRedisTheory(Comparison.LessThan, "7.3.0")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void InfoWithIndexEmptyAndIndexMissing(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -938,9 +1088,10 @@ public void InfoWithIndexEmptyAndIndexMissing(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise)]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task AlterAddSortableAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddTextField("title", 1.0, sortable: true);
@@ -956,17 +1107,22 @@ public async Task AlterAddSortableAsync(string endpointId)
{
db.HashSet($"doc{i}", fields.Name, fields.Value);
}
+
SearchResult res = ft.Search(index, new("hello world"));
Assert.Equal(100, res.TotalResults);
Assert.True(await ft.AlterAsync(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5)));
for (int i = 0; i < 100; i++)
{
- var fields2 = new HashEntry[] { new("name", "name" + i),
- new("tags", $"tagA,tagB,tag{i}") };
+ var fields2 = new HashEntry[]
+ {
+ new("name", "name" + i),
+ new("tags", $"tagA,tagB,tag{i}")
+ };
// assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2));
db.HashSet($"doc{i}", fields2);
}
+
SearchResult res2 = ft.Search(index, new("@tags:{tagA}"));
Assert.Equal(100, res2.TotalResults);
@@ -975,27 +1131,36 @@ public async Task AlterAddSortableAsync(string endpointId)
Assert.Equal("title", info.Attributes[0]["identifier"].ToString());
Assert.Equal("TAG", info.Attributes[1]["type"].ToString());
Assert.Equal("name", info.Attributes[2]["attribute"].ToString());
- Assert.Equal(100, info.NumDocs);
- Assert.Equal("300", info.MaxDocId);
- Assert.Equal(102, info.NumTerms);
- Assert.True(info.NumRecords >= 200);
- Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
- Assert.Equal(0, info.VectorIndexSzMebibytes);
- Assert.Equal(208, info.TotalInvertedIndexBlocks);
- Assert.True(info.OffsetVectorsSzMebibytes < 1);
- Assert.True(info.DocTableSizeMebibytes < 1);
- Assert.Equal(0, info.SortableValueSizeMebibytes);
- Assert.True(info.KeyTableSizeMebibytes < 1);
- Assert.Equal(8, (int)info.RecordsPerDocAvg);
- Assert.True(info.BytesPerRecordAvg > 5);
- Assert.True(info.OffsetsPerTermAvg > 0.8);
- Assert.Equal(8, info.OffsetBitsPerRecordAvg);
- Assert.Equal(0, info.HashIndexingFailures);
- Assert.Equal(0, info.Indexing);
- Assert.Equal(1, info.PercentIndexed);
- Assert.Equal(4, info.NumberOfUses);
- Assert.Equal(7, info.GcStats.Count);
- Assert.Equal(4, info.CursorStats.Count);
+ if (endpointId == EndpointsFixture.Env.Cluster)
+ {
+ Assert.True(info.NumDocs is 100 or 200, $"NumDocs: {info.NumDocs}");
+ }
+ else
+ {
+ Assert.Equal(100, info.NumDocs);
+
+ // these numbers don't make sense when considering a shard
+ Assert.Equal("300", info.MaxDocId);
+ Assert.Equal(102, info.NumTerms);
+ Assert.True(info.NumRecords >= 200);
+ Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
+ Assert.Equal(0, info.VectorIndexSzMebibytes);
+ Assert.Equal(208, info.TotalInvertedIndexBlocks);
+ Assert.True(info.OffsetVectorsSzMebibytes < 1);
+ Assert.True(info.DocTableSizeMebibytes < 1);
+ Assert.Equal(0, info.SortableValueSizeMebibytes);
+ Assert.True(info.KeyTableSizeMebibytes < 1);
+ Assert.Equal(8, (int)info.RecordsPerDocAvg);
+ Assert.True(info.BytesPerRecordAvg > 5);
+ Assert.True(info.OffsetsPerTermAvg > 0.8);
+ Assert.Equal(8, info.OffsetBitsPerRecordAvg);
+ Assert.Equal(0, info.HashIndexingFailures);
+ Assert.Equal(0, info.Indexing);
+ Assert.Equal(1, info.PercentIndexed);
+ Assert.Equal(4, info.NumberOfUses);
+ Assert.Equal(7, info.GcStats.Count);
+ Assert.Equal(4, info.CursorStats.Count);
+ }
}
// TODO : fix with FT.CONFIG response change
@@ -1032,7 +1197,13 @@ public void configOnTimeout(string endpointId)
Assert.True(ft.ConfigSet("ON_TIMEOUT", "fail"));
Assert.Equal("fail", ft.ConfigGet("ON_TIMEOUT")["ON_TIMEOUT"]);
- try { ft.ConfigSet("ON_TIMEOUT", "null"); } catch (RedisServerException) { }
+ try
+ {
+ ft.ConfigSet("ON_TIMEOUT", "null");
+ }
+ catch (RedisServerException)
+ {
+ }
}
// TODO : fix with FT.CONFIG response change
@@ -1045,7 +1216,13 @@ public async Task configOnTimeoutAsync(string endpointId)
Assert.True(await ft.ConfigSetAsync("ON_TIMEOUT", "fail"));
Assert.Equal("fail", (await ft.ConfigGetAsync("ON_TIMEOUT"))["ON_TIMEOUT"]);
- try { ft.ConfigSet("ON_TIMEOUT", "null"); } catch (RedisServerException) { }
+ try
+ {
+ ft.ConfigSet("ON_TIMEOUT", "null");
+ }
+ catch (RedisServerException)
+ {
+ }
}
// TODO : fix with FT.CONFIG response change
@@ -1095,9 +1272,10 @@ public async Task TestDialectConfigAsync(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestCursor(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new();
@@ -1107,6 +1285,7 @@ public async Task TestCursor(string endpointId)
AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10));
AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5));
AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25));
+ AssertDatabaseSize(db, 3);
AggregationRequest r = new AggregationRequest()
.GroupBy("@name", Reducers.Sum("@count").As("sum"))
@@ -1125,21 +1304,17 @@ public async Task TestCursor(string endpointId)
Assert.Equal(0.0, row.Value.GetDouble("nosuchcol"));
Assert.Null(row.Value.GetString("nosuchcol"));
- res = ft.CursorRead(index, res.CursorId, 1);
+ res = ft.CursorRead(res, 1);
Row? row2 = res.GetRow(0);
Assert.NotNull(row2);
Assert.Equal("abc", row2.Value.GetString("name"));
Assert.Equal(10, row2.Value.GetLong("sum"));
- Assert.True(ft.CursorDel(index, res.CursorId));
+ Assert.True(ft.CursorDel(res));
- try
- {
- ft.CursorRead(index, res.CursorId, 1);
- Assert.True(false);
- }
- catch (RedisException) { }
+ var ex = Assert.Throws(() => ft.CursorRead(res, 1));
+ Assert.Contains("Cursor not found", ex.Message, StringComparison.OrdinalIgnoreCase);
_ = new AggregationRequest()
.GroupBy("@name", Reducers.Sum("@count").As("sum"))
@@ -1148,18 +1323,54 @@ public async Task TestCursor(string endpointId)
await Task.Delay(1000).ConfigureAwait(false);
- try
- {
- ft.CursorRead(index, res.CursorId, 1);
- Assert.True(false);
- }
- catch (RedisException) { }
+ ex = Assert.Throws(() => ft.CursorRead(res, 1));
+ Assert.Contains("Cursor not found", ex.Message, StringComparison.OrdinalIgnoreCase);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
+ public void TestCursorEnumerable(string endpointId)
+ {
+ SkipClusterPre8(endpointId);
+ IDatabase db = GetCleanDatabase(endpointId);
+ var ft = db.FT();
+ Schema sc = new();
+ sc.AddTextField("name", 1.0, sortable: true);
+ sc.AddNumericField("count", sortable: true);
+ ft.Create(index, FTCreateParams.CreateParams(), sc);
+ AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10));
+ AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5));
+ AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25));
+ AssertDatabaseSize(db, 3);
+
+ AggregationRequest r = new AggregationRequest()
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
+ .SortBy(10, SortedField.Desc("@sum"))
+ .Cursor(1, 3000);
+
+ // actual search
+ using var iter = ft.AggregateEnumerable(index, r).GetEnumerator();
+ Assert.True(iter.MoveNext());
+ var row = iter.Current;
+ Assert.Equal("def", row.GetString("name"));
+ Assert.Equal(30, row.GetLong("sum"));
+ Assert.Equal(30.0, row.GetDouble("sum"));
+
+ Assert.Equal(0L, row.GetLong("nosuchcol"));
+ Assert.Equal(0.0, row.GetDouble("nosuchcol"));
+ Assert.Null(row.GetString("nosuchcol"));
+
+ Assert.True(iter.MoveNext());
+ row = iter.Current;
+ Assert.Equal("abc", row.GetString("name"));
+ Assert.Equal(10, row.GetLong("sum"));
+ }
+
+ [SkippableTheory]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestCursorAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new();
@@ -1169,6 +1380,7 @@ public async Task TestCursorAsync(string endpointId)
AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10));
AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5));
AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25));
+ await AssertDatabaseSizeAsync(db, 3);
AggregationRequest r = new AggregationRequest()
.GroupBy("@name", Reducers.Sum("@count").As("sum"))
@@ -1176,7 +1388,7 @@ public async Task TestCursorAsync(string endpointId)
.Cursor(1, 3000);
// actual search
- AggregationResult res = ft.Aggregate(index, r);
+ AggregationResult res = await ft.AggregateAsync(index, r);
Row? row = res.GetRow(0);
Assert.NotNull(row);
Assert.Equal("def", row.Value.GetString("name"));
@@ -1187,21 +1399,17 @@ public async Task TestCursorAsync(string endpointId)
Assert.Equal(0.0, row.Value.GetDouble("nosuchcol"));
Assert.Null(row.Value.GetString("nosuchcol"));
- res = await ft.CursorReadAsync(index, res.CursorId, 1);
+ res = await ft.CursorReadAsync(res, 1);
Row? row2 = res.GetRow(0);
Assert.NotNull(row2);
Assert.Equal("abc", row2.Value.GetString("name"));
Assert.Equal(10, row2.Value.GetLong("sum"));
- Assert.True(await ft.CursorDelAsync(index, res.CursorId));
+ Assert.True(await ft.CursorDelAsync(res));
- try
- {
- await ft.CursorReadAsync(index, res.CursorId, 1);
- Assert.True(false);
- }
- catch (RedisException) { }
+ var ex = await Assert.ThrowsAsync(async () => await ft.CursorReadAsync(res, 1));
+ Assert.Contains("Cursor not found", ex.Message, StringComparison.OrdinalIgnoreCase);
_ = new AggregationRequest()
.GroupBy("@name", Reducers.Sum("@count").As("sum"))
@@ -1210,12 +1418,47 @@ public async Task TestCursorAsync(string endpointId)
await Task.Delay(1000).ConfigureAwait(false);
- try
- {
- await ft.CursorReadAsync(index, res.CursorId, 1);
- Assert.True(false);
- }
- catch (RedisException) { }
+ ex = await Assert.ThrowsAsync(async () => await ft.CursorReadAsync(res, 1));
+ Assert.Contains("Cursor not found", ex.Message, StringComparison.OrdinalIgnoreCase);
+ }
+
+ [SkippableTheory]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
+ public async Task TestCursorEnumerableAsync(string endpointId)
+ {
+ SkipClusterPre8(endpointId);
+ IDatabase db = GetCleanDatabase(endpointId);
+ var ft = db.FT();
+ Schema sc = new();
+ sc.AddTextField("name", 1.0, sortable: true);
+ sc.AddNumericField("count", sortable: true);
+ ft.Create(index, FTCreateParams.CreateParams(), sc);
+ AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10));
+ AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5));
+ AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25));
+ await AssertDatabaseSizeAsync(db, 3);
+
+ AggregationRequest r = new AggregationRequest()
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
+ .SortBy(10, SortedField.Desc("@sum"))
+ .Cursor(1, 3000);
+
+ // actual search
+ await using var iter = ft.AggregateAsyncEnumerable(index, r).GetAsyncEnumerator();
+ Assert.True(await iter.MoveNextAsync());
+ var row = iter.Current;
+ Assert.Equal("def", row.GetString("name"));
+ Assert.Equal(30, row.GetLong("sum"));
+ Assert.Equal(30.0, row.GetDouble("sum"));
+
+ Assert.Equal(0L, row.GetLong("nosuchcol"));
+ Assert.Equal(0.0, row.GetDouble("nosuchcol"));
+ Assert.Null(row.GetString("nosuchcol"));
+
+ Assert.True(await iter.MoveNextAsync());
+ row = iter.Current;
+ Assert.Equal("abc", row.GetString("name"));
+ Assert.Equal(10, row.GetLong("sum"));
}
[SkipIfRedisTheory(Is.Enterprise)]
@@ -1227,30 +1470,34 @@ public void TestAggregationGroupBy(string endpointId)
// Creating the index definition and schema
ft.Create("idx", new(), new Schema().AddNumericField("random_num")
- .AddTextField("title")
- .AddTextField("body")
- .AddTextField("parent"));
+ .AddTextField("title")
+ .AddTextField("body")
+ .AddTextField("parent"));
// Indexing a document
- AddDocument(db, "search", new(){
- { "title", "RediSearch" },
- { "body", "Redisearch impements a search engine on top of redis" },
- { "parent", "redis" },
- { "random_num", 10 }});
+ AddDocument(db, "search", new()
+ {
+ { "title", "RediSearch" },
+ { "body", "Redisearch impements a search engine on top of redis" },
+ { "parent", "redis" },
+ { "random_num", 10 }
+ });
AddDocument(db, "ai", new()
{
- { "title", "RedisAI" },
- { "body", "RedisAI executes Deep Learning/Machine Learning models and managing their data." },
- { "parent", "redis" },
- { "random_num", 3 }});
+ { "title", "RedisAI" },
+ { "body", "RedisAI executes Deep Learning/Machine Learning models and managing their data." },
+ { "parent", "redis" },
+ { "random_num", 3 }
+ });
AddDocument(db, "json", new()
{
- { "title", "RedisJson" },
- { "body", "RedisJSON implements ECMA-404 The JSON Data Interchange Standard as a native data type." },
- { "parent", "redis" },
- { "random_num", 8 }});
+ { "title", "RedisJson" },
+ { "body", "RedisJSON implements ECMA-404 The JSON Data Interchange Standard as a native data type." },
+ { "parent", "redis" },
+ { "random_num", 8 }
+ });
var req = new AggregationRequest("redis").GroupBy("@parent", Reducers.Count());
var res = ft.Aggregate("idx", req).GetRow(0);
@@ -1297,7 +1544,7 @@ public void TestAggregationGroupBy(string endpointId)
"@parent", Reducers.Quantile("@random_num", 0.5));
res = ft.Aggregate("idx", req).GetRow(0);
Assert.Equal("redis", res["parent"]);
- Assert.Equal(8, res.GetLong("__generated_aliasquantilerandom_num,0.5")); // median of 3,8,10
+ Assert.Equal(8, res.GetLong("__generated_aliasquantilerandom_num,0.5")); // median of 3,8,10
req = new AggregationRequest("redis").GroupBy(
"@parent", Reducers.ToList("@title"));
@@ -1311,7 +1558,14 @@ public void TestAggregationGroupBy(string endpointId)
req = new AggregationRequest("redis").GroupBy(
"@parent", Reducers.FirstValue("@title").As("first"));
- res = ft.Aggregate("idx", req).GetRow(0);
+ var agg = ft.Aggregate("idx", req);
+ Log($"results: {agg.TotalResults}");
+ for (int i = 0; i < agg.TotalResults; i++)
+ {
+ Log($"parent: {agg.GetRow(i)["parent"]}, first: {agg.GetRow(i)["first"]}");
+ }
+
+ res = agg.GetRow(0);
Assert.Equal("redis", res["parent"]);
Assert.Equal("RediSearch", res["first"]);
@@ -1327,8 +1581,8 @@ public void TestAggregationGroupBy(string endpointId)
Assert.Contains(actual[1].ToString(), possibleValues);
req = new AggregationRequest("redis")
- .Load(new FieldName("__key"))
- .GroupBy("@parent", Reducers.ToList("__key").As("docs"));
+ .Load(new FieldName("__key"))
+ .GroupBy("@parent", Reducers.ToList("__key").As("docs"));
res = db.FT().Aggregate("idx", req).GetRow(0);
actual = (List)res.Get("docs");
@@ -1338,14 +1592,15 @@ public void TestAggregationGroupBy(string endpointId)
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestDictionary(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Assert.Equal(3L, ft.DictAdd("dict", "bar", "foo", "hello world"));
-
+ AssertDatabaseSize(db, 0);
var dumResult = ft.DictDump("dict");
int i = 0;
Assert.Equal("bar", dumResult[i++].ToString());
@@ -1353,13 +1608,15 @@ public void TestDictionary(string endpointId)
Assert.Equal("hello world", dumResult[i].ToString());
Assert.Equal(3L, ft.DictDel("dict", "foo", "bar", "hello world"));
+ AssertDatabaseSize(db, 0);
Assert.Empty(ft.DictDump("dict"));
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestDropIndex(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddTextField("title", 1.0);
@@ -1386,13 +1643,50 @@ public void TestDropIndex(string endpointId)
{
Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase);
}
- Assert.Equal("100", db.Execute("DBSIZE").ToString());
+
+ AssertDatabaseSize(db, 100);
+ }
+
+ private int DatabaseSize(IDatabase db) => DatabaseSize(db, out _);
+
+ private int DatabaseSize(IDatabase db, out int replicaCount)
+ {
+ replicaCount = 0;
+ var count = 0L;
+ foreach (var server in db.Multiplexer.GetServers())
+ {
+ if (server.IsReplica || !server.IsConnected)
+ {
+ replicaCount++;
+ }
+ else
+ {
+ count += server.DatabaseSize();
+ }
+ }
+
+ return checked((int)count);
+ }
+
+ private async Task DatabaseSizeAsync(IDatabase db)
+ {
+ var count = 0L;
+ foreach (var server in db.Multiplexer.GetServers())
+ {
+ if (!server.IsReplica && server.IsConnected)
+ {
+ count += await server.DatabaseSizeAsync();
+ }
+ }
+
+ return checked((int)count);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestDropIndexAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddTextField("title", 1.0);
@@ -1419,13 +1713,15 @@ public async Task TestDropIndexAsync(string endpointId)
{
Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase);
}
- Assert.Equal("100", db.Execute("DBSIZE").ToString());
+
+ AssertDatabaseSize(db, 100);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void dropIndexDD(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddTextField("title", 1.0);
@@ -1445,13 +1741,14 @@ public void dropIndexDD(string endpointId)
RedisResult[] keys = (RedisResult[])db.Execute("KEYS", "*")!;
Assert.Empty(keys);
- Assert.Equal("0", db.Execute("DBSIZE").ToString());
+ AssertDatabaseSize(db, 0);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task dropIndexDDAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema().AddTextField("title", 1.0);
@@ -1471,18 +1768,19 @@ public async Task dropIndexDDAsync(string endpointId)
RedisResult[] keys = (RedisResult[])db.Execute("KEYS", "*")!;
Assert.Empty(keys);
- Assert.Equal("0", db.Execute("DBSIZE").ToString());
+ AssertDatabaseSize(db, 0);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestDictionaryAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Assert.Equal(3L, await ft.DictAddAsync("dict", "bar", "foo", "hello world"));
-
+ await AssertDatabaseSizeAsync(db, 0);
var dumResult = await ft.DictDumpAsync("dict");
int i = 0;
Assert.Equal("bar", dumResult[i++].ToString());
@@ -1490,12 +1788,14 @@ public async Task TestDictionaryAsync(string endpointId)
Assert.Equal("hello world", dumResult[i].ToString());
Assert.Equal(3L, await ft.DictDelAsync("dict", "foo", "bar", "hello world"));
+ await AssertDatabaseSizeAsync(db, 0);
Assert.Empty((await ft.DictDumpAsync("dict")));
}
readonly string explainQuery = "@f3:f3_val @f2:f2_val @f1:f1_val";
+
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestExplain(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -1514,12 +1814,10 @@ public void TestExplain(string endpointId)
res = ft.Explain(index, explainQuery, 2);
Assert.NotNull(res);
Assert.False(res.Length == 0);
-
-
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestExplainAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -1588,7 +1886,7 @@ public async Task TestExplainCliAsync(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestExplainWithDefaultDialect(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -1605,7 +1903,7 @@ public void TestExplainWithDefaultDialect(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestExplainWithDefaultDialectAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -1622,7 +1920,7 @@ public async Task TestExplainWithDefaultDialectAsync(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestSynonym(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -1648,7 +1946,7 @@ public void TestSynonym(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestSynonymAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -1687,9 +1985,10 @@ public void TestModulePrefixs()
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task GetTagFieldSyncAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema()
@@ -1745,9 +2044,10 @@ public async Task GetTagFieldSyncAsync(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestGetTagFieldWithNonDefaultSeparatorSyncAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
Schema sc = new Schema()
@@ -1813,36 +2113,40 @@ public void TestFTCreateParamsCommandBuilder()
.AddTagField("category", separator: ";");
var ftCreateParams = FTCreateParams.CreateParams().On(IndexDataType.JSON)
- .AddPrefix("doc:")
- .Filter("@category:{red}")
- .Language("English")
- .LanguageField("play")
- .Score(1.0)
- .ScoreField("chapter")
- .PayloadField("txt")
- .MaxTextFields()
- .NoOffsets()
- .Temporary(10)
- .NoHighlights()
- .NoFields()
- .NoFreqs()
- .Stopwords(new[] { "foo", "bar" })
- .SkipInitialScan();
+ .AddPrefix("doc:")
+ .Filter("@category:{red}")
+ .Language("English")
+ .LanguageField("play")
+ .Score(1.0)
+ .ScoreField("chapter")
+ .PayloadField("txt")
+ .MaxTextFields()
+ .NoOffsets()
+ .Temporary(10)
+ .NoHighlights()
+ .NoFields()
+ .NoFreqs()
+ .Stopwords(new[] { "foo", "bar" })
+ .SkipInitialScan();
var builedCommand = SearchCommandBuilder.Create(index, ftCreateParams, sc);
- var expectedArgs = new object[] { "TEST_INDEX", "ON", "JSON", "PREFIX", 1,
- "doc:", "FILTER", "@category:{red}", "LANGUAGE",
- "English", "LANGUAGE_FIELD", "play", "SCORE", 1,
- "SCORE_FIELD", "chapter", "PAYLOAD_FIELD", "txt",
- "MAXTEXTFIELDS", "NOOFFSETS", "TEMPORARY", 10,
- "NOHL", "NOFIELDS", "NOFREQS", "STOPWORDS", 2,
- "foo", "bar", "SKIPINITIALSCAN", "SCHEMA", "title",
- "TEXT", "category", "TAG", "SEPARATOR", ";" };
+ var expectedArgs = new object[]
+ {
+ "TEST_INDEX", "ON", "JSON", "PREFIX", 1,
+ "doc:", "FILTER", "@category:{red}", "LANGUAGE",
+ "English", "LANGUAGE_FIELD", "play", "SCORE", 1,
+ "SCORE_FIELD", "chapter", "PAYLOAD_FIELD", "txt",
+ "MAXTEXTFIELDS", "NOOFFSETS", "TEMPORARY", 10,
+ "NOHL", "NOFIELDS", "NOFREQS", "STOPWORDS", 2,
+ "foo", "bar", "SKIPINITIALSCAN", "SCHEMA", "title",
+ "TEXT", "category", "TAG", "SEPARATOR", ";"
+ };
for (int i = 0; i < expectedArgs.Length; i++)
{
Assert.Equal(expectedArgs[i].ToString(), builedCommand.Args[i].ToString());
}
+
Assert.Equal("FT.CREATE", builedCommand.Command.ToString());
}
@@ -1857,8 +2161,11 @@ public void TestFTCreateParamsCommandBuilderNoStopwords()
var ftCreateParams = FTCreateParams.CreateParams().NoStopwords();
- var expectedArgs = new object[] { "TEST_INDEX", "STOPWORDS", 0, "SCHEMA", "title",
- "TEXT", "category", "TAG", "SEPARATOR", ";" };
+ var expectedArgs = new object[]
+ {
+ "TEST_INDEX", "STOPWORDS", 0, "SCHEMA", "title",
+ "TEXT", "category", "TAG", "SEPARATOR", ";"
+ };
var builedCommand = SearchCommandBuilder.Create(index, ftCreateParams, sc);
@@ -1866,13 +2173,15 @@ public void TestFTCreateParamsCommandBuilderNoStopwords()
{
Assert.Equal(expectedArgs[i].ToString(), builedCommand.Args[i].ToString());
}
+
Assert.Equal("FT.CREATE", builedCommand.Command.ToString());
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestFilters(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
// Create the index with the same fields as in the original test
@@ -1885,16 +2194,16 @@ public void TestFilters(string endpointId)
// Add the two documents to the index
AddDocument(db, "doc1", new()
{
- { "txt", "foo bar" },
- { "num", "3.141" },
- { "loc", "-0.441,51.458" }
- });
+ { "txt", "foo bar" },
+ { "num", "3.141" },
+ { "loc", "-0.441,51.458" }
+ });
AddDocument(db, "doc2", new()
{
- { "txt", "foo baz" },
- { "num", "2" },
- { "loc", "-0.1,51.2" }
- });
+ { "txt", "foo baz" },
+ { "num", "2" },
+ { "loc", "-0.1,51.2" }
+ });
// WaitForIndex(client, ft.IndexName ?? "idx");
// Test numerical filter
@@ -1922,9 +2231,10 @@ public void TestFilters(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestFiltersAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
// Create the index with the same fields as in the original test
@@ -1937,16 +2247,16 @@ public async Task TestFiltersAsync(string endpointId)
// Add the two documents to the index
AddDocument(db, "doc1", new()
{
- { "txt", "foo bar" },
- { "num", "3.141" },
- { "loc", "-0.441,51.458" }
- });
+ { "txt", "foo bar" },
+ { "num", "3.141" },
+ { "loc", "-0.441,51.458" }
+ });
AddDocument(db, "doc2", new()
{
- { "txt", "foo baz" },
- { "num", "2" },
- { "loc", "-0.1,51.2" }
- });
+ { "txt", "foo baz" },
+ { "num", "2" },
+ { "loc", "-0.1,51.2" }
+ });
// WaitForIndex(client, ft.IndexName ?? "idx");
// Test numerical filter
@@ -1977,90 +2287,94 @@ public async Task TestFiltersAsync(string endpointId)
public void TestQueryCommandBuilder()
{
var testQuery = new Query("foo").HighlightFields(new Query.HighlightTags("", " "), "txt")
- .SetVerbatim()
- .SetNoStopwords()
- .SetWithScores()
- .SetPayload("txt")
- .SetLanguage("English")
- .SetScorer("TFIDF")
- //.SetExplainScore()
- .SetWithPayloads()
- .SetSortBy("txt", true)
- .Limit(0, 11)
- .SummarizeFields(20, 3, ";", "txt")
- .LimitKeys("key1", "key2")
- .LimitFields("txt")
- .ReturnFields("txt")
- .AddParam("name", "value")
- .Dialect(1)
- .Slop(0)
- .Timeout(1000)
- .SetInOrder()
- .SetExpander("myexpander");
+ .SetVerbatim()
+ .SetNoStopwords()
+ .SetWithScores()
+ .SetPayload("txt")
+ .SetLanguage("English")
+ .SetScorer("TFIDF")
+ //.SetExplainScore()
+ .SetWithPayloads()
+ .SetSortBy("txt", true)
+ .Limit(0, 11)
+ .SummarizeFields(20, 3, ";", "txt")
+ .LimitKeys("key1", "key2")
+ .LimitFields("txt")
+ .ReturnFields("txt")
+ .AddParam("name", "value")
+ .Dialect(1)
+ .Slop(0)
+ .Timeout(1000)
+ .SetInOrder()
+ .SetExpander("myexpander");
var buildCommand = SearchCommandBuilder.Search("idx", testQuery);
- var expectedArgs = new List {"idx",
- "foo",
- "VERBATIM",
- "NOSTOPWORDS",
- "WITHSCORES",
- "WITHPAYLOADS",
- "LANGUAGE",
- "English",
- "SCORER",
- "TFIDF",
- "INFIELDS",
- "1",
- "txt",
- "SORTBY",
- "txt",
- "ASC",
- "PAYLOAD",
- "txt",
- "LIMIT",
- "0",
- "11",
- "HIGHLIGHT",
- "FIELDS",
- "1",
- "txt",
- "TAGS",
- "",
- " ",
- "SUMMARIZE",
- "FIELDS",
- "1",
- "txt",
- "FRAGS",
- "3",
- "LEN",
- "20",
- "SEPARATOR",
- ";",
- "INKEYS",
- "2",
- "key1",
- "key2",
- "RETURN",
- "1",
- "txt",
- "PARAMS",
- "2",
- "name",
- "value",
- "DIALECT",
- "1",
- "SLOP",
- "0",
- "TIMEOUT",
- "1000",
- "INORDER",
- "EXPANDER",
- "myexpander"};
+ var expectedArgs = new List
+ {
+ "idx",
+ "foo",
+ "VERBATIM",
+ "NOSTOPWORDS",
+ "WITHSCORES",
+ "WITHPAYLOADS",
+ "LANGUAGE",
+ "English",
+ "SCORER",
+ "TFIDF",
+ "INFIELDS",
+ "1",
+ "txt",
+ "SORTBY",
+ "txt",
+ "ASC",
+ "PAYLOAD",
+ "txt",
+ "LIMIT",
+ "0",
+ "11",
+ "HIGHLIGHT",
+ "FIELDS",
+ "1",
+ "txt",
+ "TAGS",
+ "",
+ " ",
+ "SUMMARIZE",
+ "FIELDS",
+ "1",
+ "txt",
+ "FRAGS",
+ "3",
+ "LEN",
+ "20",
+ "SEPARATOR",
+ ";",
+ "INKEYS",
+ "2",
+ "key1",
+ "key2",
+ "RETURN",
+ "1",
+ "txt",
+ "PARAMS",
+ "2",
+ "name",
+ "value",
+ "DIALECT",
+ "1",
+ "SLOP",
+ "0",
+ "TIMEOUT",
+ "1000",
+ "INORDER",
+ "EXPANDER",
+ "myexpander"
+ };
for (int i = 0; i < buildCommand.Args.Count(); i++)
{
Assert.Equal(expectedArgs[i].ToString(), buildCommand.Args[i].ToString());
}
+
Assert.Equal("FT.SEARCH", buildCommand.Command);
// test that the command not throw an exception:
var db = GetCleanDatabase();
@@ -2074,27 +2388,31 @@ public void TestQueryCommandBuilder()
public void TestQueryCommandBuilderReturnField()
{
var testQuery = new Query("foo").HighlightFields("txt")
- .ReturnFields(new FieldName("txt"))
- .SetNoContent();
+ .ReturnFields(new FieldName("txt"))
+ .SetNoContent();
var buildCommand = SearchCommandBuilder.Search("idx", testQuery);
- var expectedArgs = new List {"idx",
- "foo",
- "NOCONTENT",
- "HIGHLIGHT",
- "FIELDS",
- "1",
- "txt",
- "RETURN",
- "1",
- "txt"};
+ var expectedArgs = new List
+ {
+ "idx",
+ "foo",
+ "NOCONTENT",
+ "HIGHLIGHT",
+ "FIELDS",
+ "1",
+ "txt",
+ "RETURN",
+ "1",
+ "txt"
+ };
Assert.Equal(expectedArgs.Count(), buildCommand.Args.Count());
for (int i = 0; i < buildCommand.Args.Count(); i++)
{
Assert.Equal(expectedArgs[i].ToString(), buildCommand.Args[i].ToString());
}
+
Assert.Equal("FT.SEARCH", buildCommand.Command);
// test that the command not throw an exception:
@@ -2112,8 +2430,10 @@ public void TestQueryCommandBuilderScore()
IDatabase db = GetCleanDatabase();
var ft = db.FT();
- db.Execute("JSON.SET", "doc:1", "$", "[{\"arr\": [1, 2, 3]}, {\"val\": \"hello\"}, {\"val\": \"world\"}]");
- db.Execute("FT.CREATE", "idx", "ON", "JSON", "PREFIX", "1", "doc:", "SCHEMA", "$..arr", "AS", "arr", "NUMERIC", "$..val", "AS", "val", "TEXT");
+ db.Execute("JSON.SET", (RedisKey)"doc:1", "$",
+ "[{\"arr\": [1, 2, 3]}, {\"val\": \"hello\"}, {\"val\": \"world\"}]");
+ db.Execute("FT.CREATE", "idx", "ON", "JSON", "PREFIX", "1", "doc:", "SCHEMA", "$..arr", "AS", "arr", "NUMERIC",
+ "$..val", "AS", "val", "TEXT");
// sleep:
Thread.Sleep(2000);
@@ -2134,7 +2454,8 @@ public void TestFieldsCommandBuilder()
.AddTagField(FieldName.Of("tag"), true, true, true, ";", true, true)
.AddVectorField("vec", VectorField.VectorAlgo.FLAT, new() { { "dim", 10 } });
var buildCommand = SearchCommandBuilder.Create("idx", new(), sc);
- var expectedArgs = new List {
+ var expectedArgs = new List
+ {
"idx",
"SCHEMA",
"txt",
@@ -2179,9 +2500,10 @@ public void TestFieldsCommandBuilder()
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestLimit(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -2190,6 +2512,7 @@ public void TestLimit(string endpointId)
Document doc2 = new("doc2", new() { { "t1", "b" }, { "t2", "a" } });
AddDocument(db, doc1);
AddDocument(db, doc2);
+ AssertDatabaseSize(db, 2);
var req = new AggregationRequest("*").SortBy("@t1").Limit(1);
var res = ft.Aggregate("idx", req);
@@ -2199,9 +2522,10 @@ public void TestLimit(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestLimitAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -2210,6 +2534,7 @@ public async Task TestLimitAsync(string endpointId)
Document doc2 = new("doc2", new() { { "t1", "b" }, { "t2", "a" } });
AddDocument(db, doc1);
AddDocument(db, doc2);
+ await AssertDatabaseSizeAsync(db, 2);
var req = new AggregationRequest("*").SortBy("@t1").Limit(1, 1);
var res = await ft.AggregateAsync("idx", req);
@@ -2265,13 +2590,15 @@ public void TestVectorCount_Issue70()
{
Assert.Equal(expected[i].ToString(), actual.Args[i].ToString());
}
+
Assert.Equal(expected.Count(), actual.Args.Length);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void VectorSimilaritySearch(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
var json = db.JSON();
@@ -2281,12 +2608,13 @@ public void VectorSimilaritySearch(string endpointId)
json.Set("vec:3", "$", "{\"vector\":[3,3,3,3]}");
json.Set("vec:4", "$", "{\"vector\":[4,4,4,4]}");
- var schema = new Schema().AddVectorField(FieldName.Of("$.vector").As("vector"), VectorField.VectorAlgo.FLAT, new()
- {
- ["TYPE"] = "FLOAT32",
- ["DIM"] = "4",
- ["DISTANCE_METRIC"] = "L2",
- });
+ var schema = new Schema().AddVectorField(FieldName.Of("$.vector").As("vector"), VectorField.VectorAlgo.FLAT,
+ new()
+ {
+ ["TYPE"] = "FLOAT32",
+ ["DIM"] = "4",
+ ["DISTANCE_METRIC"] = "L2",
+ });
var idxDef = new FTCreateParams().On(IndexDataType.JSON).Prefix("vec:");
Assert.True(ft.Create("vss_idx", idxDef, schema));
@@ -2294,11 +2622,11 @@ public void VectorSimilaritySearch(string endpointId)
float[] vec = [2, 2, 2, 2];
byte[] queryVec = MemoryMarshal.Cast(vec).ToArray();
-
+ AssertDatabaseSize(db, 4);
var query = new Query("*=>[KNN 3 @vector $query_vec]")
- .AddParam("query_vec", queryVec)
- .SetSortBy("__vector_score")
- .Dialect(2);
+ .AddParam("query_vec", queryVec)
+ .SetSortBy("__vector_score")
+ .Dialect(2);
var res = ft.Search("vss_idx", query);
Assert.Equal(3, res.TotalResults);
@@ -2312,9 +2640,10 @@ public void VectorSimilaritySearch(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void QueryingVectorFields(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
var json = db.JSON();
@@ -2332,6 +2661,7 @@ public void QueryingVectorFields(string endpointId)
db.HashSet("b", "v", "aaaabaaa");
db.HashSet("c", "v", "aaaaabaa");
+ AssertDatabaseSize(db, 3);
var q = new Query("*=>[KNN 2 @v $vec]").ReturnFields("__v_score").Dialect(2);
var res = ft.Search("idx", q.AddParam("vec", "aaaaaaaa"));
Assert.Equal(2, res.TotalResults);
@@ -2359,9 +2689,10 @@ public async Task TestVectorFieldJson_Issue102Async()
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestQueryAddParam_DefaultDialect(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT(2);
@@ -2372,15 +2703,17 @@ public void TestQueryAddParam_DefaultDialect(string endpointId)
db.HashSet("2", "numval", 2);
db.HashSet("3", "numval", 3);
+ AssertDatabaseSize(db, 3);
Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2);
var res = ft.Search("idx", query);
Assert.Equal(2, res.TotalResults);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestQueryAddParam_DefaultDialectAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT(2);
@@ -2391,15 +2724,17 @@ public async Task TestQueryAddParam_DefaultDialectAsync(string endpointId)
db.HashSet("2", "numval", 2);
db.HashSet("3", "numval", 3);
+ await AssertDatabaseSizeAsync(db, 3);
Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2);
var res = await ft.SearchAsync("idx", query);
Assert.Equal(2, res.TotalResults);
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestQueryParamsWithParams_DefaultDialect(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT(2);
@@ -2410,6 +2745,7 @@ public void TestQueryParamsWithParams_DefaultDialect(string endpointId)
db.HashSet("2", "numval", 2);
db.HashSet("3", "numval", 3);
+ AssertDatabaseSize(db, 3);
Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2);
var res = ft.Search("idx", query);
Assert.Equal(2, res.TotalResults);
@@ -2425,7 +2761,7 @@ public void TestQueryParamsWithParams_DefaultDialect(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestBasicSpellCheck(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -2437,6 +2773,7 @@ public void TestBasicSpellCheck(string endpointId)
db.HashSet("doc1", [new("name", "name2"), new("body", "body2")]);
db.HashSet("doc1", [new("name", "name2"), new("body", "name2")]);
+ AssertDatabaseSize(db, 1);
var reply = ft.SpellCheck(index, "name");
Assert.Single(reply.Keys);
Assert.Equal("name", reply.Keys.First());
@@ -2445,7 +2782,7 @@ public void TestBasicSpellCheck(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestBasicSpellCheckAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -2457,6 +2794,7 @@ public async Task TestBasicSpellCheckAsync(string endpointId)
db.HashSet("doc1", [new("name", "name2"), new("body", "body2")]);
db.HashSet("doc1", [new("name", "name2"), new("body", "name2")]);
+ await AssertDatabaseSizeAsync(db, 1);
var reply = await ft.SpellCheckAsync(index, "name");
Assert.Single(reply.Keys);
Assert.Equal("name", reply.Keys.First());
@@ -2465,9 +2803,10 @@ public async Task TestBasicSpellCheckAsync(string endpointId)
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestCrossTermDictionary(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -2481,17 +2820,20 @@ public void TestCrossTermDictionary(string endpointId)
}
};
+ AssertDatabaseSize(db, 0);
+
Assert.Equal(expected, ft.SpellCheck(index,
- "Tooni toque kerfuffle",
- new FTSpellCheckParams()
- .IncludeTerm("slang")
- .ExcludeTerm("slang")));
+ "Tooni toque kerfuffle",
+ new FTSpellCheckParams()
+ .IncludeTerm("slang")
+ .ExcludeTerm("slang")));
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestCrossTermDictionaryAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -2505,11 +2847,12 @@ public async Task TestCrossTermDictionaryAsync(string endpointId)
}
};
+ AssertDatabaseSize(db, 0);
Assert.Equal(expected, await ft.SpellCheckAsync(index,
- "Tooni toque kerfuffle",
- new FTSpellCheckParams()
- .IncludeTerm("slang")
- .ExcludeTerm("slang")));
+ "Tooni toque kerfuffle",
+ new FTSpellCheckParams()
+ .IncludeTerm("slang")
+ .ExcludeTerm("slang")));
}
[Fact]
@@ -2531,7 +2874,8 @@ public async Task TestDistanceBoundAsync()
ft.Create(index, new(), new Schema().AddTextField("name").AddTextField("body"));
// distance suppose to be between 1 and 4
- await Assert.ThrowsAsync(async () => await ft.SpellCheckAsync(index, "name", new FTSpellCheckParams().Distance(0)));
+ await Assert.ThrowsAsync(async () =>
+ await ft.SpellCheckAsync(index, "name", new FTSpellCheckParams().Distance(0)));
}
[Fact]
@@ -2553,13 +2897,15 @@ public async Task TestDialectBoundAsync()
ft.Create(index, new(), new Schema().AddTextField("t"));
// dialect 0 is not valid
- await Assert.ThrowsAsync(async () => await ft.SpellCheckAsync(index, "name", new FTSpellCheckParams().Dialect(0)));
+ await Assert.ThrowsAsync(async () =>
+ await ft.SpellCheckAsync(index, "name", new FTSpellCheckParams().Dialect(0)));
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestQueryParamsWithParams_DefaultDialectAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT(2);
@@ -2570,6 +2916,7 @@ public async Task TestQueryParamsWithParams_DefaultDialectAsync(string endpointI
db.HashSet("2", "numval", 2);
db.HashSet("3", "numval", 3);
+ AssertDatabaseSize(db, 3);
Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2);
var res = await ft.SearchAsync("idx", query);
Assert.Equal(2, res.TotalResults);
@@ -2793,7 +3140,7 @@ public async Task getSuggestionLengthAndDeleteSuggestionAsync()
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.9")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestProfileSearch(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -2804,7 +3151,7 @@ public void TestProfileSearch(string endpointId)
db.HashSet("doc1", [
new("t1", "foo"),
- new("t2", "bar")
+ new("t2", "bar")
]);
var profile = ft.ProfileOnSearch(index, new("foo"));
@@ -2817,7 +3164,7 @@ public void TestProfileSearch(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.9")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestProfileSearchAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -2828,7 +3175,7 @@ public async Task TestProfileSearchAsync(string endpointId)
db.HashSet("doc1", [
new("t1", "foo"),
- new("t2", "bar")
+ new("t2", "bar")
]);
var profile = await ft.ProfileOnSearchAsync(index, new("foo"));
@@ -2840,9 +3187,10 @@ public async Task TestProfileSearchAsync(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.GreaterThanOrEqual, "7.9")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestProfileSearch_WithoutCoordinator(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -2851,7 +3199,7 @@ public void TestProfileSearch_WithoutCoordinator(string endpointId)
db.HashSet("doc1", [
new("t1", "foo"),
- new("t2", "bar")
+ new("t2", "bar")
]);
var profile = ft.ProfileSearch(index, new("foo"));
@@ -2860,9 +3208,10 @@ public void TestProfileSearch_WithoutCoordinator(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.GreaterThanOrEqual, "7.9")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestProfileSearchAsync_WithoutCoordinator(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -2871,7 +3220,7 @@ public async Task TestProfileSearchAsync_WithoutCoordinator(string endpointId)
db.HashSet("doc1", [
new("t1", "foo"),
- new("t2", "bar")
+ new("t2", "bar")
]);
var profile = await ft.ProfileSearchAsync(index, new("foo"));
@@ -2880,7 +3229,7 @@ public async Task TestProfileSearchAsync_WithoutCoordinator(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.9")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestProfile(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -2916,7 +3265,7 @@ public void TestProfile(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.9")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestProfileAsync(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -2952,9 +3301,10 @@ public async Task TestProfileAsync(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.GreaterThanOrEqual, "7.9")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestProfile_WithoutCoordinator(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -2982,9 +3332,10 @@ public void TestProfile_WithoutCoordinator(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.GreaterThanOrEqual, "7.9")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestProfileAsync_WithoutCoordinator(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -3012,9 +3363,10 @@ public async Task TestProfileAsync_WithoutCoordinator(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.3.240")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestProfileIssue306(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -3042,13 +3394,15 @@ public void TestProfileIssue306(string endpointId)
}
[SkipIfRedisTheory(Is.Enterprise, Comparison.LessThan, "7.3.240")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task TestProfileAsyncIssue306(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
- await ft.CreateAsync(index, new Schema().AddTextField("t", sortable: true)); // Calling FT.CREATR without FTCreateParams
+ await ft.CreateAsync(index,
+ new Schema().AddTextField("t", sortable: true)); // Calling FT.CREATR without FTCreateParams
db.HashSet("1", "t", "hello");
db.HashSet("2", "t", "world");
@@ -3083,7 +3437,7 @@ public void TestProfileCommandBuilder()
}
[SkippableTheory]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void Issue175(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -3092,18 +3446,19 @@ public void Issue175(string endpointId)
var sortable = true;
var ftParams = new FTCreateParams()
- .On(IndexDataType.JSON)
- .Prefix("doc:");
+ .On(IndexDataType.JSON)
+ .Prefix("doc:");
var schema = new Schema().AddTagField("tag", sortable, false, false, "|")
- .AddTextField("text", 1, sortable);
+ .AddTextField("text", 1, sortable);
Assert.True(ft.Create("myIndex", ftParams, schema));
}
[SkipIfRedisTheory(Comparison.LessThan, "7.2.1")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void GeoShapeFilterSpherical(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
@@ -3166,16 +3521,18 @@ public void GeoShapeFilterSpherical(string endpointId)
}
[SkipIfRedisTheory(Comparison.LessThan, "7.2.1")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task GeoShapeFilterSphericalAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
WKTReader reader = new();
GeometryFactory factory = new();
- Assert.True(await ft.CreateAsync(index, new Schema().AddGeoShapeField("geom", GeoShapeField.CoordinateSystem.SPHERICAL)));
+ Assert.True(await ft.CreateAsync(index,
+ new Schema().AddGeoShapeField("geom", GeoShapeField.CoordinateSystem.SPHERICAL)));
// Create polygons
Polygon small = factory.CreatePolygon([
@@ -3204,7 +3561,8 @@ public async Task GeoShapeFilterSphericalAsync(string endpointId)
new(34.9000, 29.7000)
]);
- var res = await ft.SearchAsync(index, new Query($"@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
+ var res = await ft.SearchAsync(index,
+ new Query($"@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
Assert.Equal(1, res.TotalResults);
Assert.Single(res.Documents);
Assert.Equal(small, reader.Read(res.Documents[0]["geom"].ToString()));
@@ -3217,7 +3575,8 @@ public async Task GeoShapeFilterSphericalAsync(string endpointId)
new(34.9002, 29.7002)
]);
- res = await ft.SearchAsync(index, new Query($"@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3));
+ res = await ft.SearchAsync(index,
+ new Query($"@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3));
Assert.Equal(2, res.TotalResults);
Assert.Equal(2, res.Documents.Count);
@@ -3225,15 +3584,17 @@ public async Task GeoShapeFilterSphericalAsync(string endpointId)
Point point = factory.CreatePoint(new Coordinate(34.9010, 29.7010));
db.HashSet("point", "geom", point.ToString());
- res = await ft.SearchAsync(index, new Query($"@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
+ res = await ft.SearchAsync(index,
+ new Query($"@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
Assert.Equal(2, res.TotalResults);
Assert.Equal(2, res.Documents.Count);
}
[SkipIfRedisTheory(Comparison.LessThan, "7.2.1")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void GeoShapeFilterFlat(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
WKTReader reader = new();
@@ -3244,23 +3605,24 @@ public void GeoShapeFilterFlat(string endpointId)
// polygon type
Polygon small = factory.CreatePolygon([
new(1, 1),
- new(1, 100), new(100, 100), new(100, 1), new(1, 1)
+ new(1, 100), new(100, 100), new(100, 1), new(1, 1)
]);
db.HashSet("small", "geom", small.ToString());
Polygon large = factory.CreatePolygon([
new(1, 1),
- new(1, 200), new(200, 200), new(200, 1), new(1, 1)
+ new(1, 200), new(200, 200), new(200, 1), new(1, 1)
]);
db.HashSet("large", "geom", large.ToString());
// within condition
Polygon within = factory.CreatePolygon([
new(0, 0),
- new(0, 150), new(150, 150), new(150, 0), new(0, 0)
+ new(0, 150), new(150, 150), new(150, 0), new(0, 0)
]);
- SearchResult res = ft.Search(index, new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
+ SearchResult res = ft.Search(index,
+ new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
Assert.Equal(1, res.TotalResults);
Assert.Single(res.Documents);
Assert.Equal(small, reader.Read(res.Documents[0]["geom"].ToString()));
@@ -3268,7 +3630,7 @@ public void GeoShapeFilterFlat(string endpointId)
// contains condition
Polygon contains = factory.CreatePolygon([
new(2, 2),
- new(2, 50), new(50, 50), new(50, 2), new(2, 2)
+ new(2, 50), new(50, 50), new(50, 2), new(2, 2)
]);
res = ft.Search(index, new Query("@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3));
@@ -3285,36 +3647,39 @@ public void GeoShapeFilterFlat(string endpointId)
}
[SkipIfRedisTheory(Comparison.LessThan, "7.2.1")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public async Task GeoShapeFilterFlatAsync(string endpointId)
{
+ SkipClusterPre8(endpointId);
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
WKTReader reader = new();
GeometryFactory factory = new();
- Assert.True(await ft.CreateAsync(index, new Schema().AddGeoShapeField("geom", GeoShapeField.CoordinateSystem.FLAT)));
+ Assert.True(await ft.CreateAsync(index,
+ new Schema().AddGeoShapeField("geom", GeoShapeField.CoordinateSystem.FLAT)));
// polygon type
Polygon small = factory.CreatePolygon([
new(1, 1),
- new(1, 100), new(100, 100), new(100, 1), new(1, 1)
+ new(1, 100), new(100, 100), new(100, 1), new(1, 1)
]);
db.HashSet("small", "geom", small.ToString());
Polygon large = factory.CreatePolygon([
new(1, 1),
- new(1, 200), new(200, 200), new(200, 1), new(1, 1)
+ new(1, 200), new(200, 200), new(200, 1), new(1, 1)
]);
db.HashSet("large", "geom", large.ToString());
// within condition
Polygon within = factory.CreatePolygon([
new(0, 0),
- new(0, 150), new(150, 150), new(150, 0), new(0, 0)
+ new(0, 150), new(150, 150), new(150, 0), new(0, 0)
]);
- SearchResult res = await ft.SearchAsync(index, new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
+ SearchResult res = await ft.SearchAsync(index,
+ new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
Assert.Equal(1, res.TotalResults);
Assert.Single(res.Documents);
Assert.Equal(small, reader.Read(res.Documents[0]["geom"].ToString()));
@@ -3322,10 +3687,11 @@ public async Task GeoShapeFilterFlatAsync(string endpointId)
// contains condition
Polygon contains = factory.CreatePolygon([
new(2, 2),
- new(2, 50), new(50, 50), new(50, 2), new(2, 2)
+ new(2, 50), new(50, 50), new(50, 2), new(2, 2)
]);
- res = await ft.SearchAsync(index, new Query("@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3));
+ res = await ft.SearchAsync(index,
+ new Query("@geom:[contains $poly]").AddParam("poly", contains.ToString()).Dialect(3));
Assert.Equal(2, res.TotalResults);
Assert.Equal(2, res.Documents.Count);
@@ -3333,7 +3699,8 @@ public async Task GeoShapeFilterFlatAsync(string endpointId)
Point point = factory.CreatePoint(new Coordinate(10, 10));
db.HashSet("point", "geom", point.ToString());
- res = await ft.SearchAsync(index, new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
+ res = await ft.SearchAsync(index,
+ new Query("@geom:[within $poly]").AddParam("poly", within.ToString()).Dialect(3));
Assert.Equal(2, res.TotalResults);
Assert.Equal(2, res.Documents.Count);
}
@@ -3342,17 +3709,19 @@ public async Task GeoShapeFilterFlatAsync(string endpointId)
public void Issue230()
{
var request = new AggregationRequest("*", 3).Filter("@StatusId==1")
- .GroupBy("@CreatedDay", Reducers.CountDistinct("@UserId"), Reducers.Count().As("count"));
+ .GroupBy("@CreatedDay", Reducers.CountDistinct("@UserId"), Reducers.Count().As("count"));
var buildCommand = SearchCommandBuilder.Aggregate("idx:users", request);
// expected: FT.AGGREGATE idx:users * FILTER @StatusId==1 GROUPBY 1 @CreatedDay REDUCE COUNT_DISTINCT 1 @UserId REDUCE COUNT 0 AS count DIALECT 3
Assert.Equal("FT.AGGREGATE", buildCommand.Command);
- Assert.Equal(["idx:users", "*", "FILTER", "@StatusId==1", "GROUPBY", 1, "@CreatedDay", "REDUCE", "COUNT_DISTINCT", 1, "@UserId", "REDUCE", "COUNT", 0, "AS", "count", "DIALECT", 3
+ Assert.Equal([
+ "idx:users", "*", "FILTER", "@StatusId==1", "GROUPBY", 1, "@CreatedDay", "REDUCE", "COUNT_DISTINCT", 1,
+ "@UserId", "REDUCE", "COUNT", 0, "AS", "count", "DIALECT", 3
], buildCommand.Args);
}
[SkipIfRedisTheory(Comparison.LessThan, "7.3.240")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestNumericInDialect4(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -3375,7 +3744,7 @@ public void TestNumericInDialect4(string endpointId)
}
[SkipIfRedisTheory(Comparison.LessThan, "7.3.240")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestNumericOperatorsInDialect4(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -3401,11 +3770,10 @@ public void TestNumericOperatorsInDialect4(string endpointId)
Assert.Equal(1, ft.Search(index, new("@version:[-inf 124]")).TotalResults);
Assert.Equal(1, ft.Search(index, new Query("@version<=124").Dialect(4)).TotalResults);
-
}
[SkipIfRedisTheory(Comparison.LessThan, "7.3.240")]
- [MemberData(nameof(EndpointsFixture.Env.StandaloneOnly), MemberType = typeof(EndpointsFixture.Env))]
+ [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))]
public void TestNumericLogicalOperatorsInDialect4(string endpointId)
{
IDatabase db = GetCleanDatabase(endpointId);
@@ -3465,7 +3833,8 @@ public async Task TestDocumentLoadWithDB_Issue352(string endpointId)
IDatabase db = GetCleanDatabase(endpointId);
var ft = db.FT();
- Schema sc = new Schema().AddTextField("firstText", 1.0).AddTextField("lastText", 1.0).AddNumericField("ageNumeric");
+ Schema sc = new Schema().AddTextField("firstText", 1.0).AddTextField("lastText", 1.0)
+ .AddNumericField("ageNumeric");
Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc));
Document? droppedDocument = null;
@@ -3509,7 +3878,11 @@ public async Task TestDocumentLoadWithDB_Issue352(string endpointId)
List tasks = [];
// try with 3 different tasks simultaneously to increase the chance of hitting it
- for (int i = 0; i < 3; i++) { tasks.Add(Task.Run(checker)); }
+ for (int i = 0; i < 3; i++)
+ {
+ tasks.Add(Task.Run(checker));
+ }
+
Task checkTask = Task.WhenAll(tasks);
await Task.WhenAny(checkTask, Task.Delay(1000));
var keyTtl = db.KeyTimeToLive("student:22222");
@@ -3522,4 +3895,4 @@ public async Task TestDocumentLoadWithDB_Issue352(string endpointId)
// Without fix for Issue352, document load in this case fails %100 with my local test runs,, and %100 success with fixed version.
// The results in pipeline should be the same.
}
-}
+}
\ No newline at end of file
diff --git a/tests/dockers/docker-compose.yml b/tests/dockers/docker-compose.yml
index 98ef921e..652885f2 100644
--- a/tests/dockers/docker-compose.yml
+++ b/tests/dockers/docker-compose.yml
@@ -3,7 +3,7 @@
services:
redis:
- image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:rs-7.4.0-v1}
+ image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:8.2.1}
container_name: redis-standalone
environment:
- TLS_ENABLED=yes
@@ -21,7 +21,7 @@ services:
- all
cluster:
- image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:rs-7.4.0-v1}
+ image: ${CLIENT_LIBS_TEST_IMAGE:-redislabs/client-libs-test:8.2.1-pre}
container_name: redis-cluster
environment:
- REDIS_CLUSTER=yes
@@ -38,4 +38,4 @@ services:
- "./cluster:/redis/work"
profiles:
- cluster
- - all
\ No newline at end of file
+ - all
diff --git a/tests/dockers/endpoints.json b/tests/dockers/endpoints.json
index 1afd1fda..3b0e1994 100644
--- a/tests/dockers/endpoints.json
+++ b/tests/dockers/endpoints.json
@@ -1,17 +1,17 @@
{
"standalone":{
"endpoints": [
- "localhost:6379"
+ "127.0.0.1:6379"
]
},
"cluster": {
"endpoints": [
- "localhost:16379",
- "localhost:16380",
- "localhost:16381",
- "localhost:16382",
- "localhost:16383",
- "localhost:16384"
+ "127.0.0.1:16379",
+ "127.0.0.1:16380",
+ "127.0.0.1:16381",
+ "127.0.0.1:16382",
+ "127.0.0.1:16383",
+ "127.0.0.1:16384"
]
}
}
\ No newline at end of file