diff --git a/.gitignore b/.gitignore
index 45739b38..25913044 100644
--- a/.gitignore
+++ b/.gitignore
@@ -396,4 +396,7 @@ FodyWeavers.xsd
# JetBrains Rider
*.sln.iml
-.idea
\ No newline at end of file
+.idea
+tests/NRedisStack.Tests/lcov.net7.0.info
+tests/NRedisStack.Tests/lcov.net6.0.info
+tests/NRedisStack.Tests/lcov.info
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 7a73a41b..b26a2359 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,2 +1,4 @@
{
+ "dotnet-test-explorer.testArguments": "/p:CollectCoverage=true /p:CoverletOutputFormat=lcov /p:CoverletOutput=./lcov.info",
+ "dotnet-test-explorer.testProjectPath": "**/*NRedisStack.Tests.csproj"
}
\ No newline at end of file
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
index 9dc653aa..0542bb3a 100644
--- a/.vscode/tasks.json
+++ b/.vscode/tasks.json
@@ -36,6 +36,24 @@
"${workspaceFolder}/tests/NRedisStack.Tests/NRedisStack.Tests.csproj"
],
"problemMatcher": "$msCompile"
+ },
+ {
+ "label": "test",
+ "dependsOn": [],
+ "command": "dotnet",
+ "type": "process",
+ "args": [
+ "test",
+ "${workspaceFolder}/tests/NRedisStack.Tests/NRedisStack.Tests.csproj",
+ "/p:CollectCoverage=true",
+ "/p:CoverletOutputFormat=lcov",
+ "/p:CoverletOutput=./lcov.info"
+ ],
+ "problemMatcher": "$msCompile",
+ "group": {
+ "kind": "test",
+ "isDefault": true
+ }
}
]
}
\ No newline at end of file
diff --git a/src/NRedisStack/CuckooFilter/DataTypes/CuckooInformation.cs b/src/NRedisStack/CuckooFilter/DataTypes/CuckooInformation.cs
index cb5d31ae..74be500c 100644
--- a/src/NRedisStack/CuckooFilter/DataTypes/CuckooInformation.cs
+++ b/src/NRedisStack/CuckooFilter/DataTypes/CuckooInformation.cs
@@ -8,12 +8,12 @@ public class CuckooInformation
{
public long Size { get; private set; }
public long NumberOfBuckets { get; private set; }
- public long NumberOfFilter { get; private set; }
+ public long NumberOfFilters { get; private set; }
public long NumberOfItemsInserted { get; private set; }
public long NumberOfItemsDeleted { get; private set; }
public long BucketSize { get; private set; }
public long ExpansionRate { get; private set; }
- public long MaxIteration { get; private set; }
+ public long MaxIterations { get; private set; }
internal CuckooInformation(long size, long numberOfBuckets, long numberOfFilter,
long numberOfItemsInserted, long numberOfItemsDeleted,
@@ -21,12 +21,12 @@ internal CuckooInformation(long size, long numberOfBuckets, long numberOfFilter,
{
Size = size;
NumberOfBuckets = numberOfBuckets;
- NumberOfFilter = numberOfFilter;
+ NumberOfFilters = numberOfFilter;
NumberOfItemsInserted = numberOfItemsInserted;
NumberOfItemsDeleted = numberOfItemsDeleted;
BucketSize = bucketSize;
ExpansionRate = expansionRate;
- MaxIteration = maxIteration;
+ MaxIterations = maxIteration;
}
}
}
\ No newline at end of file
diff --git a/src/NRedisStack/Graph/DataTypes/Edge.cs b/src/NRedisStack/Graph/DataTypes/Edge.cs
index 24e7df5b..3e446f22 100644
--- a/src/NRedisStack/Graph/DataTypes/Edge.cs
+++ b/src/NRedisStack/Graph/DataTypes/Edge.cs
@@ -26,6 +26,7 @@ public class Edge : GraphEntity
///
public long Destination { get; set; }
+ // TODO: check if this is needed:
///
/// Overriden from the base `Equals` implementation. In addition to the expected behavior of checking
/// reference equality, we'll also fall back and check to see if the: Source, Destination, and RelationshipType
diff --git a/src/NRedisStack/Graph/DataTypes/GraphEntity.cs b/src/NRedisStack/Graph/DataTypes/GraphEntity.cs
index d902c06b..58a5391e 100644
--- a/src/NRedisStack/Graph/DataTypes/GraphEntity.cs
+++ b/src/NRedisStack/Graph/DataTypes/GraphEntity.cs
@@ -12,6 +12,7 @@ public abstract class GraphEntity
public IDictionary PropertyMap = new Dictionary();
+ // TODO: check if this is needed:
///
/// Overriden Equals that considers the equality of the entity ID as well as the equality of the
/// properties that each entity has.
diff --git a/src/NRedisStack/Graph/DataTypes/Node.cs b/src/NRedisStack/Graph/DataTypes/Node.cs
index 05a30e86..92fda49b 100644
--- a/src/NRedisStack/Graph/DataTypes/Node.cs
+++ b/src/NRedisStack/Graph/DataTypes/Node.cs
@@ -16,6 +16,7 @@ public Node()
Labels = new List();
}
+ // TODO: check if this is needed:
///
/// Overriden member that checks to see if the names of the labels of a node are equal
/// (in addition to base `Equals` functionality).
diff --git a/src/NRedisStack/Graph/DataTypes/Path.cs b/src/NRedisStack/Graph/DataTypes/Path.cs
index d3f5a68c..0efa1dde 100644
--- a/src/NRedisStack/Graph/DataTypes/Path.cs
+++ b/src/NRedisStack/Graph/DataTypes/Path.cs
@@ -20,12 +20,7 @@ public Path(IList nodes, IList edges)
Edges = new ReadOnlyCollection(edges);
}
-
- ///
- /// How many edges exist on this path.
- ///
- public int Length => Edges.Count;
-
+ // TODO: check if this is needed:
///
/// Overriden `Equals` method that will consider the equality of the Nodes and Edges between two paths.
///
@@ -73,6 +68,7 @@ public override int GetHashCode()
}
}
+ // TODO: check if this is needed:
///
/// Overridden `ToString` method that will emit a string based on the string values of the nodes and edges
/// on the path.
diff --git a/src/NRedisStack/Graph/GraphCommandBuilder.cs b/src/NRedisStack/Graph/GraphCommandBuilder.cs
index d7dfef81..ed56b4dd 100644
--- a/src/NRedisStack/Graph/GraphCommandBuilder.cs
+++ b/src/NRedisStack/Graph/GraphCommandBuilder.cs
@@ -8,14 +8,6 @@ public static class GraphCommandBuilder
{
internal static readonly object CompactQueryFlag = "--COMPACT";
- ///
- public static SerializedCommand Query(string graphName, string query, IDictionary parameters, long? timeout = null)
- {
- var preparedQuery = PrepareQuery(query, parameters);
-
- return Query(graphName, preparedQuery, timeout);
- }
-
///
public static SerializedCommand Query(string graphName, string query, long? timeout = null)
{
@@ -25,14 +17,6 @@ public static SerializedCommand Query(string graphName, string query, long? time
return new SerializedCommand(GRAPH.QUERY, args);
}
- ///
- public static SerializedCommand RO_Query(string graphName, string query, IDictionary parameters, long? timeout = null)
- {
- var preparedQuery = PrepareQuery(query, parameters);
-
- return RO_Query(graphName, preparedQuery, timeout);
- }
-
///
public static SerializedCommand RO_Query(string graphName, string query, long? timeout = null)
{
diff --git a/src/NRedisStack/Graph/GraphCommands.cs b/src/NRedisStack/Graph/GraphCommands.cs
index 4a04e6b9..3779b961 100644
--- a/src/NRedisStack/Graph/GraphCommands.cs
+++ b/src/NRedisStack/Graph/GraphCommands.cs
@@ -18,16 +18,6 @@ public GraphCommands(IDatabase db)
private readonly IDictionary _graphCaches = new Dictionary();
- private GraphCache GetGraphCache(string graphName)
- {
- if (!_graphCaches.ContainsKey(graphName))
- {
- _graphCaches.Add(graphName, new GraphCache(graphName, this));
- }
-
- return _graphCaches[graphName];
- }
-
///
public ResultSet Query(string graphName, string query, IDictionary parameters, long? timeout = null)
{
diff --git a/src/NRedisStack/Graph/Header.cs b/src/NRedisStack/Graph/Header.cs
index fe8915cb..ea9a6259 100644
--- a/src/NRedisStack/Graph/Header.cs
+++ b/src/NRedisStack/Graph/Header.cs
@@ -41,6 +41,7 @@ internal Header(RedisResult result)
}
}
+ // TODO: check if this is needed:
public override bool Equals(object? obj)
{
if (obj == null) return this == null;
diff --git a/src/NRedisStack/Graph/Point.cs b/src/NRedisStack/Graph/Point.cs
index 4bb562fb..a1f9a226 100644
--- a/src/NRedisStack/Graph/Point.cs
+++ b/src/NRedisStack/Graph/Point.cs
@@ -23,6 +23,7 @@ public Point(List values)
this.longitude = values[1];
}
+ // TODO: check if this is needed:
public override bool Equals(object? obj)
{
if (obj == null) return this == null;
diff --git a/src/NRedisStack/Graph/Record.cs b/src/NRedisStack/Graph/Record.cs
index be2346ec..84a9346c 100644
--- a/src/NRedisStack/Graph/Record.cs
+++ b/src/NRedisStack/Graph/Record.cs
@@ -58,6 +58,7 @@ internal Record(List header, List
public int Size => Header.Count;
+ // TODO: check if this is needed:
public override bool Equals(object? obj)
{
if (obj == null) return this == null;
diff --git a/src/NRedisStack/Json/IJsonCommands.cs b/src/NRedisStack/Json/IJsonCommands.cs
index 80be63ef..80b2af7e 100644
--- a/src/NRedisStack/Json/IJsonCommands.cs
+++ b/src/NRedisStack/Json/IJsonCommands.cs
@@ -394,16 +394,6 @@ public interface IJsonCommands
///
Task GetAsync(RedisKey key, string path = "$");
- ///
- /// retrieves a group of items stored in redis, appropriate if the path will resolve to multiple records.
- ///
- /// The key to pull from.
- /// The path to pull.
- /// The type.
- /// An enumerable of the requested tyep
- ///
- Task> GetEnumerableAsync(RedisKey key, string path = "$");
-
///
/// Gets the provided path from multiple keys
///
diff --git a/src/NRedisStack/Json/JsonCommandBuilder.cs b/src/NRedisStack/Json/JsonCommandBuilder.cs
index 2baeec29..3f06ede9 100644
--- a/src/NRedisStack/Json/JsonCommandBuilder.cs
+++ b/src/NRedisStack/Json/JsonCommandBuilder.cs
@@ -19,12 +19,6 @@ public static SerializedCommand Resp(RedisKey key, string? path = null)
return new SerializedCommand(JSON.RESP, key, path);
}
- public static SerializedCommand Set(RedisKey key, RedisValue path, object obj, When when = When.Always)
- {
- string json = JsonSerializer.Serialize(obj);
- return Set(key, path, json, when);
- }
-
public static SerializedCommand Set(RedisKey key, RedisValue path, RedisValue json, When when = When.Always)
{
return when switch
@@ -137,8 +131,6 @@ public static SerializedCommand Del(RedisKey key, string? path = null)
return new SerializedCommand(JSON.DEL, args);
}
- public static SerializedCommand Forget(RedisKey key, string? path = null) => Del(key, path);
-
public static SerializedCommand Get(RedisKey key, RedisValue? indent = null, RedisValue? newLine = null, RedisValue? space = null, RedisValue? path = null)
{
List
- public FTCreateParams PayloadField(byte[] payloadAttribute)
+ public FTCreateParams PayloadField(string payloadAttribute)
{
- Array.Copy(this.payloadField, payloadAttribute, payloadAttribute.Length);
+ //TODO: check if this is correct
+ // Array.Copy(this.payloadField, payloadAttribute, payloadAttribute.Length);
+ this.payloadField = payloadAttribute;
return this;
}
+
///
/// Forces RediSearch to encode indexes as if there were more than 32 text attributes.
///
@@ -192,7 +195,7 @@ public FTCreateParams NoFreqs()
///
/// Sets the index with a custom stopword list, to be ignored during indexing and search time.
///
- public FTCreateParams topwords(params string[] stopwords)
+ public FTCreateParams Stopwords(params string[] stopwords)
{
this.stopwords = stopwords.ToList();
return this;
diff --git a/src/NRedisStack/Search/SearchCommandBuilder.cs b/src/NRedisStack/Search/SearchCommandBuilder.cs
index 490785bc..057f60fe 100644
--- a/src/NRedisStack/Search/SearchCommandBuilder.cs
+++ b/src/NRedisStack/Search/SearchCommandBuilder.cs
@@ -18,7 +18,7 @@ public static SerializedCommand Aggregate(string index, AggregationRequest query
List args = new List { index };
foreach (var arg in query.GetArgs())
{
- args.Add(arg.ToString()!);
+ if(arg != null) args.Add(arg.ToString()!);
}
return new SerializedCommand(FT.AGGREGATE, args);
}
diff --git a/src/NRedisStack/Tdigest/DataTypes/TdigestInformation.cs b/src/NRedisStack/Tdigest/DataTypes/TdigestInformation.cs
index e57fa3b4..b564ccec 100644
--- a/src/NRedisStack/Tdigest/DataTypes/TdigestInformation.cs
+++ b/src/NRedisStack/Tdigest/DataTypes/TdigestInformation.cs
@@ -12,14 +12,14 @@ public class TdigestInformation
public long UnmergedNodes { get; private set; }
public double MergedWeight { get; private set; }
public double UnmergedWeight { get; private set; }
- public double SumWeights { get; private set; }
+ public double Observations { get; private set; }
public long TotalCompressions { get; private set; }
public long MemoryUsage { get; private set; }
internal TdigestInformation(long compression, long capacity, long mergedNodes,
long unmergedNodes, double mergedWeight,
- double unmergedWeight, double sumWeights, long totalCompressions, long memoryUsage)
+ double unmergedWeight, double observations, long totalCompressions, long memoryUsage)
{
Compression = compression;
@@ -28,7 +28,7 @@ internal TdigestInformation(long compression, long capacity, long mergedNodes,
UnmergedNodes = unmergedNodes;
MergedWeight = mergedWeight;
UnmergedWeight = unmergedWeight;
- SumWeights = sumWeights;
+ Observations = observations;
TotalCompressions = totalCompressions;
MemoryUsage = memoryUsage;
}
diff --git a/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs b/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs
index 47bd95fb..e32be109 100644
--- a/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs
+++ b/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs
@@ -193,7 +193,14 @@ public void TestInfo()
var info = cf.Info(key);
Assert.NotNull(info);
+ Assert.Equal(info.BucketSize, (long)2);
+ Assert.Equal(info.ExpansionRate, (long)1);
+ Assert.Equal(info.MaxIterations, (long)20);
+ Assert.Equal(info.NumberOfBuckets, (long)512);
+ Assert.Equal(info.NumberOfFilters, (long)1);
+ Assert.Equal(info.NumberOfItemsDeleted, (long)0);
Assert.Equal(info.NumberOfItemsInserted, (long)1);
+ Assert.Equal(info.Size, (long)1080);
Assert.Throws(() => cf.Info("notExistKey"));
}
@@ -209,7 +216,16 @@ public async Task TestInfoAsync()
var info = await cf.InfoAsync(key);
Assert.NotNull(info);
+ Assert.Equal(info.BucketSize, (long)2);
+ Assert.Equal(info.ExpansionRate, (long)1);
+ Assert.Equal(info.MaxIterations, (long)20);
+ Assert.Equal(info.NumberOfBuckets, (long)512);
+ Assert.Equal(info.NumberOfFilters, (long)1);
+ Assert.Equal(info.NumberOfItemsDeleted, (long)0);
Assert.Equal(info.NumberOfItemsInserted, (long)1);
+ Assert.Equal(info.Size, (long)1080);
+
+
await Assert.ThrowsAsync(() => cf.InfoAsync("notExistKey"));
}
diff --git a/tests/NRedisStack.Tests/Graph/GraphTests.cs b/tests/NRedisStack.Tests/Graph/GraphTests.cs
index 0a402478..211d6702 100644
--- a/tests/NRedisStack.Tests/Graph/GraphTests.cs
+++ b/tests/NRedisStack.Tests/Graph/GraphTests.cs
@@ -46,6 +46,9 @@ public void TestCreateNode()
Assert.NotNull(stats.QueryInternalExecutionTime);
Assert.Equal(0, resultSet.Count);
+
+ // delete
+ graph.Delete("social");
}
[Fact]
@@ -993,6 +996,9 @@ public async Task TestCreateNodeAsync()
Assert.NotNull(stats.QueryInternalExecutionTime);
Assert.Equal(0, resultSet.Count);
+
+ // delete
+ await graph.DeleteAsync("social");
}
[Fact]
@@ -1927,6 +1933,37 @@ public async Task TestModulePrefixs1Async()
}
+ [Fact]
+ public void TestEquals()
+ {
+ IDatabase db = redisFixture.Redis.GetDatabase();
+ db.Execute("FLUSHALL");
+ var edge1 = new Edge();
+ var edge1Copy = new Edge();
+ var edge2 = new Edge();
+ var node1 = new Node();
+ var node1Copy = new Node();
+ var node2 = new Node();
+ edge1.Id = 1;
+ edge1Copy.Id = 1;
+ edge2.Id = 2;
+ node1.Id = 1;
+ node1Copy.Id = 1;
+ node2.Id = 2;
+ Assert.False(edge1.Equals(edge2));
+ Assert.False(node1.Equals(node2));
+ Assert.True(edge1.Equals(edge1Copy));
+ Assert.True(node1.Equals(node1Copy));
+
+ var path = new NRedisStack.Graph.DataTypes.Path(new List() { node1, node2 },
+ new List() { edge1, edge2 });
+ var pathCopy = new NRedisStack.Graph.DataTypes.Path(new List() { node1, node2 },
+ new List() { edge1, edge2 });
+ var path2 = new NRedisStack.Graph.DataTypes.Path(new List() { node1, node2 },
+ new List() { edge1 });
+ Assert.True(path.Equals(pathCopy));
+ Assert.False(path.Equals(path2));
+ }
#endregion
}
\ No newline at end of file
diff --git a/tests/NRedisStack.Tests/Json/JsonTests.cs b/tests/NRedisStack.Tests/Json/JsonTests.cs
index 1396881c..a76f6cc4 100644
--- a/tests/NRedisStack.Tests/Json/JsonTests.cs
+++ b/tests/NRedisStack.Tests/Json/JsonTests.cs
@@ -540,6 +540,24 @@ public void ArrayPop()
Assert.Equal("\"Ally\"", result[0].ToString());
}
+ [Fact]
+ public async Task ArrayPopAsync()
+ {
+ IJsonCommands commands = new JsonCommands(redisFixture.Redis.GetDatabase());
+ var keys = CreateKeyNames(2);
+ var key = keys[0];
+ var simpleKey = keys[1];
+ await commands.SetAsync(key, "$", new { name = "Alice", nicknames = new[] { "Al", "Ali", "Ally" } });
+ await commands.SetAsync(simpleKey, "$", new[] { "Al", "Ali", "Ally" });
+
+ var result = await commands.ArrPopAsync(key, "$.nicknames", 1);
+ Assert.Equal("\"Ali\"", result[0].ToString());
+ result = await commands.ArrPopAsync(key, "$.nicknames");
+ Assert.Equal("\"Ally\"", result[0].ToString());
+ result = await commands.ArrPopAsync(simpleKey);
+ Assert.Equal("\"Ally\"", result[0].ToString());
+ }
+
[Fact]
public void ArrayTrim()
{
@@ -700,7 +718,7 @@ public async Task GetAsync()
var result = await commands.GetAsync(key);
Assert.Equal("Alice", result!.Name);
Assert.Equal(35, result.Age);
- var people = commands.GetEnumerable(complexKey, "$..a").ToArray();
+ var people = (commands.GetEnumerable(complexKey, "$..a")).ToArray();
Assert.Equal(2, people.Length);
Assert.Equal("Alice", people[0]!.Name);
Assert.Equal(35, people[0]!.Age);
@@ -988,4 +1006,26 @@ public async Task TestSetFromDirectoryAsync()
Assert.Equal(jsons[6], actual.ToString());
Directory.Delete("BaseDir", true);
}
+
+ [Fact]
+ public void TestJsonCommandBuilder()
+ {
+ var getBuild1 = JsonCommandBuilder.Get("key", "indent", "newline", "space", "path");
+ var getBuild2 = JsonCommandBuilder.Get("key",new string[]{"path1", "path2", "path3"}, "indent", "newline", "space");
+ var expectedArgs1 = new object[] { "key", "INDENT", "indent", "NEWLINE","newline", "SPACE", "space", "path" };
+ var expectedArgs2 = new object[] { "key", "INDENT", "indent", "NEWLINE", "newline", "SPACE", "space", "path1", "path2", "path3" };
+
+
+ for(int i = 0; i < expectedArgs1.Length; i++)
+ {
+ Assert.Equal(expectedArgs1[i].ToString(), getBuild1.Args[i].ToString());
+ }
+ Assert.Equal("JSON.GET", getBuild1.Command);
+
+ for(int i = 0; i < expectedArgs2.Length; i++)
+ {
+ Assert.Equal(expectedArgs2[i].ToString(), getBuild2.Args[i].ToString());
+ }
+ Assert.Equal("JSON.GET", getBuild2.Command);
+ }
}
\ No newline at end of file
diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs
index dcc66323..5be01a74 100644
--- a/tests/NRedisStack.Tests/Search/SearchTests.cs
+++ b/tests/NRedisStack.Tests/Search/SearchTests.cs
@@ -6,6 +6,7 @@
using NRedisStack.Search;
using static NRedisStack.Search.Schema;
using NRedisStack.Search.Aggregation;
+using NRedisStack.Literals.Enums;
namespace NRedisStack.Tests.Search;
@@ -169,7 +170,7 @@ public void TestAggregations()
AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25));
AggregationRequest r = new AggregationRequest()
- .GroupBy("@name", Reducers.Sum("@count").As ("sum"))
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
.SortBy(10, SortedField.Desc("@sum"));
// actual search
@@ -192,6 +193,106 @@ public void TestAggregations()
Assert.Equal(10, r2.GetLong("sum"));
}
+ [Fact]
+ public async Task TestAggregationsAsync()
+ {
+ IDatabase db = redisFixture.Redis.GetDatabase();
+ await db.ExecuteAsync("FLUSHALL");
+ var ft = db.FT();
+ Schema sc = new Schema();
+ sc.AddTextField("name", 1.0, true);
+ sc.AddNumericField("count", true);
+ await ft.CreateAsync(index, FTCreateParams.CreateParams(), sc);
+ // client.AddDocument(new Document("data1").Set("name", "abc").Set("count", 10));
+ // client.AddDocument(new Document("data2").Set("name", "def").Set("count", 5));
+ // client.AddDocument(new Document("data3").Set("name", "def").Set("count", 25));
+ AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10));
+ AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5));
+ AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25));
+
+ AggregationRequest r = new AggregationRequest()
+ .GroupBy("@name", Reducers.Sum("@count").As("sum"))
+ .SortBy(10, SortedField.Desc("@sum"));
+
+ // actual search
+ var res = await ft.AggregateAsync(index, r);
+ Assert.Equal(2, res.TotalResults);
+
+ Row r1 = res.GetRow(0);
+ Assert.NotNull(r1);
+ Assert.Equal("def", r1.GetString("name"));
+ Assert.Equal(30, r1.GetLong("sum"));
+ Assert.Equal(30, r1.GetDouble("sum"), 0);
+
+ Assert.Equal(0L, r1.GetLong("nosuchcol"));
+ Assert.Equal(0.0, r1.GetDouble("nosuchcol"), 0);
+ Assert.Null(r1.GetString("nosuchcol"));
+
+ Row r2 = res.GetRow(1);
+ Assert.NotNull(r2);
+ Assert.Equal("abc", r2.GetString("name"));
+ Assert.Equal(10, r2.GetLong("sum"));
+ }
+
+
+ [Fact]
+ public void TestAggregationsLoad()
+ {
+ IDatabase db = redisFixture.Redis.GetDatabase();
+ db.Execute("FLUSHALL");
+ var ft = db.FT();
+ var sc = new Schema().AddTextField("t1").AddTextField("t2");
+ ft.Create("idx", new FTCreateParams(), sc);
+
+ AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world"));
+
+ // load t1
+ var req = new AggregationRequest("*").Load(new FieldName("t1"));
+ var res = ft.Aggregate("idx", req);
+ Assert.Equal(res[0]["t1"].ToString(), "hello");
+
+ // load t2
+ req = new AggregationRequest("*").Load(new FieldName("t2"));
+ res = ft.Aggregate("idx", req);
+ Assert.Equal(res[0]["t2"], "world");
+
+ // load all
+ req = new AggregationRequest("*").LoadAll();
+ res = ft.Aggregate("idx", req);
+ Assert.Equal(res[0]["t1"].ToString(), "hello");
+ Assert.Equal(res[0]["t2"], "world");
+ }
+
+ [Fact]
+ public async Task TestAggregationsLoadAsync()
+ {
+ IDatabase db = redisFixture.Redis.GetDatabase();
+ await db.ExecuteAsync("FLUSHALL");
+ var ft = db.FT();
+ var sc = new Schema().AddTextField("t1").AddTextField("t2");
+ await ft.CreateAsync("idx", new FTCreateParams(), sc);
+
+ AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world"));
+
+ // load t1
+ var req = new AggregationRequest("*").Load(new FieldName("t1"));
+ var res = await ft.AggregateAsync("idx", req);
+ Assert.Equal(res[0]["t1"].ToString(), "hello");
+
+ // load t2
+ req = new AggregationRequest("*").Load(new FieldName("t2"));
+ res = await ft.AggregateAsync("idx", req);
+ Assert.Equal(res[0]["t2"], "world");
+
+ // load all
+ req = new AggregationRequest("*").LoadAll();
+ res = await ft.AggregateAsync("idx", req);
+ Assert.Equal(res[0]["t1"].ToString(), "hello");
+ Assert.Equal(res[0]["t2"], "world");
+ }
+
+
+
[Fact]
public void TestAggregationRequestParamsDialect()
{
@@ -398,12 +499,12 @@ public async Task TestCreateAsync()
var parameters = FTCreateParams.CreateParams().Filter("@age>16").Prefix("student:", "pupil:");
Assert.True(await ft.CreateAsync(index, parameters, schema));
db.HashSet("profesor:5555", new HashEntry[] { new("first", "Albert"), new("last", "Blue"), new("age", "55") });
- db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") });
- db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") });
- db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") });
- db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") });
- db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") });
- db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") });
+ db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") });
+ db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") });
+ db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") });
+ db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") });
+ db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") });
+ db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") });
var noFilters = ft.Search(index, new Query());
Assert.Equal(4, noFilters.TotalResults);
var res1 = ft.Search(index, new Query("@first:Jo*"));
@@ -426,7 +527,7 @@ public void CreateNoParams()
db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", 18) });
db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", 17) });
- db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", 21) });
+ db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", 21) });
db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", 20) });
SearchResult noFilters = ft.Search(index, new Query());
@@ -454,7 +555,7 @@ public async Task CreateNoParamsAsync()
db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", 18) });
db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", 17) });
- db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", 21) });
+ db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", 21) });
db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", 20) });
SearchResult noFilters = ft.Search(index, new Query());
@@ -481,13 +582,13 @@ public void CreateWithFieldNames()
Assert.True(ft.Create(index, FTCreateParams.CreateParams().Prefix("student:", "pupil:"), sc));
- db.HashSet("profesor:5555", new HashEntry[] { new("first", "Albert"), new("last", "Blue"), new("age", "55") });
- db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") });
- db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") });
- db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") });
- db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") });
- db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") });
- db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") });
+ db.HashSet("profesor:5555", new HashEntry[] { new("first", "Albert"), new("last", "Blue"), new("age", "55") });
+ db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") });
+ db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") });
+ db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") });
+ db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") });
+ db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") });
+ db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") });
SearchResult noFilters = ft.Search(index, new Query());
Assert.Equal(5, noFilters.TotalResults);
@@ -513,13 +614,13 @@ public async Task CreateWithFieldNamesAsync()
Assert.True(await ft.CreateAsync(index, FTCreateParams.CreateParams().Prefix("student:", "pupil:"), sc));
- db.HashSet("profesor:5555", new HashEntry[] { new("first", "Albert"), new("last", "Blue"), new("age", "55") });
- db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") });
- db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") });
- db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") });
- db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") });
- db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") });
- db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") });
+ db.HashSet("profesor:5555", new HashEntry[] { new("first", "Albert"), new("last", "Blue"), new("age", "55") });
+ db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") });
+ db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") });
+ db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") });
+ db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") });
+ db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") });
+ db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") });
SearchResult noFilters = await ft.SearchAsync(index, new Query());
Assert.Equal(5, noFilters.TotalResults);
@@ -568,6 +669,28 @@ public void AlterAdd()
Assert.Equal("title", (info.Attributes[0]["identifier"]).ToString());
Assert.Equal("TAG", (info.Attributes[1]["type"]).ToString());
Assert.Equal("name", (info.Attributes[2]["attribute"]).ToString());
+ Assert.Equal(100, info.NumDocs);
+ Assert.Equal("300", info.MaxDocId);
+ Assert.Equal(102, info.NumTerms);
+ Assert.True(info.NumRecords == 800 || info.NumRecords == 802); // TODO: should this be 800?
+ Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
+ Assert.Equal(0, info.VectorIndexSzMebibytes);
+ Assert.Equal(208, info.TotalInvertedIndexBlocks);
+ Assert.True(info.OffsetVectorsSzMebibytes < 1);
+ Assert.True(info.DocTableSizeMebibytes < 1);
+ Assert.Equal(0, info.SortableValueSizeMebibytes);
+ Assert.True(info.KeyTableSizeMebibytes < 1);
+ Assert.Equal(8, (int)info.RecordsPerDocAvg);
+ Assert.True(info.BytesPerRecordAvg > 5);
+ Assert.True(info.OffsetsPerTermAvg > 0.8);
+ Assert.Equal(8, info.OffsetBitsPerRecordAvg);
+ Assert.Equal(0, info.HashIndexingFailures);
+ Assert.True(info.TotalIndexingTime > 0);
+ Assert.Equal(0, info.Indexing);
+ Assert.Equal(1, info.PercentIndexed);
+ Assert.Equal(4, info.NumberOfUses);
+ Assert.Equal(7, info.GcStats.Count);
+ Assert.Equal(4, info.CursorStats.Count);
}
[Fact]
@@ -604,6 +727,28 @@ public async Task AlterAddAsync()
Assert.Equal("title", (info.Attributes[0]["identifier"]).ToString());
Assert.Equal("TAG", (info.Attributes[1]["type"]).ToString());
Assert.Equal("name", (info.Attributes[2]["attribute"]).ToString());
+ Assert.Equal(100, info.NumDocs);
+ Assert.Equal("300", info.MaxDocId);
+ Assert.Equal(102, info.NumTerms);
+ Assert.True(info.NumRecords == 800 || info.NumRecords == 802); // TODO: should this be 800?
+ Assert.True(info.InvertedSzMebibytes < 1); // TODO: check this line and all the <1 lines
+ Assert.Equal(0, info.VectorIndexSzMebibytes);
+ Assert.Equal(208, info.TotalInvertedIndexBlocks);
+ Assert.True(info.OffsetVectorsSzMebibytes < 1);
+ Assert.True(info.DocTableSizeMebibytes < 1);
+ Assert.Equal(0, info.SortableValueSizeMebibytes);
+ Assert.True(info.KeyTableSizeMebibytes < 1);
+ Assert.Equal(8, (int)info.RecordsPerDocAvg);
+ Assert.True(info.BytesPerRecordAvg > 5);
+ Assert.True(info.OffsetsPerTermAvg > 0.8);
+ Assert.Equal(8, info.OffsetBitsPerRecordAvg);
+ Assert.Equal(0, info.HashIndexingFailures);
+ Assert.True(info.TotalIndexingTime > 0);
+ Assert.Equal(0, info.Indexing);
+ Assert.Equal(1, info.PercentIndexed);
+ Assert.Equal(4, info.NumberOfUses);
+ Assert.Equal(7, info.GcStats.Count);
+ Assert.Equal(4, info.CursorStats.Count);
}
[Fact]
@@ -831,9 +976,9 @@ public void TestDictionary()
var dumResult = ft.DictDump("dict");
int i = 0;
- Assert.Equal("bar",dumResult[i++].ToString());
- Assert.Equal("foo",dumResult[i++].ToString());
- Assert.Equal("hello world",dumResult[i].ToString());
+ Assert.Equal("bar", dumResult[i++].ToString());
+ Assert.Equal("foo", dumResult[i++].ToString());
+ Assert.Equal("hello world", dumResult[i].ToString());
Assert.Equal(3L, ft.DictDel("dict", "foo", "bar", "hello world"));
Assert.Equal(ft.DictDump("dict").Length, 0);
@@ -926,7 +1071,7 @@ public void dropIndexDD()
Assert.True(ft.DropIndex(index, true));
- RedisResult[] keys = (RedisResult[]) db.Execute("KEYS", "*");
+ RedisResult[] keys = (RedisResult[])db.Execute("KEYS", "*");
Assert.True(keys.Length == 0);
Assert.Equal("0", db.Execute("DBSIZE").ToString());
}
@@ -952,7 +1097,7 @@ public async Task dropIndexDDAsync()
Assert.True(await ft.DropIndexAsync(index, true));
- RedisResult[] keys = (RedisResult[]) db.Execute("KEYS", "*");
+ RedisResult[] keys = (RedisResult[])db.Execute("KEYS", "*");
Assert.True(keys.Length == 0);
Assert.Equal("0", db.Execute("DBSIZE").ToString());
}
@@ -968,9 +1113,9 @@ public async Task TestDictionaryAsync()
var dumResult = await ft.DictDumpAsync("dict");
int i = 0;
- Assert.Equal("bar",dumResult[i++].ToString());
- Assert.Equal("foo",dumResult[i++].ToString());
- Assert.Equal("hello world",dumResult[i].ToString());
+ Assert.Equal("bar", dumResult[i++].ToString());
+ Assert.Equal("foo", dumResult[i++].ToString());
+ Assert.Equal("hello world", dumResult[i].ToString());
Assert.Equal(3L, await ft.DictDelAsync("dict", "foo", "bar", "hello world"));
Assert.Equal((await ft.DictDumpAsync("dict")).Length, 0);
@@ -1190,25 +1335,249 @@ public async Task TestGetTagFieldWithNonDefaultSeparatorSyncAsync()
Assert.Equal(SyncRes[i++].ToString(), "yellow");
}
+
[Fact]
- public void TestModulePrefixs1()
+ public void TestFTCreateParamsCommandBuilder()
{
- {
- var conn = ConnectionMultiplexer.Connect("localhost");
- IDatabase db = conn.GetDatabase();
+ IDatabase db = redisFixture.Redis.GetDatabase();
+ db.Execute("FLUSHALL");
+ var ft = db.FT();
+ Schema sc = new Schema()
+ .AddTextField("title", 1.0)
+ .AddTagField("category", separator: ";");
- var ft = db.FT();
- // ...
- conn.Dispose();
+ var ftCreateParams = FTCreateParams.CreateParams().On(IndexDataType.Json)
+ .AddPrefix("doc:")
+ .Filter("@category:{red}")
+ .Language("English")
+ .LanguageField("play")
+ .Score(1.0)
+ .ScoreField("chapter")
+ .PayloadField("txt")
+ .MaxTextFields()
+ .NoOffsets()
+ .Temporary(10)
+ .NoHL()
+ .NoFields()
+ .NoFreqs()
+ .Stopwords(new[] { "foo", "bar" })
+ .SkipInitialScan();
+
+ var builedCommand = SearchCommandBuilder.Create(index, ftCreateParams, sc);
+ var expectedArgs = new object[] { "TEST_INDEX", "PREFIX", 1,
+ "doc:", "FILTER", "@category:{red}", "LANGUAGE",
+ "English", "LANGUAGE_FIELD", "play", "SCORE", 1,
+ "SCORE_FIELD", "chapter", "PAYLOAD_FIELD", "txt",
+ "MAXTEXTFIELDS", "NOOFFSETS", "TEMPORARY", 10,
+ "NOHL", "NOFIELDS", "NOFREQS", "STOPWORDS", 2,
+ "foo", "bar", "SKIPINITIALSCAN", "SCHEMA", "title",
+ "TEXT", "category", "TAG", "SEPARATOR", ";" };
+
+ for (int i = 0; i < expectedArgs.Length; i++)
+ {
+ Assert.Equal(expectedArgs[i].ToString(), builedCommand.Args[i].ToString());
}
+ Assert.Equal("FT.CREATE", builedCommand.Command.ToString());
+ }
+
+ [Fact]
+ public void TestFTCreateParamsCommandBuilderNoStopwords()
+ {
+ IDatabase db = redisFixture.Redis.GetDatabase();
+ db.Execute("FLUSHALL");
+ var ft = db.FT();
+ Schema sc = new Schema()
+ .AddTextField("title", 1.0)
+ .AddTagField("category", separator: ";");
+
+ var ftCreateParams = FTCreateParams.CreateParams().NoStopwords();
+
+ var expectedArgs = new object[] { "TEST_INDEX", "STOPWORDS", 0, "SCHEMA", "title",
+ "TEXT", "category", "TAG", "SEPARATOR", ";" };
+ var builedCommand = SearchCommandBuilder.Create(index, ftCreateParams, sc);
+
+ for (int i = 0; i < expectedArgs.Length; i++)
{
- var conn = ConnectionMultiplexer.Connect("localhost");
- IDatabase db = conn.GetDatabase();
+ Assert.Equal(expectedArgs[i].ToString(), builedCommand.Args[i].ToString());
+ }
+ Assert.Equal("FT.CREATE", builedCommand.Command.ToString());
+ }
+
+ [Fact]
+ public void TestFilters()
+ {
+ IDatabase db = redisFixture.Redis.GetDatabase();
+ db.Execute("FLUSHALL");
+ var ft = db.FT();
+ // Create the index with the same fields as in the original test
+ var sc = new Schema()
+ .AddTextField("txt")
+ .AddNumericField("num")
+ .AddGeoField("loc");
+ ft.Create("idx", new FTCreateParams(), sc);
+
+ // Add the two documents to the index
+ AddDocument(db, "doc1", new Dictionary {
+ { "txt", "foo bar" },
+ { "num", "3.141" },
+ { "loc", "-0.441,51.458" }
+ });
+ AddDocument(db, "doc2", new Dictionary {
+ { "txt", "foo baz" },
+ { "num", "2" },
+ { "loc", "-0.1,51.2" }
+ });
+ // WaitForIndex(client, ft.IndexName ?? "idx");
+
+ // Test numerical filter
+ var q1 = new Query("foo").AddFilter(new Query.NumericFilter("num", 0, 2));
+ var q2 = new Query("foo").AddFilter(new Query.NumericFilter("num",2,true, double.MaxValue, false));
+ q1.NoContent = q2.NoContent = true;
+ var res1 = ft.Search("idx", q1);
+ var res2 = ft.Search("idx", q2);
- var ft = db.FT();
- // ...
- conn.Dispose();
+ Assert.Equal(1, res1.TotalResults);
+ Assert.Equal(1, res2.TotalResults);
+ Assert.Equal("doc2", res1.Documents[0].Id);
+ Assert.Equal("doc1", res2.Documents[0].Id);
+
+ // Test geo filter
+ q1 = new Query("foo").AddFilter(new Query.GeoFilter("loc", -0.44, 51.45, 10, Query.GeoFilter.KILOMETERS));
+ q2 = new Query("foo").AddFilter(new Query.GeoFilter("loc", -0.44, 51.45, 100, Query.GeoFilter.KILOMETERS));
+ q1.NoContent = q2.NoContent = true;
+ res1 = ft.Search("idx", q1);
+ res2 = ft.Search("idx", q2);
+
+ Assert.Equal( 1 , res1.TotalResults);
+ Assert.Equal( 2 , res2.TotalResults);
+ Assert.Equal( "doc1" , res1.Documents[0].Id);
+ }
+
+ [Fact]
+ public async Task TestFiltersAsync()
+ {
+ IDatabase db = redisFixture.Redis.GetDatabase();
+ db.Execute("FLUSHALL");
+ var ft = db.FT();
+ // Create the index with the same fields as in the original test
+ var sc = new Schema()
+ .AddTextField("txt")
+ .AddNumericField("num")
+ .AddGeoField("loc");
+ await ft.CreateAsync("idx", new FTCreateParams(), sc);
+
+ // Add the two documents to the index
+ AddDocument(db, "doc1", new Dictionary {
+ { "txt", "foo bar" },
+ { "num", "3.141" },
+ { "loc", "-0.441,51.458" }
+ });
+ AddDocument(db, "doc2", new Dictionary {
+ { "txt", "foo baz" },
+ { "num", "2" },
+ { "loc", "-0.1,51.2" }
+ });
+ // WaitForIndex(client, ft.IndexName ?? "idx");
+
+ // Test numerical filter
+ var q1 = new Query("foo").AddFilter(new Query.NumericFilter("num", 0, 2));
+ var q2 = new Query("foo").AddFilter(new Query.NumericFilter("num",2,true, double.MaxValue, false));
+ q1.NoContent = q2.NoContent = true;
+ var res1 = await ft.SearchAsync("idx", q1);
+ var res2 = await ft.SearchAsync("idx", q2);
+
+ Assert.Equal(1, res1.TotalResults);
+ Assert.Equal(1, res2.TotalResults);
+ Assert.Equal("doc2", res1.Documents[0].Id);
+ Assert.Equal("doc1", res2.Documents[0].Id);
+
+ // Test geo filter
+ q1 = new Query("foo").AddFilter(new Query.GeoFilter("loc", -0.44, 51.45, 10, Query.GeoFilter.KILOMETERS));
+ q2 = new Query("foo").AddFilter(new Query.GeoFilter("loc", -0.44, 51.45, 100, Query.GeoFilter.KILOMETERS));
+ q1.NoContent = q2.NoContent = true;
+ res1 = await ft.SearchAsync("idx", q1);
+ res2 = await ft.SearchAsync("idx", q2);
+
+ Assert.Equal( 1 , res1.TotalResults);
+ Assert.Equal( 2 , res2.TotalResults);
+ Assert.Equal( "doc1" , res1.Documents[0].Id);
+ }
+
+ [Fact]
+ public void TestFieldsCommandBuilder()
+ {
+ IDatabase db = redisFixture.Redis.GetDatabase();
+ db.Execute("FLUSHALL");
+ var ft = db.FT();
+ // Create the index with the same fields as in the original test
+ var sc = new Schema()
+ .AddTextField("txt", 1.0, true, true, true, "dm:en", true, true)
+ .AddNumericField("num", true, true)
+ .AddGeoField("loc", true, true)
+ .AddTagField("tag",true,true, true, ";", true, true)
+ .AddVectorField("vec", VectorField.VectorAlgo.FLAT, null);
+ var buildCommand = SearchCommandBuilder.Create("idx", new FTCreateParams(), sc);
+ var expectedArgs = new List {
+ "idx",
+ "SCHEMA",
+ "txt",
+ "TEXT",
+ "SORTABLE",
+ "UNF",
+ "NOSTEM",
+ "NOINDEX",
+ "PHONETIC",
+ "dm:en",
+ "WITHSUFFIXTRIE",
+ "num",
+ "NUMERIC",
+ "SORTABLE",
+ "NOINDEX",
+ "loc",
+ "GEO",
+ "SORTABLE",
+ "NOINDEX",
+ "tag",
+ "TAG",
+ "SORTABLE",
+ "UNF",
+ "NOINDEX",
+ "WITHSUFFIXTRIE",
+ "SEPARATOR",
+ ";",
+ "CASESENSITIVE",
+ "vec",
+ "VECTOR",
+ "FLAT"
+ };
+
+ Assert.Equal("FT.CREATE", buildCommand.Command);
+ for(int i = 0; i < expectedArgs.Count; i++)
+ {
+ Assert.Equal(expectedArgs[i], buildCommand.Args[i]);
}
}
-}
\ No newline at end of file
+
+ [Fact]
+ public void TestModulePrefixs1()
+ {
+ {
+ var conn = ConnectionMultiplexer.Connect("localhost");
+ IDatabase db = conn.GetDatabase();
+
+ var ft = db.FT();
+ // ...
+ conn.Dispose();
+ }
+
+ {
+ var conn = ConnectionMultiplexer.Connect("localhost");
+ IDatabase db = conn.GetDatabase();
+
+ var ft = db.FT();
+ // ...
+ conn.Dispose();
+ }
+ }
+ }
\ No newline at end of file
diff --git a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs
index 4865017e..eb8d43a1 100644
--- a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs
+++ b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs
@@ -40,7 +40,15 @@ public void TestCreateSimple()
Assert.True(tdigest.Create(key));
var info = tdigest.Info(key);
+ Assert.Equal(610, info.Capacity);
Assert.Equal(100, info.Compression);
+ Assert.Equal(9768, info.MemoryUsage);
+ Assert.Equal(0, info.MergedNodes);
+ Assert.Equal(0, info.MergedWeight);
+ Assert.Equal(0, info.Observations);
+ Assert.Equal(0, info.TotalCompressions);
+ Assert.Equal(0, info.UnmergedWeight);
+ Assert.Equal(0, info.UnmergedNodes);
}
[Fact]
@@ -53,7 +61,15 @@ public async Task TestCreateSimpleAsync()
Assert.True(await tdigest.CreateAsync(key));
var info = await tdigest.InfoAsync(key);
+ Assert.Equal(610, info.Capacity);
Assert.Equal(100, info.Compression);
+ Assert.Equal(9768, info.MemoryUsage);
+ Assert.Equal(0, info.MergedNodes);
+ Assert.Equal(0, info.MergedWeight);
+ Assert.Equal(0, info.Observations);
+ Assert.Equal(0, info.TotalCompressions);
+ Assert.Equal(0, info.UnmergedWeight);
+ Assert.Equal(0, info.UnmergedNodes);
}
[Fact]
diff --git a/tests/NRedisStack.Tests/TopK/TopKTests.cs b/tests/NRedisStack.Tests/TopK/TopKTests.cs
index 0594e967..653e4175 100644
--- a/tests/NRedisStack.Tests/TopK/TopKTests.cs
+++ b/tests/NRedisStack.Tests/TopK/TopKTests.cs
@@ -21,28 +21,35 @@ public void CreateTopKFilter()
{
IDatabase db = redisFixture.Redis.GetDatabase();
db.Execute("FLUSHALL");
+ var topk = db.TOPK();
- db.TOPK().Reserve("aaa", 30, 2000, 7, 0.925);
+ topk.Reserve("aaa", 30, 2000, 7, 0.925);
- var res = db.TOPK().Add("aaa", "bb", "cc");
+ var res = topk.Add("aaa", "bb", "cc");
Assert.True(res[0].IsNull && res[1].IsNull);
- Assert.Equal(db.TOPK().Query("aaa", "bb", "gg", "cc"), new bool[] { true, false, true });
+ Assert.Equal(topk.Query("aaa", "bb", "gg", "cc"), new bool[] { true, false, true });
- Assert.Equal(db.TOPK().Count("aaa", "bb", "gg", "cc"), new long[] { 1, 0, 1 });
+ Assert.Equal(topk.Count("aaa", "bb", "gg", "cc"), new long[] { 1, 0, 1 });
- var res2 = db.TOPK().List("aaa");
+ var res2 = topk.List("aaa");
Assert.Equal(res2[0].ToString(), "bb");
Assert.Equal(res2[1].ToString(), "cc");
var tuple = new Tuple("ff", 10);
- var del = db.TOPK().IncrBy("aaa", tuple);
- Assert.True(db.TOPK().IncrBy("aaa", tuple)[0].IsNull);
+ var del = topk.IncrBy("aaa", tuple);
+ Assert.True(topk.IncrBy("aaa", tuple)[0].IsNull);
- res2 = db.TOPK().List("aaa");
+ res2 = topk.List("aaa");
Assert.Equal(res2[0].ToString(), "ff");
Assert.Equal(res2[1].ToString(), "bb");
Assert.Equal(res2[2].ToString(), "cc");
+
+ var info = topk.Info("aaa");
+ Assert.Equal(info.Decay, 0.925);
+ Assert.Equal(info.Depth, 7);
+ Assert.Equal(info.K, 30);
+ Assert.Equal(info.Width, 2000);
}
[Fact]
@@ -50,27 +57,34 @@ public async Task CreateTopKFilterAsync()
{
IDatabase db = redisFixture.Redis.GetDatabase();
db.Execute("FLUSHALL");
+ var topk = db.TOPK();
- db.TOPK().ReserveAsync("aaa", 30, 2000, 7, 0.925);
+ await topk.ReserveAsync("aaa", 30, 2000, 7, 0.925);
- var res = await db.TOPK().AddAsync("aaa", "bb", "cc");
+ var res = await topk.AddAsync("aaa", "bb", "cc");
Assert.True(res[0].IsNull && res[1].IsNull);
- Assert.Equal(await db.TOPK().QueryAsync("aaa", "bb", "gg", "cc"), new bool[] { true, false, true });
+ Assert.Equal(await topk.QueryAsync("aaa", "bb", "gg", "cc"), new bool[] { true, false, true });
- Assert.Equal(await db.TOPK().CountAsync("aaa", "bb", "gg", "cc"), new long[] { 1, 0, 1 });
+ Assert.Equal(await topk.CountAsync("aaa", "bb", "gg", "cc"), new long[] { 1, 0, 1 });
- var res2 = await db.TOPK().ListAsync("aaa");
+ var res2 = await topk.ListAsync("aaa");
Assert.Equal(res2[0].ToString(), "bb");
Assert.Equal(res2[1].ToString(), "cc");
var tuple = new Tuple("ff", 10);
- Assert.True((await db.TOPK().IncrByAsync("aaa", tuple))[0].IsNull);
+ Assert.True((await topk.IncrByAsync("aaa", tuple))[0].IsNull);
- res2 = await db.TOPK().ListAsync("aaa");
+ res2 = await topk.ListAsync("aaa");
Assert.Equal(res2[0].ToString(), "ff");
Assert.Equal(res2[1].ToString(), "bb");
Assert.Equal(res2[2].ToString(), "cc");
+
+ var info = await topk.InfoAsync("aaa");
+ Assert.Equal(info.Decay, 0.925);
+ Assert.Equal(info.Depth, 7);
+ Assert.Equal(info.K, 30);
+ Assert.Equal(info.Width, 2000);
}
[Fact]