From ce12a54ef5790e1cdb568206825136a6b0369ea4 Mon Sep 17 00:00:00 2001 From: Avi Avni Date: Wed, 31 Aug 2022 12:30:46 +0300 Subject: [PATCH 1/7] review --- src/NRedisStack/Auxiliary.cs | 2 +- src/NRedisStack/Bloom/BloomCommands.cs | 6 +- src/NRedisStack/Bloom/Literals/CommandArgs.cs | 12 +- src/NRedisStack/ModulPrefixes.cs | 112 ++---------------- src/NRedisStack/ResponseParser.cs | 5 +- tests/NRedisStack.Tests/Bloom/BloomTests.cs | 7 +- tests/NRedisStack.Tests/RedisFixture.cs | 2 +- tests/NRedisStack.Tests/TopK/TopKTests.cs | 35 ++++++ 8 files changed, 61 insertions(+), 120 deletions(-) diff --git a/src/NRedisStack/Auxiliary.cs b/src/NRedisStack/Auxiliary.cs index 365e97e2..474489ce 100644 --- a/src/NRedisStack/Auxiliary.cs +++ b/src/NRedisStack/Auxiliary.cs @@ -6,7 +6,7 @@ public static class Auxiliary { public static List MergeArgs(RedisKey key, params RedisValue[] items) { - var args = new List { key }; + var args = new List(items.Length + 1) { key }; foreach (var item in items) args.Add(item); return args; } diff --git a/src/NRedisStack/Bloom/BloomCommands.cs b/src/NRedisStack/Bloom/BloomCommands.cs index aa63ab9d..2cbec6cd 100644 --- a/src/NRedisStack/Bloom/BloomCommands.cs +++ b/src/NRedisStack/Bloom/BloomCommands.cs @@ -72,8 +72,7 @@ public async Task ExistsAsync(RedisKey key, RedisValue item) /// public BloomInformation Info(RedisKey key) { - var info = _db.Execute(BF.INFO, key); - return ResponseParser.ToBloomInfo(info); + return _db.Execute(BF.INFO, key).ToBloomInfo(); } /// @@ -85,7 +84,7 @@ public BloomInformation Info(RedisKey key) public async Task InfoAsync(RedisKey key) { var info = await _db.ExecuteAsync(BF.INFO, key); - return ResponseParser.ToBloomInfo(info); + return info.ToBloomInfo(); } /// @@ -108,6 +107,7 @@ public bool[] Insert(RedisKey key, RedisValue[] items, int? capacity = null, double? error = null, int? expansion = null, bool nocreate = false, bool nonscaling = false) { + // TODO: extract common logic to a new method if (items.Length < 1) throw new ArgumentOutOfRangeException(nameof(items)); diff --git a/src/NRedisStack/Bloom/Literals/CommandArgs.cs b/src/NRedisStack/Bloom/Literals/CommandArgs.cs index b86fd67d..9825cec3 100644 --- a/src/NRedisStack/Bloom/Literals/CommandArgs.cs +++ b/src/NRedisStack/Bloom/Literals/CommandArgs.cs @@ -2,11 +2,11 @@ namespace NRedisStack.Literals { internal class BloomArgs { - public static string CAPACITY => "CAPACITY"; - public static string ERROR => "ERROR"; - public static string EXPANSION => "EXPANSION"; - public static string NOCREATE => "NOCREATE"; - public static string NONSCALING => "NONSCALING"; - public static string ITEMS => "ITEMS"; + public const string CAPACITY = "CAPACITY"; + public const string ERROR = "ERROR"; + public const string EXPANSION = "EXPANSION"; + public const string NOCREATE = "NOCREATE"; + public const string NONSCALING = "NONSCALING"; + public const string ITEMS = "ITEMS"; } } \ No newline at end of file diff --git a/src/NRedisStack/ModulPrefixes.cs b/src/NRedisStack/ModulPrefixes.cs index bf76c002..fd748a00 100644 --- a/src/NRedisStack/ModulPrefixes.cs +++ b/src/NRedisStack/ModulPrefixes.cs @@ -4,116 +4,20 @@ namespace NRedisStack.RedisStackCommands { public static class ModulPrefixes { - static bool bloomCreated = false; - static BloomCommands bloomCommands; + static public BloomCommands BF(this IDatabase db) => new BloomCommands(db); - static bool cuckooCreated = false; - static CuckooCommands cuckooCommands; + static public CuckooCommands CF(this IDatabase db) => new CuckooCommands(db); - static bool cmsCreated = false; - static CmsCommands cmsCommands; + static public CmsCommands CMS(this IDatabase db) => new CmsCommands(db); - static bool topKCreated = false; - static TopKCommands topKCommands; + static public TopKCommands TOPK(this IDatabase db) => new TopKCommands(db); - static bool tdigestCreated = false; - static TdigestCommands tdigestCommands; + static public TdigestCommands TDIGEST(this IDatabase db) => new TdigestCommands(db); - static bool searchCreated = false; - static SearchCommands searchCommands; + static public SearchCommands FT(this IDatabase db) => new SearchCommands(db); - static bool jsonCreated = false; - static JsonCommands jsonCommands; + static public JsonCommands JSON(this IDatabase db) => new JsonCommands(db); - static bool timeSeriesCreated = false; - static TimeSeriesCommands timeSeriesCommands; - - static public BloomCommands BF(this IDatabase db) - { - if (!bloomCreated) - { - bloomCommands = new BloomCommands(db); - bloomCreated = true; - } - - return bloomCommands; - } - - static public CuckooCommands CF(this IDatabase db) - { - if (!cuckooCreated) - { - cuckooCommands = new CuckooCommands(db); - cuckooCreated = true; - } - - return cuckooCommands; - } - - static public CmsCommands CMS(this IDatabase db) - { - if (!cmsCreated) - { - cmsCommands = new CmsCommands(db); - cmsCreated = true; - } - - return cmsCommands; - } - - static public TopKCommands TOPK(this IDatabase db) - { - if (!topKCreated) - { - topKCommands = new TopKCommands(db); - topKCreated = true; - } - - return topKCommands; - } - - static public TdigestCommands TDIGEST(this IDatabase db) - { - if (!tdigestCreated) - { - tdigestCommands = new TdigestCommands(db); - tdigestCreated = true; - } - - return tdigestCommands; - } - - static public SearchCommands FT(this IDatabase db) - { - if (!searchCreated) - { - searchCommands = new SearchCommands(db); - searchCreated = true; - } - - return searchCommands; - } - - static public JsonCommands JSON(this IDatabase db) - { - if (!jsonCreated) - { - jsonCommands = new JsonCommands(db); - jsonCreated = true; - } - - return jsonCommands; - } - - static public TimeSeriesCommands TS(this IDatabase db) - { - if (!jsonCreated) - { - timeSeriesCommands = new TimeSeriesCommands(db); - timeSeriesCreated = true; - } - - return timeSeriesCommands; - } + static public TimeSeriesCommands TS(this IDatabase db) => new TimeSeriesCommands(db); } } \ No newline at end of file diff --git a/src/NRedisStack/ResponseParser.cs b/src/NRedisStack/ResponseParser.cs index 02b0da44..2c138b08 100644 --- a/src/NRedisStack/ResponseParser.cs +++ b/src/NRedisStack/ResponseParser.cs @@ -12,7 +12,7 @@ namespace NRedisStack { - public static class ResponseParser + internal static class ResponseParser { public static bool OKtoBoolean(RedisResult result) { @@ -222,7 +222,7 @@ public static IReadOnlyList ToRuleArray(RedisResult result) return DuplicatePolicyExtensions.AsPolicy(policyStatus.ToUpper()); } - public static BloomInformation ToBloomInfo(RedisResult result) //TODO: Think about a different implementation, because if the output of BF.INFO changes or even just the names of the labels then the parsing will not work + public static BloomInformation ToBloomInfo(this RedisResult result) //TODO: Think about a different implementation, because if the output of BF.INFO changes or even just the names of the labels then the parsing will not work { long capacity, size, numberOfFilters, numberOfItemsInserted, expansionRate; capacity = size = numberOfFilters = numberOfItemsInserted = expansionRate = -1; @@ -231,6 +231,7 @@ public static IReadOnlyList ToRuleArray(RedisResult result) for (int i = 0; i < redisResults.Length; ++i) { string? label = redisResults[i++].ToString(); + // string.Compare(label, "Capacity", true) switch (label) { case "Capacity": diff --git a/tests/NRedisStack.Tests/Bloom/BloomTests.cs b/tests/NRedisStack.Tests/Bloom/BloomTests.cs index f9c6ce6f..5379d146 100644 --- a/tests/NRedisStack.Tests/Bloom/BloomTests.cs +++ b/tests/NRedisStack.Tests/Bloom/BloomTests.cs @@ -34,14 +34,15 @@ public void TestReserveBasic() public async Task TestReserveBasicAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); + var bf = db.BF(); db.Execute("FLUSHALL"); await db.BF().ReserveAsync(key, 0.001, 100L); - Assert.True(await (db.BF().AddAsync(key, "item1"))); - Assert.True(await db.BF().ExistsAsync(key, "item1")); - Assert.False(await db.BF().ExistsAsync(key, "item2")); + Assert.True(await (bf.AddAsync(key, "item1"))); + Assert.True(await bf.ExistsAsync(key, "item1")); + Assert.False(await bf.ExistsAsync(key, "item2")); } [Fact] diff --git a/tests/NRedisStack.Tests/RedisFixture.cs b/tests/NRedisStack.Tests/RedisFixture.cs index af5c4155..55ecc956 100644 --- a/tests/NRedisStack.Tests/RedisFixture.cs +++ b/tests/NRedisStack.Tests/RedisFixture.cs @@ -9,7 +9,7 @@ public class RedisFixture : IDisposable public void Dispose() { - //Redis.Close(); + Redis.Close(); } public ConnectionMultiplexer Redis { get; private set; } diff --git a/tests/NRedisStack.Tests/TopK/TopKTests.cs b/tests/NRedisStack.Tests/TopK/TopKTests.cs index 98e6c5ed..23041efe 100644 --- a/tests/NRedisStack.Tests/TopK/TopKTests.cs +++ b/tests/NRedisStack.Tests/TopK/TopKTests.cs @@ -72,4 +72,39 @@ public async Task CreateTopKFilterAsync() Assert.Equal(res2[1].ToString(), "bb"); Assert.Equal(res2[2].ToString(), "cc"); } + + [Fact] + public void TestModulePrefixs() + { + IDatabase db1 = redisFixture.Redis.GetDatabase(); + IDatabase db2 = redisFixture.Redis.GetDatabase(); + + var ft1 = db1.FT(); + var ft2 = db2.FT(); + + Assert.NotEqual(ft1.GetHashCode(), ft2.GetHashCode()); + } + + [Fact] + public void TestModulePrefixs1() + { + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var ft = db.FT(); + // ... + conn.Dispose(); + } + + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var ft = db.FT(); + // ... + conn.Dispose(); + } + + } } \ No newline at end of file From 84b9789f9a167e06579503db20c985e7f16fef6b Mon Sep 17 00:00:00 2001 From: shacharPash Date: Wed, 31 Aug 2022 15:02:19 +0300 Subject: [PATCH 2/7] Changing the way the function is called --- src/NRedisStack/ResponseParser.cs | 58 ++-- tests/NRedisStack.Tests/Bloom/BloomTests.cs | 139 +++++----- .../CountMinSketch/CmsTests.cs | 130 +++++---- .../CuckooFilter/CuckooTests.cs | 198 ++++++++------ tests/NRedisStack.Tests/Json/JsonTests.cs | 10 +- .../NRedisStack.Tests/Tdigest/TdigestTests.cs | 256 ++++++++++-------- .../TimeSeries/TestAPI/TestAdd.cs | 99 ++++--- .../TimeSeries/TestAPI/TestAddAsync.cs | 99 ++++--- .../TimeSeries/TestAPI/TestAlter.cs | 25 +- .../TimeSeries/TestAPI/TestAlterAsync.cs | 25 +- .../TimeSeries/TestAPI/TestCreate.cs | 38 ++- .../TimeSeries/TestAPI/TestCreateAsync.cs | 36 ++- .../TimeSeries/TestAPI/TestDecrBy.cs | 39 +-- .../TimeSeries/TestAPI/TestDecrByAsync.cs | 39 +-- .../TimeSeries/TestAPI/TestDel.cs | 18 +- .../TimeSeries/TestAPI/TestDelAsync.cs | 18 +- .../TimeSeries/TestAPI/TestGet.cs | 15 +- .../TimeSeries/TestAPI/TestGetAsync.cs | 15 +- .../TimeSeries/TestAPI/TestIncrBy.cs | 39 +-- .../TimeSeries/TestAPI/TestIncrByAsync.cs | 39 +-- .../TimeSeries/TestAPI/TestMADD.cs | 19 +- .../TimeSeries/TestAPI/TestMAddAsync.cs | 19 +- .../TimeSeries/TestAPI/TestMGet.cs | 21 +- .../TimeSeries/TestAPI/TestMGetAsync.cs | 21 +- .../TimeSeries/TestAPI/TestMRange.cs | 98 ++++--- .../TimeSeries/TestAPI/TestMRangeAsync.cs | 98 ++++--- .../TimeSeries/TestAPI/TestMRevRange.cs | 100 ++++--- .../TimeSeries/TestAPI/TestMRevRangeAsync.cs | 100 ++++--- .../TimeSeries/TestAPI/TestQueryIndex.cs | 9 +- .../TimeSeries/TestAPI/TestQueryIndexAsync.cs | 9 +- .../TimeSeries/TestAPI/TestRange.cs | 130 +++++---- .../TimeSeries/TestAPI/TestRangeAsync.cs | 130 +++++---- .../TimeSeries/TestAPI/TestRevRange.cs | 44 +-- .../TimeSeries/TestAPI/TestRevRangeAsync.cs | 44 +-- .../TimeSeries/TestAPI/TestRules.cs | 48 ++-- .../TimeSeries/TestAPI/TestRulesAsync.cs | 48 ++-- .../TestTimeSeriesInformation.cs | 18 +- .../TimeSeries/TimeSeriesTests.cs | 4 +- 38 files changed, 1284 insertions(+), 1011 deletions(-) diff --git a/src/NRedisStack/ResponseParser.cs b/src/NRedisStack/ResponseParser.cs index 2c138b08..dec73326 100644 --- a/src/NRedisStack/ResponseParser.cs +++ b/src/NRedisStack/ResponseParser.cs @@ -14,12 +14,12 @@ namespace NRedisStack { internal static class ResponseParser { - public static bool OKtoBoolean(RedisResult result) + public static bool OKtoBoolean(this RedisResult result) { return result.ToString() == "OK"; } - public static bool[] ToBooleanArray(RedisResult result) + public static bool[] ToBooleanArray(this RedisResult result) { RedisResult[]? redisResults = ToArray(result); @@ -32,7 +32,7 @@ public static bool[] ToBooleanArray(RedisResult result) return boolArr; } - public static RedisResult[] ToArray(RedisResult result) + public static RedisResult[] ToArray(this RedisResult result) { var redisResults = (RedisResult[]?)result; if (redisResults != null) @@ -40,21 +40,21 @@ public static RedisResult[] ToArray(RedisResult result) throw new ArgumentNullException(nameof(redisResults)); } - public static long ToLong(RedisResult result) + public static long ToLong(this RedisResult result) { if ((long?)result == null) throw new ArgumentNullException(nameof(result)); return (long)result; } - public static double ToDouble(RedisResult result) + public static double ToDouble(this RedisResult result) { if ((double?)result == null) throw new ArgumentNullException(nameof(result)); return (double)result; } - public static double[] ToDoubleArray(RedisResult result) + public static double[] ToDoubleArray(this RedisResult result) { List redisResults = new List(); foreach (var res in ToArray(result)) @@ -65,7 +65,7 @@ public static double[] ToDoubleArray(RedisResult result) return redisResults.ToArray(); } - public static long[] ToLongArray(RedisResult result) + public static long[] ToLongArray(this RedisResult result) { List redisResults = new List(); foreach (var res in ToArray(result)) @@ -76,13 +76,13 @@ public static long[] ToLongArray(RedisResult result) return redisResults.ToArray(); } - public static TimeStamp ToTimeStamp(RedisResult result) + public static TimeStamp ToTimeStamp(this RedisResult result) { if (result.Type == ResultType.None) return null; return new TimeStamp((long)result); } - public static IReadOnlyList ToTimeStampArray(RedisResult result) + public static IReadOnlyList ToTimeStampArray(this RedisResult result) { RedisResult[] redisResults = (RedisResult[])result; var list = new List(redisResults.Length); @@ -91,21 +91,21 @@ public static IReadOnlyList ToTimeStampArray(RedisResult result) return list; } - public static TimeSeriesTuple? ToTimeSeriesTuple(RedisResult result) + public static TimeSeriesTuple? ToTimeSeriesTuple(this RedisResult result) { RedisResult[] redisResults = ToArray(result); if (redisResults.Length == 0) return null; return new TimeSeriesTuple(ToTimeStamp(redisResults[0]), (double)redisResults[1]); } - public static Tuple ToScanDumpTuple(RedisResult result) + public static Tuple ToScanDumpTuple(this RedisResult result) { RedisResult[] redisResults = ToArray(result); if (redisResults == null || redisResults.Length == 0) return null; return new Tuple((long)redisResults[0], (Byte[])redisResults[1]); } - public static HashEntry ToHashEntry(RedisResult result) + public static HashEntry ToHashEntry(this RedisResult result) { RedisResult[] redisResults = ToArray(result); if (redisResults.Length < 2) @@ -114,7 +114,7 @@ public static HashEntry ToHashEntry(RedisResult result) return new HashEntry((RedisValue)(redisResults[0]), ((RedisValue)redisResults[1])); } - public static HashEntry[] ToHashEntryArray(RedisResult result) + public static HashEntry[] ToHashEntryArray(this RedisResult result) { RedisResult[] redisResults = ToArray(result); @@ -126,7 +126,7 @@ public static HashEntry[] ToHashEntryArray(RedisResult result) return hash; } - public static IReadOnlyList ToTimeSeriesTupleArray(RedisResult result) + public static IReadOnlyList ToTimeSeriesTupleArray(this RedisResult result) { RedisResult[] redisResults = (RedisResult[])result; var list = new List(redisResults.Length); @@ -135,7 +135,7 @@ public static IReadOnlyList ToTimeSeriesTupleArray(RedisResult return list; } - public static IReadOnlyList ToLabelArray(RedisResult result) + public static IReadOnlyList ToLabelArray(this RedisResult result) { RedisResult[] redisResults = (RedisResult[])result; var list = new List(redisResults.Length); @@ -148,7 +148,7 @@ public static IReadOnlyList ToLabelArray(RedisResult result) return list; } - // public static IReadOnlyList ToCunckArray(RedisResult result) + // public static IReadOnlyList ToCunckArray(this RedisResult result) // { // RedisResult[] redisResults = (RedisResult[])result; // var list = new List(redisResults.Length); @@ -161,7 +161,7 @@ public static IReadOnlyList ToLabelArray(RedisResult result) // return list; // } - public static IReadOnlyList<(string key, IReadOnlyList labels, TimeSeriesTuple value)> ParseMGetResponse(RedisResult result) + public static IReadOnlyList<(string key, IReadOnlyList labels, TimeSeriesTuple value)> ParseMGetResponse(this RedisResult result) { RedisResult[] redisResults = (RedisResult[])result; var list = new List<(string key, IReadOnlyList labels, TimeSeriesTuple values)>(redisResults.Length); @@ -177,7 +177,7 @@ public static IReadOnlyList ToLabelArray(RedisResult result) return list; } - public static IReadOnlyList<(string key, IReadOnlyList labels, IReadOnlyList values)> ParseMRangeResponse(RedisResult result) + public static IReadOnlyList<(string key, IReadOnlyList labels, IReadOnlyList values)> ParseMRangeResponse(this RedisResult result) { RedisResult[] redisResults = (RedisResult[])result; var list = new List<(string key, IReadOnlyList labels, IReadOnlyList values)>(redisResults.Length); @@ -193,7 +193,7 @@ public static IReadOnlyList ToLabelArray(RedisResult result) return list; } - public static TimeSeriesRule ToRule(RedisResult result) + public static TimeSeriesRule ToRule(this RedisResult result) { RedisResult[] redisResults = (RedisResult[])result; string destKey = (string)redisResults[0]; @@ -202,7 +202,7 @@ public static TimeSeriesRule ToRule(RedisResult result) return new TimeSeriesRule(destKey, bucketTime, aggregation); } - public static IReadOnlyList ToRuleArray(RedisResult result) + public static IReadOnlyList ToRuleArray(this RedisResult result) { RedisResult[] redisResults = (RedisResult[])result; var list = new List(); @@ -211,7 +211,7 @@ public static IReadOnlyList ToRuleArray(RedisResult result) return list; } - public static TsDuplicatePolicy? ToPolicy(RedisResult result) + public static TsDuplicatePolicy? ToPolicy(this RedisResult result) { var policyStatus = (string)result; if (String.IsNullOrEmpty(policyStatus) || policyStatus == "(nil)") @@ -255,7 +255,7 @@ public static IReadOnlyList ToRuleArray(RedisResult result) return new BloomInformation(capacity, size, numberOfFilters, numberOfItemsInserted, expansionRate); } - public static CuckooInformation ToCuckooInfo(RedisResult result) //TODO: Think about a different implementation, because if the output of BF.INFO changes or even just the names of the labels then the parsing will not work + public static CuckooInformation ToCuckooInfo(this RedisResult result) //TODO: Think about a different implementation, because if the output of BF.INFO changes or even just the names of the labels then the parsing will not work { long size, numberOfBuckets, numberOfFilter, numberOfItemsInserted, numberOfItemsDeleted, bucketSize, expansionRate, maxIteration; @@ -303,7 +303,7 @@ public static IReadOnlyList ToRuleArray(RedisResult result) numberOfItemsDeleted, bucketSize, expansionRate, maxIteration); } - public static CmsInformation ToCmsInfo(RedisResult result) //TODO: Think about a different implementation, because if the output of CMS.INFO changes or even just the names of the labels then the parsing will not work + public static CmsInformation ToCmsInfo(this RedisResult result) //TODO: Think about a different implementation, because if the output of CMS.INFO changes or even just the names of the labels then the parsing will not work { long width, depth, count; @@ -332,7 +332,7 @@ public static IReadOnlyList ToRuleArray(RedisResult result) return new CmsInformation(width, depth, count); } - public static TopKInformation ToTopKInfo(RedisResult result) //TODO: Think about a different implementation, because if the output of CMS.INFO changes or even just the names of the labels then the parsing will not work + public static TopKInformation ToTopKInfo(this RedisResult result) //TODO: Think about a different implementation, because if the output of CMS.INFO changes or even just the names of the labels then the parsing will not work { long k, width, depth; double decay; @@ -366,7 +366,7 @@ public static IReadOnlyList ToRuleArray(RedisResult result) return new TopKInformation(k, width, depth, decay); } - public static TdigestInformation ToTdigestInfo(RedisResult result) //TODO: Think about a different implementation, because if the output of CMS.INFO changes or even just the names of the labels then the parsing will not work + public static TdigestInformation ToTdigestInfo(this RedisResult result) //TODO: Think about a different implementation, because if the output of CMS.INFO changes or even just the names of the labels then the parsing will not work { long compression, capacity, mergedNodes, unmergedNodes, totalCompressions; double mergedWeight, unmergedWeight; @@ -411,7 +411,7 @@ public static IReadOnlyList ToRuleArray(RedisResult result) mergedWeight, unmergedWeight, totalCompressions); } - public static TimeSeriesInformation ToTimeSeriesInfo(RedisResult result) + public static TimeSeriesInformation ToTimeSeriesInfo(this RedisResult result) { long totalSamples = -1, memoryUsage = -1, retentionTime = -1, chunkSize = -1, chunkCount = -1; TimeStamp? firstTimestamp = null, lastTimestamp = null; @@ -480,7 +480,7 @@ public static TimeSeriesInformation ToTimeSeriesInfo(RedisResult result) lastTimestamp, retentionTime, chunkCount, chunkSize, labels, sourceKey, rules, duplicatePolicy, keySelfName, chunks); } - public static IReadOnlyList ToTimeSeriesChunkArray(RedisResult result) + public static IReadOnlyList ToTimeSeriesChunkArray(this RedisResult result) { RedisResult[] redisResults = (RedisResult[])result; var list = new List(); @@ -489,7 +489,7 @@ public static IReadOnlyList ToTimeSeriesChunkArray(RedisResult return list; } - public static TimeSeriesChunck ToTimeSeriesChunk(RedisResult result) + public static TimeSeriesChunck ToTimeSeriesChunk(this RedisResult result) { long startTimestamp = -1, endTimestamp = -1, samples = -1, size = -1; string bytesPerSample = ""; @@ -522,7 +522,7 @@ public static TimeSeriesChunck ToTimeSeriesChunk(RedisResult result) } - public static IReadOnlyList ToStringArray(RedisResult result) + public static IReadOnlyList ToStringArray(this RedisResult result) { RedisResult[] redisResults = ToArray(result); diff --git a/tests/NRedisStack.Tests/Bloom/BloomTests.cs b/tests/NRedisStack.Tests/Bloom/BloomTests.cs index 5379d146..bbee25e3 100644 --- a/tests/NRedisStack.Tests/Bloom/BloomTests.cs +++ b/tests/NRedisStack.Tests/Bloom/BloomTests.cs @@ -21,24 +21,24 @@ public void TestReserveBasic() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); - db.BF().Reserve(key, 0.001, 100L); + bf.Reserve(key, 0.001, 100L); - Assert.True((db.BF().Add(key, "item1"))); - Assert.True(db.BF().Exists(key, "item1")); - Assert.False(db.BF().Exists(key, "item2")); + Assert.True((bf.Add(key, "item1"))); + Assert.True(bf.Exists(key, "item1")); + Assert.False(bf.Exists(key, "item2")); } [Fact] public async Task TestReserveBasicAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); - var bf = db.BF(); db.Execute("FLUSHALL"); + var bf = db.BF(); - - await db.BF().ReserveAsync(key, 0.001, 100L); + await bf.ReserveAsync(key, 0.001, 100L); Assert.True(await (bf.AddAsync(key, "item1"))); Assert.True(await bf.ExistsAsync(key, "item1")); @@ -50,10 +50,10 @@ public void TestAddWhenExist() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); - - Assert.True((db.BF().Add(key, "item1"))); // first time - Assert.False(db.BF().Add(key, "item1")); // second time + Assert.True((bf.Add(key, "item1"))); // first time + Assert.False(bf.Add(key, "item1")); // second time } [Fact] @@ -61,10 +61,11 @@ public async Task TestAddWhenExistAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); - Assert.True(await db.BF().AddAsync(key, "item1")); // first time - Assert.False(await db.BF().AddAsync(key, "item1")); // second time + Assert.True(await bf.AddAsync(key, "item1")); // first time + Assert.False(await bf.AddAsync(key, "item1")); // second time } [Fact] @@ -72,10 +73,11 @@ public void TestAddExists() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); - db.BF().Add(key, "item1"); - Assert.True(db.BF().Exists(key, "item1")); + bf.Add(key, "item1"); + Assert.True(bf.Exists(key, "item1")); } [Fact] @@ -83,10 +85,11 @@ public async Task TestAddExistsAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); - await db.BF().AddAsync(key, "item1"); - Assert.True(await db.BF().ExistsAsync(key, "item1")); + await bf.AddAsync(key, "item1"); + Assert.True(await bf.ExistsAsync(key, "item1")); } [Fact] @@ -94,13 +97,14 @@ public void TestAddExistsMulti() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); var items = new RedisValue[] { "foo", "bar", "baz" }; var items2 = new RedisValue[] { "newElement", "bar", "baz" }; - var result = db.BF().MAdd(key, items); + var result = bf.MAdd(key, items); Assert.Equal(new bool[] { true, true, true }, result); - result = db.BF().MAdd(key, items2); + result = bf.MAdd(key, items2); Assert.Equal(new bool[] { true, false, false }, result); } @@ -109,13 +113,14 @@ public async Task TestAddExistsMultiAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); var items = new RedisValue[] { "foo", "bar", "baz" }; var items2 = new RedisValue[] { "newElement", "bar", "baz" }; - var result = await db.BF().MAddAsync(key, items); + var result = await bf.MAddAsync(key, items); Assert.Equal(new bool[] { true, true, true }, result); - result = await db.BF().MAddAsync(key, items2); + result = await bf.MAddAsync(key, items2); Assert.Equal(new bool[] { true, false, false }, result); } @@ -124,27 +129,28 @@ public void TestExample() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); // Simple bloom filter using default module settings - db.BF().Add("simpleBloom", "Mark"); + bf.Add("simpleBloom", "Mark"); // Does "Mark" now exist? - db.BF().Exists("simpleBloom", "Mark"); // true - db.BF().Exists("simpleBloom", "Farnsworth"); // False + bf.Exists("simpleBloom", "Mark"); // true + bf.Exists("simpleBloom", "Farnsworth"); // False // If you have a long list of items to check/add, you can use the // "multi" methods var items = new RedisValue[] { "foo", "bar", "baz", "bat", "bag" }; - db.BF().MAdd("simpleBloom", items); + bf.MAdd("simpleBloom", items); // Check if they exist: var allItems = new RedisValue[] { "foo", "bar", "baz", "bat", "Mark", "nonexist" }; - var rv = db.BF().MExists("simpleBloom", allItems); + var rv = bf.MExists("simpleBloom", allItems); // All items except the last one will be 'true' Assert.Equal(new bool[] { true, true, true, true, true, false }, rv); // Reserve a "customized" bloom filter - db.BF().Reserve("specialBloom", 0.0001, 10000); - db.BF().Add("specialBloom", "foo"); + bf.Reserve("specialBloom", 0.0001, 10000); + bf.Add("specialBloom", "foo"); } [Fact] @@ -152,27 +158,28 @@ public async Task TestExampleAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); // Simple bloom filter using default module settings - await db.BF().AddAsync("simpleBloom", "Mark"); + await bf.AddAsync("simpleBloom", "Mark"); // Does "Mark" now exist? - await db.BF().ExistsAsync("simpleBloom", "Mark"); // true - await db.BF().ExistsAsync("simpleBloom", "Farnsworth"); // False + await bf.ExistsAsync("simpleBloom", "Mark"); // true + await bf.ExistsAsync("simpleBloom", "Farnsworth"); // False // If you have a long list of items to check/add, you can use the // "multi" methods var items = new RedisValue[] { "foo", "bar", "baz", "bat", "bag" }; - await db.BF().MAddAsync("simpleBloom", items); + await bf.MAddAsync("simpleBloom", items); // Check if they exist: var allItems = new RedisValue[] { "foo", "bar", "baz", "bat", "Mark", "nonexist" }; - var rv = await db.BF().MExistsAsync("simpleBloom", allItems); + var rv = await bf.MExistsAsync("simpleBloom", allItems); // All items except the last one will be 'true' Assert.Equal(new bool[] { true, true, true, true, true, false }, rv); // Reserve a "customized" bloom filter - await db.BF().ReserveAsync("specialBloom", 0.0001, 10000); - await db.BF().AddAsync("specialBloom", "foo"); + await bf.ReserveAsync("specialBloom", 0.0001, 10000); + await bf.AddAsync("specialBloom", "foo"); } [Fact] @@ -180,14 +187,15 @@ public void TestInsert() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); RedisValue[] items = new RedisValue[] { "item1", "item2", "item3" }; - db.BF().Insert("key", items); + bf.Insert("key", items); - Assert.True(db.BF().Exists("key", "item1")); - Assert.True(db.BF().Exists("key", "item2")); - Assert.True(db.BF().Exists("key", "item3")); + Assert.True(bf.Exists("key", "item1")); + Assert.True(bf.Exists("key", "item2")); + Assert.True(bf.Exists("key", "item3")); } [Fact] @@ -195,14 +203,15 @@ public async Task TestInsertAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); RedisValue[] items = new RedisValue[] { "item1", "item2", "item3" }; - await db.BF().InsertAsync("key", items); + await bf.InsertAsync("key", items); - Assert.True(await db.BF().ExistsAsync("key", "item1")); - Assert.True(await db.BF().ExistsAsync("key", "item2")); - Assert.True(await db.BF().ExistsAsync("key", "item3")); + Assert.True(await bf.ExistsAsync("key", "item1")); + Assert.True(await bf.ExistsAsync("key", "item2")); + Assert.True(await bf.ExistsAsync("key", "item3")); } [Fact] @@ -210,9 +219,10 @@ public void TestExistsNonExist() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); RedisValue item = new RedisValue("item"); - Assert.False(db.BF().Exists("NonExistKey", item)); + Assert.False(bf.Exists("NonExistKey", item)); } [Fact] @@ -220,9 +230,10 @@ public async Task TestExistsNonExistAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); RedisValue item = new RedisValue("item"); - Assert.False(await db.BF().ExistsAsync("NonExistKey", item)); + Assert.False(await bf.ExistsAsync("NonExistKey", item)); } [Fact] @@ -230,14 +241,15 @@ public void TestInfo() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); - db.BF().Add(key, "item"); - var info = db.BF().Info(key); + bf.Add(key, "item"); + var info = bf.Info(key); Assert.NotNull(info); Assert.Equal(info.NumberOfItemsInserted, (long)1); - Assert.Throws(() => db.BF().Info("notExistKey")); + Assert.Throws(() => bf.Info("notExistKey")); } [Fact] @@ -245,14 +257,15 @@ public async Task TestInfoAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); - await db.BF().AddAsync(key, "item"); - var info = await db.BF().InfoAsync(key); + await bf.AddAsync(key, "item"); + var info = await bf.InfoAsync(key); Assert.NotNull(info); Assert.Equal(info.NumberOfItemsInserted, (long)1); - await Assert.ThrowsAsync(() => db.BF().InfoAsync("notExistKey")); + await Assert.ThrowsAsync(() => bf.InfoAsync("notExistKey")); } [Fact] @@ -260,23 +273,24 @@ public void TestScanDumpAndLoadChunk() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); - db.BF().Reserve("bloom-dump", 0.1, 10); - db.BF().Add("bloom-dump", "a"); + bf.Reserve("bloom-dump", 0.1, 10); + bf.Add("bloom-dump", "a"); long iterator = 0; while (true) { - var chunkData = db.BF().ScanDump("bloom-dump", iterator); + var chunkData = bf.ScanDump("bloom-dump", iterator); iterator = chunkData.Item1; if (iterator == 0) break; - Assert.True(db.BF().LoadChunk("bloom-load", iterator, chunkData.Item2)); + Assert.True(bf.LoadChunk("bloom-load", iterator, chunkData.Item2)); } // check for properties - Assert.Equal(db.BF().Info("bloom-dump").NumberOfItemsInserted, db.BF().Info("bloom-load").NumberOfItemsInserted); + Assert.Equal(bf.Info("bloom-dump").NumberOfItemsInserted, bf.Info("bloom-load").NumberOfItemsInserted); // check for existing items - Assert.True(db.BF().Exists("bloom-load", "a")); + Assert.True(bf.Exists("bloom-load", "a")); } [Fact] @@ -284,22 +298,23 @@ public async Task TestScanDumpAndLoadChunkAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var bf = db.BF(); - await db.BF().ReserveAsync("bloom-dump", 0.1, 10); - await db.BF().AddAsync("bloom-dump", "a"); + await bf.ReserveAsync("bloom-dump", 0.1, 10); + await bf.AddAsync("bloom-dump", "a"); long iterator = 0; while (true) { - var chunkData = await db.BF().ScanDumpAsync("bloom-dump", iterator); + var chunkData = await bf.ScanDumpAsync("bloom-dump", iterator); iterator = chunkData.Item1; if (iterator == 0) break; - Assert.True(await db.BF().LoadChunkAsync("bloom-load", iterator, chunkData.Item2)); + Assert.True(await bf.LoadChunkAsync("bloom-load", iterator, chunkData.Item2)); } // check for properties - Assert.Equal((await db.BF().InfoAsync("bloom-dump")).NumberOfItemsInserted, (await db.BF().InfoAsync("bloom-load")).NumberOfItemsInserted); + Assert.Equal((await bf.InfoAsync("bloom-dump")).NumberOfItemsInserted, (await bf.InfoAsync("bloom-load")).NumberOfItemsInserted); // check for existing items - Assert.True(await db.BF().ExistsAsync("bloom-load", "a")); + Assert.True(await bf.ExistsAsync("bloom-load", "a")); } } \ No newline at end of file diff --git a/tests/NRedisStack.Tests/CountMinSketch/CmsTests.cs b/tests/NRedisStack.Tests/CountMinSketch/CmsTests.cs index 2487ac4d..dbe3f46b 100644 --- a/tests/NRedisStack.Tests/CountMinSketch/CmsTests.cs +++ b/tests/NRedisStack.Tests/CountMinSketch/CmsTests.cs @@ -21,9 +21,10 @@ public void TestInitByDim() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - db.CMS().InitByDim(key, 16, 4); - var info = db.CMS().Info(key); + cms.InitByDim(key, 16, 4); + var info = cms.Info(key); Assert.Equal(16, info.Width); Assert.Equal(4, info.Depth); @@ -35,9 +36,10 @@ public async Task TestInitByDimAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - await db.CMS().InitByDimAsync(key, 16, 4); - var info = await db.CMS().InfoAsync(key); + await cms.InitByDimAsync(key, 16, 4); + var info = await cms.InfoAsync(key); Assert.Equal(16, info.Width); Assert.Equal(4, info.Depth); @@ -49,9 +51,10 @@ public void TestInitByProb() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - db.CMS().InitByProb(key, 0.01, 0.01); - var info = db.CMS().Info(key); + cms.InitByProb(key, 0.01, 0.01); + var info = cms.Info(key); Assert.Equal(200, info.Width); Assert.Equal(7, info.Depth); @@ -63,9 +66,10 @@ public async Task TestInitByProbAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - await db.CMS().InitByProbAsync(key, 0.01, 0.01); - var info = await db.CMS().InfoAsync(key); + await cms.InitByProbAsync(key, 0.01, 0.01); + var info = await cms.InfoAsync(key); Assert.Equal(200, info.Width); Assert.Equal(7, info.Depth); @@ -77,9 +81,10 @@ public void TestKeyAlreadyExists() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - db.CMS().InitByDim("dup", 16, 4); - Assert.Throws(() => db.CMS().InitByDim("dup", 8, 6)); + cms.InitByDim("dup", 16, 4); + Assert.Throws(() => cms.InitByDim("dup", 8, 6)); } [Fact] @@ -87,9 +92,10 @@ public async Task TestKeyAlreadyExistsAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - await db.CMS().InitByDimAsync("dup", 16, 4); - await Assert.ThrowsAsync(() => db.CMS().InitByDimAsync("dup", 8, 6)); + await cms.InitByDimAsync("dup", 16, 4); + await Assert.ThrowsAsync(() => cms.InitByDimAsync("dup", 8, 6)); } [Fact] @@ -97,12 +103,13 @@ public void TestIncrBy() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - db.CMS().InitByDim(key, 1000, 5); - var resp = db.CMS().IncrBy(key, "foo", 5); + cms.InitByDim(key, 1000, 5); + var resp = cms.IncrBy(key, "foo", 5); Assert.Equal(5, resp); - var info = db.CMS().Info(key); + var info = cms.Info(key); Assert.Equal(1000, info.Width); Assert.Equal(5, info.Depth); Assert.Equal(5, info.Count); @@ -114,12 +121,13 @@ public async Task TestIncrByAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - await db.CMS().InitByDimAsync(key, 1000, 5); - var resp = await db.CMS().IncrByAsync(key, "foo", 5); + await cms.InitByDimAsync(key, 1000, 5); + var resp = await cms.IncrByAsync(key, "foo", 5); Assert.Equal(5, resp); - var info = await db.CMS().InfoAsync(key); + var info = await cms.InfoAsync(key); Assert.Equal(1000, info.Width); Assert.Equal(5, info.Depth); Assert.Equal(5, info.Count); @@ -131,18 +139,19 @@ public void TestIncrByMultipleArgs() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - db.CMS().InitByDim(key, 1000, 5); - db.CMS().IncrBy(key, "foo", 5L); + cms.InitByDim(key, 1000, 5); + cms.IncrBy(key, "foo", 5L); var itemIncrements = new Tuple[2]; itemIncrements[0] = new Tuple("foo", 5); itemIncrements[1] = new Tuple("bar", 15); - var resp = db.CMS().IncrBy(key, itemIncrements); + var resp = cms.IncrBy(key, itemIncrements); Assert.Equal(new long[] { 10, 15 }, resp); - var info = db.CMS().Info(key); + var info = cms.Info(key); Assert.Equal(1000, info.Width); Assert.Equal(5, info.Depth); Assert.Equal(25, info.Count); @@ -153,18 +162,19 @@ public async Task TestIncrByMultipleArgsAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - await db.CMS().InitByDimAsync(key, 1000, 5); - await db.CMS().IncrByAsync(key, "foo", 5L); + await cms.InitByDimAsync(key, 1000, 5); + await cms.IncrByAsync(key, "foo", 5L); var itemIncrements = new Tuple[2]; itemIncrements[0] = new Tuple("foo", 5); itemIncrements[1] = new Tuple("bar", 15); - var resp = await db.CMS().IncrByAsync(key, itemIncrements); + var resp = await cms.IncrByAsync(key, itemIncrements); Assert.Equal(new long[] { 10, 15 }, resp); - var info = await db.CMS().InfoAsync(key); + var info = await cms.InfoAsync(key); Assert.Equal(1000, info.Width); Assert.Equal(5, info.Depth); Assert.Equal(25, info.Count); @@ -176,15 +186,16 @@ public void TestQuery() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.CMS().InitByDim(key, 1000, 5); + var cms = db.CMS(); + cms.InitByDim(key, 1000, 5); var itemIncrements = new Tuple[2]; itemIncrements[0] = new Tuple("foo", 10); itemIncrements[1] = new Tuple("bar", 15); - db.CMS().IncrBy(key, itemIncrements); + cms.IncrBy(key, itemIncrements); - var resp = db.CMS().Query(key, "foo", "bar"); + var resp = cms.Query(key, "foo", "bar"); Assert.Equal(new long[] { 10, 15 }, resp); } @@ -193,15 +204,16 @@ public async Task TestQueryAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.CMS().InitByDimAsync(key, 1000, 5); + var cms = db.CMS(); + await cms.InitByDimAsync(key, 1000, 5); var itemIncrements = new Tuple[2]; itemIncrements[0] = new Tuple("foo", 10); itemIncrements[1] = new Tuple("bar", 15); - await db.CMS().IncrByAsync(key, itemIncrements); + await cms.IncrByAsync(key, itemIncrements); - var resp = await db.CMS().QueryAsync(key, new RedisValue[] { "foo", "bar" }); + var resp = await cms.QueryAsync(key, new RedisValue[] { "foo", "bar" }); Assert.Equal(new long[] { 10, 15 }, resp); } @@ -210,45 +222,46 @@ public void TestMerge() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - db.CMS().InitByDim("A", 1000, 5); - db.CMS().InitByDim("B", 1000, 5); - db.CMS().InitByDim("C", 1000, 5); + cms.InitByDim("A", 1000, 5); + cms.InitByDim("B", 1000, 5); + cms.InitByDim("C", 1000, 5); var aValues = new Tuple[3]; aValues[0] = new Tuple("foo", 5); aValues[1] = new Tuple("bar", 3); aValues[2] = new Tuple("baz", 9); - db.CMS().IncrBy("A", aValues); + cms.IncrBy("A", aValues); var bValues = new Tuple[3]; bValues[0] = new Tuple("foo", 2); bValues[1] = new Tuple("bar", 3); bValues[2] = new Tuple("baz", 1); - db.CMS().IncrBy("B", bValues); + cms.IncrBy("B", bValues); - var q1 = db.CMS().Query("A", new RedisValue[] { "foo", "bar", "baz" }); + var q1 = cms.Query("A", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 5L, 3L, 9L }, q1); - var q2 = db.CMS().Query("B", new RedisValue[] { "foo", "bar", "baz" }); + var q2 = cms.Query("B", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 2L, 3L, 1L }, q2); - db.CMS().Merge("C", 2, new RedisValue[] { "A", "B" }); + cms.Merge("C", 2, new RedisValue[] { "A", "B" }); - var q3 = db.CMS().Query("C", new RedisValue[] { "foo", "bar", "baz" }); + var q3 = cms.Query("C", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 7L, 6L, 10L }, q3); - db.CMS().Merge("C", 2, new RedisValue[] { "A", "B" }, new long[] { 1, 2 }); + cms.Merge("C", 2, new RedisValue[] { "A", "B" }, new long[] { 1, 2 }); - var q4 = db.CMS().Query("C", new RedisValue[] { "foo", "bar", "baz" }); + var q4 = cms.Query("C", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 9L, 9L, 11L }, q4); - db.CMS().Merge("C", 2, new RedisValue[] { "A", "B" }, new long[] { 2, 3 }); + cms.Merge("C", 2, new RedisValue[] { "A", "B" }, new long[] { 2, 3 }); - var q5 = db.CMS().Query("C", new RedisValue[] { "foo", "bar", "baz" }); + var q5 = cms.Query("C", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 16L, 15L, 21L }, q5); } @@ -258,45 +271,46 @@ public async Task TestMergeAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cms = db.CMS(); - await db.CMS().InitByDimAsync("A", 1000, 5); - await db.CMS().InitByDimAsync("B", 1000, 5); - await db.CMS().InitByDimAsync("C", 1000, 5); + await cms.InitByDimAsync("A", 1000, 5); + await cms.InitByDimAsync("B", 1000, 5); + await cms.InitByDimAsync("C", 1000, 5); var aValues = new Tuple[3]; aValues[0] = new Tuple("foo", 5); aValues[1] = new Tuple("bar", 3); aValues[2] = new Tuple("baz", 9); - await db.CMS().IncrByAsync("A", aValues); + await cms.IncrByAsync("A", aValues); var bValues = new Tuple[3]; bValues[0] = new Tuple("foo", 2); bValues[1] = new Tuple("bar", 3); bValues[2] = new Tuple("baz", 1); - await db.CMS().IncrByAsync("B", bValues); + await cms.IncrByAsync("B", bValues); - var q1 = await db.CMS().QueryAsync("A", new RedisValue[] { "foo", "bar", "baz" }); + var q1 = await cms.QueryAsync("A", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 5L, 3L, 9L }, q1); - var q2 = await db.CMS().QueryAsync("B", new RedisValue[] { "foo", "bar", "baz" }); + var q2 = await cms.QueryAsync("B", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 2L, 3L, 1L }, q2); - await db.CMS().MergeAsync("C", 2, new RedisValue[] { "A", "B" }); + await cms.MergeAsync("C", 2, new RedisValue[] { "A", "B" }); - var q3 = await db.CMS().QueryAsync("C", new RedisValue[] { "foo", "bar", "baz" }); + var q3 = await cms.QueryAsync("C", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 7L, 6L, 10L }, q3); - await db.CMS().MergeAsync("C", 2, new RedisValue[] { "A", "B" }, new long[] { 1, 2 }); + await cms.MergeAsync("C", 2, new RedisValue[] { "A", "B" }, new long[] { 1, 2 }); - var q4 = await db.CMS().QueryAsync("C", new RedisValue[] { "foo", "bar", "baz" }); + var q4 = await cms.QueryAsync("C", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 9L, 9L, 11L }, q4); - await db.CMS().MergeAsync("C", 2, new RedisValue[] { "A", "B" }, new long[] { 2, 3 }); + await cms.MergeAsync("C", 2, new RedisValue[] { "A", "B" }, new long[] { 2, 3 }); - var q5 = await db.CMS().QueryAsync("C", new RedisValue[] { "foo", "bar", "baz" }); + var q5 = await cms.QueryAsync("C", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 16L, 15L, 21L }, q5); } } diff --git a/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs b/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs index 1c8c1605..ede6f88e 100644 --- a/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs +++ b/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs @@ -21,25 +21,27 @@ public void TestReserveBasic() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.CF().Reserve(key, 100L)); - Assert.Throws(() => db.CF().Reserve(key, 100L)); + var cf = db.CF(); + Assert.True(cf.Reserve(key, 100L)); + Assert.Throws(() => cf.Reserve(key, 100L)); - Assert.True((db.CF().Add(key, "item1"))); - Assert.True(db.CF().Exists(key, "item1")); - Assert.False(db.CF().Exists(key, "item2")); + Assert.True((cf.Add(key, "item1"))); + Assert.True(cf.Exists(key, "item1")); + Assert.False(cf.Exists(key, "item2")); } [Fact] public async Task TestReserveBasicAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); - db.ExecuteAsync("FLUSHALL"); - Assert.True(await db.CF().ReserveAsync(key, 100L)); - Assert.ThrowsAsync(async () => await db.CF().ReserveAsync(key, 100L)); + db.Execute("FLUSHALL"); + var cf = db.CF(); + Assert.True(await cf.ReserveAsync(key, 100L)); + Assert.ThrowsAsync(async () => await cf.ReserveAsync(key, 100L)); - Assert.True(await (db.CF().AddAsync(key, "item1"))); - Assert.True(await db.CF().ExistsAsync(key, "item1")); - Assert.False(await db.CF().ExistsAsync(key, "item2")); + Assert.True(await (cf.AddAsync(key, "item1"))); + Assert.True(await cf.ExistsAsync(key, "item1")); + Assert.False(await cf.ExistsAsync(key, "item2")); } [Fact] @@ -47,9 +49,10 @@ public void TestAddExists() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - Assert.True(db.CF().Add(key, "item1")); - Assert.True(db.CF().Exists(key, "item1")); + Assert.True(cf.Add(key, "item1")); + Assert.True(cf.Exists(key, "item1")); } [Fact] @@ -57,9 +60,10 @@ public async Task TestAddExistsAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - Assert.True(await db.CF().AddAsync(key, "item1")); - Assert.True(await db.CF().ExistsAsync(key, "item1")); + Assert.True(await cf.AddAsync(key, "item1")); + Assert.True(await cf.ExistsAsync(key, "item1")); } [Fact] @@ -67,10 +71,11 @@ public void TestAddNX() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - Assert.True(db.CF().AddNX(key, "item1")); - Assert.False(db.CF().AddNX(key, "item1")); - Assert.True(db.CF().Exists(key, "item1")); + Assert.True(cf.AddNX(key, "item1")); + Assert.False(cf.AddNX(key, "item1")); + Assert.True(cf.Exists(key, "item1")); } [Fact] @@ -78,10 +83,11 @@ public async Task TestAddNXAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - Assert.True(await db.CF().AddNXAsync(key, "item1")); - Assert.False(await db.CF().AddNXAsync(key, "item1")); - Assert.True(await db.CF().ExistsAsync(key, "item1")); + Assert.True(await cf.AddNXAsync(key, "item1")); + Assert.False(await cf.AddNXAsync(key, "item1")); + Assert.True(await cf.ExistsAsync(key, "item1")); } [Fact] @@ -89,8 +95,9 @@ public void TestCountFilterDoesNotExist() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - Assert.Equal(db.CF().Count("notExistFilter", "notExistItem"), 0); + Assert.Equal(cf.Count("notExistFilter", "notExistItem"), 0); } [Fact] @@ -98,8 +105,9 @@ public async Task TestCountFilterDoesNotExistAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - Assert.Equal(await db.CF().CountAsync("notExistFilter", "notExistItem"), 0); + Assert.Equal(await cf.CountAsync("notExistFilter", "notExistItem"), 0); } [Fact] @@ -107,9 +115,10 @@ public void TestCountFilterExist() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - db.CF().Insert(key, new RedisValue[]{"foo"}); - Assert.Equal(db.CF().Count(key, "notExistItem"), 0); + cf.Insert(key, new RedisValue[] { "foo" }); + Assert.Equal(cf.Count(key, "notExistItem"), 0); } [Fact] @@ -117,9 +126,10 @@ public async Task TestCountFilterExistAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - await db.CF().InsertAsync(key, new RedisValue[]{"foo"}); - Assert.Equal(await db.CF().CountAsync(key, "notExistItem"), 0); + await cf.InsertAsync(key, new RedisValue[] { "foo" }); + Assert.Equal(await cf.CountAsync(key, "notExistItem"), 0); } [Fact] @@ -127,9 +137,10 @@ public void TestCountItemExist() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - db.CF().Insert(key, new RedisValue[]{"foo"}); - Assert.Equal(db.CF().Count(key, "foo"), 1); + cf.Insert(key, new RedisValue[] { "foo" }); + Assert.Equal(cf.Count(key, "foo"), 1); } [Fact] @@ -137,9 +148,10 @@ public async Task TestCountItemExistAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - await db.CF().InsertAsync(key, new RedisValue[]{"foo"}); - Assert.Equal(await db.CF().CountAsync(key, "foo"), 1); + await cf.InsertAsync(key, new RedisValue[] { "foo" }); + Assert.Equal(await cf.CountAsync(key, "foo"), 1); } [Fact] @@ -147,12 +159,13 @@ public void TestDelete() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - db.CF().Add(key, "item"); - Assert.False(db.CF().Del(key, "notExistsItem")); - Assert.True(db.CF().Del(key, "item")); + cf.Add(key, "item"); + Assert.False(cf.Del(key, "notExistsItem")); + Assert.True(cf.Del(key, "item")); - Assert.Throws( () => db.CF().Del("notExistKey", "item")); + Assert.Throws(() => cf.Del("notExistKey", "item")); } [Fact] @@ -160,12 +173,13 @@ public async Task TestDeleteAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - await db.CF().AddAsync(key, "item"); - Assert.False(await db.CF().DelAsync(key, "notExistsItem")); - Assert.True(await db.CF().DelAsync(key, "item")); + await cf.AddAsync(key, "item"); + Assert.False(await cf.DelAsync(key, "notExistsItem")); + Assert.True(await cf.DelAsync(key, "item")); - await Assert.ThrowsAsync( () => db.CF().DelAsync("notExistKey", "item")); + await Assert.ThrowsAsync(() => cf.DelAsync("notExistKey", "item")); } [Fact] @@ -173,14 +187,15 @@ public void TestInfo() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - db.CF().Add(key, "item"); - var info = db.CF().Info(key); + cf.Add(key, "item"); + var info = cf.Info(key); Assert.NotNull(info); Assert.Equal(info.NumberOfItemsInserted, (long)1); - Assert.Throws( () => db.CF().Info("notExistKey")); + Assert.Throws(() => cf.Info("notExistKey")); } [Fact] @@ -188,14 +203,15 @@ public async Task TestInfoAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - await db.CF().AddAsync(key, "item"); - var info = await db.CF().InfoAsync(key); + await cf.AddAsync(key, "item"); + var info = await cf.InfoAsync(key); Assert.NotNull(info); Assert.Equal(info.NumberOfItemsInserted, (long)1); - await Assert.ThrowsAsync( () => db.CF().InfoAsync("notExistKey")); + await Assert.ThrowsAsync(() => cf.InfoAsync("notExistKey")); } [Fact] @@ -203,14 +219,15 @@ public void TestInsert() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); RedisValue[] items = new RedisValue[] { "item1", "item2", "item3" }; - db.CF().Insert("key", items); + cf.Insert("key", items); - Assert.True(db.CF().Exists("key", "item1")); - Assert.True(db.CF().Exists("key", "item2")); - Assert.True(db.CF().Exists("key", "item3")); + Assert.True(cf.Exists("key", "item1")); + Assert.True(cf.Exists("key", "item2")); + Assert.True(cf.Exists("key", "item3")); } [Fact] @@ -218,14 +235,15 @@ public async Task TestInsertAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); RedisValue[] items = new RedisValue[] { "item1", "item2", "item3" }; - await db.CF().InsertAsync("key", items); + await cf.InsertAsync("key", items); - Assert.True(await db.CF().ExistsAsync("key", "item1")); - Assert.True(await db.CF().ExistsAsync("key", "item2")); - Assert.True(await db.CF().ExistsAsync("key", "item3")); + Assert.True(await cf.ExistsAsync("key", "item1")); + Assert.True(await cf.ExistsAsync("key", "item2")); + Assert.True(await cf.ExistsAsync("key", "item3")); } [Fact] @@ -233,21 +251,22 @@ public void TestInsertNX() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); RedisValue[] items = new RedisValue[] { "item1", "item2", "item3" }; - var result = db.CF().InsertNX(key, items); - var trues = new bool[] {true, true, true}; + var result = cf.InsertNX(key, items); + var trues = new bool[] { true, true, true }; Assert.Equal(result, trues); - Assert.True(db.CF().Exists(key, "item1")); - Assert.True(db.CF().Exists(key, "item2")); - Assert.True(db.CF().Exists(key, "item3")); + Assert.True(cf.Exists(key, "item1")); + Assert.True(cf.Exists(key, "item2")); + Assert.True(cf.Exists(key, "item3")); - Assert.Equal(db.CF().MExists(key, items), trues); + Assert.Equal(cf.MExists(key, items), trues); - result = db.CF().InsertNX(key, items); - Assert.Equal(result, new bool[] {false, false, false}); + result = cf.InsertNX(key, items); + Assert.Equal(result, new bool[] { false, false, false }); } [Fact] @@ -255,21 +274,22 @@ public async Task TestInsertNXAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); RedisValue[] items = new RedisValue[] { "item1", "item2", "item3" }; - var result = await db.CF().InsertNXAsync(key, items); - var trues = new bool[] {true, true, true}; + var result = await cf.InsertNXAsync(key, items); + var trues = new bool[] { true, true, true }; Assert.Equal(result, trues); - Assert.True(await db.CF().ExistsAsync(key, "item1")); - Assert.True(await db.CF().ExistsAsync(key, "item2")); - Assert.True(await db.CF().ExistsAsync(key, "item3")); + Assert.True(await cf.ExistsAsync(key, "item1")); + Assert.True(await cf.ExistsAsync(key, "item2")); + Assert.True(await cf.ExistsAsync(key, "item3")); - Assert.Equal(await db.CF().MExistsAsync(key, items), trues); + Assert.Equal(await cf.MExistsAsync(key, items), trues); - result = await db.CF().InsertNXAsync(key, items); - Assert.Equal(result, new bool[] {false, false, false}); + result = await cf.InsertNXAsync(key, items); + Assert.Equal(result, new bool[] { false, false, false }); } [Fact] @@ -277,9 +297,10 @@ public void TestExistsNonExist() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); RedisValue item = new RedisValue("item"); - Assert.False(db.CF().Exists("NonExistKey", item)); + Assert.False(cf.Exists("NonExistKey", item)); } [Fact] @@ -287,9 +308,10 @@ public async Task TestExistsNonExistAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); RedisValue item = new RedisValue("item"); - Assert.False(await db.CF().ExistsAsync("NonExistKey", item)); + Assert.False(await cf.ExistsAsync("NonExistKey", item)); } [Fact] @@ -297,23 +319,24 @@ public void TestScanDumpAndLoadChunk() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - db.CF().Reserve("cuckoo",100, 50); - db.CF().Add("cuckoo-dump", "a"); + cf.Reserve("cuckoo", 100, 50); + cf.Add("cuckoo-dump", "a"); long iterator = 0; - while(true) + while (true) { - var chunkData = db.CF().ScanDump("cuckoo-dump", iterator); + var chunkData = cf.ScanDump("cuckoo-dump", iterator); iterator = chunkData.Item1; - if(iterator == 0) break; - Assert.True(db.CF().LoadChunk("cuckoo-load", iterator, chunkData.Item2)); + if (iterator == 0) break; + Assert.True(cf.LoadChunk("cuckoo-load", iterator, chunkData.Item2)); } // check for properties - Assert.Equal(db.CF().Info("cuckoo-dump").NumberOfItemsInserted, db.CF().Info("cuckoo-load").NumberOfItemsInserted); + Assert.Equal(cf.Info("cuckoo-dump").NumberOfItemsInserted, cf.Info("cuckoo-load").NumberOfItemsInserted); // check for existing items - Assert.True(db.CF().Exists("cuckoo-load", "a")); + Assert.True(cf.Exists("cuckoo-load", "a")); } [Fact] @@ -321,22 +344,23 @@ public async Task TestScanDumpAndLoadChunkAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var cf = db.CF(); - await db.CF().ReserveAsync("cuckoo",100, 50); - await db.CF().AddAsync("cuckoo-dump", "a"); + await cf.ReserveAsync("cuckoo", 100, 50); + await cf.AddAsync("cuckoo-dump", "a"); long iterator = 0; - while(true) + while (true) { - var chunkData = await db.CF().ScanDumpAsync("cuckoo-dump", iterator); + var chunkData = await cf.ScanDumpAsync("cuckoo-dump", iterator); iterator = chunkData.Item1; - if(iterator == 0) break; - Assert.True(await db.CF().LoadChunkAsync("cuckoo-load", iterator, chunkData.Item2)); + if (iterator == 0) break; + Assert.True(await cf.LoadChunkAsync("cuckoo-load", iterator, chunkData.Item2)); } // check for properties - Assert.Equal((await db.CF().InfoAsync("cuckoo-dump")).NumberOfItemsInserted, (await db.CF().InfoAsync("cuckoo-load")).NumberOfItemsInserted); + Assert.Equal((await cf.InfoAsync("cuckoo-dump")).NumberOfItemsInserted, (await cf.InfoAsync("cuckoo-load")).NumberOfItemsInserted); // check for existing items - Assert.True(await db.CF().ExistsAsync("cuckoo-load", "a")); + Assert.True(await cf.ExistsAsync("cuckoo-load", "a")); } } \ No newline at end of file diff --git a/tests/NRedisStack.Tests/Json/JsonTests.cs b/tests/NRedisStack.Tests/Json/JsonTests.cs index 2f65a0eb..c62876cd 100644 --- a/tests/NRedisStack.Tests/Json/JsonTests.cs +++ b/tests/NRedisStack.Tests/Json/JsonTests.cs @@ -41,10 +41,11 @@ public void TestJsonSetNotExist() // var obj = new Person { Name = "Shachar", Age = 23 }; // IDatabase db = redisFixture.Redis.GetDatabase(); // db.Execute("FLUSHALL"); + // var cf = db.JSON(); - // db.JSON().Set(key, "$", obj); + // json.Set(key, "$", obj); // string expected = "{\"Name\":\"Shachar\",\"Age\":23}"; - // var result = db.JSON().Get(key).ToString(); + // var result = json.Get(key).ToString(); // if(result == null) // throw new ArgumentNullException(nameof(result)); @@ -57,11 +58,12 @@ public void TestJsonSetNotExist() // var obj = new Person { Name = "Shachar", Age = 23 }; // IDatabase db = redisFixture.Redis.GetDatabase(); // db.Execute("FLUSHALL"); + // var cf = db.JSON(); - // db.JSON().Set(key, "$", obj); + // json.Set(key, "$", obj); // var expected = "[222111\"Shachar\"222]"; - // var result = db.JSON().Get(key, "111", "222", "333", "$.Name"); + // var result = json.Get(key, "111", "222", "333", "$.Name"); // // if(result == null) // // throw new ArgumentNullException(nameof(result)); // Assert.Equal(result.ToString(), expected); diff --git a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs index fc70dae9..5ad9d18c 100644 --- a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs +++ b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs @@ -16,16 +16,16 @@ public void Dispose() redisFixture.Redis.GetDatabase().KeyDelete(key); } - private void AssertMergedUnmergedNodes(IDatabase db, string key, int mergedNodes, int unmergedNodes) + private void AssertMergedUnmergedNodes(TdigestCommands tdigest, string key, int mergedNodes, int unmergedNodes) { - var info = db.TDIGEST().Info(key); + var info = tdigest.Info(key); Assert.Equal((long)mergedNodes, info.MergedNodes); Assert.Equal((long)unmergedNodes, info.UnmergedNodes); } - private void AssertTotalWeight(IDatabase db, string key, double totalWeight) + private void AssertTotalWeight(TdigestCommands tdigest, string key, double totalWeight) { - var info = db.TDIGEST().Info(key); + var info = tdigest.Info(key); Assert.Equal(totalWeight, info.MergedWeight + info.UnmergedWeight); //Assert.Equal(totalWeight, 0.01); } @@ -35,10 +35,11 @@ public void TestCreateSimple() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - Assert.True(db.TDIGEST().Create(key)); + Assert.True(tdigest.Create(key)); - var info = db.TDIGEST().Info(key); + var info = tdigest.Info(key); Assert.Equal(100, info.Compression); } @@ -47,10 +48,11 @@ public async Task TestCreateSimpleAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - Assert.True(await db.TDIGEST().CreateAsync(key)); + Assert.True(await tdigest.CreateAsync(key)); - var info = await db.TDIGEST().InfoAsync(key); + var info = await tdigest.InfoAsync(key); Assert.Equal(100, info.Compression); } @@ -59,13 +61,14 @@ public void TestCreateAndInfo() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); for (int i = 100; i < 1000; i += 100) { string myKey = "td-" + i; - Assert.True(db.TDIGEST().Create(myKey, i)); + Assert.True(tdigest.Create(myKey, i)); - var info = db.TDIGEST().Info(myKey); + var info = tdigest.Info(myKey); Assert.Equal(i, info.Compression); } } @@ -75,13 +78,14 @@ public async Task TestCreateAndInfoAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); for (int i = 100; i < 1000; i += 100) { string myKey = "td-" + i; - Assert.True(await db.TDIGEST().CreateAsync(myKey, i)); + Assert.True(await tdigest.CreateAsync(myKey, i)); - var info = await db.TDIGEST().InfoAsync(myKey); + var info = await tdigest.InfoAsync(myKey); Assert.Equal(i, info.Compression); } } @@ -91,19 +95,20 @@ public void TestReset() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - db.TDIGEST().Create("reset", 100); - AssertMergedUnmergedNodes(db, "reset", 0, 0); + tdigest.Create("reset", 100); + AssertMergedUnmergedNodes(tdigest, "reset", 0, 0); // on empty - Assert.True(db.TDIGEST().Reset("reset")); - AssertMergedUnmergedNodes(db, "reset", 0, 0); + Assert.True(tdigest.Reset("reset")); + AssertMergedUnmergedNodes(tdigest, "reset", 0, 0); - db.TDIGEST().Add("reset", RandomValueWeight(), RandomValueWeight(), RandomValueWeight()); - AssertMergedUnmergedNodes(db, "reset", 0, 3); + tdigest.Add("reset", RandomValueWeight(), RandomValueWeight(), RandomValueWeight()); + AssertMergedUnmergedNodes(tdigest, "reset", 0, 3); - Assert.True(db.TDIGEST().Reset("reset")); - AssertMergedUnmergedNodes(db, "reset", 0, 0); + Assert.True(tdigest.Reset("reset")); + AssertMergedUnmergedNodes(tdigest, "reset", 0, 0); } [Fact] @@ -111,19 +116,20 @@ public async Task TestResetAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - await db.TDIGEST().CreateAsync("reset", 100); - AssertMergedUnmergedNodes(db, "reset", 0, 0); + await tdigest.CreateAsync("reset", 100); + AssertMergedUnmergedNodes(tdigest, "reset", 0, 0); // on empty - Assert.True(await db.TDIGEST().ResetAsync("reset")); - AssertMergedUnmergedNodes(db, "reset", 0, 0); + Assert.True(await tdigest.ResetAsync("reset")); + AssertMergedUnmergedNodes(tdigest, "reset", 0, 0); - await db.TDIGEST().AddAsync("reset", RandomValueWeight(), RandomValueWeight(), RandomValueWeight()); - AssertMergedUnmergedNodes(db, "reset", 0, 3); + await tdigest.AddAsync("reset", RandomValueWeight(), RandomValueWeight(), RandomValueWeight()); + AssertMergedUnmergedNodes(tdigest, "reset", 0, 3); - Assert.True(await db.TDIGEST().ResetAsync("reset")); - AssertMergedUnmergedNodes(db, "reset", 0, 0); + Assert.True(await tdigest.ResetAsync("reset")); + AssertMergedUnmergedNodes(tdigest, "reset", 0, 0); } [Fact] @@ -131,14 +137,15 @@ public void TestAdd() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - db.TDIGEST().Create("tdadd", 100); + tdigest.Create("tdadd", 100); - Assert.True(db.TDIGEST().Add("tdadd", RandomValueWeight())); - AssertMergedUnmergedNodes(db, "tdadd", 0, 1); + Assert.True(tdigest.Add("tdadd", RandomValueWeight())); + AssertMergedUnmergedNodes(tdigest, "tdadd", 0, 1); - Assert.True(db.TDIGEST().Add("tdadd", RandomValueWeight(), RandomValueWeight(), RandomValueWeight(), RandomValueWeight())); - AssertMergedUnmergedNodes(db, "tdadd", 0, 5); + Assert.True(tdigest.Add("tdadd", RandomValueWeight(), RandomValueWeight(), RandomValueWeight(), RandomValueWeight())); + AssertMergedUnmergedNodes(tdigest, "tdadd", 0, 5); } [Fact] @@ -146,14 +153,15 @@ public async Task TestAddAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - await db.TDIGEST().CreateAsync("tdadd", 100); + await tdigest.CreateAsync("tdadd", 100); - Assert.True(await db.TDIGEST().AddAsync("tdadd", RandomValueWeight())); - AssertMergedUnmergedNodes(db, "tdadd", 0, 1); + Assert.True(await tdigest.AddAsync("tdadd", RandomValueWeight())); + AssertMergedUnmergedNodes(tdigest, "tdadd", 0, 1); - Assert.True(await db.TDIGEST().AddAsync("tdadd", RandomValueWeight(), RandomValueWeight(), RandomValueWeight(), RandomValueWeight())); - AssertMergedUnmergedNodes(db, "tdadd", 0, 5); + Assert.True(await tdigest.AddAsync("tdadd", RandomValueWeight(), RandomValueWeight(), RandomValueWeight(), RandomValueWeight())); + AssertMergedUnmergedNodes(tdigest, "tdadd", 0, 5); } @@ -162,18 +170,19 @@ public void TestMerge() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - db.TDIGEST().Create("td2", 100); - db.TDIGEST().Create("td4m", 100); + tdigest.Create("td2", 100); + tdigest.Create("td4m", 100); - Assert.True(db.TDIGEST().Merge("td2", "td4m")); - AssertMergedUnmergedNodes(db, "td2", 0, 0); + Assert.True(tdigest.Merge("td2", "td4m")); + AssertMergedUnmergedNodes(tdigest, "td2", 0, 0); - db.TDIGEST().Add("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - db.TDIGEST().Add("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); + tdigest.Add("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + tdigest.Add("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); - Assert.True(db.TDIGEST().Merge("td2", "td4m")); - AssertMergedUnmergedNodes(db, "td2", 3, 2); + Assert.True(tdigest.Merge("td2", "td4m")); + AssertMergedUnmergedNodes(tdigest, "td2", 3, 2); } @@ -182,18 +191,19 @@ public async Task TestMergeAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - await db.TDIGEST().CreateAsync("td2", 100); - await db.TDIGEST().CreateAsync("td4m", 100); + await tdigest.CreateAsync("td2", 100); + await tdigest.CreateAsync("td4m", 100); - Assert.True(await db.TDIGEST().MergeAsync("td2", "td4m")); - AssertMergedUnmergedNodes(db, "td2", 0, 0); + Assert.True(await tdigest.MergeAsync("td2", "td4m")); + AssertMergedUnmergedNodes(tdigest, "td2", 0, 0); - await db.TDIGEST().AddAsync("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - await db.TDIGEST().AddAsync("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); + await tdigest.AddAsync("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + await tdigest.AddAsync("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); - Assert.True(await db.TDIGEST().MergeAsync("td2", "td4m")); - AssertMergedUnmergedNodes(db, "td2", 3, 2); + Assert.True(await tdigest.MergeAsync("td2", "td4m")); + AssertMergedUnmergedNodes(tdigest, "td2", 3, 2); } [Fact] @@ -201,18 +211,19 @@ public void TestMergeStore() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - db.TDIGEST().Create("from1", 100); - db.TDIGEST().Create("from2", 200); + tdigest.Create("from1", 100); + tdigest.Create("from2", 200); - db.TDIGEST().Add("from1", 1, 1); - db.TDIGEST().Add("from2", 1, 10); + tdigest.Add("from1", 1, 1); + tdigest.Add("from2", 1, 10); - Assert.True(db.TDIGEST().MergeStore("to", 2, 100, "from1", "from2")); - AssertTotalWeight(db, "to", 11d); + Assert.True(tdigest.MergeStore("to", 2, 100, "from1", "from2")); + AssertTotalWeight(tdigest, "to", 11d); - Assert.True(db.TDIGEST().MergeStore("to50", 2, 50, "from1", "from2")); - Assert.Equal(50, db.TDIGEST().Info("to50").Compression); + Assert.True(tdigest.MergeStore("to50", 2, 50, "from1", "from2")); + Assert.Equal(50, tdigest.Info("to50").Compression); } [Fact] @@ -220,18 +231,19 @@ public async Task TestMergeStoreAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - await db.TDIGEST().CreateAsync("from1", 100); - await db.TDIGEST().CreateAsync("from2", 200); + await tdigest.CreateAsync("from1", 100); + await tdigest.CreateAsync("from2", 200); - await db.TDIGEST().AddAsync("from1", 1, 1); - await db.TDIGEST().AddAsync("from2", 1, 10); + await tdigest.AddAsync("from1", 1, 1); + await tdigest.AddAsync("from2", 1, 10); - Assert.True(await db.TDIGEST().MergeStoreAsync("to", 2, 100, "from1", "from2")); - AssertTotalWeight(db, "to", 11d); + Assert.True(await tdigest.MergeStoreAsync("to", 2, 100, "from1", "from2")); + AssertTotalWeight(tdigest, "to", 11d); - Assert.True(await db.TDIGEST().MergeStoreAsync("to50", 2, 50, "from1", "from2")); - Assert.Equal(50, (await db.TDIGEST().InfoAsync("to50")).Compression); + Assert.True(await tdigest.MergeStoreAsync("to50", 2, 50, "from1", "from2")); + Assert.Equal(50, (await tdigest.InfoAsync("to50")).Compression); } [Fact] @@ -239,13 +251,14 @@ public void TestCDF() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - db.TDIGEST().Create("tdcdf", 100); - Assert.Equal(double.NaN, db.TDIGEST().CDF("tdcdf", 50)); + tdigest.Create("tdcdf", 100); + Assert.Equal(double.NaN, tdigest.CDF("tdcdf", 50)); - db.TDIGEST().Add("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - db.TDIGEST().Add("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); - Assert.Equal(0.6, db.TDIGEST().CDF("tdcdf", 50)); + tdigest.Add("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + tdigest.Add("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + Assert.Equal(0.6, tdigest.CDF("tdcdf", 50)); } [Fact] @@ -253,13 +266,14 @@ public async Task TestCDFAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - await db.TDIGEST().CreateAsync("tdcdf", 100); - Assert.Equal(double.NaN, await db.TDIGEST().CDFAsync("tdcdf", 50)); + await tdigest.CreateAsync("tdcdf", 100); + Assert.Equal(double.NaN, await tdigest.CDFAsync("tdcdf", 50)); - await db.TDIGEST().AddAsync("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - await db.TDIGEST().AddAsync("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); - Assert.Equal(0.6, await db.TDIGEST().CDFAsync("tdcdf", 50)); + await tdigest.AddAsync("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + await tdigest.AddAsync("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + Assert.Equal(0.6, await tdigest.CDFAsync("tdcdf", 50)); } [Fact] @@ -267,14 +281,15 @@ public void TestQuantile() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - db.TDIGEST().Create("tdqnt", 100); - var resDelete = db.TDIGEST().Quantile("tdqnt", 0.5); - Assert.Equal(new double[] { double.NaN }, db.TDIGEST().Quantile("tdqnt", 0.5)); + tdigest.Create("tdqnt", 100); + var resDelete = tdigest.Quantile("tdqnt", 0.5); + Assert.Equal(new double[] { double.NaN }, tdigest.Quantile("tdqnt", 0.5)); - db.TDIGEST().Add("tdqnt", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - db.TDIGEST().Add("tdqnt", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); - Assert.Equal(new double[] { 1 }, db.TDIGEST().Quantile("tdqnt", 0.5)); + tdigest.Add("tdqnt", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + tdigest.Add("tdqnt", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + Assert.Equal(new double[] { 1 }, tdigest.Quantile("tdqnt", 0.5)); } [Fact] @@ -282,14 +297,15 @@ public async Task TestQuantileAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - db.TDIGEST().Create("tdqnt", 100); - var resDelete = await db.TDIGEST().QuantileAsync("tdqnt", 0.5); - Assert.Equal(new double[] { double.NaN }, await db.TDIGEST().QuantileAsync("tdqnt", 0.5)); + tdigest.Create("tdqnt", 100); + var resDelete = await tdigest.QuantileAsync("tdqnt", 0.5); + Assert.Equal(new double[] { double.NaN }, await tdigest.QuantileAsync("tdqnt", 0.5)); - await db.TDIGEST().AddAsync("tdqnt", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - await db.TDIGEST().AddAsync("tdqnt", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); - Assert.Equal(new double[] { 1 }, await db.TDIGEST().QuantileAsync("tdqnt", 0.5)); + await tdigest.AddAsync("tdqnt", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + await tdigest.AddAsync("tdqnt", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + Assert.Equal(new double[] { 1 }, await tdigest.QuantileAsync("tdqnt", 0.5)); } [Fact] @@ -297,15 +313,16 @@ public void TestMinAndMax() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - db.TDIGEST().Create(key, 100); - Assert.Equal(double.MaxValue, db.TDIGEST().Min(key)); - Assert.Equal(-double.MaxValue, db.TDIGEST().Max(key)); + tdigest.Create(key, 100); + Assert.Equal(double.MaxValue, tdigest.Min(key)); + Assert.Equal(-double.MaxValue, tdigest.Max(key)); - db.TDIGEST().Add(key, DefinedValueWeight(2, 1)); - db.TDIGEST().Add(key, DefinedValueWeight(5, 1)); - Assert.Equal(2d, db.TDIGEST().Min(key)); - Assert.Equal(5d, db.TDIGEST().Max(key)); + tdigest.Add(key, DefinedValueWeight(2, 1)); + tdigest.Add(key, DefinedValueWeight(5, 1)); + Assert.Equal(2d, tdigest.Min(key)); + Assert.Equal(5d, tdigest.Max(key)); } [Fact] @@ -313,15 +330,16 @@ public async Task TestMinAndMaxAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - await db.TDIGEST().CreateAsync(key, 100); - Assert.Equal(double.MaxValue, await db.TDIGEST().MinAsync(key)); - Assert.Equal(-double.MaxValue, await db.TDIGEST().MaxAsync(key)); + await tdigest.CreateAsync(key, 100); + Assert.Equal(double.MaxValue, await tdigest.MinAsync(key)); + Assert.Equal(-double.MaxValue, await tdigest.MaxAsync(key)); - await db.TDIGEST().AddAsync(key, DefinedValueWeight(2, 1)); - await db.TDIGEST().AddAsync(key, DefinedValueWeight(5, 1)); - Assert.Equal(2d, await db.TDIGEST().MinAsync(key)); - Assert.Equal(5d, await db.TDIGEST().MaxAsync(key)); + await tdigest.AddAsync(key, DefinedValueWeight(2, 1)); + await tdigest.AddAsync(key, DefinedValueWeight(5, 1)); + Assert.Equal(2d, await tdigest.MinAsync(key)); + Assert.Equal(5d, await tdigest.MaxAsync(key)); } [Fact] @@ -329,18 +347,19 @@ public void TestTrimmedMean() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - db.TDIGEST().Create(key, 500); + tdigest.Create(key, 500); for (int i = 0; i < 20; i++) { - db.TDIGEST().Add(key, new Tuple(i, 1)); + tdigest.Add(key, new Tuple(i, 1)); } - Assert.Equal(9.5, db.TDIGEST().TrimmedMean(key, 0.1, 0.9)); - Assert.Equal(9.5, db.TDIGEST().TrimmedMean(key, 0.0, 1.0)); - Assert.Equal(4.5, db.TDIGEST().TrimmedMean(key, 0.0, 0.5)); - Assert.Equal(14.5, db.TDIGEST().TrimmedMean(key, 0.5, 1.0)); + Assert.Equal(9.5, tdigest.TrimmedMean(key, 0.1, 0.9)); + Assert.Equal(9.5, tdigest.TrimmedMean(key, 0.0, 1.0)); + Assert.Equal(4.5, tdigest.TrimmedMean(key, 0.0, 0.5)); + Assert.Equal(14.5, tdigest.TrimmedMean(key, 0.5, 1.0)); } [Fact] @@ -348,18 +367,19 @@ public async Task TestTrimmedMeanAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); - await db.TDIGEST().CreateAsync(key, 500); + await tdigest.CreateAsync(key, 500); for (int i = 0; i < 20; i++) { - await db.TDIGEST().AddAsync(key, new Tuple(i, 1)); + await tdigest.AddAsync(key, new Tuple(i, 1)); } - Assert.Equal(9.5, await db.TDIGEST().TrimmedMeanAsync(key, 0.1, 0.9)); - Assert.Equal(9.5, await db.TDIGEST().TrimmedMeanAsync(key, 0.0, 1.0)); - Assert.Equal(4.5, await db.TDIGEST().TrimmedMeanAsync(key, 0.0, 0.5)); - Assert.Equal(14.5, await db.TDIGEST().TrimmedMeanAsync(key, 0.5, 1.0)); + Assert.Equal(9.5, await tdigest.TrimmedMeanAsync(key, 0.1, 0.9)); + Assert.Equal(9.5, await tdigest.TrimmedMeanAsync(key, 0.0, 1.0)); + Assert.Equal(4.5, await tdigest.TrimmedMeanAsync(key, 0.0, 0.5)); + Assert.Equal(14.5, await tdigest.TrimmedMeanAsync(key, 0.5, 1.0)); } static Tuple RandomValueWeight() diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAdd.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAdd.cs index 477b37ee..e0d5c6f3 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAdd.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAdd.cs @@ -25,10 +25,11 @@ public void TestAddNotExistingTimeSeries() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1)); - TimeSeriesInformation info = db.TS().Info(key); + Assert.Equal(now, ts.Add(key, now, 1.1)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(now, info.FirstTimeStamp); Assert.Equal(now, info.LastTimeStamp); } @@ -38,11 +39,12 @@ public void TestAddExistingTimeSeries() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); - db.TS().Create(key); + ts.Create(key); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1)); - TimeSeriesInformation info = db.TS().Info(key); + Assert.Equal(now, ts.Add(key, now, 1.1)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(now, info.FirstTimeStamp); Assert.Equal(now, info.LastTimeStamp); } @@ -52,9 +54,10 @@ public void TestAddStar() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); - db.TS().Add(key, "*", 1.1); - TimeSeriesInformation info = db.TS().Info(key); + ts.Add(key, "*", 1.1); + TimeSeriesInformation info = ts.Info(key); Assert.True(info.FirstTimeStamp > 0); Assert.Equal(info.FirstTimeStamp, info.LastTimeStamp); } @@ -64,10 +67,11 @@ public void TestAddWithRetentionTime() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; long retentionTime = 5000; - Assert.Equal(now, db.TS().Add(key, now, 1.1, retentionTime: retentionTime)); - TimeSeriesInformation info = db.TS().Info(key); + Assert.Equal(now, ts.Add(key, now, 1.1, retentionTime: retentionTime)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(now, info.FirstTimeStamp); Assert.Equal(now, info.LastTimeStamp); Assert.Equal(retentionTime, info.RetentionTime); @@ -78,11 +82,12 @@ public void TestAddWithLabels() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; TimeSeriesLabel label = new TimeSeriesLabel("key", "value"); var labels = new List { label }; - Assert.Equal(now, db.TS().Add(key, now, 1.1, labels: labels)); - TimeSeriesInformation info = db.TS().Info(key); + Assert.Equal(now, ts.Add(key, now, 1.1, labels: labels)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(now, info.FirstTimeStamp); Assert.Equal(now, info.LastTimeStamp); Assert.Equal(labels, info.Labels); @@ -93,10 +98,11 @@ public void TestAddWithUncompressed() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create(key); + var ts = db.TS(); + ts.Create(key); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1, uncompressed: true)); - TimeSeriesInformation info = db.TS().Info(key); + Assert.Equal(now, ts.Add(key, now, 1.1, uncompressed: true)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(now, info.FirstTimeStamp); Assert.Equal(now, info.LastTimeStamp); } @@ -106,9 +112,10 @@ public void TestAddWithChunkSize() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1, chunkSizeBytes: 128)); - TimeSeriesInformation info = db.TS().Info(key); + Assert.Equal(now, ts.Add(key, now, 1.1, chunkSizeBytes: 128)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(now, info.FirstTimeStamp); Assert.Equal(now, info.LastTimeStamp); Assert.Equal(128, info.ChunkSize); @@ -119,9 +126,10 @@ public void TestAddWithDuplicatePolicyBlock() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1)); - Assert.Throws(() => db.TS().Add(key, now, 1.2)); + Assert.Equal(now, ts.Add(key, now, 1.1)); + Assert.Throws(() => ts.Add(key, now, 1.2)); } [Fact] @@ -129,15 +137,16 @@ public void TestAddWithDuplicatePolicyMin() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1)); + Assert.Equal(now, ts.Add(key, now, 1.1)); // Insert a bigger number and check that it did not change the value. - Assert.Equal(now, db.TS().Add(key, now, 1.2, duplicatePolicy: TsDuplicatePolicy.MIN)); - Assert.Equal(1.1, db.TS().Range(key, now, now)[0].Val); + Assert.Equal(now, ts.Add(key, now, 1.2, duplicatePolicy: TsDuplicatePolicy.MIN)); + Assert.Equal(1.1, ts.Range(key, now, now)[0].Val); // Insert a smaller number and check that it changed. - Assert.Equal(now, db.TS().Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.MIN)); - Assert.Equal(1.0, db.TS().Range(key, now, now)[0].Val); + Assert.Equal(now, ts.Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.MIN)); + Assert.Equal(1.0, ts.Range(key, now, now)[0].Val); } [Fact] @@ -145,15 +154,16 @@ public void TestAddWithDuplicatePolicyMax() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1)); + Assert.Equal(now, ts.Add(key, now, 1.1)); // Insert a smaller number and check that it did not change the value. - Assert.Equal(now, db.TS().Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.MAX)); - Assert.Equal(1.1, db.TS().Range(key, now, now)[0].Val); + Assert.Equal(now, ts.Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.MAX)); + Assert.Equal(1.1, ts.Range(key, now, now)[0].Val); // Insert a bigger number and check that it changed. - Assert.Equal(now, db.TS().Add(key, now, 1.2, duplicatePolicy: TsDuplicatePolicy.MAX)); - Assert.Equal(1.2, db.TS().Range(key, now, now)[0].Val); + Assert.Equal(now, ts.Add(key, now, 1.2, duplicatePolicy: TsDuplicatePolicy.MAX)); + Assert.Equal(1.2, ts.Range(key, now, now)[0].Val); } [Fact] @@ -161,10 +171,11 @@ public void TestAddWithDuplicatePolicySum() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1)); - Assert.Equal(now, db.TS().Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.SUM)); - Assert.Equal(2.1, db.TS().Range(key, now, now)[0].Val); + Assert.Equal(now, ts.Add(key, now, 1.1)); + Assert.Equal(now, ts.Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.SUM)); + Assert.Equal(2.1, ts.Range(key, now, now)[0].Val); } [Fact] @@ -172,10 +183,11 @@ public void TestAddWithDuplicatePolicyFirst() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1)); - Assert.Equal(now, db.TS().Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.FIRST)); - Assert.Equal(1.1, db.TS().Range(key, now, now)[0].Val); + Assert.Equal(now, ts.Add(key, now, 1.1)); + Assert.Equal(now, ts.Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.FIRST)); + Assert.Equal(1.1, ts.Range(key, now, now)[0].Val); } [Fact] @@ -183,10 +195,11 @@ public void TestAddWithDuplicatePolicyLast() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp now = DateTime.UtcNow; - Assert.Equal(now, db.TS().Add(key, now, 1.1)); - Assert.Equal(now, db.TS().Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.LAST)); - Assert.Equal(1.0, db.TS().Range(key, now, now)[0].Val); + Assert.Equal(now, ts.Add(key, now, 1.1)); + Assert.Equal(now, ts.Add(key, now, 1.0, duplicatePolicy: TsDuplicatePolicy.LAST)); + Assert.Equal(1.0, ts.Range(key, now, now)[0].Val); } [Fact] @@ -197,10 +210,11 @@ public void TestOldAdd() TimeStamp new_dt = DateTime.UtcNow; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create(key); - db.TS().Add(key, new_dt, 1.1); + var ts = db.TS(); + ts.Create(key); + ts.Add(key, new_dt, 1.1); // Adding old event - Assert.Equal(old_dt, db.TS().Add(key, old_dt, 1.1)); + Assert.Equal(old_dt, ts.Add(key, old_dt, 1.1)); } [Fact] @@ -209,9 +223,10 @@ public void TestWrongParameters() double value = 1.1; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = Assert.Throws(() => db.TS().Add(key, "+", value)); + var ts = db.TS(); + var ex = Assert.Throws(() => ts.Add(key, "+", value)); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); - ex = Assert.Throws(() => db.TS().Add(key, "-", value)); + ex = Assert.Throws(() => ts.Add(key, "-", value)); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAddAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAddAsync.cs index ce5bd07b..1f3c3817 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAddAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAddAsync.cs @@ -19,10 +19,11 @@ public async Task TestAddNotExistingTimeSeries() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(timeStamp, info.FirstTimeStamp); Assert.Equal(timeStamp, info.LastTimeStamp); } @@ -33,11 +34,12 @@ public async Task TestAddExistingTimeSeries() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync(key); + var ts = db.TS(); + await ts.CreateAsync(key); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(timeStamp, info.FirstTimeStamp); Assert.Equal(timeStamp, info.LastTimeStamp); } @@ -48,8 +50,9 @@ public async Task TestAddStar() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().AddAsync(key, "*", 1.1); - var info = await db.TS().InfoAsync(key); + var ts = db.TS(); + await ts.AddAsync(key, "*", 1.1); + var info = await ts.InfoAsync(key); Assert.True(info.FirstTimeStamp > 0); Assert.Equal(info.FirstTimeStamp, info.LastTimeStamp); } @@ -60,11 +63,12 @@ public async Task TestAddWithRetentionTime() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; long retentionTime = 5000; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1, retentionTime: retentionTime)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1, retentionTime: retentionTime)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(timeStamp, info.FirstTimeStamp); Assert.Equal(timeStamp, info.LastTimeStamp); Assert.Equal(retentionTime, info.RetentionTime); @@ -76,12 +80,13 @@ public async Task TestAddWithLabels() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; var label = new TimeSeriesLabel("key", "value"); var labels = new List { label }; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1, labels: labels)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1, labels: labels)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(timeStamp, info.FirstTimeStamp); Assert.Equal(timeStamp, info.LastTimeStamp); Assert.Equal(labels, info.Labels); @@ -93,9 +98,10 @@ public async Task TestAddWithChunkSize() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1, chunkSizeBytes: 128)); - var info = await db.TS().InfoAsync(key); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1, chunkSizeBytes: 128)); + var info = await ts.InfoAsync(key); Assert.Equal(timeStamp, info.FirstTimeStamp); Assert.Equal(timeStamp, info.LastTimeStamp); Assert.Equal(128, info.ChunkSize); @@ -107,11 +113,12 @@ public async Task TestAddWithUncompressed() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync(key); + var ts = db.TS(); + await ts.CreateAsync(key); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1, uncompressed: true)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1, uncompressed: true)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(timeStamp, info.FirstTimeStamp); Assert.Equal(timeStamp, info.LastTimeStamp); } @@ -122,9 +129,10 @@ public async Task TestAddWithDuplicatePolicyBlock() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1)); - await Assert.ThrowsAsync(async () => await db.TS().AddAsync(key, timeStamp, 1.2)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1)); + await Assert.ThrowsAsync(async () => await ts.AddAsync(key, timeStamp, 1.2)); } [Fact] @@ -133,17 +141,18 @@ public async Task TestAddWithDuplicatePolicyMin() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1)); // Insert a bigger number and check that it did not change the value. - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.2, duplicatePolicy: TsDuplicatePolicy.MIN)); - IReadOnlyList results = await db.TS().RangeAsync(key, timeStamp, timeStamp); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.2, duplicatePolicy: TsDuplicatePolicy.MIN)); + IReadOnlyList results = await ts.RangeAsync(key, timeStamp, timeStamp); Assert.Equal(1.1, results[0].Val); // Insert a smaller number and check that it changed. - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.MIN)); - results = await db.TS().RangeAsync(key, timeStamp, timeStamp); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.MIN)); + results = await ts.RangeAsync(key, timeStamp, timeStamp); Assert.Equal(1.0, results[0].Val); } @@ -153,16 +162,17 @@ public async Task TestAddWithDuplicatePolicyMax() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1)); // Insert a smaller number and check that it did not change the value. - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.MAX)); - IReadOnlyList results = await db.TS().RangeAsync(key, timeStamp, timeStamp); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.MAX)); + IReadOnlyList results = await ts.RangeAsync(key, timeStamp, timeStamp); Assert.Equal(1.1, results[0].Val); // Insert a bigger number and check that it changed. - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.2, duplicatePolicy: TsDuplicatePolicy.MAX)); - results = await db.TS().RangeAsync(key, timeStamp, timeStamp); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.2, duplicatePolicy: TsDuplicatePolicy.MAX)); + results = await ts.RangeAsync(key, timeStamp, timeStamp); Assert.Equal(1.2, results[0].Val); } @@ -172,10 +182,11 @@ public async Task TestAddWithDuplicatePolicySum() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1)); - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.SUM)); - IReadOnlyList results = await db.TS().RangeAsync(key, timeStamp, timeStamp); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.SUM)); + IReadOnlyList results = await ts.RangeAsync(key, timeStamp, timeStamp); Assert.Equal(2.1, results[0].Val); } @@ -185,10 +196,11 @@ public async Task TestAddWithDuplicatePolicyFirst() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1)); - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.FIRST)); - IReadOnlyList results = await db.TS().RangeAsync(key, timeStamp, timeStamp); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.FIRST)); + IReadOnlyList results = await ts.RangeAsync(key, timeStamp, timeStamp); Assert.Equal(1.1, results[0].Val); } @@ -198,10 +210,11 @@ public async Task TestAddWithDuplicatePolicyLast() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.1)); - Assert.Equal(timeStamp, await db.TS().AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.LAST)); - IReadOnlyList results = await db.TS().RangeAsync(key, timeStamp, timeStamp); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.1)); + Assert.Equal(timeStamp, await ts.AddAsync(key, timeStamp, 1.0, duplicatePolicy: TsDuplicatePolicy.LAST)); + IReadOnlyList results = await ts.RangeAsync(key, timeStamp, timeStamp); Assert.Equal(1.0, results[0].Val); } @@ -211,13 +224,14 @@ public async Task TestOldAdd() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var dateTime = DateTime.UtcNow; TimeStamp oldTimeStamp = dateTime.AddSeconds(-1); TimeStamp newTimeStamp = dateTime; - await db.TS().CreateAsync(key); - await db.TS().AddAsync(key, newTimeStamp, 1.1); + await ts.CreateAsync(key); + await ts.AddAsync(key, newTimeStamp, 1.1); // Adding old event - Assert.Equal(oldTimeStamp, await db.TS().AddAsync(key, oldTimeStamp, 1.1)); + Assert.Equal(oldTimeStamp, await ts.AddAsync(key, oldTimeStamp, 1.1)); } [Fact] @@ -227,10 +241,11 @@ public async Task TestWrongParameters() var value = 1.1; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = await Assert.ThrowsAsync(async () => await db.TS().AddAsync(key, "+", value)); + var ts = db.TS(); + var ex = await Assert.ThrowsAsync(async () => await ts.AddAsync(key, "+", value)); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); - ex = await Assert.ThrowsAsync(async () => await db.TS().AddAsync(key, "-", value)); + ex = await Assert.ThrowsAsync(async () => await ts.AddAsync(key, "-", value)); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAlter.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAlter.cs index 9c9e32a4..0d335c2a 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAlter.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAlter.cs @@ -25,9 +25,10 @@ public void TestAlterRetentionTime() long retentionTime = 5000; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create(key); - Assert.True(db.TS().Alter(key, retentionTime: retentionTime)); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + ts.Create(key); + Assert.True(ts.Alter(key, retentionTime: retentionTime)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(retentionTime, info.RetentionTime); } @@ -38,13 +39,14 @@ public void TestAlterLabels() var labels = new List { label }; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create(key); - Assert.True(db.TS().Alter(key, labels: labels)); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + ts.Create(key); + Assert.True(ts.Alter(key, labels: labels)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(labels, info.Labels); labels.Clear(); - Assert.True(db.TS().Alter(key, labels: labels)); - info = db.TS().Info(key); + Assert.True(ts.Alter(key, labels: labels)); + info = ts.Info(key); Assert.Equal(labels, info.Labels); } @@ -53,9 +55,10 @@ public void TestAlterPolicyAndChunk() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create(key); - Assert.True(db.TS().Alter(key, chunkSizeBytes: 128, duplicatePolicy: TsDuplicatePolicy.MIN)); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + ts.Create(key); + Assert.True(ts.Alter(key, chunkSizeBytes: 128, duplicatePolicy: TsDuplicatePolicy.MIN)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(info.ChunkSize, 128); Assert.Equal(info.DuplicatePolicy, TsDuplicatePolicy.MIN); } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAlterAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAlterAsync.cs index b3ad0f4c..9e9903b1 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAlterAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestAlterAsync.cs @@ -18,10 +18,11 @@ public async Task TestAlterRetentionTime() long retentionTime = 5000; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync(key); - Assert.True(await db.TS().AlterAsync(key, retentionTime: retentionTime)); + var ts = db.TS(); + await ts.CreateAsync(key); + Assert.True(await ts.AlterAsync(key, retentionTime: retentionTime)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(retentionTime, info.RetentionTime); } @@ -31,18 +32,19 @@ public async Task TestAlterLabels() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel("key", "value"); var labels = new List { label }; - await db.TS().CreateAsync(key); - Assert.True(await db.TS().AlterAsync(key, labels: labels)); + await ts.CreateAsync(key); + Assert.True(await ts.AlterAsync(key, labels: labels)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(labels, info.Labels); labels.Clear(); - Assert.True(await db.TS().AlterAsync(key, labels: labels)); + Assert.True(await ts.AlterAsync(key, labels: labels)); - info = await db.TS().InfoAsync(key); + info = await ts.InfoAsync(key); Assert.Equal(labels, info.Labels); } @@ -52,9 +54,10 @@ public async Task TestAlterPolicyAndChunkAsync() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create(key); - Assert.True(await db.TS().AlterAsync(key, chunkSizeBytes: 128, duplicatePolicy: TsDuplicatePolicy.MIN)); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + ts.Create(key); + Assert.True(await ts.AlterAsync(key, chunkSizeBytes: 128, duplicatePolicy: TsDuplicatePolicy.MIN)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(info.ChunkSize, 128); Assert.Equal(info.DuplicatePolicy, TsDuplicatePolicy.MIN); } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestCreate.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestCreate.cs index e3f6e29e..c2f2e995 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestCreate.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestCreate.cs @@ -25,8 +25,9 @@ public void TestCreateOK() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key)); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + Assert.True(ts.Create(key)); + TimeSeriesInformation info = ts.Info(key); } [Fact] @@ -35,8 +36,9 @@ public void TestCreateRetentionTime() long retentionTime = 5000; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key, retentionTime: retentionTime)); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + Assert.True(ts.Create(key, retentionTime: retentionTime)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(retentionTime, info.RetentionTime); } @@ -47,8 +49,9 @@ public void TestCreateLabels() var labels = new List { label }; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key, labels: labels)); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + Assert.True(ts.Create(key, labels: labels)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(labels, info.Labels); } @@ -58,8 +61,9 @@ public void TestCreateEmptyLabels() var labels = new List(); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key, labels: labels)); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + Assert.True(ts.Create(key, labels: labels)); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(labels, info.Labels); } @@ -68,7 +72,8 @@ public void TestCreateUncompressed() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key, uncompressed: true)); + var ts = db.TS(); + Assert.True(ts.Create(key, uncompressed: true)); } [Fact] @@ -76,7 +81,8 @@ public void TestCreatehDuplicatePolicyFirst() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key, duplicatePolicy: TsDuplicatePolicy.FIRST)); + var ts = db.TS(); + Assert.True(ts.Create(key, duplicatePolicy: TsDuplicatePolicy.FIRST)); } [Fact] @@ -84,7 +90,8 @@ public void TestCreatehDuplicatePolicyLast() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key, duplicatePolicy: TsDuplicatePolicy.LAST)); + var ts = db.TS(); + Assert.True(ts.Create(key, duplicatePolicy: TsDuplicatePolicy.LAST)); } [Fact] @@ -92,7 +99,8 @@ public void TestCreatehDuplicatePolicyMin() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key, duplicatePolicy: TsDuplicatePolicy.MIN)); + var ts = db.TS(); + Assert.True(ts.Create(key, duplicatePolicy: TsDuplicatePolicy.MIN)); } [Fact] @@ -100,7 +108,8 @@ public void TestCreatehDuplicatePolicyMax() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key, duplicatePolicy: TsDuplicatePolicy.MAX)); + var ts = db.TS(); + Assert.True(ts.Create(key, duplicatePolicy: TsDuplicatePolicy.MAX)); } [Fact] @@ -108,7 +117,8 @@ public void TestCreatehDuplicatePolicySum() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().Create(key, duplicatePolicy: TsDuplicatePolicy.SUM)); + var ts = db.TS(); + Assert.True(ts.Create(key, duplicatePolicy: TsDuplicatePolicy.SUM)); } } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestCreateAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestCreateAsync.cs index 92725f17..7cc66c52 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestCreateAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestCreateAsync.cs @@ -17,7 +17,8 @@ public async Task TestCreateOK() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key)); } [Fact] @@ -27,9 +28,10 @@ public async Task TestCreateRetentionTime() long retentionTime = 5000; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key, retentionTime: retentionTime)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key, retentionTime: retentionTime)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(retentionTime, info.RetentionTime); } @@ -41,9 +43,10 @@ public async Task TestCreateLabels() var labels = new List { label }; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key, labels: labels)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key, labels: labels)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(labels, info.Labels); } @@ -54,9 +57,10 @@ public async Task TestCreateEmptyLabels() var labels = new List(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key, labels: labels)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key, labels: labels)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(labels, info.Labels); } @@ -66,7 +70,8 @@ public async Task TestCreateUncompressed() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key, uncompressed: true)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key, uncompressed: true)); } [Fact] @@ -75,7 +80,8 @@ public async void TestCreatehDuplicatePolicyFirst() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.FIRST)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.FIRST)); } [Fact] @@ -84,7 +90,8 @@ public async void TestCreatehDuplicatePolicyLast() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.LAST)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.LAST)); } [Fact] @@ -93,7 +100,8 @@ public async void TestCreatehDuplicatePolicyMin() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.MIN)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.MIN)); } [Fact] @@ -102,7 +110,8 @@ public async void TestCreatehDuplicatePolicyMax() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.MAX)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.MAX)); } [Fact] @@ -111,7 +120,8 @@ public async void TestCreatehDuplicatePolicySum() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.SUM)); + var ts = db.TS(); + Assert.True(await ts.CreateAsync(key, duplicatePolicy: TsDuplicatePolicy.SUM)); } } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDecrBy.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDecrBy.cs index a7fe22ff..e61ca926 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDecrBy.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDecrBy.cs @@ -24,8 +24,9 @@ public void TestDefaultDecrBy() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().DecrBy(key, -value) > 0); - Assert.Equal(value, db.TS().Get(key).Val); + var ts = db.TS(); + Assert.True(ts.DecrBy(key, -value) > 0); + Assert.Equal(value, ts.Get(key).Val); } [Fact] @@ -34,8 +35,9 @@ public void TestStarDecrBy() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().DecrBy(key, -value, timestamp: "*") > 0); - Assert.Equal(value, db.TS().Get(key).Val); + var ts = db.TS(); + Assert.True(ts.DecrBy(key, -value, timestamp: "*") > 0); + Assert.Equal(value, ts.Get(key).Val); } [Fact] @@ -44,9 +46,10 @@ public void TestDecrByTimeStamp() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, db.TS().DecrBy(key, -value, timestamp: timeStamp)); - Assert.Equal(new TimeSeriesTuple(timeStamp, value), db.TS().Get(key)); + Assert.Equal(timeStamp, ts.DecrBy(key, -value, timestamp: timeStamp)); + Assert.Equal(new TimeSeriesTuple(timeStamp, value), ts.Get(key)); } [Fact] @@ -56,9 +59,10 @@ public void TestDefaultDecrByWithRetentionTime() long retentionTime = 5000; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().DecrBy(key, -value, retentionTime: retentionTime) > 0); - Assert.Equal(value, db.TS().Get(key).Val); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + Assert.True(ts.DecrBy(key, -value, retentionTime: retentionTime) > 0); + Assert.Equal(value, ts.Get(key).Val); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(retentionTime, info.RetentionTime); } @@ -69,10 +73,11 @@ public void TestDefaultDecrByWithLabels() TimeSeriesLabel label = new TimeSeriesLabel("key", "value"); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var labels = new List { label }; - Assert.True(db.TS().DecrBy(key, -value, labels: labels) > 0); - Assert.Equal(value, db.TS().Get(key).Val); - TimeSeriesInformation info = db.TS().Info(key); + Assert.True(ts.DecrBy(key, -value, labels: labels) > 0); + Assert.Equal(value, ts.Get(key).Val); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(labels, info.Labels); } @@ -82,8 +87,9 @@ public void TestDefaultDecrByWithUncompressed() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().DecrBy(key, -value, uncompressed: true) > 0); - Assert.Equal(value, db.TS().Get(key).Val); + var ts = db.TS(); + Assert.True(ts.DecrBy(key, -value, uncompressed: true) > 0); + Assert.Equal(value, ts.Get(key).Val); } [Fact] @@ -92,9 +98,10 @@ public void TestWrongParameters() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = Assert.Throws(() => db.TS().DecrBy(key, value, timestamp: "+")); + var ts = db.TS(); + var ex = Assert.Throws(() => ts.DecrBy(key, value, timestamp: "+")); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); - ex = Assert.Throws(() => db.TS().DecrBy(key, value, timestamp: "-")); + ex = Assert.Throws(() => ts.DecrBy(key, value, timestamp: "-")); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDecrByAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDecrByAsync.cs index 34ef7180..d302080f 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDecrByAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDecrByAsync.cs @@ -19,9 +19,10 @@ public async Task TestDefaultDecrBy() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().DecrByAsync(key, -value) > 0); + var ts = db.TS(); + Assert.True(await ts.DecrByAsync(key, -value) > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); } @@ -32,9 +33,10 @@ public async Task TestStarDecrBy() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().DecrByAsync(key, -value, timestamp: "*") > 0); + var ts = db.TS(); + Assert.True(await ts.DecrByAsync(key, -value, timestamp: "*") > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); } @@ -45,9 +47,10 @@ public async Task TestDecrByTimeStamp() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().DecrByAsync(key, -value, timestamp: timeStamp)); - Assert.Equal(new TimeSeriesTuple(timeStamp, value), await db.TS().GetAsync(key)); + Assert.Equal(timeStamp, await ts.DecrByAsync(key, -value, timestamp: timeStamp)); + Assert.Equal(new TimeSeriesTuple(timeStamp, value), await ts.GetAsync(key)); } [Fact] @@ -58,12 +61,13 @@ public async Task TestDefaultDecrByWithRetentionTime() long retentionTime = 5000; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().DecrByAsync(key, -value, retentionTime: retentionTime) > 0); + var ts = db.TS(); + Assert.True(await ts.DecrByAsync(key, -value, retentionTime: retentionTime) > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(retentionTime, info.RetentionTime); } @@ -75,13 +79,14 @@ public async Task TestDefaultDecrByWithLabels() var label = new TimeSeriesLabel("key", "value"); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var labels = new List { label }; - Assert.True(await db.TS().DecrByAsync(key, -value, labels: labels) > 0); + Assert.True(await ts.DecrByAsync(key, -value, labels: labels) > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(labels, info.Labels); } @@ -92,9 +97,10 @@ public async Task TestDefaultDecrByWithUncompressed() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().DecrByAsync(key, -value, uncompressed: true) > 0); + var ts = db.TS(); + Assert.True(await ts.DecrByAsync(key, -value, uncompressed: true) > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); } @@ -105,10 +111,11 @@ public async Task TestWrongParameters() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = await Assert.ThrowsAsync(async () => await db.TS().DecrByAsync(key, value, timestamp: "+")); + var ts = db.TS(); + var ex = await Assert.ThrowsAsync(async () => await ts.DecrByAsync(key, value, timestamp: "+")); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); - ex = await Assert.ThrowsAsync(async () => await db.TS().DecrByAsync(key, value, timestamp: "-")); + ex = await Assert.ThrowsAsync(async () => await ts.DecrByAsync(key, value, timestamp: "-")); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDel.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDel.cs index fff0bcec..84f25501 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDel.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDel.cs @@ -19,13 +19,13 @@ public void Dispose() redisFixture.Redis.GetDatabase().KeyDelete(key); } - private List CreateData(IDatabase db, int timeBucket) + private List CreateData(TimeSeriesCommands ts, int timeBucket) //TODO: check this { var tuples = new List(); for (int i = 0; i < 10; i++) { - TimeStamp ts = db.TS().Add(key, i * timeBucket, i); - tuples.Add(new TimeSeriesTuple(ts, i)); + TimeStamp timeStamp = ts.Add(key, i * timeBucket, i); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; } @@ -35,7 +35,8 @@ public void TestDelNotExists() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = Assert.Throws(() => db.TS().Del(key, "-", "+")); + var ts = db.TS(); + var ex = Assert.Throws(() => ts.Del(key, "-", "+")); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); } @@ -44,15 +45,16 @@ public void TestDelRange() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, 50); + var ts = db.TS(); + var tuples = CreateData(ts, 50); TimeStamp from = tuples[0].Time; TimeStamp to = tuples[5].Time; - Assert.Equal(6, db.TS().Del(key, from, to)); + Assert.Equal(6, ts.Del(key, from, to)); // check that the operation deleted the timestamps - IReadOnlyList res = db.TS().Range(key, from, to); + IReadOnlyList res = ts.Range(key, from, to); Assert.Equal(0, res.Count); - Assert.NotNull(db.TS().Get(key)); + Assert.NotNull(ts.Get(key)); } } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDelAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDelAsync.cs index 19957736..b29bdd69 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDelAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestDelAsync.cs @@ -11,13 +11,13 @@ public class TestDelAsync : AbstractNRedisStackTest { public TestDelAsync(RedisFixture redisFixture) : base(redisFixture) { } - private async Task> CreateData(IDatabase db, string key, int timeBucket) + private async Task> CreateData(TimeSeriesCommands ts, string key, int timeBucket) { var tuples = new List(); for (var i = 0; i < 10; i++) { - var ts = await db.TS().AddAsync(key, i * timeBucket, i); - tuples.Add(new TimeSeriesTuple(ts, i)); + var timeStamp = await ts.AddAsync(key, i * timeBucket, i); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; } @@ -28,7 +28,8 @@ public async Task TestDelNotExists() var key = CreateKeyName(); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = await Assert.ThrowsAsync(async () => await db.TS().DelAsync(key, "-", "+")); + var ts = db.TS(); + var ex = await Assert.ThrowsAsync(async () => await ts.DelAsync(key, "-", "+")); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); } @@ -37,16 +38,17 @@ public async Task TestDelRange() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var key = CreateKeyName(); - var tuples = await CreateData(db, key, 50); + var tuples = await CreateData(ts, key, 50); TimeStamp from = tuples[0].Time; TimeStamp to = tuples[5].Time; - Assert.Equal(6, await db.TS().DelAsync(key, from, to)); + Assert.Equal(6, await ts.DelAsync(key, from, to)); // check that the operation deleted the timestamps - IReadOnlyList res = await db.TS().RangeAsync(key, from, to); + IReadOnlyList res = await ts.RangeAsync(key, from, to); Assert.Equal(0, res.Count); - Assert.NotNull(await db.TS().GetAsync(key)); + Assert.NotNull(await ts.GetAsync(key)); } } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestGet.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestGet.cs index 1533e794..c1d62fbf 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestGet.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestGet.cs @@ -23,7 +23,8 @@ public void TestGetNotExists() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = Assert.Throws(() => db.TS().Get(key)); + var ts = db.TS(); + var ex = Assert.Throws(() => ts.Get(key)); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); } @@ -32,8 +33,9 @@ public void TestEmptyGet() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create(key); - Assert.Null(db.TS().Get(key)); + var ts = db.TS(); + ts.Create(key); + Assert.Null(ts.Get(key)); } [Fact] @@ -43,9 +45,10 @@ public void TestAddAndGet() TimeSeriesTuple expected = new TimeSeriesTuple(now, 1.1); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create(key); - db.TS().Add(key, now, 1.1); - TimeSeriesTuple actual = db.TS().Get(key); + var ts = db.TS(); + ts.Create(key); + ts.Add(key, now, 1.1); + TimeSeriesTuple actual = ts.Get(key); Assert.Equal(expected, actual); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestGetAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestGetAsync.cs index bf33fcd5..4b069e61 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestGetAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestGetAsync.cs @@ -17,7 +17,8 @@ public async Task TestGetNotExists() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = await Assert.ThrowsAsync(async () => await db.TS().GetAsync(key)); + var ts = db.TS(); + var ex = await Assert.ThrowsAsync(async () => await ts.GetAsync(key)); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); } @@ -27,8 +28,9 @@ public async Task TestEmptyGet() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync(key); - Assert.Null(await db.TS().GetAsync(key)); + var ts = db.TS(); + await ts.CreateAsync(key); + Assert.Null(await ts.GetAsync(key)); } [Fact] @@ -39,9 +41,10 @@ public async Task TestAddAndGet() var expected = new TimeSeriesTuple(now, 1.1); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync(key); - await db.TS().AddAsync(key, now, 1.1); - var actual = await db.TS().GetAsync(key); + var ts = db.TS(); + await ts.CreateAsync(key); + await ts.AddAsync(key, now, 1.1); + var actual = await ts.GetAsync(key); Assert.Equal(expected, actual); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestIncrBy.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestIncrBy.cs index 9673be58..0626fe6a 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestIncrBy.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestIncrBy.cs @@ -24,8 +24,9 @@ public void TestDefaultIncrBy() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().IncrBy(key, value) > 0); - Assert.Equal(value, db.TS().Get(key).Val); + var ts = db.TS(); + Assert.True(ts.IncrBy(key, value) > 0); + Assert.Equal(value, ts.Get(key).Val); } [Fact] @@ -34,8 +35,9 @@ public void TestStarIncrBy() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().IncrBy(key, value, timestamp: "*") > 0); - Assert.Equal(value, db.TS().Get(key).Val); + var ts = db.TS(); + Assert.True(ts.IncrBy(key, value, timestamp: "*") > 0); + Assert.Equal(value, ts.Get(key).Val); } [Fact] @@ -44,9 +46,10 @@ public void TestIncrByTimeStamp() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, db.TS().IncrBy(key, value, timestamp: timeStamp)); - Assert.Equal(new TimeSeriesTuple(timeStamp, value), db.TS().Get(key)); + Assert.Equal(timeStamp, ts.IncrBy(key, value, timestamp: timeStamp)); + Assert.Equal(new TimeSeriesTuple(timeStamp, value), ts.Get(key)); } [Fact] @@ -56,9 +59,10 @@ public void TestDefaultIncrByWithRetentionTime() long retentionTime = 5000; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().IncrBy(key, value, retentionTime: retentionTime) > 0); - Assert.Equal(value, db.TS().Get(key).Val); - TimeSeriesInformation info = db.TS().Info(key); + var ts = db.TS(); + Assert.True(ts.IncrBy(key, value, retentionTime: retentionTime) > 0); + Assert.Equal(value, ts.Get(key).Val); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(retentionTime, info.RetentionTime); } @@ -69,10 +73,11 @@ public void TestDefaultIncrByWithLabels() TimeSeriesLabel label = new TimeSeriesLabel("key", "value"); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var labels = new List { label }; - Assert.True(db.TS().IncrBy(key, value, labels: labels) > 0); - Assert.Equal(value, db.TS().Get(key).Val); - TimeSeriesInformation info = db.TS().Info(key); + Assert.True(ts.IncrBy(key, value, labels: labels) > 0); + Assert.Equal(value, ts.Get(key).Val); + TimeSeriesInformation info = ts.Info(key); Assert.Equal(labels, info.Labels); } @@ -82,8 +87,9 @@ public void TestDefaultIncrByWithUncompressed() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(db.TS().IncrBy(key, value, uncompressed: true) > 0); - Assert.Equal(value, db.TS().Get(key).Val); + var ts = db.TS(); + Assert.True(ts.IncrBy(key, value, uncompressed: true) > 0); + Assert.Equal(value, ts.Get(key).Val); } [Fact] @@ -92,9 +98,10 @@ public void TestWrongParameters() double value = 5.5; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = Assert.Throws(() => db.TS().IncrBy(key, value, timestamp: "+")); + var ts = db.TS(); + var ex = Assert.Throws(() => ts.IncrBy(key, value, timestamp: "+")); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); - ex = Assert.Throws(() => db.TS().IncrBy(key, value, timestamp: "-")); + ex = Assert.Throws(() => ts.IncrBy(key, value, timestamp: "-")); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestIncrByAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestIncrByAsync.cs index 5bf80603..c066e036 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestIncrByAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestIncrByAsync.cs @@ -19,9 +19,10 @@ public async Task TestDefaultIncrBy() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().IncrByAsync(key, value) > 0); + var ts = db.TS(); + Assert.True(await ts.IncrByAsync(key, value) > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); } @@ -32,9 +33,10 @@ public async Task TestStarIncrBy() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().IncrByAsync(key, value, timestamp: "*") > 0); + var ts = db.TS(); + Assert.True(await ts.IncrByAsync(key, value, timestamp: "*") > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); } @@ -45,9 +47,10 @@ public async Task TestIncrByTimeStamp() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeStamp timeStamp = DateTime.UtcNow; - Assert.Equal(timeStamp, await db.TS().IncrByAsync(key, value, timestamp: timeStamp)); - Assert.Equal(new TimeSeriesTuple(timeStamp, value), await db.TS().GetAsync(key)); + Assert.Equal(timeStamp, await ts.IncrByAsync(key, value, timestamp: timeStamp)); + Assert.Equal(new TimeSeriesTuple(timeStamp, value), await ts.GetAsync(key)); } [Fact] @@ -58,12 +61,13 @@ public async Task TestDefaultIncrByWithRetentionTime() long retentionTime = 5000; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().IncrByAsync(key, value, retentionTime: retentionTime) > 0); + var ts = db.TS(); + Assert.True(await ts.IncrByAsync(key, value, retentionTime: retentionTime) > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(retentionTime, info.RetentionTime); } @@ -75,13 +79,14 @@ public async Task TestDefaultIncrByWithLabels() var label = new TimeSeriesLabel("key", "value"); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var labels = new List { label }; - Assert.True(await db.TS().IncrByAsync(key, value, labels: labels) > 0); + Assert.True(await ts.IncrByAsync(key, value, labels: labels) > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(labels, info.Labels); } @@ -92,9 +97,10 @@ public async Task TestDefaultIncrByWithUncompressed() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - Assert.True(await db.TS().IncrByAsync(key, value, uncompressed: true) > 0); + var ts = db.TS(); + Assert.True(await ts.IncrByAsync(key, value, uncompressed: true) > 0); - var result = await db.TS().GetAsync(key); + var result = await ts.GetAsync(key); Assert.Equal(value, result.Val); } @@ -105,10 +111,11 @@ public async Task TestWrongParameters() var value = 5.5; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var ex = await Assert.ThrowsAsync(async () => await db.TS().IncrByAsync(key, value, timestamp: "+")); + var ts = db.TS(); + var ex = await Assert.ThrowsAsync(async () => await ts.IncrByAsync(key, value, timestamp: "+")); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); - ex = await Assert.ThrowsAsync(async () => await db.TS().IncrByAsync(key, value, timestamp: "-")); + ex = await Assert.ThrowsAsync(async () => await ts.IncrByAsync(key, value, timestamp: "-")); Assert.Equal("ERR TSDB: invalid timestamp", ex.Message); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMADD.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMADD.cs index a35268ca..0f7d25cb 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMADD.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMADD.cs @@ -28,23 +28,24 @@ public void TestStarMADD() IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (string key in keys) { - db.TS().Create(key); + ts.Create(key); } List<(string, TimeStamp, double)> sequence = new List<(string, TimeStamp, double)>(keys.Length); foreach (var keyname in keys) { sequence.Add((keyname, "*", 1.1)); } - var response = db.TS().MAdd(sequence); + var response = ts.MAdd(sequence); Assert.Equal(keys.Length, response.Count); foreach (var key in keys) { - TimeSeriesInformation info = db.TS().Info(key); + TimeSeriesInformation info = ts.Info(key); Assert.True(info.FirstTimeStamp > 0); Assert.Equal(info.FirstTimeStamp, info.LastTimeStamp); } @@ -56,10 +57,11 @@ public void TestSuccessfulMADD() IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (string key in keys) { - db.TS().Create(key); + ts.Create(key); } List<(string, TimeStamp, double)> sequence = new List<(string, TimeStamp, double)>(keys.Length); @@ -70,7 +72,7 @@ public void TestSuccessfulMADD() timestamps.Add(now); sequence.Add((keyname, now, 1.1)); } - var response = db.TS().MAdd(sequence); + var response = ts.MAdd(sequence); Assert.Equal(timestamps.Count, response.Count); for (int i = 0; i < response.Count; i++) @@ -84,10 +86,11 @@ public void TestOverrideMADD() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (string key in keys) { - db.TS().Create(key); + ts.Create(key); } List oldTimeStamps = new List(); @@ -101,7 +104,7 @@ public void TestOverrideMADD() { sequence.Add((keyname, DateTime.UtcNow, 1.1)); } - db.TS().MAdd(sequence); + ts.MAdd(sequence); sequence.Clear(); @@ -110,7 +113,7 @@ public void TestOverrideMADD() { sequence.Add((keys[i], oldTimeStamps[i], 1.1)); } - var response = db.TS().MAdd(sequence); + var response = ts.MAdd(sequence); Assert.Equal(oldTimeStamps.Count, response.Count); for (int i = 0; i < response.Count; i++) diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMAddAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMAddAsync.cs index 6932ee99..6a2e6578 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMAddAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMAddAsync.cs @@ -20,10 +20,11 @@ public async Task TestStarMADD() IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (string key in keys) { - await db.TS().CreateAsync(key); + await ts.CreateAsync(key); } List<(string, TimeStamp, double)> sequence = new List<(string, TimeStamp, double)>(keys.Length); @@ -31,13 +32,13 @@ public async Task TestStarMADD() { sequence.Add((keyname, "*", 1.1)); } - var response = await db.TS().MAddAsync(sequence); + var response = await ts.MAddAsync(sequence); Assert.Equal(keys.Length, response.Count); foreach (var key in keys) { - TimeSeriesInformation info = await db.TS().InfoAsync(key); + TimeSeriesInformation info = await ts.InfoAsync(key); Assert.True(info.FirstTimeStamp > 0); Assert.Equal(info.FirstTimeStamp, info.LastTimeStamp); } @@ -50,10 +51,11 @@ public async Task TestSuccessfulMAdd() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (var key in keys) { - await db.TS().CreateAsync(key); + await ts.CreateAsync(key); } var sequence = new List<(string, TimeStamp, double)>(keys.Length); @@ -65,7 +67,7 @@ public async Task TestSuccessfulMAdd() sequence.Add((keyname, now, 1.1)); } - var response = await db.TS().MAddAsync(sequence); + var response = await ts.MAddAsync(sequence); Assert.Equal(timestamps.Count, response.Count); for (var i = 0; i < response.Count; i++) { @@ -79,10 +81,11 @@ public async Task TestOverrideMAdd() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (var key in keys) { - await db.TS().CreateAsync(key); + await ts.CreateAsync(key); } var oldTimeStamps = new List(); @@ -97,7 +100,7 @@ public async Task TestOverrideMAdd() sequence.Add((keyname, DateTime.UtcNow, 1.1)); } - await db.TS().MAddAsync(sequence); + await ts.MAddAsync(sequence); sequence.Clear(); // Override the same events should not throw an error @@ -106,7 +109,7 @@ public async Task TestOverrideMAdd() sequence.Add((keys[i], oldTimeStamps[i], 1.1)); } - var response = await db.TS().MAddAsync(sequence); + var response = await ts.MAddAsync(sequence); Assert.Equal(oldTimeStamps.Count, response.Count); for (int i = 0; i < response.Count; i++) diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMGet.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMGet.cs index 0f68bbbf..014e6c9c 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMGet.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMGet.cs @@ -27,17 +27,18 @@ public void TestMGetQuery() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label1 = new TimeSeriesLabel("MGET_TESTS_1", "value"); var label2 = new TimeSeriesLabel("MGET_TESTS_2", "value2"); var labels1 = new List { label1, label2 }; var labels2 = new List { label1 }; - TimeStamp ts1 = db.TS().Add(keys[0], "*", 1.1, labels: labels1); + TimeStamp ts1 = ts.Add(keys[0], "*", 1.1, labels: labels1); TimeSeriesTuple tuple1 = new TimeSeriesTuple(ts1, 1.1); - TimeStamp ts2 = db.TS().Add(keys[1], "*", 2.2, labels: labels2); + TimeStamp ts2 = ts.Add(keys[1], "*", 2.2, labels: labels2); TimeSeriesTuple tuple2 = new TimeSeriesTuple(ts2, 2.2); - var results = db.TS().MGet(new List { "MGET_TESTS_1=value" }); + var results = ts.MGet(new List { "MGET_TESTS_1=value" }); Assert.Equal(2, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(tuple1, results[0].value); @@ -53,18 +54,19 @@ public void TestMGetQueryWithLabels() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label1 = new TimeSeriesLabel("MGET_TESTS_1", "value"); var label2 = new TimeSeriesLabel("MGET_TESTS_2", "value2"); var labels1 = new List { label1, label2 }; var labels2 = new List { label1 }; - TimeStamp ts1 = db.TS().Add(keys[0], "*", 1.1, labels: labels1); + TimeStamp ts1 = ts.Add(keys[0], "*", 1.1, labels: labels1); TimeSeriesTuple tuple1 = new TimeSeriesTuple(ts1, 1.1); - TimeStamp ts2 = db.TS().Add(keys[1], "*", 2.2, labels: labels2); + TimeStamp ts2 = ts.Add(keys[1], "*", 2.2, labels: labels2); TimeSeriesTuple tuple2 = new TimeSeriesTuple(ts2, 2.2); - var results = db.TS().MGet(new List { "MGET_TESTS_1=value" }, withLabels: true); + var results = ts.MGet(new List { "MGET_TESTS_1=value" }, withLabels: true); Assert.Equal(2, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(tuple1, results[0].value); @@ -79,18 +81,19 @@ public void TestMGetQuerySelectedLabels() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label1 = new TimeSeriesLabel("MGET_TESTS_1", "value"); var label2 = new TimeSeriesLabel("MGET_TESTS_2", "value2"); var labels1 = new List { label1, label2 }; var labels2 = new List { label1 }; - TimeStamp ts1 = db.TS().Add(keys[0], "*", 1.1, labels: labels1); + TimeStamp ts1 = ts.Add(keys[0], "*", 1.1, labels: labels1); TimeSeriesTuple tuple1 = new TimeSeriesTuple(ts1, 1.1); - TimeStamp ts2 = db.TS().Add(keys[1], "*", 2.2, labels: labels2); + TimeStamp ts2 = ts.Add(keys[1], "*", 2.2, labels: labels2); TimeSeriesTuple tuple2 = new TimeSeriesTuple(ts2, 2.2); - var results = db.TS().MGet(new List { "MGET_TESTS_1=value" }, selectedLabels: new List{"MGET_TESTS_1"}); + var results = ts.MGet(new List { "MGET_TESTS_1=value" }, selectedLabels: new List { "MGET_TESTS_1" }); Assert.Equal(2, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(tuple1, results[0].value); diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMGetAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMGetAsync.cs index a3626957..6b9bfc4d 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMGetAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMGetAsync.cs @@ -16,18 +16,19 @@ public async Task TestMGetQuery() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label1 = new TimeSeriesLabel(keys[0], "value"); var label2 = new TimeSeriesLabel(keys[1], "value2"); var labels1 = new List { label1, label2 }; var labels2 = new List { label1 }; - var ts1 = await db.TS().AddAsync(keys[0], "*", 1.1, labels: labels1); + var ts1 = await ts.AddAsync(keys[0], "*", 1.1, labels: labels1); var tuple1 = new TimeSeriesTuple(ts1, 1.1); - var ts2 = await db.TS().AddAsync(keys[1], "*", 2.2, labels: labels2); + var ts2 = await ts.AddAsync(keys[1], "*", 2.2, labels: labels2); var tuple2 = new TimeSeriesTuple(ts2, 2.2); - var results = await db.TS().MGetAsync(new List { $"{keys[0]}=value" }); + var results = await ts.MGetAsync(new List { $"{keys[0]}=value" }); Assert.Equal(2, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(tuple1, results[0].value); @@ -43,18 +44,19 @@ public async Task TestMGetQueryWithLabels() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label1 = new TimeSeriesLabel(keys[0], "value"); var label2 = new TimeSeriesLabel(keys[1], "value2"); var labels1 = new List { label1, label2 }; var labels2 = new List { label1 }; - var ts1 = await db.TS().AddAsync(keys[0], "*", 1.1, labels: labels1); + var ts1 = await ts.AddAsync(keys[0], "*", 1.1, labels: labels1); var tuple1 = new TimeSeriesTuple(ts1, 1.1); - var ts2 = await db.TS().AddAsync(keys[1], "*", 2.2, labels: labels2); + var ts2 = await ts.AddAsync(keys[1], "*", 2.2, labels: labels2); var tuple2 = new TimeSeriesTuple(ts2, 2.2); - var results = await db.TS().MGetAsync(new List { $"{keys[0]}=value" }, withLabels: true); + var results = await ts.MGetAsync(new List { $"{keys[0]}=value" }, withLabels: true); Assert.Equal(2, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(tuple1, results[0].value); @@ -70,18 +72,19 @@ public async Task TestMGetQuerySelectedLabelsAsync() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label1 = new TimeSeriesLabel("MGET_TESTS_1", "value"); var label2 = new TimeSeriesLabel("MGET_TESTS_2", "value2"); var labels1 = new List { label1, label2 }; var labels2 = new List { label1 }; - TimeStamp ts1 = await db.TS().AddAsync(keys[0], "*", 1.1, labels: labels1); + TimeStamp ts1 = await ts.AddAsync(keys[0], "*", 1.1, labels: labels1); TimeSeriesTuple tuple1 = new TimeSeriesTuple(ts1, 1.1); - TimeStamp ts2 = await db.TS().AddAsync(keys[1], "*", 2.2, labels: labels2); + TimeStamp ts2 = await ts.AddAsync(keys[1], "*", 2.2, labels: labels2); TimeSeriesTuple tuple2 = new TimeSeriesTuple(ts2, 2.2); - var results = await db.TS().MGetAsync(new List { "MGET_TESTS_1=value" }, selectedLabels: new List{"MGET_TESTS_1"}); + var results = await ts.MGetAsync(new List { "MGET_TESTS_1=value" }, selectedLabels: new List { "MGET_TESTS_1" }); Assert.Equal(2, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(tuple1, results[0].value); diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRange.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRange.cs index 0eaf81db..49f64e8a 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRange.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRange.cs @@ -22,18 +22,18 @@ public void Dispose() } } - private List CreateData(IDatabase db, int timeBucket) + private List CreateData(TimeSeriesCommands ts, int timeBucket) { var tuples = new List(); for (int i = 0; i < 10; i++) { - TimeStamp ts = new TimeStamp(i * timeBucket); + TimeStamp timeStamp = new TimeStamp(i * timeBucket); foreach (var key in keys) { - db.TS().Add(key, ts, i); + ts.Add(key, timeStamp, i); } - tuples.Add(new TimeSeriesTuple(ts, i)); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; } @@ -43,15 +43,16 @@ public void TestSimpleMRange() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel("MRANGEkey", "MRANGEvalue"); var labels = new List { label }; foreach (string key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, 50); - var results = db.TS().MRange("-", "+", new List { "MRANGEkey=MRANGEvalue" }); + var tuples = CreateData(ts, 50); + var results = ts.MRange("-", "+", new List { "MRANGEkey=MRANGEvalue" }); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -65,15 +66,16 @@ public void TestMRangeWithLabels() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel("key", "MRangeWithLabels"); var labels = new List { label }; foreach (string key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, 50); - var results = db.TS().MRange("-", "+", new List { "key=MRangeWithLabels" }, withLabels: true); + var tuples = CreateData(ts, 50); + var results = ts.MRange("-", "+", new List { "key=MRangeWithLabels" }, withLabels: true); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -88,20 +90,21 @@ public void TestMRangeSelectLabels() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label1 = new TimeSeriesLabel("key", "MRangeSelectLabels"); TimeSeriesLabel[] labels = new TimeSeriesLabel[] { new TimeSeriesLabel("team", "CTO"), new TimeSeriesLabel("team", "AUT") }; for (int i = 0; i < keys.Length; i++) { - db.TS().Create(keys[i], labels: new List { label1, labels[i] }); + ts.Create(keys[i], labels: new List { label1, labels[i] }); } - var tuples = CreateData(db, 50); + var tuples = CreateData(ts, 50); // selectLabels and withlabels are mutualy exclusive. - var ex = Assert.Throws(() => db.TS().MRange("-", "+", new List { "key=MRangeSelectLabels" }, + var ex = Assert.Throws(() => ts.MRange("-", "+", new List { "key=MRangeSelectLabels" }, withLabels: true, selectLabels: new List { "team" })); Assert.Equal("withLabels and selectLabels cannot be specified together.", ex.Message); - var results = db.TS().MRange("-", "+", new List { "key=MRangeSelectLabels" }, selectLabels: new List { "team" }); + var results = ts.MRange("-", "+", new List { "key=MRangeSelectLabels" }, selectLabels: new List { "team" }); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -116,11 +119,12 @@ public void TestMRangeFilter() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel("key", "MRangeFilter"); var labels = new List { label }; - db.TS().Create(keys[0], labels: labels); - var tuples = CreateData(db, 50); - var results = db.TS().MRange("-", "+", new List { "key=MRangeFilter" }); + ts.Create(keys[0], labels: labels); + var tuples = CreateData(ts, 50); + var results = ts.MRange("-", "+", new List { "key=MRangeFilter" }); Assert.Equal(1, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(0, results[0].labels.Count); @@ -132,16 +136,17 @@ public void TestMRangeCount() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel("key", "MRangeCount"); var labels = new List { label }; foreach (string key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, 50); + var tuples = CreateData(ts, 50); long count = 5; - var results = db.TS().MRange("-", "+", new List { "key=MRangeCount" }, count: count); + var results = ts.MRange("-", "+", new List { "key=MRangeCount" }, count: count); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -156,15 +161,16 @@ public void TestMRangeAggregation() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel("key", "MRangeAggregation"); var labels = new List { label }; foreach (string key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, 50); - var results = db.TS().MRange("-", "+", new List { "key=MRangeAggregation" }, aggregation: TsAggregation.Min, timeBucket: 50); + var tuples = CreateData(ts, 50); + var results = ts.MRange("-", "+", new List { "key=MRangeAggregation" }, aggregation: TsAggregation.Min, timeBucket: 50); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -179,20 +185,21 @@ public void TestMRangeAlign() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel("key", "MRangeAlign"); var labels = new List { label }; - db.TS().Create(keys[0], labels: labels); - CreateData(db, 50); + ts.Create(keys[0], labels: labels); + CreateData(ts, 50); var expected = new List { new TimeSeriesTuple(0,1), new TimeSeriesTuple(50,1), new TimeSeriesTuple(100,1) }; - var results = db.TS().MRange(0, "+", new List { "key=MRangeAlign" }, align: "-", aggregation: TsAggregation.Count, timeBucket: 10, count: 3); + var results = ts.MRange(0, "+", new List { "key=MRangeAlign" }, align: "-", aggregation: TsAggregation.Count, timeBucket: 10, count: 3); Assert.Equal(1, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(expected, results[0].values); - results = db.TS().MRange(1, 500, new List { "key=MRangeAlign" }, align: "+", aggregation: TsAggregation.Count, timeBucket: 10, count: 1); + results = ts.MRange(1, 500, new List { "key=MRangeAlign" }, align: "+", aggregation: TsAggregation.Count, timeBucket: 10, count: 1); Assert.Equal(expected[1], results[0].values[0]); } @@ -201,15 +208,16 @@ public void TestMissingFilter() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel("key", "MissingFilter"); var labels = new List { label }; foreach (string key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, 50); - var ex = Assert.Throws(() => db.TS().MRange("-", "+", new List())); + var tuples = CreateData(ts, 50); + var ex = Assert.Throws(() => ts.MRange("-", "+", new List())); Assert.Equal("There should be at least one filter on MRANGE/MREVRANGE", ex.Message); } @@ -218,15 +226,16 @@ public void TestMissingTimeBucket() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel("key", "MissingTimeBucket"); var labels = new List { label }; foreach (string key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, 50); - var ex = Assert.Throws(() => db.TS().MRange("-", "+", new List { "key=MissingTimeBucket" }, aggregation: TsAggregation.Avg)); + var tuples = CreateData(ts, 50); + var ex = Assert.Throws(() => ts.MRange("-", "+", new List { "key=MissingTimeBucket" }, aggregation: TsAggregation.Avg)); Assert.Equal("RANGE Aggregation should have timeBucket value", ex.Message); } @@ -235,15 +244,16 @@ public void TestMRangeGroupby() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); for (int i = 0; i < keys.Length; i++) { var label1 = new TimeSeriesLabel("key", "MRangeGroupby"); var label2 = new TimeSeriesLabel("group", i.ToString()); - db.TS().Create(keys[i], labels: new List { label1, label2 }); + ts.Create(keys[i], labels: new List { label1, label2 }); } - var tuples = CreateData(db, 50); - var results = db.TS().MRange("-", "+", new List { "key=MRangeGroupby" }, withLabels: true, groupbyTuple: ("group", TsReduce.Min)); + var tuples = CreateData(ts, 50); + var results = ts.MRange("-", "+", new List { "key=MRangeGroupby" }, withLabels: true, groupbyTuple: ("group", TsReduce.Min)); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -260,14 +270,15 @@ public void TestMRangeReduce() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (var key in keys) { var label = new TimeSeriesLabel("key", "MRangeReduce"); - db.TS().Create(key, labels: new List { label }); + ts.Create(key, labels: new List { label }); } - var tuples = CreateData(db, 50); - var results = db.TS().MRange("-", "+", new List { "key=MRangeReduce" }, withLabels: true, groupbyTuple: ("key", TsReduce.Sum)); + var tuples = CreateData(ts, 50); + var results = ts.MRange("-", "+", new List { "key=MRangeReduce" }, withLabels: true, groupbyTuple: ("key", TsReduce.Sum)); Assert.Equal(1, results.Count); Assert.Equal("key=MRangeReduce", results[0].key); Assert.Equal(new TimeSeriesLabel("key", "MRangeReduce"), results[0].labels[0]); @@ -284,21 +295,22 @@ public void TestMRangeFilterBy() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel("key", "MRangeFilterBy"); var labels = new List { label }; foreach (string key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, 50); - var results = db.TS().MRange("-", "+", new List { "key=MRangeFilterBy" }, filterByValue: (0, 2)); + var tuples = CreateData(ts, 50); + var results = ts.MRange("-", "+", new List { "key=MRangeFilterBy" }, filterByValue: (0, 2)); for (int i = 0; i < results.Count; i++) { Assert.Equal(tuples.GetRange(0, 3), results[i].values); } - results = db.TS().MRange("-", "+", new List { "key=MRangeFilterBy" }, filterByTs: new List { 0 }, filterByValue: (0, 2)); + results = ts.MRange("-", "+", new List { "key=MRangeFilterBy" }, filterByTs: new List { 0 }, filterByValue: (0, 2)); for (int i = 0; i < results.Count; i++) { Assert.Equal(tuples.GetRange(0, 1), results[i].values); diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRangeAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRangeAsync.cs index 0beca62f..dd326e12 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRangeAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRangeAsync.cs @@ -13,18 +13,18 @@ public class TestMRangeAsync : AbstractNRedisStackTest { public TestMRangeAsync(RedisFixture redisFixture) : base(redisFixture) { } - private async Task> CreateData(IDatabase db, string[] keys, int timeBucket) + private async Task> CreateData(TimeSeriesCommands ts, string[] keys, int timeBucket) { var tuples = new List(); for (var i = 0; i < 10; i++) { - var ts = new TimeStamp(i * timeBucket); + var timeStamp = new TimeStamp(i * timeBucket); foreach (var key in keys) { - await db.TS().AddAsync(key, ts, i); + await ts.AddAsync(key, timeStamp, i); } - tuples.Add(new TimeSeriesTuple(ts, i)); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; @@ -36,15 +36,16 @@ public async Task TestSimpleMRange() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var results = db.TS().MRange("-", "+", new List { $"{keys[0]}=value" }); + var tuples = await CreateData(ts, keys, 50); + var results = ts.MRange("-", "+", new List { $"{keys[0]}=value" }); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -59,15 +60,16 @@ public async Task TestMRangeWithLabels() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -83,24 +85,25 @@ public async Task TestMRangeSelectLabels() var keys = CreateKeyNames(2); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label1 = new TimeSeriesLabel(keys[0], "value"); TimeSeriesLabel[] labels = new TimeSeriesLabel[] { new TimeSeriesLabel("team", "CTO"), new TimeSeriesLabel("team", "AUT") }; for (int i = 0; i < keys.Length; i++) { - await db.TS().CreateAsync(keys[i], labels: new List { label1, labels[i] }); + await ts.CreateAsync(keys[i], labels: new List { label1, labels[i] }); } - var tuples = await CreateData(db, keys, 50); + var tuples = await CreateData(ts, keys, 50); // selectLabels and withlabels are mutualy exclusive. var ex = await Assert.ThrowsAsync(async () => { - await db.TS().MRangeAsync("-", "+", + await ts.MRangeAsync("-", "+", new List { "key=MRangeSelectLabels" }, withLabels: true, selectLabels: new List { "team" }); }); Assert.Equal("withLabels and selectLabels cannot be specified together.", ex.Message); - var results = await db.TS().MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, selectLabels: new List { "team" }); + var results = await ts.MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, selectLabels: new List { "team" }); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -116,11 +119,12 @@ public async Task TestMRangeFilter() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; - await db.TS().CreateAsync(keys[0], labels: labels); - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRangeAsync("-", "+", new List { $"{keys[0]}=value" }); + await ts.CreateAsync(keys[0], labels: labels); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRangeAsync("-", "+", new List { $"{keys[0]}=value" }); Assert.Equal(1, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(0, results[0].labels.Count); @@ -133,16 +137,17 @@ public async Task TestMRangeCount() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); + var tuples = await CreateData(ts, keys, 50); var count = 5L; - var results = await db.TS().MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, count: count); + var results = await ts.MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, count: count); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -158,15 +163,16 @@ public async Task TestMRangeAggregation() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, aggregation: TsAggregation.Min, timeBucket: 50); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, aggregation: TsAggregation.Min, timeBucket: 50); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -182,20 +188,21 @@ public async Task TestMRangeAlign() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; - db.TS().Create(keys[0], labels: labels); - await CreateData(db, keys, 50); + ts.Create(keys[0], labels: labels); + await CreateData(ts, keys, 50); var expected = new List { new TimeSeriesTuple(0,1), new TimeSeriesTuple(50,1), new TimeSeriesTuple(100,1) }; - var results = await db.TS().MRangeAsync(0, "+", new List { $"{keys[0]}=value" }, align: "-", aggregation: TsAggregation.Count, timeBucket: 10, count: 3); + var results = await ts.MRangeAsync(0, "+", new List { $"{keys[0]}=value" }, align: "-", aggregation: TsAggregation.Count, timeBucket: 10, count: 3); Assert.Equal(1, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(expected, results[0].values); - results = await db.TS().MRangeAsync(0, 500, new List { $"{keys[0]}=value" }, align: "+", aggregation: TsAggregation.Count, timeBucket: 10, count: 1); + results = await ts.MRangeAsync(0, 500, new List { $"{keys[0]}=value" }, align: "+", aggregation: TsAggregation.Count, timeBucket: 10, count: 1); Assert.Equal(expected[0], results[0].values[0]); } @@ -205,15 +212,16 @@ public async Task TestMissingFilter() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var ex = await Assert.ThrowsAsync(async () => await db.TS().MRangeAsync("-", "+", new List())); + var tuples = await CreateData(ts, keys, 50); + var ex = await Assert.ThrowsAsync(async () => await ts.MRangeAsync("-", "+", new List())); Assert.Equal("There should be at least one filter on MRANGE/MREVRANGE", ex.Message); } @@ -223,17 +231,18 @@ public async Task TestMissingTimeBucket() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); + var tuples = await CreateData(ts, keys, 50); var ex = await Assert.ThrowsAsync(async () => { - await db.TS().MRangeAsync("-", "+", + await ts.MRangeAsync("-", "+", filter: new List() { $"key=value" }, aggregation: TsAggregation.Avg); }); @@ -245,15 +254,16 @@ public async Task TestMRangeGroupby() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); for (int i = 0; i < keys.Length; i++) { var label1 = new TimeSeriesLabel(keys[0], "value"); var label2 = new TimeSeriesLabel("group", i.ToString()); - await db.TS().CreateAsync(keys[i], labels: new List { label1, label2 }); + await ts.CreateAsync(keys[i], labels: new List { label1, label2 }); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: ("group", TsReduce.Min)); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: ("group", TsReduce.Min)); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -271,14 +281,15 @@ public async Task TestMRangeReduce() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (var key in keys) { var label = new TimeSeriesLabel(keys[0], "value"); - await db.TS().CreateAsync(key, labels: new List { label }); + await ts.CreateAsync(key, labels: new List { label }); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: (keys[0], TsReduce.Sum)); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: (keys[0], TsReduce.Sum)); Assert.Equal(1, results.Count); Assert.Equal($"{keys[0]}=value", results[0].key); Assert.Equal(new TimeSeriesLabel(keys[0], "value"), results[0].labels[0]); @@ -296,21 +307,22 @@ public async Task TestMRangeFilterBy() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (string key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, filterByValue: (0, 2)); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, filterByValue: (0, 2)); for (int i = 0; i < results.Count; i++) { Assert.Equal(tuples.GetRange(0, 3), results[i].values); } - results = await db.TS().MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, filterByTs: new List { 0 }, filterByValue: (0, 2)); + results = await ts.MRangeAsync("-", "+", new List { $"{keys[0]}=value" }, filterByTs: new List { 0 }, filterByValue: (0, 2)); for (int i = 0; i < results.Count; i++) { Assert.Equal(tuples.GetRange(0, 1), results[i].values); diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRevRange.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRevRange.cs index 9ab4c336..4a58bad4 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRevRange.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRevRange.cs @@ -12,19 +12,19 @@ public class TestMRevRange : AbstractNRedisStackTest { public TestMRevRange(RedisFixture redisFixture) : base(redisFixture) { } - private List CreateData(IDatabase db, string[] keys, int timeBucket) + private List CreateData(TimeSeriesCommands ts, string[] keys, int timeBucket) { var tuples = new List(); for (var i = 0; i < 10; i++) { - var ts = new TimeStamp(i * timeBucket); + var timeStamp = new TimeStamp(i * timeBucket); foreach (var key in keys) { - db.TS().Add(key, ts, i); + ts.Add(key, timeStamp, i); } - tuples.Add(new TimeSeriesTuple(ts, i)); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; } @@ -35,15 +35,16 @@ public void TestSimpleMRevRange() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, keys, 50); - var results = db.TS().MRevRange("-", "+", new List { $"{keys[0]}=value" }); + var tuples = CreateData(ts, keys, 50); + var results = ts.MRevRange("-", "+", new List { $"{keys[0]}=value" }); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -52,22 +53,23 @@ public void TestSimpleMRevRange() Assert.Equal(ReverseData(tuples), results[i].values); } } - + [Fact] public void TestMRevRangeWithLabels() { var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, keys, 50); - var results = db.TS().MRevRange("-", "+", new List { $"{keys[0]}=value" }, withLabels: true); + var tuples = CreateData(ts, keys, 50); + var results = ts.MRevRange("-", "+", new List { $"{keys[0]}=value" }, withLabels: true); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) @@ -84,15 +86,16 @@ public void TestMRevRangeSelectLabels() var keys = CreateKeyNames(2); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label1 = new TimeSeriesLabel("key", "MRangeSelectLabels"); TimeSeriesLabel[] labels = new TimeSeriesLabel[] { new TimeSeriesLabel("team", "CTO"), new TimeSeriesLabel("team", "AUT") }; for (int i = 0; i < keys.Length; i++) { - db.TS().Create(keys[i], labels: new List { label1, labels[i] }); + ts.Create(keys[i], labels: new List { label1, labels[i] }); } - var tuples = CreateData(db, keys, 50); - var results = db.TS().MRevRange("-", "+", new List { "key=MRangeSelectLabels" }, selectLabels: new List { "team" }); + var tuples = CreateData(ts, keys, 50); + var results = ts.MRevRange("-", "+", new List { "key=MRangeSelectLabels" }, selectLabels: new List { "team" }); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -108,11 +111,12 @@ public void TestMRevRangeFilter() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; - db.TS().Create(keys[0], labels: labels); - var tuples = CreateData(db, keys, 50); - var results = db.TS().MRevRange("-", "+", new List { $"{keys[0]}=value" }); + ts.Create(keys[0], labels: labels); + var tuples = CreateData(ts, keys, 50); + var results = ts.MRevRange("-", "+", new List { $"{keys[0]}=value" }); Assert.Equal(1, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(0, results[0].labels.Count); @@ -125,16 +129,17 @@ public void TestMRevRangeCount() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, keys, 50); + var tuples = CreateData(ts, keys, 50); var count = 5L; - var results = db.TS().MRevRange("-", "+", new List { $"{keys[0]}=value" }, count: count); + var results = ts.MRevRange("-", "+", new List { $"{keys[0]}=value" }, count: count); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -150,15 +155,16 @@ public void TestMRevRangeAggregation() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, keys, 50); - var results = db.TS().MRevRange("-", "+", new List { $"{keys[0]}=value" }, aggregation: TsAggregation.Min, timeBucket: 50); + var tuples = CreateData(ts, keys, 50); + var results = ts.MRevRange("-", "+", new List { $"{keys[0]}=value" }, aggregation: TsAggregation.Min, timeBucket: 50); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -174,20 +180,21 @@ public void TestMRevRangeAlign() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; - db.TS().Create(keys[0], labels: labels); - CreateData(db, keys, 50); + ts.Create(keys[0], labels: labels); + CreateData(ts, keys, 50); var expected = new List { new TimeSeriesTuple(450,1), new TimeSeriesTuple(400,1), new TimeSeriesTuple(350,1) }; - var results = db.TS().MRevRange(0, "+", new List { $"{keys[0]}=value" }, align: "-", aggregation: TsAggregation.Count, timeBucket: 10, count: 3); + var results = ts.MRevRange(0, "+", new List { $"{keys[0]}=value" }, align: "-", aggregation: TsAggregation.Count, timeBucket: 10, count: 3); Assert.Equal(1, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(expected, results[0].values); - results = db.TS().MRevRange(0, 500, new List { $"{keys[0]}=value" }, align: "+", aggregation: TsAggregation.Count, timeBucket: 10, count: 1); + results = ts.MRevRange(0, 500, new List { $"{keys[0]}=value" }, align: "+", aggregation: TsAggregation.Count, timeBucket: 10, count: 1); Assert.Equal(expected[0], results[0].values[0]); } @@ -197,15 +204,16 @@ public void TestMissingFilter() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, keys, 50); - var ex = Assert.Throws(() => db.TS().MRevRange("-", "+", new List())); + var tuples = CreateData(ts, keys, 50); + var ex = Assert.Throws(() => ts.MRevRange("-", "+", new List())); Assert.Equal("There should be at least one filter on MRANGE/MREVRANGE", ex.Message); } @@ -215,15 +223,16 @@ public void TestMissingTimeBucket() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, keys, 50); - var ex = Assert.Throws(() => db.TS().MRevRange("-", "+", new List { "key=MissingTimeBucket" }, aggregation: TsAggregation.Avg)); + var tuples = CreateData(ts, keys, 50); + var ex = Assert.Throws(() => ts.MRevRange("-", "+", new List { "key=MissingTimeBucket" }, aggregation: TsAggregation.Avg)); Assert.Equal("RANGE Aggregation should have timeBucket value", ex.Message); } @@ -233,15 +242,16 @@ public void TestMRevRangeGroupby() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); for (int i = 0; i < keys.Length; i++) { var label1 = new TimeSeriesLabel(keys[0], "value"); var label2 = new TimeSeriesLabel("group", i.ToString()); - db.TS().Create(keys[i], labels: new List { label1, label2 }); + ts.Create(keys[i], labels: new List { label1, label2 }); } - var tuples = CreateData(db, keys, 50); - var results = db.TS().MRevRange("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: ("group", TsReduce.Min)); + var tuples = CreateData(ts, keys, 50); + var results = ts.MRevRange("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: ("group", TsReduce.Min)); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -251,7 +261,7 @@ public void TestMRevRangeGroupby() Assert.Equal(new TimeSeriesLabel("__source__", keys[i]), results[i].labels[2]); Assert.Equal(ReverseData(tuples), results[i].values); } - } + } [Fact] public void TestMRevRangeReduce() @@ -259,14 +269,15 @@ public void TestMRevRangeReduce() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (var key in keys) { var label = new TimeSeriesLabel(keys[0], "value"); - db.TS().Create(key, labels: new List { label }); + ts.Create(key, labels: new List { label }); } - var tuples = CreateData(db, keys, 50); - var results = db.TS().MRevRange("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: (keys[0], TsReduce.Sum)); + var tuples = CreateData(ts, keys, 50); + var results = ts.MRevRange("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: (keys[0], TsReduce.Sum)); Assert.Equal(1, results.Count); Assert.Equal($"{keys[0]}=value", results[0].key); Assert.Equal(new TimeSeriesLabel(keys[0], "value"), results[0].labels[0]); @@ -285,21 +296,22 @@ public void TestMRevRangeFilterBy() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (string key in keys) { - db.TS().Create(key, labels: labels); + ts.Create(key, labels: labels); } - var tuples = CreateData(db, keys, 50); - var results = db.TS().MRevRange("-", "+", new List { "key=MRangeFilterBy" }, filterByValue: (0, 2)); + var tuples = CreateData(ts, keys, 50); + var results = ts.MRevRange("-", "+", new List { "key=MRangeFilterBy" }, filterByValue: (0, 2)); for (int i = 0; i < results.Count; i++) { Assert.Equal(ReverseData(tuples.GetRange(0, 3)), results[i].values); } - results = db.TS().MRevRange("-", "+", new List { "key=MRangeFilterBy" }, filterByTs: new List { 0 }, filterByValue: (0, 2)); + results = ts.MRevRange("-", "+", new List { "key=MRangeFilterBy" }, filterByTs: new List { 0 }, filterByValue: (0, 2)); for (int i = 0; i < results.Count; i++) { Assert.Equal(ReverseData(tuples.GetRange(0, 1)), results[i].values); diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRevRangeAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRevRangeAsync.cs index 884a44eb..297dfff2 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRevRangeAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestMRevRangeAsync.cs @@ -13,18 +13,18 @@ public class TestMRevRangeAsync : AbstractNRedisStackTest { public TestMRevRangeAsync(RedisFixture redisFixture) : base(redisFixture) { } - private async Task> CreateData(IDatabase db, string[] keys, int timeBucket) + private async Task> CreateData(TimeSeriesCommands ts, string[] keys, int timeBucket) { var tuples = new List(); for (var i = 0; i < 10; i++) { - var ts = new TimeStamp(i * timeBucket); + var timeStamp = new TimeStamp(i * timeBucket); foreach (var key in keys) { - await db.TS().AddAsync(key, ts, i); + await ts.AddAsync(key, timeStamp, i); } - tuples.Add(new TimeSeriesTuple(ts, i)); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; @@ -36,15 +36,16 @@ public async Task TestSimpleMRevRange() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -60,15 +61,16 @@ public async Task TestMRevRangeWithLabels() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -84,15 +86,16 @@ public async Task TestMRevRangeSelectLabels() var keys = CreateKeyNames(2); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label1 = new TimeSeriesLabel(keys[0], "value"); TimeSeriesLabel[] labels = new TimeSeriesLabel[] { new TimeSeriesLabel("team", "CTO"), new TimeSeriesLabel("team", "AUT") }; for (int i = 0; i < keys.Length; i++) { - await db.TS().CreateAsync(keys[i], labels: new List { label1, labels[i] }); + await ts.CreateAsync(keys[i], labels: new List { label1, labels[i] }); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, selectLabels: new List { "team" }); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, selectLabels: new List { "team" }); Assert.Equal(keys.Length, results.Count); for (int i = 0; i < results.Count; i++) { @@ -108,11 +111,12 @@ public async Task TestMRevRangeFilter() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; - await db.TS().CreateAsync(keys[0], labels: labels); - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }); + await ts.CreateAsync(keys[0], labels: labels); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }); Assert.Equal(1, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(0, results[0].labels.Count); @@ -125,16 +129,17 @@ public async Task TestMRevRangeCount() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); + var tuples = await CreateData(ts, keys, 50); var count = 5L; - var results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, count: count); + var results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, count: count); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -150,15 +155,16 @@ public async Task TestMRangeAggregation() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, aggregation: TsAggregation.Min, timeBucket: 50); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, aggregation: TsAggregation.Min, timeBucket: 50); Assert.Equal(keys.Length, results.Count); for (var i = 0; i < results.Count; i++) { @@ -174,20 +180,21 @@ public async Task TestMRevRangeAlign() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; - db.TS().Create(keys[0], labels: labels); - await CreateData(db, keys, 50); + ts.Create(keys[0], labels: labels); + await CreateData(ts, keys, 50); var expected = new List { new TimeSeriesTuple(450,1), new TimeSeriesTuple(400,1), new TimeSeriesTuple(350,1) }; - var results = await db.TS().MRevRangeAsync(0, "+", new List { $"{keys[0]}=value" }, align: "-", aggregation: TsAggregation.Count, timeBucket: 10, count: 3); + var results = await ts.MRevRangeAsync(0, "+", new List { $"{keys[0]}=value" }, align: "-", aggregation: TsAggregation.Count, timeBucket: 10, count: 3); Assert.Equal(1, results.Count); Assert.Equal(keys[0], results[0].key); Assert.Equal(expected, results[0].values); - results = await db.TS().MRevRangeAsync(0, 500, new List { $"{keys[0]}=value" }, align: "+", aggregation: TsAggregation.Count, timeBucket: 10, count: 1); + results = await ts.MRevRangeAsync(0, 500, new List { $"{keys[0]}=value" }, align: "+", aggregation: TsAggregation.Count, timeBucket: 10, count: 1); Assert.Equal(expected[0], results[0].values[0]); } @@ -197,15 +204,16 @@ public async Task TestMissingFilter() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var ex = await Assert.ThrowsAsync(async () => await db.TS().MRevRangeAsync("-", "+", new List())); + var tuples = await CreateData(ts, keys, 50); + var ex = await Assert.ThrowsAsync(async () => await ts.MRevRangeAsync("-", "+", new List())); Assert.Equal("There should be at least one filter on MRANGE/MREVRANGE", ex.Message); } @@ -215,17 +223,18 @@ public async Task TestMissingTimeBucket() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (var key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); + var tuples = await CreateData(ts, keys, 50); var ex = await Assert.ThrowsAsync(async () => { - await db.TS().MRevRangeAsync("-", "+", + await ts.MRevRangeAsync("-", "+", filter: new List() { $"key=value" }, aggregation: TsAggregation.Avg); }); @@ -238,17 +247,18 @@ public async Task TestMRevRangeGroupby() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); for (int i = 0; i < keys.Length; i++) { var label1 = new TimeSeriesLabel(keys[0], "value"); var label2 = new TimeSeriesLabel("group", i.ToString()); - await db.TS().CreateAsync(keys[i], labels: new List { label1, label2 }); + await ts.CreateAsync(keys[i], labels: new List { label1, label2 }); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: ("group", TsReduce.Min)); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: ("group", TsReduce.Min)); Assert.Equal(keys.Length, results.Count); - for (var i = 0; i < results.Count && i < results[i].labels.Count ; i++) + for (var i = 0; i < results.Count && i < results[i].labels.Count; i++) { Assert.Equal("group=" + i, results[i].key); Assert.Equal(new TimeSeriesLabel("group", i.ToString()), results[i].labels[0]); @@ -264,21 +274,22 @@ public async Task TestMRevRangeReduce() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); foreach (var key in keys) { var label = new TimeSeriesLabel(keys[0], "value"); - await db.TS().CreateAsync(key, labels: new List { label }); + await ts.CreateAsync(key, labels: new List { label }); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: (keys[0], TsReduce.Sum)); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, withLabels: true, groupbyTuple: (keys[0], TsReduce.Sum)); Assert.Equal(1, results.Count); Assert.Equal($"{keys[0]}=value", results[0].key); Assert.Equal(new TimeSeriesLabel(keys[0], "value"), results[0].labels.FirstOrDefault()); Assert.Equal(new TimeSeriesLabel("__reducer__", "sum"), results[0].labels[1]); Assert.Equal(new TimeSeriesLabel("__source__", string.Join(",", keys)), results[0].labels[2]); tuples = ReverseData(tuples); - for (int i = 0; i < results[0].values.Count ; i++) + for (int i = 0; i < results[0].values.Count; i++) { Assert.Equal(tuples[i].Val * 2, results[0].values[i].Val); } @@ -290,21 +301,22 @@ public async Task TestMRevRangeFilterBy() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); TimeSeriesLabel label = new TimeSeriesLabel(keys[0], "value"); var labels = new List { label }; foreach (string key in keys) { - await db.TS().CreateAsync(key, labels: labels); + await ts.CreateAsync(key, labels: labels); } - var tuples = await CreateData(db, keys, 50); - var results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, filterByValue: (0, 2)); + var tuples = await CreateData(ts, keys, 50); + var results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, filterByValue: (0, 2)); for (int i = 0; i < results.Count; i++) { Assert.Equal(ReverseData(tuples.GetRange(0, 3)), results[i].values); } - results = await db.TS().MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, filterByTs: new List { 0 }, filterByValue: (0, 2)); + results = await ts.MRevRangeAsync("-", "+", new List { $"{keys[0]}=value" }, filterByTs: new List { 0 }, filterByValue: (0, 2)); for (int i = 0; i < results.Count; i++) { Assert.Equal(ReverseData(tuples.GetRange(0, 1)), results[i].values); diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestQueryIndex.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestQueryIndex.cs index cf23baf2..b4692c74 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestQueryIndex.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestQueryIndex.cs @@ -25,15 +25,16 @@ public void TestTSQueryIndex() { var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label1 = new TimeSeriesLabel("QUERYINDEX_TESTS_1", "value"); var label2 = new TimeSeriesLabel("QUERYINDEX_TESTS_2", "value2"); var labels1 = new List { label1, label2 }; var labels2 = new List { label1 }; - db.TS().Create(keys[0], labels: labels1); - db.TS().Create(keys[1], labels: labels2); - Assert.Equal(keys, db.TS().QueryIndex(new List { "QUERYINDEX_TESTS_1=value" })); - Assert.Equal(new List { keys[0] }, db.TS().QueryIndex(new List { "QUERYINDEX_TESTS_2=value2" })); + ts.Create(keys[0], labels: labels1); + ts.Create(keys[1], labels: labels2); + Assert.Equal(keys, ts.QueryIndex(new List { "QUERYINDEX_TESTS_1=value" })); + Assert.Equal(new List { keys[0] }, ts.QueryIndex(new List { "QUERYINDEX_TESTS_2=value2" })); } } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestQueryIndexAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestQueryIndexAsync.cs index 7aa949c7..f950daed 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestQueryIndexAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestQueryIndexAsync.cs @@ -16,15 +16,16 @@ public async Task TestTSQueryIndex() var keys = CreateKeyNames(2); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var label1 = new TimeSeriesLabel(keys[0], "value"); var label2 = new TimeSeriesLabel(keys[1], "value2"); var labels1 = new List { label1, label2 }; var labels2 = new List { label1 }; - await db.TS().CreateAsync(keys[0], labels: labels1); - await db.TS().CreateAsync(keys[1], labels: labels2); - Assert.Equal(keys, db.TS().QueryIndex(new List { $"{keys[0]}=value" })); - Assert.Equal(new List { keys[0] }, db.TS().QueryIndex(new List { $"{keys[1]}=value2" })); + await ts.CreateAsync(keys[0], labels: labels1); + await ts.CreateAsync(keys[1], labels: labels2); + Assert.Equal(keys, ts.QueryIndex(new List { $"{keys[0]}=value" })); + Assert.Equal(new List { keys[0] }, ts.QueryIndex(new List { $"{keys[1]}=value2" })); } } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRange.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRange.cs index 44383c70..b23e1173 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRange.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRange.cs @@ -19,13 +19,13 @@ public void Dispose() redisFixture.Redis.GetDatabase().KeyDelete(key); } - private List CreateData(IDatabase db, int timeBucket) + private List CreateData(TimeSeriesCommands ts, int timeBucket) { var tuples = new List(); for (int i = 0; i < 10; i++) { - TimeStamp ts = db.TS().Add(key, i * timeBucket, i); - tuples.Add(new TimeSeriesTuple(ts, i)); + TimeStamp timeStamp = ts.Add(key, i * timeBucket, i); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; } @@ -35,8 +35,9 @@ public void TestSimpleRange() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, 50); - Assert.Equal(tuples, db.TS().Range(key, "-", "+")); + var ts = db.TS(); + var tuples = CreateData(ts, 50); + Assert.Equal(tuples, ts.Range(key, "-", "+")); } [Fact] @@ -44,8 +45,9 @@ public void TestRangeCount() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, 50); - Assert.Equal(tuples.GetRange(0, 5), db.TS().Range(key, "-", "+", count: 5)); + var ts = db.TS(); + var tuples = CreateData(ts, 50); + Assert.Equal(tuples.GetRange(0, 5), ts.Range(key, "-", "+", count: 5)); } [Fact] @@ -53,8 +55,9 @@ public void TestRangeAggregation() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, 50); - Assert.Equal(tuples, db.TS().Range(key, "-", "+", aggregation: TsAggregation.Min, timeBucket: 50)); + var ts = db.TS(); + var tuples = CreateData(ts, 50); + Assert.Equal(tuples, ts.Range(key, "-", "+", aggregation: TsAggregation.Min, timeBucket: 50)); } [Fact] @@ -62,6 +65,7 @@ public void TestRangeAlign() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var tuples = new List() { new TimeSeriesTuple(1, 10), @@ -72,7 +76,7 @@ public void TestRangeAlign() foreach (var tuple in tuples) { - db.TS().Add(key, tuple.Time, tuple.Val); + ts.Add(key, tuple.Time, tuple.Val); } // Aligh start @@ -82,7 +86,7 @@ public void TestRangeAlign() new TimeSeriesTuple(11, 1), new TimeSeriesTuple(21, 1) }; - Assert.Equal(resStart, db.TS().Range(key, 1, 30, align: "-", aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resStart, ts.Range(key, 1, 30, align: "-", aggregation: TsAggregation.Count, timeBucket: 10)); // Aligh end var resEnd = new List() @@ -91,10 +95,10 @@ public void TestRangeAlign() new TimeSeriesTuple(10, 1), new TimeSeriesTuple(20, 1) }; - Assert.Equal(resEnd, db.TS().Range(key, 1, 30, align: "+", aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resEnd, ts.Range(key, 1, 30, align: "+", aggregation: TsAggregation.Count, timeBucket: 10)); // Align 1 - Assert.Equal(resStart, db.TS().Range(key, 1, 30, align: 1, aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resStart, ts.Range(key, 1, 30, align: 1, aggregation: TsAggregation.Count, timeBucket: 10)); } [Fact] @@ -102,8 +106,9 @@ public void TestMissingTimeBucket() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, 50); - var ex = Assert.Throws(() => db.TS().Range(key, "-", "+", aggregation: TsAggregation.Avg)); + var ts = db.TS(); + var tuples = CreateData(ts, 50); + var ex = Assert.Throws(() => ts.Range(key, "-", "+", aggregation: TsAggregation.Avg)); Assert.Equal("RANGE Aggregation should have timeBucket value", ex.Message); } @@ -112,17 +117,18 @@ public void TestFilterBy() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, 50); + var ts = db.TS(); + var tuples = CreateData(ts, 50); - var res = db.TS().Range(key, "-", "+", filterByValue: (0, 2)); // The first 3 tuples + var res = ts.Range(key, "-", "+", filterByValue: (0, 2)); // The first 3 tuples Assert.Equal(3, res.Count); Assert.Equal(tuples.GetRange(0, 3), res); var filterTs = new List { 0, 50, 100 }; // Also the first 3 tuples - res = db.TS().Range(key, "-", "+", filterByTs: filterTs); + res = ts.Range(key, "-", "+", filterByTs: filterTs); Assert.Equal(tuples.GetRange(0, 3), res); - res = db.TS().Range(key, "-", "+", filterByTs: filterTs, filterByValue: (2, 5)); // The third tuple + res = ts.Range(key, "-", "+", filterByTs: filterTs, filterByValue: (2, 5)); // The third tuple Assert.Equal(tuples.GetRange(2, 1), res); } @@ -131,33 +137,34 @@ public void latest() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create("ts1"); - db.TS().Create("ts2"); - db.TS().CreateRule("ts1", new TimeSeriesRule("ts2", 10, TsAggregation.Sum)); - db.TS().Add("ts1", 1, 1); - db.TS().Add("ts1", 2, 3); - db.TS().Add("ts1", 11, 7); - db.TS().Add("ts1", 13, 1); - var range = db.TS().Range("ts1", 0, 20); + var ts = db.TS(); + ts.Create("ts1"); + ts.Create("ts2"); + ts.CreateRule("ts1", new TimeSeriesRule("ts2", 10, TsAggregation.Sum)); + ts.Add("ts1", 1, 1); + ts.Add("ts1", 2, 3); + ts.Add("ts1", 11, 7); + ts.Add("ts1", 13, 1); + var range = ts.Range("ts1", 0, 20); Assert.Equal(4, range.Count); var compact = new TimeSeriesTuple(0, 4); var latest = new TimeSeriesTuple(10, 8); // get - Assert.Equal(compact, db.TS().Get("ts2")); + Assert.Equal(compact, ts.Get("ts2")); - Assert.Equal(latest, db.TS().Get("ts2", true)); + Assert.Equal(latest, ts.Get("ts2", true)); // range - Assert.Equal(new List() { compact }, db.TS().Range("ts2", 0, 10)); + Assert.Equal(new List() { compact }, ts.Range("ts2", 0, 10)); - Assert.Equal(new List() { compact, latest }, db.TS().Range("ts2", 0, 10, true)); + Assert.Equal(new List() { compact, latest }, ts.Range("ts2", 0, 10, true)); // revrange - Assert.Equal(new List() { compact }, db.TS().RevRange("ts2", 0, 10)); + Assert.Equal(new List() { compact }, ts.RevRange("ts2", 0, 10)); - Assert.Equal(new List() { latest, compact }, db.TS().RevRange("ts2", 0, 10, true)); + Assert.Equal(new List() { latest, compact }, ts.RevRange("ts2", 0, 10, true)); } [Fact] @@ -165,16 +172,17 @@ public void TestAlignTimestamp() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create("ts1"); - db.TS().Create("ts2"); - db.TS().Create("ts3"); - db.TS().CreateRule("ts1", new TimeSeriesRule("ts2", 10, TsAggregation.Count), 0); - db.TS().CreateRule("ts1", new TimeSeriesRule("ts3", 10, TsAggregation.Count), 1); - db.TS().Add("ts1", 1, 1); - db.TS().Add("ts1", 10, 3); - db.TS().Add("ts1", 21, 7); - Assert.Equal(2, db.TS().Range("ts2", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10).Count); - Assert.Equal(1, db.TS().Range("ts3", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10).Count); + var ts = db.TS(); + ts.Create("ts1"); + ts.Create("ts2"); + ts.Create("ts3"); + ts.CreateRule("ts1", new TimeSeriesRule("ts2", 10, TsAggregation.Count), 0); + ts.CreateRule("ts1", new TimeSeriesRule("ts3", 10, TsAggregation.Count), 1); + ts.Add("ts1", 1, 1); + ts.Add("ts1", 10, 3); + ts.Add("ts1", 21, 7); + Assert.Equal(2, ts.Range("ts2", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10).Count); + Assert.Equal(1, ts.Range("ts3", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10).Count); } [Fact] @@ -182,16 +190,17 @@ public void TestBucketTimestamp() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); - db.TS().Create("t1"); + ts.Create("t1"); - db.TS().Add("t1",15,1); - db.TS().Add("t1",17,4); - db.TS().Add("t1",51,3); - db.TS().Add("t1",73,5); - db.TS().Add("t1",75,3); + ts.Add("t1", 15, 1); + ts.Add("t1", 17, 4); + ts.Add("t1", 51, 3); + ts.Add("t1", 73, 5); + ts.Add("t1", 75, 3); - var range = db.TS().Range("t1", 0, 100, + var range = ts.Range("t1", 0, 100, align: 0, aggregation: TsAggregation.Max, timeBucket: 10); @@ -202,7 +211,7 @@ public void TestBucketTimestamp() expected.Add(new TimeSeriesTuple(70, 5.0)); Assert.Equal(range, expected); - range = db.TS().Range("t1", 0, 100, + range = ts.Range("t1", 0, 100, align: 0, aggregation: TsAggregation.Max, timeBucket: 10, @@ -220,16 +229,17 @@ public void TestEmpty() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); - db.TS().Create("t1"); + ts.Create("t1"); - db.TS().Add("t1",15,1); - db.TS().Add("t1",17,4); - db.TS().Add("t1",51,3); - db.TS().Add("t1",73,5); - db.TS().Add("t1",75,3); + ts.Add("t1", 15, 1); + ts.Add("t1", 17, 4); + ts.Add("t1", 51, 3); + ts.Add("t1", 73, 5); + ts.Add("t1", 75, 3); - var range = db.TS().Range("t1", 0, 100, + var range = ts.Range("t1", 0, 100, align: 0, aggregation: TsAggregation.Max, timeBucket: 10); @@ -240,7 +250,7 @@ public void TestEmpty() expected.Add(new TimeSeriesTuple(70, 5.0)); Assert.Equal(range, expected); - range = db.TS().Range("t1", 0, 100, + range = ts.Range("t1", 0, 100, align: 0, aggregation: TsAggregation.Max, timeBucket: 10, @@ -256,7 +266,7 @@ public void TestEmpty() expected.Add(new TimeSeriesTuple(60, double.NaN)); expected.Add(new TimeSeriesTuple(70, 5.0)); - for (int i = 0 ; i < range.Count() ; i++) + for (int i = 0; i < range.Count(); i++) { Assert.Equal(range[i].Time.Value, expected[i].Time.Value); Assert.Equal(range[i].Val, expected[i].Val); diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRangeAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRangeAsync.cs index 4185f1f5..f52354b0 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRangeAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRangeAsync.cs @@ -13,13 +13,13 @@ public class TestRangeAsync : AbstractNRedisStackTest { public TestRangeAsync(RedisFixture redisFixture) : base(redisFixture) { } - private async Task> CreateData(IDatabase db, string key, int timeBucket) + private async Task> CreateData(TimeSeriesCommands ts, string key, int timeBucket) { var tuples = new List(); for (var i = 0; i < 10; i++) { - var ts = await db.TS().AddAsync(key, i * timeBucket, i); - tuples.Add(new TimeSeriesTuple(ts, i)); + var timeStamp = await ts.AddAsync(key, i * timeBucket, i); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; } @@ -30,8 +30,9 @@ public async Task TestSimpleRange() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); - Assert.Equal(tuples, await db.TS().RangeAsync(key, "-", "+")); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); + Assert.Equal(tuples, await ts.RangeAsync(key, "-", "+")); } [Fact] @@ -40,8 +41,9 @@ public async Task TestRangeCount() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); - Assert.Equal(tuples.GetRange(0, 5), await db.TS().RangeAsync(key, "-", "+", count: 5)); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); + Assert.Equal(tuples.GetRange(0, 5), await ts.RangeAsync(key, "-", "+", count: 5)); } [Fact] @@ -50,8 +52,9 @@ public async Task TestRangeAggregation() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); - Assert.Equal(tuples, await db.TS().RangeAsync(key, "-", "+", aggregation: TsAggregation.Min, timeBucket: 50)); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); + Assert.Equal(tuples, await ts.RangeAsync(key, "-", "+", aggregation: TsAggregation.Min, timeBucket: 50)); } [Fact] @@ -60,6 +63,7 @@ public async Task TestRangeAlign() var key = CreateKeyName(); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var tuples = new List() { new TimeSeriesTuple(1, 10), @@ -70,7 +74,7 @@ public async Task TestRangeAlign() foreach (var tuple in tuples) { - await db.TS().AddAsync(key, tuple.Time, tuple.Val); + await ts.AddAsync(key, tuple.Time, tuple.Val); } // Aligh start @@ -80,7 +84,7 @@ public async Task TestRangeAlign() new TimeSeriesTuple(11, 1), new TimeSeriesTuple(21, 1) }; - Assert.Equal(resStart, await db.TS().RangeAsync(key, 1, 30, align: "-", aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resStart, await ts.RangeAsync(key, 1, 30, align: "-", aggregation: TsAggregation.Count, timeBucket: 10)); // Aligh end var resEnd = new List() @@ -89,10 +93,10 @@ public async Task TestRangeAlign() new TimeSeriesTuple(10, 1), new TimeSeriesTuple(20, 1) }; - Assert.Equal(resEnd, await db.TS().RangeAsync(key, 1, 30, align: "+", aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resEnd, await ts.RangeAsync(key, 1, 30, align: "+", aggregation: TsAggregation.Count, timeBucket: 10)); // Align 1 - Assert.Equal(resStart, await db.TS().RangeAsync(key, 1, 30, align: 1, aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resStart, await ts.RangeAsync(key, 1, 30, align: 1, aggregation: TsAggregation.Count, timeBucket: 10)); } [Fact] @@ -101,8 +105,9 @@ public async Task TestMissingTimeBucket() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); - var ex = await Assert.ThrowsAsync(async () => await db.TS().RangeAsync(key, "-", "+", aggregation: TsAggregation.Avg)); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); + var ex = await Assert.ThrowsAsync(async () => await ts.RangeAsync(key, "-", "+", aggregation: TsAggregation.Avg)); Assert.Equal("RANGE Aggregation should have timeBucket value", ex.Message); } @@ -112,17 +117,18 @@ public async Task TestFilterBy() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); - var res = await db.TS().RangeAsync(key, "-", "+", filterByValue: (0, 2)); // The first 3 tuples + var res = await ts.RangeAsync(key, "-", "+", filterByValue: (0, 2)); // The first 3 tuples Assert.Equal(3, res.Count); Assert.Equal(tuples.GetRange(0, 3), res); var filterTs = new List { 0, 50, 100 }; // Also the first 3 tuples - res = await db.TS().RangeAsync(key, "-", "+", filterByTs: filterTs); + res = await ts.RangeAsync(key, "-", "+", filterByTs: filterTs); Assert.Equal(tuples.GetRange(0, 3), res); - res = await db.TS().RangeAsync(key, "-", "+", filterByTs: filterTs, filterByValue: (2, 5)); // The third tuple + res = await ts.RangeAsync(key, "-", "+", filterByTs: filterTs, filterByValue: (2, 5)); // The third tuple Assert.Equal(tuples.GetRange(2, 1), res); } @@ -131,33 +137,34 @@ public async Task TestLatestAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync("ts1"); - await db.TS().CreateAsync("ts2"); - await db.TS().CreateRuleAsync("ts1", new TimeSeriesRule("ts2", 10, TsAggregation.Sum)); - await db.TS().AddAsync("ts1", 1, 1); - await db.TS().AddAsync("ts1", 2, 3); - await db.TS().AddAsync("ts1", 11, 7); - await db.TS().AddAsync("ts1", 13, 1); - var range = await db.TS().RangeAsync("ts1", 0, 20); + var ts = db.TS(); + await ts.CreateAsync("ts1"); + await ts.CreateAsync("ts2"); + await ts.CreateRuleAsync("ts1", new TimeSeriesRule("ts2", 10, TsAggregation.Sum)); + await ts.AddAsync("ts1", 1, 1); + await ts.AddAsync("ts1", 2, 3); + await ts.AddAsync("ts1", 11, 7); + await ts.AddAsync("ts1", 13, 1); + var range = await ts.RangeAsync("ts1", 0, 20); Assert.Equal(4, range.Count); var compact = new TimeSeriesTuple(0, 4); var latest = new TimeSeriesTuple(10, 8); // get - Assert.Equal(compact, await db.TS().GetAsync("ts2")); + Assert.Equal(compact, await ts.GetAsync("ts2")); - Assert.Equal(latest, await db.TS().GetAsync("ts2", true)); + Assert.Equal(latest, await ts.GetAsync("ts2", true)); // range - Assert.Equal(new List() { compact }, await db.TS().RangeAsync("ts2", 0, 10)); + Assert.Equal(new List() { compact }, await ts.RangeAsync("ts2", 0, 10)); - Assert.Equal(new List() { compact, latest }, await db.TS().RangeAsync("ts2", 0, 10, true)); + Assert.Equal(new List() { compact, latest }, await ts.RangeAsync("ts2", 0, 10, true)); // revrange - Assert.Equal(new List() { compact }, await db.TS().RevRangeAsync("ts2", 0, 10)); + Assert.Equal(new List() { compact }, await ts.RevRangeAsync("ts2", 0, 10)); - Assert.Equal(new List() { latest, compact }, await db.TS().RevRangeAsync("ts2", 0, 10, true)); + Assert.Equal(new List() { latest, compact }, await ts.RevRangeAsync("ts2", 0, 10, true)); } [Fact] @@ -165,16 +172,17 @@ public async Task TestAlignTimestampAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create("ts1"); - db.TS().Create("ts2"); - db.TS().Create("ts3"); - db.TS().CreateRule("ts1", new TimeSeriesRule("ts2", 10, TsAggregation.Count), 0); - db.TS().CreateRule("ts1", new TimeSeriesRule("ts3", 10, TsAggregation.Count), 1); - db.TS().Add("ts1", 1, 1); - db.TS().Add("ts1", 10, 3); - db.TS().Add("ts1", 21, 7); - Assert.Equal(2, (await db.TS().RangeAsync("ts2", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10)).Count); - Assert.Equal(1, (await db.TS().RangeAsync("ts3", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10)).Count); + var ts = db.TS(); + ts.Create("ts1"); + ts.Create("ts2"); + ts.Create("ts3"); + ts.CreateRule("ts1", new TimeSeriesRule("ts2", 10, TsAggregation.Count), 0); + ts.CreateRule("ts1", new TimeSeriesRule("ts3", 10, TsAggregation.Count), 1); + ts.Add("ts1", 1, 1); + ts.Add("ts1", 10, 3); + ts.Add("ts1", 21, 7); + Assert.Equal(2, (await ts.RangeAsync("ts2", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10)).Count); + Assert.Equal(1, (await ts.RangeAsync("ts3", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10)).Count); } [Fact] @@ -182,16 +190,17 @@ public async Task TestBucketTimestampAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); - db.TS().Create("t1"); + ts.Create("t1"); - db.TS().Add("t1",15,1); - db.TS().Add("t1",17,4); - db.TS().Add("t1",51,3); - db.TS().Add("t1",73,5); - db.TS().Add("t1",75,3); + ts.Add("t1", 15, 1); + ts.Add("t1", 17, 4); + ts.Add("t1", 51, 3); + ts.Add("t1", 73, 5); + ts.Add("t1", 75, 3); - var range = await db.TS().RangeAsync("t1", 0, 100, + var range = await ts.RangeAsync("t1", 0, 100, align: 0, aggregation: TsAggregation.Max, timeBucket: 10); @@ -202,7 +211,7 @@ public async Task TestBucketTimestampAsync() expected.Add(new TimeSeriesTuple(70, 5.0)); Assert.Equal(range, expected); - range = await db.TS().RangeAsync("t1", 0, 100, + range = await ts.RangeAsync("t1", 0, 100, align: 0, aggregation: TsAggregation.Max, timeBucket: 10, @@ -220,16 +229,17 @@ public async Task TestEmptyAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); - db.TS().Create("t1"); + ts.Create("t1"); - db.TS().Add("t1",15,1); - db.TS().Add("t1",17,4); - db.TS().Add("t1",51,3); - db.TS().Add("t1",73,5); - db.TS().Add("t1",75,3); + ts.Add("t1", 15, 1); + ts.Add("t1", 17, 4); + ts.Add("t1", 51, 3); + ts.Add("t1", 73, 5); + ts.Add("t1", 75, 3); - var range = await db.TS().RangeAsync("t1", 0, 100, + var range = await ts.RangeAsync("t1", 0, 100, align: 0, aggregation: TsAggregation.Max, timeBucket: 10); @@ -240,7 +250,7 @@ public async Task TestEmptyAsync() expected.Add(new TimeSeriesTuple(70, 5.0)); Assert.Equal(range, expected); - range = await db.TS().RangeAsync("t1", 0, 100, + range = await ts.RangeAsync("t1", 0, 100, align: 0, aggregation: TsAggregation.Max, timeBucket: 10, @@ -256,7 +266,7 @@ public async Task TestEmptyAsync() expected.Add(new TimeSeriesTuple(60, double.NaN)); expected.Add(new TimeSeriesTuple(70, 5.0)); - for (int i = 0 ; i < range.Count() ; i++) + for (int i = 0; i < range.Count(); i++) { Assert.Equal(range[i].Time.Value, expected[i].Time.Value); Assert.Equal(range[i].Val, expected[i].Val); diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRevRange.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRevRange.cs index 7a6ee312..5f264caf 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRevRange.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRevRange.cs @@ -12,13 +12,13 @@ public class TestRevRange : AbstractNRedisStackTest { public TestRevRange(RedisFixture redisFixture) : base(redisFixture) { } - private List CreateData(IDatabase db, string key, int timeBucket) + private List CreateData(TimeSeriesCommands ts, string key, int timeBucket) { var tuples = new List(); for (var i = 0; i < 10; i++) { - var ts = db.TS().Add(key, i * timeBucket, i); - tuples.Add(new TimeSeriesTuple(ts, i)); + var timeStamp = ts.Add(key, i * timeBucket, i); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; } @@ -29,8 +29,9 @@ public void TestSimpleRevRange() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, key, 50); - Assert.Equal(ReverseData(tuples), db.TS().RevRange(key, "-", "+")); + var ts = db.TS(); + var tuples = CreateData(ts, key, 50); + Assert.Equal(ReverseData(tuples), ts.RevRange(key, "-", "+")); } [Fact] @@ -39,8 +40,9 @@ public void TestRevRangeCount() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, key, 50); - Assert.Equal(ReverseData(tuples).GetRange(0, 5), db.TS().RevRange(key, "-", "+", count: 5)); + var ts = db.TS(); + var tuples = CreateData(ts, key, 50); + Assert.Equal(ReverseData(tuples).GetRange(0, 5), ts.RevRange(key, "-", "+", count: 5)); } [Fact] @@ -49,8 +51,9 @@ public void TestRevRangeAggregation() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, key, 50); - Assert.Equal(ReverseData(tuples), db.TS().RevRange(key, "-", "+", aggregation: TsAggregation.Min, timeBucket: 50)); + var ts = db.TS(); + var tuples = CreateData(ts, key, 50); + Assert.Equal(ReverseData(tuples), ts.RevRange(key, "-", "+", aggregation: TsAggregation.Min, timeBucket: 50)); } [Fact] @@ -59,6 +62,7 @@ public void TestRevRangeAlign() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var tuples = new List() { new TimeSeriesTuple(1, 10), @@ -69,7 +73,7 @@ public void TestRevRangeAlign() foreach (var tuple in tuples) { - db.TS().Add(key, tuple.Time, tuple.Val); + ts.Add(key, tuple.Time, tuple.Val); } // Aligh start @@ -79,7 +83,7 @@ public void TestRevRangeAlign() new TimeSeriesTuple(11, 1), new TimeSeriesTuple(1, 2) }; - Assert.Equal(resStart, db.TS().RevRange(key, 1, 30, align: "-", aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resStart, ts.RevRange(key, 1, 30, align: "-", aggregation: TsAggregation.Count, timeBucket: 10)); // Aligh end var resEnd = new List() @@ -88,10 +92,10 @@ public void TestRevRangeAlign() new TimeSeriesTuple(10, 1), new TimeSeriesTuple(0, 2) }; - Assert.Equal(resEnd, db.TS().RevRange(key, 1, 30, align: "+", aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resEnd, ts.RevRange(key, 1, 30, align: "+", aggregation: TsAggregation.Count, timeBucket: 10)); // Align 1 - Assert.Equal(resStart, db.TS().RevRange(key, 1, 30, align: 1, aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resStart, ts.RevRange(key, 1, 30, align: 1, aggregation: TsAggregation.Count, timeBucket: 10)); } [Fact] @@ -100,8 +104,9 @@ public void TestMissingTimeBucket() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, key, 50); - var ex = Assert.Throws(() => db.TS().RevRange(key, "-", "+", aggregation: TsAggregation.Avg)); + var ts = db.TS(); + var tuples = CreateData(ts, key, 50); + var ex = Assert.Throws(() => ts.RevRange(key, "-", "+", aggregation: TsAggregation.Avg)); Assert.Equal("RANGE Aggregation should have timeBucket value", ex.Message); } @@ -112,17 +117,18 @@ public void TestFilterBy() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = CreateData(db, key, 50); + var ts = db.TS(); + var tuples = CreateData(ts, key, 50); - var res = db.TS().RevRange(key, "-", "+", filterByValue: (0, 2)); + var res = ts.RevRange(key, "-", "+", filterByValue: (0, 2)); Assert.Equal(3, res.Count); Assert.Equal(ReverseData(tuples.GetRange(0, 3)), res); var filterTs = new List { 0, 50, 100 }; - res = db.TS().RevRange(key, "-", "+", filterByTs: filterTs); + res = ts.RevRange(key, "-", "+", filterByTs: filterTs); Assert.Equal(ReverseData(tuples.GetRange(0, 3)), res); - res = db.TS().RevRange(key, "-", "+", filterByTs: filterTs, filterByValue: (2, 5)); + res = ts.RevRange(key, "-", "+", filterByTs: filterTs, filterByValue: (2, 5)); Assert.Equal(tuples.GetRange(2, 1), res); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRevRangeAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRevRangeAsync.cs index 651e27cc..d418ec09 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRevRangeAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRevRangeAsync.cs @@ -13,13 +13,13 @@ public class TestRevRangeAsync : AbstractNRedisStackTest { public TestRevRangeAsync(RedisFixture redisFixture) : base(redisFixture) { } - private async Task> CreateData(IDatabase db, string key, int timeBucket) + private async Task> CreateData(TimeSeriesCommands ts, string key, int timeBucket) { var tuples = new List(); for (var i = 0; i < 10; i++) { - var ts = await db.TS().AddAsync(key, i * timeBucket, i); - tuples.Add(new TimeSeriesTuple(ts, i)); + var timeStamp = await ts.AddAsync(key, i * timeBucket, i); + tuples.Add(new TimeSeriesTuple(timeStamp, i)); } return tuples; } @@ -30,8 +30,9 @@ public async Task TestSimpleRevRange() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); - Assert.Equal(ReverseData(tuples), await db.TS().RevRangeAsync(key, "-", "+")); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); + Assert.Equal(ReverseData(tuples), await ts.RevRangeAsync(key, "-", "+")); } [Fact] @@ -40,8 +41,9 @@ public async Task TestRevRangeCount() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); - Assert.Equal(ReverseData(tuples).GetRange(0, 5), await db.TS().RevRangeAsync(key, "-", "+", count: 5)); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); + Assert.Equal(ReverseData(tuples).GetRange(0, 5), await ts.RevRangeAsync(key, "-", "+", count: 5)); } [Fact] @@ -50,8 +52,9 @@ public async Task TestRevRangeAggregation() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); - Assert.Equal(ReverseData(tuples), await db.TS().RevRangeAsync(key, "-", "+", aggregation: TsAggregation.Min, timeBucket: 50)); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); + Assert.Equal(ReverseData(tuples), await ts.RevRangeAsync(key, "-", "+", aggregation: TsAggregation.Min, timeBucket: 50)); } [Fact] @@ -60,6 +63,7 @@ public async Task TestRevRangeAlign() var key = CreateKeyName(); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); var tuples = new List() { new TimeSeriesTuple(1, 10), @@ -70,7 +74,7 @@ public async Task TestRevRangeAlign() foreach (var tuple in tuples) { - await db.TS().AddAsync(key, tuple.Time, tuple.Val); + await ts.AddAsync(key, tuple.Time, tuple.Val); } // Aligh start @@ -80,7 +84,7 @@ public async Task TestRevRangeAlign() new TimeSeriesTuple(11, 1), new TimeSeriesTuple(1, 2) }; - Assert.Equal(resStart, await db.TS().RevRangeAsync(key, 1, 30, align: "-", aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resStart, await ts.RevRangeAsync(key, 1, 30, align: "-", aggregation: TsAggregation.Count, timeBucket: 10)); // Aligh end var resEnd = new List() @@ -89,10 +93,10 @@ public async Task TestRevRangeAlign() new TimeSeriesTuple(10, 1), new TimeSeriesTuple(0, 2) }; - Assert.Equal(resEnd, await db.TS().RevRangeAsync(key, 1, 30, align: "+", aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resEnd, await ts.RevRangeAsync(key, 1, 30, align: "+", aggregation: TsAggregation.Count, timeBucket: 10)); // Align 1 - Assert.Equal(resStart, await db.TS().RevRangeAsync(key, 1, 30, align: 1, aggregation: TsAggregation.Count, timeBucket: 10)); + Assert.Equal(resStart, await ts.RevRangeAsync(key, 1, 30, align: 1, aggregation: TsAggregation.Count, timeBucket: 10)); } [Fact] @@ -101,8 +105,9 @@ public async Task TestMissingTimeBucket() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); - var ex = await Assert.ThrowsAsync(async () => await db.TS().RevRangeAsync(key, "-", "+", aggregation: TsAggregation.Avg)); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); + var ex = await Assert.ThrowsAsync(async () => await ts.RevRangeAsync(key, "-", "+", aggregation: TsAggregation.Avg)); Assert.Equal("RANGE Aggregation should have timeBucket value", ex.Message); } @@ -112,17 +117,18 @@ public async Task TestFilterBy() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - var tuples = await CreateData(db, key, 50); + var ts = db.TS(); + var tuples = await CreateData(ts, key, 50); - var res = await db.TS().RevRangeAsync(key, "-", "+", filterByValue: (0, 2)); + var res = await ts.RevRangeAsync(key, "-", "+", filterByValue: (0, 2)); Assert.Equal(3, res.Count); Assert.Equal(ReverseData(tuples.GetRange(0, 3)), res); var filterTs = new List { 0, 50, 100 }; - res = await db.TS().RevRangeAsync(key, "-", "+", filterByTs: filterTs); + res = await ts.RevRangeAsync(key, "-", "+", filterByTs: filterTs); Assert.Equal(ReverseData(tuples.GetRange(0, 3)), res); - res = await db.TS().RevRangeAsync(key, "-", "+", filterByTs: filterTs, filterByValue: (2, 5)); + res = await ts.RevRangeAsync(key, "-", "+", filterByTs: filterTs, filterByValue: (2, 5)); Assert.Equal(tuples.GetRange(2, 1), res); } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRules.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRules.cs index ceb1e5ba..b9b41e4a 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRules.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRules.cs @@ -48,10 +48,11 @@ public void TestRulesAdditionDeletion() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create(srcKey); + var ts = db.TS(); + ts.Create(srcKey); foreach (var destKey in destKeys.Values) { - db.TS().Create(destKey); + ts.Create(destKey); } long timeBucket = 50; var rules = new List(); @@ -61,16 +62,16 @@ public void TestRulesAdditionDeletion() var rule = new TimeSeriesRule(destKeys[aggregation], timeBucket, aggregation); rules.Add(rule); rulesMap[aggregation] = rule; - Assert.True(db.TS().CreateRule(srcKey, rule)); - TimeSeriesInformation info = db.TS().Info(srcKey); + Assert.True(ts.CreateRule(srcKey, rule)); + TimeSeriesInformation info = ts.Info(srcKey); Assert.Equal(rules, info.Rules); } foreach (var aggregation in destKeys.Keys) { var rule = rulesMap[aggregation]; rules.Remove(rule); - Assert.True(db.TS().DeleteRule(srcKey, rule.DestKey)); - TimeSeriesInformation info = db.TS().Info(srcKey); + Assert.True(ts.DeleteRule(srcKey, rule.DestKey)); + TimeSeriesInformation info = ts.Info(srcKey); Assert.Equal(rules, info.Rules); } } @@ -80,12 +81,13 @@ public void TestNonExistingSrc() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); string destKey = "RULES_DEST_" + TsAggregation.Avg; - db.TS().Create(destKey); + ts.Create(destKey); TimeSeriesRule rule = new TimeSeriesRule(destKey, 50, TsAggregation.Avg); - var ex = Assert.Throws(() => db.TS().CreateRule(srcKey, rule)); + var ex = Assert.Throws(() => ts.CreateRule(srcKey, rule)); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); - ex = Assert.Throws(() => db.TS().DeleteRule(srcKey, destKey)); + ex = Assert.Throws(() => ts.DeleteRule(srcKey, destKey)); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); } @@ -94,12 +96,13 @@ public void TestNonExisitingDestinaion() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); + var ts = db.TS(); string destKey = "RULES_DEST_" + TsAggregation.Avg; - db.TS().Create(srcKey); + ts.Create(srcKey); TimeSeriesRule rule = new TimeSeriesRule(destKey, 50, TsAggregation.Avg); - var ex = Assert.Throws(() => db.TS().CreateRule(srcKey, rule)); + var ex = Assert.Throws(() => ts.CreateRule(srcKey, rule)); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); - ex = Assert.Throws(() => db.TS().DeleteRule(srcKey, destKey)); + ex = Assert.Throws(() => ts.DeleteRule(srcKey, destKey)); Assert.Equal("ERR TSDB: compaction rule does not exist", ex.Message); } @@ -108,22 +111,23 @@ public void TestAlignTimestamp() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Create("ts1"); - db.TS().Create("ts2"); - db.TS().Create("ts3"); + var ts = db.TS(); + ts.Create("ts1"); + ts.Create("ts2"); + ts.Create("ts3"); TimeSeriesRule rule1 = new TimeSeriesRule("ts2", 10, TsAggregation.Count); - db.TS().CreateRule("ts1", rule1, 0); + ts.CreateRule("ts1", rule1, 0); TimeSeriesRule rule2 = new TimeSeriesRule("ts3", 10, TsAggregation.Count); - db.TS().CreateRule("ts1", rule2, 1); + ts.CreateRule("ts1", rule2, 1); - db.TS().Add("ts1", 1, 1); - db.TS().Add("ts1", 10, 3); - db.TS().Add("ts1", 21, 7); + ts.Add("ts1", 1, 1); + ts.Add("ts1", 10, 3); + ts.Add("ts1", 21, 7); - Assert.Equal(2, db.TS().Range("ts2", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10).Count); - Assert.Equal(1, db.TS().Range("ts3", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10).Count); + Assert.Equal(2, ts.Range("ts2", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10).Count); + Assert.Equal(1, ts.Range("ts3", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10).Count); } } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRulesAsync.cs b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRulesAsync.cs index cfb92f34..5d2f30a2 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRulesAsync.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestAPI/TestRulesAsync.cs @@ -20,12 +20,13 @@ public async Task TestRulesAdditionDeletion() var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync(key); + var ts = db.TS(); + await ts.CreateAsync(key); var aggregations = (TsAggregation[])Enum.GetValues(typeof(TsAggregation)); foreach (var aggregation in aggregations) { - await db.TS().CreateAsync($"{key}:{aggregation}"); + await ts.CreateAsync($"{key}:{aggregation}"); } var timeBucket = 50L; @@ -36,9 +37,9 @@ public async Task TestRulesAdditionDeletion() var rule = new TimeSeriesRule($"{key}:{aggregation}", timeBucket, aggregation); rules.Add(rule); rulesMap[aggregation] = rule; - Assert.True(await db.TS().CreateRuleAsync(key, rule)); + Assert.True(await ts.CreateRuleAsync(key, rule)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(rules, info.Rules); } @@ -46,9 +47,9 @@ public async Task TestRulesAdditionDeletion() { var rule = rulesMap[aggregation]; rules.Remove(rule); - Assert.True(await db.TS().DeleteRuleAsync(key, rule.DestKey)); + Assert.True(await ts.DeleteRuleAsync(key, rule.DestKey)); - var info = await db.TS().InfoAsync(key); + var info = await ts.InfoAsync(key); Assert.Equal(rules, info.Rules); } @@ -62,12 +63,13 @@ public async Task TestNonExistingSrc() var aggKey = $"{key}:{TsAggregation.Avg}"; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync(aggKey); + var ts = db.TS(); + await ts.CreateAsync(aggKey); var rule = new TimeSeriesRule(aggKey, 50, TsAggregation.Avg); - var ex = await Assert.ThrowsAsync(async () => await db.TS().CreateRuleAsync(key, rule)); + var ex = await Assert.ThrowsAsync(async () => await ts.CreateRuleAsync(key, rule)); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); - ex = await Assert.ThrowsAsync(async () => await db.TS().DeleteRuleAsync(key, aggKey)); + ex = await Assert.ThrowsAsync(async () => await ts.DeleteRuleAsync(key, aggKey)); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); await db.KeyDeleteAsync(aggKey); @@ -80,12 +82,13 @@ public async Task TestNonExisitingDestinaion() var aggKey = $"{key}:{TsAggregation.Avg}"; var db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync(key); + var ts = db.TS(); + await ts.CreateAsync(key); var rule = new TimeSeriesRule(aggKey, 50, TsAggregation.Avg); - var ex = await Assert.ThrowsAsync(async () => await db.TS().CreateRuleAsync(key, rule)); + var ex = await Assert.ThrowsAsync(async () => await ts.CreateRuleAsync(key, rule)); Assert.Equal("ERR TSDB: the key does not exist", ex.Message); - ex = await Assert.ThrowsAsync(async () => await db.TS().DeleteRuleAsync(key, aggKey)); + ex = await Assert.ThrowsAsync(async () => await ts.DeleteRuleAsync(key, aggKey)); Assert.Equal("ERR TSDB: compaction rule does not exist", ex.Message); } @@ -94,22 +97,23 @@ public async Task TestAlignTimestampAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().CreateAsync("ts1"); - await db.TS().CreateAsync("ts2"); - await db.TS().CreateAsync("ts3"); + var ts = db.TS(); + await ts.CreateAsync("ts1"); + await ts.CreateAsync("ts2"); + await ts.CreateAsync("ts3"); TimeSeriesRule rule1 = new TimeSeriesRule("ts2", 10, TsAggregation.Count); - await db.TS().CreateRuleAsync("ts1", rule1, 0); + await ts.CreateRuleAsync("ts1", rule1, 0); TimeSeriesRule rule2 = new TimeSeriesRule("ts3", 10, TsAggregation.Count); - await db.TS().CreateRuleAsync("ts1", rule2, 1); + await ts.CreateRuleAsync("ts1", rule2, 1); - await db.TS().AddAsync("ts1", 1, 1); - await db.TS().AddAsync("ts1", 10, 3); - await db.TS().AddAsync("ts1", 21, 7); + await ts.AddAsync("ts1", 1, 1); + await ts.AddAsync("ts1", 10, 3); + await ts.AddAsync("ts1", 21, 7); - Assert.Equal(2, (await db.TS().RangeAsync("ts2", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10)).Count); - Assert.Equal(1, (await db.TS().RangeAsync("ts3", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10)).Count); + Assert.Equal(2, (await ts.RangeAsync("ts2", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10)).Count); + Assert.Equal(1, (await ts.RangeAsync("ts3", "-", "+", aggregation: TsAggregation.Count, timeBucket: 10)).Count); } } } diff --git a/tests/NRedisStack.Tests/TimeSeries/TestDataTypes/TestTimeSeriesInformation.cs b/tests/NRedisStack.Tests/TimeSeries/TestDataTypes/TestTimeSeriesInformation.cs index 0871c0a7..e069c0b4 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TestDataTypes/TestTimeSeriesInformation.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TestDataTypes/TestTimeSeriesInformation.cs @@ -20,11 +20,12 @@ public void TestInformationSync() string key = CreateKeyName(); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - db.TS().Add(key, "*", 1.1); - db.TS().Add(key, "*", 1.3, duplicatePolicy: TsDuplicatePolicy.LAST); + var ts = db.TS(); + ts.Add(key, "*", 1.1); + ts.Add(key, "*", 1.3, duplicatePolicy: TsDuplicatePolicy.LAST); - TimeSeriesInformation info = db.TS().Info(key); - TimeSeriesInformation infoDebug = db.TS().Info(key, debug: true); + TimeSeriesInformation info = ts.Info(key); + TimeSeriesInformation infoDebug = ts.Info(key, debug: true); Assert.Equal(4184, info.MemoryUsage); Assert.Equal(0, info.RetentionTime); @@ -47,11 +48,12 @@ public async Task TestInformationAsync() string key = CreateKeyName(); IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); - await db.TS().AddAsync(key, "*", 1.1); - await db.TS().AddAsync(key, "*", 1.3, duplicatePolicy: TsDuplicatePolicy.LAST); + var ts = db.TS(); + await ts.AddAsync(key, "*", 1.1); + await ts.AddAsync(key, "*", 1.3, duplicatePolicy: TsDuplicatePolicy.LAST); - TimeSeriesInformation info = await db.TS().InfoAsync(key); - TimeSeriesInformation infoDebug = await db.TS().InfoAsync(key, debug: true); + TimeSeriesInformation info = await ts.InfoAsync(key); + TimeSeriesInformation infoDebug = await ts.InfoAsync(key, debug: true); Assert.Equal(4184, info.MemoryUsage); Assert.Equal(0, info.RetentionTime); diff --git a/tests/NRedisStack.Tests/TimeSeries/TimeSeriesTests.cs b/tests/NRedisStack.Tests/TimeSeries/TimeSeriesTests.cs index ea7720b2..3b1942c9 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TimeSeriesTests.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TimeSeriesTests.cs @@ -21,9 +21,9 @@ public void Dispose() // public void TestCreateOK() // { // IDatabase db = redisFixture.Redis.GetDatabase(); - // var result = db.TS().Create(key); + // var result = ts.Create(key); // Assert.True(result); - // //TimeSeriesInformation info = db.TS().Info(key); + // //TimeSeriesInformation info = ts.Info(key); // } } \ No newline at end of file From 0acf687910e0e0e4536b3248a0b1dc7f9b67066c Mon Sep 17 00:00:00 2001 From: shacharPash Date: Wed, 31 Aug 2022 16:58:00 +0300 Subject: [PATCH 3/7] Make ResponeParser mothod to be extension Methods and Changing the way of calling them --- src/NRedisStack/Bloom/BloomCommands.cs | 24 +++--- src/NRedisStack/CountMinSketch/CmsCommands.cs | 28 +++---- .../CuckooFilter/CuckooCommands.cs | 32 ++++---- src/NRedisStack/ResponseParser.cs | 26 +++---- src/NRedisStack/Tdigest/TdigestCommands.cs | 52 ++++++------- .../TimeSeries/TimeSeriesCommands.cs | 76 ++++++++++--------- src/NRedisStack/TopK/TopKCommands.cs | 32 ++++---- 7 files changed, 137 insertions(+), 133 deletions(-) diff --git a/src/NRedisStack/Bloom/BloomCommands.cs b/src/NRedisStack/Bloom/BloomCommands.cs index 2cbec6cd..df4d4ed6 100644 --- a/src/NRedisStack/Bloom/BloomCommands.cs +++ b/src/NRedisStack/Bloom/BloomCommands.cs @@ -149,7 +149,7 @@ public bool[] Insert(RedisKey key, RedisValue[] items, int? capacity = null, args.Add(item); } - return ResponseParser.ToBooleanArray(_db.Execute(BF.INSERT, args)); + return _db.Execute(BF.INSERT, args).ToBooleanArray(); } /// @@ -213,7 +213,7 @@ public async Task InsertAsync(RedisKey key, RedisValue[] items, int? cap } var result = await _db.ExecuteAsync(BF.INSERT, args); - return ResponseParser.ToBooleanArray(result); + return result.ToBooleanArray(); } /// @@ -226,7 +226,7 @@ public async Task InsertAsync(RedisKey key, RedisValue[] items, int? cap /// public bool LoadChunk(RedisKey key, long iterator, Byte[] data) { - return ResponseParser.OKtoBoolean(_db.Execute(BF.LOADCHUNK, key, iterator, data)); + return _db.Execute(BF.LOADCHUNK, key, iterator, data).OKtoBoolean(); } /// @@ -240,7 +240,7 @@ public bool LoadChunk(RedisKey key, long iterator, Byte[] data) public async Task LoadChunkAsync(RedisKey key, long iterator, Byte[] data) { var result = await _db.ExecuteAsync(BF.LOADCHUNK, key, iterator, data); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -263,7 +263,7 @@ public bool[] MAdd(RedisKey key, params RedisValue[] items) args.Add(item); } - return ResponseParser.ToBooleanArray(_db.Execute(BF.MADD, args)); + return _db.Execute(BF.MADD, args).ToBooleanArray(); } /// @@ -287,7 +287,7 @@ public async Task MAddAsync(RedisKey key, params RedisValue[] items) } var result = await _db.ExecuteAsync(BF.MADD, args); - return ResponseParser.ToBooleanArray(result); + return result.ToBooleanArray(); } /// @@ -310,7 +310,7 @@ public bool[] MExists(RedisKey key, RedisValue[] items) args.Add(item); } - return ResponseParser.ToBooleanArray(_db.Execute(BF.MEXISTS, args)); + return _db.Execute(BF.MEXISTS, args).ToBooleanArray(); } @@ -335,7 +335,7 @@ public async Task MExistsAsync(RedisKey key, RedisValue[] items) } var result = await _db.ExecuteAsync(BF.MEXISTS, args); - return ResponseParser.ToBooleanArray(result); + return result.ToBooleanArray(); } @@ -366,7 +366,7 @@ public bool Reserve(RedisKey key, double errorRate, long capacity, args.Add(BloomArgs.NONSCALING); } - return ResponseParser.OKtoBoolean(_db.Execute(BF.RESERVE, args)); + return _db.Execute(BF.RESERVE, args).OKtoBoolean(); } /// @@ -397,7 +397,7 @@ public async Task ReserveAsync(RedisKey key, double errorRate, long capaci } var result = await _db.ExecuteAsync(BF.RESERVE, args); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -409,7 +409,7 @@ public async Task ReserveAsync(RedisKey key, double errorRate, long capaci /// public Tuple ScanDump(RedisKey key, long iterator) { - return ResponseParser.ToScanDumpTuple(_db.Execute(BF.SCANDUMP, key, iterator)); + return _db.Execute(BF.SCANDUMP, key, iterator).ToScanDumpTuple(); } /// @@ -422,7 +422,7 @@ public Tuple ScanDump(RedisKey key, long iterator) public async Task> ScanDumpAsync(RedisKey key, long iterator) { var result = await _db.ExecuteAsync(BF.SCANDUMP, key, iterator); - return ResponseParser.ToScanDumpTuple(result); + return result.ToScanDumpTuple(); } } } diff --git a/src/NRedisStack/CountMinSketch/CmsCommands.cs b/src/NRedisStack/CountMinSketch/CmsCommands.cs index b25e672d..046aafa0 100644 --- a/src/NRedisStack/CountMinSketch/CmsCommands.cs +++ b/src/NRedisStack/CountMinSketch/CmsCommands.cs @@ -22,7 +22,7 @@ public CmsCommands(IDatabase db) /// public long IncrBy(RedisKey key, RedisValue item, long increment) { - return ResponseParser.ToLong(_db.Execute(CMS.INCRBY, key, item, increment)); + return _db.Execute(CMS.INCRBY, key, item, increment).ToLong(); } /// @@ -36,7 +36,7 @@ public long IncrBy(RedisKey key, RedisValue item, long increment) public async Task IncrByAsync(RedisKey key, RedisValue item, long increment) { var result = await _db.ExecuteAsync(CMS.INCRBY, key, item, increment); - return ResponseParser.ToLong(result); + return result.ToLong(); } /// @@ -58,7 +58,7 @@ public long[] IncrBy(RedisKey key, Tuple[] itemIncrements) args.Add(pair.Item1); args.Add(pair.Item2); } - return ResponseParser.ToLongArray(_db.Execute(CMS.INCRBY, args)); + return _db.Execute(CMS.INCRBY, args).ToLongArray(); } /// @@ -82,7 +82,7 @@ public async Task IncrByAsync(RedisKey key, Tuple[] it } var result = await _db.ExecuteAsync(CMS.INCRBY, args); - return ResponseParser.ToLongArray(result); + return result.ToLongArray(); } /// @@ -94,7 +94,7 @@ public async Task IncrByAsync(RedisKey key, Tuple[] it public CmsInformation Info(RedisKey key) { var info = _db.Execute(CMS.INFO, key); - return ResponseParser.ToCmsInfo(info); + return info.ToCmsInfo(); } /// @@ -106,7 +106,7 @@ public CmsInformation Info(RedisKey key) public async Task InfoAsync(RedisKey key) { var info = await _db.ExecuteAsync(CMS.INFO, key); - return ResponseParser.ToCmsInfo(info); + return info.ToCmsInfo(); } /// @@ -120,7 +120,7 @@ public async Task InfoAsync(RedisKey key) /// public bool InitByDim(RedisKey key, long width, long depth) { - return ResponseParser.OKtoBoolean(_db.Execute(CMS.INITBYDIM, key, width, depth)); + return _db.Execute(CMS.INITBYDIM, key, width, depth).OKtoBoolean(); } /// @@ -135,7 +135,7 @@ public bool InitByDim(RedisKey key, long width, long depth) public async Task InitByDimAsync(RedisKey key, long width, long depth) { var result = await _db.ExecuteAsync(CMS.INITBYDIM, key, width, depth); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -148,7 +148,7 @@ public async Task InitByDimAsync(RedisKey key, long width, long depth) /// public bool InitByProb(RedisKey key, double error, double probability) { - return ResponseParser.OKtoBoolean(_db.Execute(CMS.INITBYPROB, key, error, probability)); + return _db.Execute(CMS.INITBYPROB, key, error, probability).OKtoBoolean(); } /// @@ -162,7 +162,7 @@ public bool InitByProb(RedisKey key, double error, double probability) public async Task InitByProbAsync(RedisKey key, double error, double probability) { var result = await _db.ExecuteAsync(CMS.INITBYPROB, key, error, probability); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -189,7 +189,7 @@ public bool Merge(RedisValue destination, long numKeys, RedisValue[] source, lon foreach (var w in weight) args.Add(w); } - return ResponseParser.OKtoBoolean(_db.Execute(CMS.MERGE, args)); + return _db.Execute(CMS.MERGE, args).OKtoBoolean(); } /// @@ -217,7 +217,7 @@ public async Task MergeAsync(RedisValue destination, long numKeys, RedisVa } var result = await _db.ExecuteAsync(CMS.MERGE, args); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -235,7 +235,7 @@ public long[] Query(RedisKey key, params RedisValue[] items) List args = new List { key }; foreach (var item in items) args.Add(item); - return ResponseParser.ToLongArray(_db.Execute(CMS.QUERY, args)); + return _db.Execute(CMS.QUERY, args).ToLongArray(); } /// @@ -254,7 +254,7 @@ public async Task QueryAsync(RedisKey key, params RedisValue[] items) foreach (var item in items) args.Add(item); var result = await _db.ExecuteAsync(CMS.QUERY, args); - return ResponseParser.ToLongArray(result); + return result.ToLongArray(); } } } diff --git a/src/NRedisStack/CuckooFilter/CuckooCommands.cs b/src/NRedisStack/CuckooFilter/CuckooCommands.cs index 79dc9def..85a81272 100644 --- a/src/NRedisStack/CuckooFilter/CuckooCommands.cs +++ b/src/NRedisStack/CuckooFilter/CuckooCommands.cs @@ -71,7 +71,7 @@ public async Task AddNXAsync(RedisKey key, RedisValue item) /// public long Count(RedisKey key, RedisValue item) { - return ResponseParser.ToLong(_db.Execute(CF.COUNT, key, item)); + return _db.Execute(CF.COUNT, key, item).ToLong(); } /// @@ -84,7 +84,7 @@ public long Count(RedisKey key, RedisValue item) public async Task CountAsync(RedisKey key, RedisValue item) { var result = await _db.ExecuteAsync(CF.COUNT, key, item); - return ResponseParser.ToLong(result); + return result.ToLong(); } /// @@ -148,7 +148,7 @@ public async Task ExistsAsync(RedisKey key, RedisValue item) public CuckooInformation Info(RedisKey key) { var info = _db.Execute(CF.INFO, key); - return ResponseParser.ToCuckooInfo(info); + return info.ToCuckooInfo(); } /// @@ -160,7 +160,7 @@ public CuckooInformation Info(RedisKey key) public async Task InfoAsync(RedisKey key) { var info = await _db.ExecuteAsync(CF.INFO, key); - return ResponseParser.ToCuckooInfo(info); + return info.ToCuckooInfo(); } /// @@ -196,7 +196,7 @@ public bool[] Insert(RedisKey key, RedisValue[] items, int? capacity = null, boo args.Add(item); } - return ResponseParser.ToBooleanArray(_db.Execute(CF.INSERT, args)); + return _db.Execute(CF.INSERT, args).ToBooleanArray(); } /// @@ -233,7 +233,7 @@ public async Task InsertAsync(RedisKey key, RedisValue[] items, int? cap } var result = await _db.ExecuteAsync(CF.INSERT, args); - return ResponseParser.ToBooleanArray(result); + return result.ToBooleanArray(); } /// @@ -271,7 +271,7 @@ public bool[] InsertNX(RedisKey key, RedisValue[] items, int? capacity = null, b args.Add(item); } - return ResponseParser.ToBooleanArray(_db.Execute(CF.INSERTNX, args)); + return _db.Execute(CF.INSERTNX, args).ToBooleanArray(); } /// @@ -310,7 +310,7 @@ public async Task InsertNXAsync(RedisKey key, RedisValue[] items, int? c } var result = await _db.ExecuteAsync(CF.INSERTNX, args); - return ResponseParser.ToBooleanArray(result); + return result.ToBooleanArray(); } /// @@ -323,7 +323,7 @@ public async Task InsertNXAsync(RedisKey key, RedisValue[] items, int? c /// public bool LoadChunk(RedisKey key, long iterator, Byte[] data) { - return ResponseParser.OKtoBoolean(_db.Execute(CF.LOADCHUNK, key, iterator, data)); + return _db.Execute(CF.LOADCHUNK, key, iterator, data).OKtoBoolean(); } /// @@ -337,7 +337,7 @@ public bool LoadChunk(RedisKey key, long iterator, Byte[] data) public async Task LoadChunkAsync(RedisKey key, long iterator, Byte[] data) { var result = await _db.ExecuteAsync(CF.LOADCHUNK, key, iterator, data); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -360,7 +360,7 @@ public bool[] MExists(RedisKey key, params RedisValue[] items) args.Add(item); } - return ResponseParser.ToBooleanArray(_db.Execute(CF.MEXISTS, args)); + return _db.Execute(CF.MEXISTS, args).ToBooleanArray(); } /// @@ -384,7 +384,7 @@ public async Task MExistsAsync(RedisKey key, params RedisValue[] items) } var result = await _db.ExecuteAsync(CF.MEXISTS, args); - return ResponseParser.ToBooleanArray(result); + return result.ToBooleanArray(); } /// @@ -422,7 +422,7 @@ public bool Reserve(RedisKey key, long capacity, args.Add(expansion); } - return ResponseParser.OKtoBoolean(_db.Execute(CF.RESERVE, args)); + return _db.Execute(CF.RESERVE, args).OKtoBoolean(); } /// @@ -461,7 +461,7 @@ public async Task ReserveAsync(RedisKey key, long capacity, } var result = await _db.ExecuteAsync(CF.RESERVE, args); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -473,7 +473,7 @@ public async Task ReserveAsync(RedisKey key, long capacity, /// public Tuple ScanDump(RedisKey key, long iterator) { - return ResponseParser.ToScanDumpTuple(_db.Execute(CF.SCANDUMP, key, iterator)); + return _db.Execute(CF.SCANDUMP, key, iterator).ToScanDumpTuple(); } /// @@ -486,7 +486,7 @@ public Tuple ScanDump(RedisKey key, long iterator) public async Task> ScanDumpAsync(RedisKey key, long iterator) { var result = await _db.ExecuteAsync(CF.SCANDUMP, key, iterator); - return ResponseParser.ToScanDumpTuple(result); + return result.ToScanDumpTuple(); } } } \ No newline at end of file diff --git a/src/NRedisStack/ResponseParser.cs b/src/NRedisStack/ResponseParser.cs index dec73326..6cf0c475 100644 --- a/src/NRedisStack/ResponseParser.cs +++ b/src/NRedisStack/ResponseParser.cs @@ -21,7 +21,7 @@ public static bool OKtoBoolean(this RedisResult result) public static bool[] ToBooleanArray(this RedisResult result) { - RedisResult[]? redisResults = ToArray(result); + RedisResult[]? redisResults = result.ToArray(); bool[] boolArr = new bool[redisResults.Length]; for (int i = 0; i < redisResults.Length; i++) @@ -57,7 +57,7 @@ public static double ToDouble(this RedisResult result) public static double[] ToDoubleArray(this RedisResult result) { List redisResults = new List(); - foreach (var res in ToArray(result)) + foreach (var res in result.ToArray()) { redisResults.Add(ToDouble(res)); } @@ -68,7 +68,7 @@ public static double[] ToDoubleArray(this RedisResult result) public static long[] ToLongArray(this RedisResult result) { List redisResults = new List(); - foreach (var res in ToArray(result)) + foreach (var res in result.ToArray()) { redisResults.Add(ToLong(res)); } @@ -93,21 +93,21 @@ public static IReadOnlyList ToTimeStampArray(this RedisResult result) public static TimeSeriesTuple? ToTimeSeriesTuple(this RedisResult result) { - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); if (redisResults.Length == 0) return null; return new TimeSeriesTuple(ToTimeStamp(redisResults[0]), (double)redisResults[1]); } public static Tuple ToScanDumpTuple(this RedisResult result) { - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); if (redisResults == null || redisResults.Length == 0) return null; return new Tuple((long)redisResults[0], (Byte[])redisResults[1]); } public static HashEntry ToHashEntry(this RedisResult result) { - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); if (redisResults.Length < 2) throw new ArgumentOutOfRangeException(nameof(result)); @@ -116,7 +116,7 @@ public static HashEntry ToHashEntry(this RedisResult result) public static HashEntry[] ToHashEntryArray(this RedisResult result) { - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); var hash = new HashEntry[redisResults.Length / 2]; if (redisResults.Length == 0) return hash; @@ -226,7 +226,7 @@ public static IReadOnlyList ToRuleArray(this RedisResult result) { long capacity, size, numberOfFilters, numberOfItemsInserted, expansionRate; capacity = size = numberOfFilters = numberOfItemsInserted = expansionRate = -1; - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); for (int i = 0; i < redisResults.Length; ++i) { @@ -264,7 +264,7 @@ public static IReadOnlyList ToRuleArray(this RedisResult result) numberOfItemsInserted = numberOfItemsDeleted = bucketSize = expansionRate = maxIteration = -1; - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); for (int i = 0; i < redisResults.Length; ++i) { @@ -309,7 +309,7 @@ public static IReadOnlyList ToRuleArray(this RedisResult result) width = depth = count = -1; - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); for (int i = 0; i < redisResults.Length; ++i) { @@ -340,7 +340,7 @@ public static IReadOnlyList ToRuleArray(this RedisResult result) k = width = depth = -1; decay = -1.0; - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); for (int i = 0; i < redisResults.Length; ++i) { @@ -374,7 +374,7 @@ public static IReadOnlyList ToRuleArray(this RedisResult result) compression = capacity = mergedNodes = unmergedNodes = totalCompressions = -1; mergedWeight = unmergedWeight = -1.0; - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); for (int i = 0; i < redisResults.Length; ++i) { @@ -524,7 +524,7 @@ public static TimeSeriesChunck ToTimeSeriesChunk(this RedisResult result) public static IReadOnlyList ToStringArray(this RedisResult result) { - RedisResult[] redisResults = ToArray(result); + RedisResult[] redisResults = result.ToArray(); var list = new List(); if (redisResults.Length == 0) return list; diff --git a/src/NRedisStack/Tdigest/TdigestCommands.cs b/src/NRedisStack/Tdigest/TdigestCommands.cs index 22506356..1d2954c2 100644 --- a/src/NRedisStack/Tdigest/TdigestCommands.cs +++ b/src/NRedisStack/Tdigest/TdigestCommands.cs @@ -24,7 +24,7 @@ public bool Add(RedisKey key, double item, double weight) { if (weight < 0) throw new ArgumentException(nameof(weight)); - return ResponseParser.OKtoBoolean(_db.Execute(TDIGEST.ADD, key, item, weight)); + return _db.Execute(TDIGEST.ADD, key, item, weight).OKtoBoolean(); } /// @@ -40,7 +40,7 @@ public async Task AddAsync(RedisKey key, double item, double weight) if (weight < 0) throw new ArgumentException(nameof(weight)); var result = await _db.ExecuteAsync(TDIGEST.ADD, key, item, weight); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -63,7 +63,7 @@ public bool Add(RedisKey key, params Tuple[] valueWeight) args.Add(pair.Item1); args.Add(pair.Item2); } - return ResponseParser.OKtoBoolean(_db.Execute(TDIGEST.ADD, args)); + return _db.Execute(TDIGEST.ADD, args).OKtoBoolean(); } /// @@ -86,7 +86,7 @@ public async Task AddAsync(RedisKey key, params Tuple[] va args.Add(pair.Item1); args.Add(pair.Item2); } - return ResponseParser.OKtoBoolean(await _db.ExecuteAsync(TDIGEST.ADD, args)); + return (await _db.ExecuteAsync(TDIGEST.ADD, args)).OKtoBoolean(); } /// @@ -98,7 +98,7 @@ public async Task AddAsync(RedisKey key, params Tuple[] va /// public double CDF(RedisKey key, double value) { - return ResponseParser.ToDouble(_db.Execute(TDIGEST.CDF, key, value)); + return _db.Execute(TDIGEST.CDF, key, value).ToDouble(); } /// @@ -111,7 +111,7 @@ public double CDF(RedisKey key, double value) public async Task CDFAsync(RedisKey key, double value) { var result = await _db.ExecuteAsync(TDIGEST.CDF, key, value); - return ResponseParser.ToDouble(result); + return result.ToDouble(); } /// @@ -123,7 +123,7 @@ public async Task CDFAsync(RedisKey key, double value) /// public bool Create(RedisKey key, long compression = 100) { - return ResponseParser.OKtoBoolean(_db.Execute(TDIGEST.CREATE, key, TdigestArgs.COMPRESSION, compression)); + return _db.Execute(TDIGEST.CREATE, key, TdigestArgs.COMPRESSION, compression).OKtoBoolean(); } /// @@ -135,7 +135,7 @@ public bool Create(RedisKey key, long compression = 100) /// public async Task CreateAsync(RedisKey key, long compression = 100) { - return ResponseParser.OKtoBoolean(await _db.ExecuteAsync(TDIGEST.CREATE, key, TdigestArgs.COMPRESSION, compression)); + return (await _db.ExecuteAsync(TDIGEST.CREATE, key, TdigestArgs.COMPRESSION, compression)).OKtoBoolean(); } /// @@ -146,7 +146,7 @@ public async Task CreateAsync(RedisKey key, long compression = 100) /// public TdigestInformation Info(RedisKey key) { - return ResponseParser.ToTdigestInfo(_db.Execute(TDIGEST.INFO, key)); + return _db.Execute(TDIGEST.INFO, key).ToTdigestInfo(); } /// @@ -157,7 +157,7 @@ public TdigestInformation Info(RedisKey key) /// public async Task InfoAsync(RedisKey key) { - return ResponseParser.ToTdigestInfo(await _db.ExecuteAsync(TDIGEST.INFO, key)); + return (await _db.ExecuteAsync(TDIGEST.INFO, key)).ToTdigestInfo(); } @@ -169,7 +169,7 @@ public async Task InfoAsync(RedisKey key) /// public double Max(RedisKey key) { - return ResponseParser.ToDouble(_db.Execute(TDIGEST.MAX, key)); + return _db.Execute(TDIGEST.MAX, key).ToDouble(); } /// @@ -180,7 +180,7 @@ public double Max(RedisKey key) /// public async Task MaxAsync(RedisKey key) { - return ResponseParser.ToDouble(await _db.ExecuteAsync(TDIGEST.MAX, key)); + return (await _db.ExecuteAsync(TDIGEST.MAX, key)).ToDouble(); } /// @@ -191,7 +191,7 @@ public async Task MaxAsync(RedisKey key) /// public double Min(RedisKey key) { - return ResponseParser.ToDouble(_db.Execute(TDIGEST.MIN, key)); + return _db.Execute(TDIGEST.MIN, key).ToDouble(); } /// @@ -202,7 +202,7 @@ public double Min(RedisKey key) /// public async Task MinAsync(RedisKey key) { - return ResponseParser.ToDouble(await _db.ExecuteAsync(TDIGEST.MIN, key)); + return (await _db.ExecuteAsync(TDIGEST.MIN, key)).ToDouble(); } /// @@ -214,7 +214,7 @@ public async Task MinAsync(RedisKey key) /// public bool Merge(RedisKey destinationKey, RedisKey sourceKey) { - return ResponseParser.OKtoBoolean(_db.Execute(TDIGEST.MERGE, destinationKey, sourceKey)); + return _db.Execute(TDIGEST.MERGE, destinationKey, sourceKey).OKtoBoolean(); } /// @@ -227,7 +227,7 @@ public bool Merge(RedisKey destinationKey, RedisKey sourceKey) public async Task MergeAsync(RedisKey destinationKey, RedisKey sourceKey) { var result = await _db.ExecuteAsync(TDIGEST.MERGE, destinationKey, sourceKey); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -244,7 +244,7 @@ public bool Merge(RedisKey destinationKey, params RedisKey[] sourceKeys) var args = sourceKeys.ToList(); args.Insert(0, destinationKey); - return ResponseParser.OKtoBoolean(_db.Execute(TDIGEST.MERGE, args)); + return _db.Execute(TDIGEST.MERGE, args).OKtoBoolean(); } /// @@ -262,7 +262,7 @@ public async Task MergeAsync(RedisKey destinationKey, params RedisKey[] so args.Insert(0, destinationKey); var result = await _db.ExecuteAsync(TDIGEST.MERGE, args); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -283,7 +283,7 @@ public bool MergeStore(RedisKey destinationKey, long numkeys, long compression = args.Add(TdigestArgs.COMPRESSION); args.Add(compression); - return ResponseParser.OKtoBoolean(_db.Execute(TDIGEST.MERGESTORE, args)); + return _db.Execute(TDIGEST.MERGESTORE, args).OKtoBoolean(); } /// @@ -305,7 +305,7 @@ public async Task MergeStoreAsync(RedisKey destinationKey, long numkeys, l args.Add(compression); var result = await _db.ExecuteAsync(TDIGEST.MERGESTORE, args); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } /// @@ -323,7 +323,7 @@ public double[] Quantile(RedisKey key, params double[] quantile) var args = new List { key }; foreach (var q in quantile) args.Add(q); - return ResponseParser.ToDoubleArray(_db.Execute(TDIGEST.QUANTILE, args)); + return _db.Execute(TDIGEST.QUANTILE, args).ToDoubleArray(); } /// @@ -341,7 +341,7 @@ public async Task QuantileAsync(RedisKey key, params double[] quantile var args = new List { key }; foreach (var q in quantile) args.Add(q); - return ResponseParser.ToDoubleArray(await _db.ExecuteAsync(TDIGEST.QUANTILE, args)); + return (await _db.ExecuteAsync(TDIGEST.QUANTILE, args)).ToDoubleArray(); } /// @@ -352,7 +352,7 @@ public async Task QuantileAsync(RedisKey key, params double[] quantile /// public bool Reset(RedisKey key, params double[] quantile) { - return ResponseParser.OKtoBoolean(_db.Execute(TDIGEST.RESET, key)); + return _db.Execute(TDIGEST.RESET, key).OKtoBoolean(); } /// @@ -363,7 +363,7 @@ public bool Reset(RedisKey key, params double[] quantile) /// public async Task ResetAsync(RedisKey key, params double[] quantile) { - return ResponseParser.OKtoBoolean(await _db.ExecuteAsync(TDIGEST.RESET, key)); + return (await _db.ExecuteAsync(TDIGEST.RESET, key)).OKtoBoolean(); } /// @@ -376,7 +376,7 @@ public async Task ResetAsync(RedisKey key, params double[] quantile) /// public double TrimmedMean(RedisKey key, double lowCutQuantile, double highCutQuantile) { - return ResponseParser.ToDouble(_db.Execute(TDIGEST.TRIMMED_MEAN, key, lowCutQuantile, highCutQuantile)); + return _db.Execute(TDIGEST.TRIMMED_MEAN, key, lowCutQuantile, highCutQuantile).ToDouble(); } /// @@ -389,7 +389,7 @@ public double TrimmedMean(RedisKey key, double lowCutQuantile, double highCutQua /// public async Task TrimmedMeanAsync(RedisKey key, double lowCutQuantile, double highCutQuantile) { - return ResponseParser.ToDouble(await _db.ExecuteAsync(TDIGEST.TRIMMED_MEAN, key, lowCutQuantile, highCutQuantile)); + return (await _db.ExecuteAsync(TDIGEST.TRIMMED_MEAN, key, lowCutQuantile, highCutQuantile)).ToDouble(); } diff --git a/src/NRedisStack/TimeSeries/TimeSeriesCommands.cs b/src/NRedisStack/TimeSeries/TimeSeriesCommands.cs index f36e8934..f99c6eee 100644 --- a/src/NRedisStack/TimeSeries/TimeSeriesCommands.cs +++ b/src/NRedisStack/TimeSeries/TimeSeriesCommands.cs @@ -31,7 +31,7 @@ public TimeSeriesCommands(IDatabase db) public bool Create(string key, long? retentionTime = null, IReadOnlyCollection labels = null, bool? uncompressed = null, long? chunkSizeBytes = null, TsDuplicatePolicy? duplicatePolicy = null) { var args = TimeSeriesAux.BuildTsCreateArgs(key, retentionTime, labels, uncompressed, chunkSizeBytes, duplicatePolicy); - return ResponseParser.OKtoBoolean(_db.Execute(TS.CREATE, args)); + return _db.Execute(TS.CREATE, args).OKtoBoolean(); } /// @@ -49,7 +49,7 @@ public bool Create(string key, long? retentionTime = null, IReadOnlyCollection CreateAsync(string key, long? retentionTime = null, IReadOnlyCollection labels = null, bool? uncompressed = null, long? chunkSizeBytes = null, TsDuplicatePolicy? duplicatePolicy = null) { var args = TimeSeriesAux.BuildTsCreateArgs(key, retentionTime, labels, uncompressed, chunkSizeBytes, duplicatePolicy); - return ResponseParser.OKtoBoolean(await _db.ExecuteAsync(TS.CREATE, args)); + return (await _db.ExecuteAsync(TS.CREATE, args)).OKtoBoolean(); } #endregion @@ -70,7 +70,7 @@ public async Task CreateAsync(string key, long? retentionTime = null, IRea public bool Alter(string key, long? retentionTime = null, long? chunkSizeBytes = null, TsDuplicatePolicy? duplicatePolicy = null, IReadOnlyCollection? labels = null) { var args = TimeSeriesAux.BuildTsAlterArgs(key, retentionTime, chunkSizeBytes, duplicatePolicy, labels); - return ResponseParser.OKtoBoolean(_db.Execute(TS.ALTER, args)); + return _db.Execute(TS.ALTER, args).OKtoBoolean(); } /// @@ -87,7 +87,7 @@ public bool Alter(string key, long? retentionTime = null, long? chunkSizeBytes = public async Task AlterAsync(string key, long? retentionTime = null, long? chunkSizeBytes = null, TsDuplicatePolicy? duplicatePolicy = null, IReadOnlyCollection? labels = null) { var args = TimeSeriesAux.BuildTsAlterArgs(key, retentionTime, chunkSizeBytes, duplicatePolicy, labels); - return ResponseParser.OKtoBoolean(await _db.ExecuteAsync(TS.ALTER, args)); + return (await _db.ExecuteAsync(TS.ALTER, args)).OKtoBoolean(); } /// @@ -109,7 +109,7 @@ public TimeStamp Add(string key, TimeStamp timestamp, double value, long? retent long? chunkSizeBytes = null, TsDuplicatePolicy? duplicatePolicy = null) { var args = TimeSeriesAux.BuildTsAddArgs(key, timestamp, value, retentionTime, labels, uncompressed, chunkSizeBytes, duplicatePolicy); - return ResponseParser.ToTimeStamp(_db.Execute(TS.ADD, args)); + return _db.Execute(TS.ADD, args).ToTimeStamp(); } /// @@ -129,7 +129,7 @@ public TimeStamp Add(string key, TimeStamp timestamp, double value, long? retent public async Task AddAsync(string key, TimeStamp timestamp, double value, long? retentionTime = null, IReadOnlyCollection labels = null, bool? uncompressed = null, long? chunkSizeBytes = null, TsDuplicatePolicy? duplicatePolicy = null) { var args = TimeSeriesAux.BuildTsAddArgs(key, timestamp, value, retentionTime, labels, uncompressed, chunkSizeBytes, duplicatePolicy); - return ResponseParser.ToTimeStamp(await _db.ExecuteAsync(TS.ADD, args)); + return (await _db.ExecuteAsync(TS.ADD, args)).ToTimeStamp(); } /// @@ -141,7 +141,7 @@ public async Task AddAsync(string key, TimeStamp timestamp, double va public IReadOnlyList MAdd(IReadOnlyCollection<(string key, TimeStamp timestamp, double value)> sequence) { var args = TimeSeriesAux.BuildTsMaddArgs(sequence); - return ResponseParser.ToTimeStampArray(_db.Execute(TS.MADD, args)); + return _db.Execute(TS.MADD, args).ToTimeStampArray(); } /// @@ -153,7 +153,7 @@ public IReadOnlyList MAdd(IReadOnlyCollection<(string key, TimeStamp public async Task> MAddAsync(IReadOnlyCollection<(string key, TimeStamp timestamp, double value)> sequence) { var args = TimeSeriesAux.BuildTsMaddArgs(sequence); - return ResponseParser.ToTimeStampArray(await _db.ExecuteAsync(TS.MADD, args)); + return (await _db.ExecuteAsync(TS.MADD, args)).ToTimeStampArray(); } /// @@ -172,7 +172,7 @@ public async Task> MAddAsync(IReadOnlyCollection<(strin public TimeStamp IncrBy(string key, double value, TimeStamp? timestamp = null, long? retentionTime = null, IReadOnlyCollection? labels = null, bool? uncompressed = null, long? chunkSizeBytes = null) { var args = TimeSeriesAux.BuildTsIncrDecrByArgs(key, value, timestamp, retentionTime, labels, uncompressed, chunkSizeBytes); - return ResponseParser.ToTimeStamp(_db.Execute(TS.INCRBY, args)); + return _db.Execute(TS.INCRBY, args).ToTimeStamp(); } /// @@ -191,7 +191,7 @@ public TimeStamp IncrBy(string key, double value, TimeStamp? timestamp = null, l public async Task IncrByAsync(string key, double value, TimeStamp? timestamp = null, long? retentionTime = null, IReadOnlyCollection? labels = null, bool? uncompressed = null, long? chunkSizeBytes = null) { var args = TimeSeriesAux.BuildTsIncrDecrByArgs(key, value, timestamp, retentionTime, labels, uncompressed, chunkSizeBytes); - return ResponseParser.ToTimeStamp(await _db.ExecuteAsync(TS.INCRBY, args)); + return (await _db.ExecuteAsync(TS.INCRBY, args)).ToTimeStamp(); } /// @@ -210,7 +210,7 @@ public async Task IncrByAsync(string key, double value, TimeStamp? ti public TimeStamp DecrBy(string key, double value, TimeStamp? timestamp = null, long? retentionTime = null, IReadOnlyCollection? labels = null, bool? uncompressed = null, long? chunkSizeBytes = null) { var args = TimeSeriesAux.BuildTsIncrDecrByArgs(key, value, timestamp, retentionTime, labels, uncompressed, chunkSizeBytes); - return ResponseParser.ToTimeStamp(_db.Execute(TS.DECRBY, args)); + return _db.Execute(TS.DECRBY, args).ToTimeStamp(); } /// @@ -229,7 +229,7 @@ public TimeStamp DecrBy(string key, double value, TimeStamp? timestamp = null, l public async Task DecrByAsync(string key, double value, TimeStamp? timestamp = null, long? retentionTime = null, IReadOnlyCollection? labels = null, bool? uncompressed = null, long? chunkSizeBytes = null) { var args = TimeSeriesAux.BuildTsIncrDecrByArgs(key, value, timestamp, retentionTime, labels, uncompressed, chunkSizeBytes); - return ResponseParser.ToTimeStamp(await _db.ExecuteAsync(TS.DECRBY, args)); + return (await _db.ExecuteAsync(TS.DECRBY, args)).ToTimeStamp(); } /// @@ -244,7 +244,7 @@ public async Task DecrByAsync(string key, double value, TimeStamp? ti public long Del(string key, TimeStamp fromTimeStamp, TimeStamp toTimeStamp) { var args = TimeSeriesAux.BuildTsDelArgs(key, fromTimeStamp, toTimeStamp); - return ResponseParser.ToLong(_db.Execute(TS.DEL, args)); + return _db.Execute(TS.DEL, args).ToLong(); } /// @@ -259,7 +259,7 @@ public long Del(string key, TimeStamp fromTimeStamp, TimeStamp toTimeStamp) public async Task DelAsync(string key, TimeStamp fromTimeStamp, TimeStamp toTimeStamp) { var args = TimeSeriesAux.BuildTsDelArgs(key, fromTimeStamp, toTimeStamp); - return ResponseParser.ToLong(await _db.ExecuteAsync(TS.DEL, args)); + return (await _db.ExecuteAsync(TS.DEL, args)).ToLong(); } #endregion @@ -282,7 +282,7 @@ public bool CreateRule(string sourceKey, TimeSeriesRule rule, long alignTimestam var args = new List { sourceKey }; args.AddRule(rule); args.Add(alignTimestamp); - return ResponseParser.OKtoBoolean(_db.Execute(TS.CREATERULE, args)); + return _db.Execute(TS.CREATERULE, args).OKtoBoolean(); } /// @@ -301,7 +301,7 @@ public async Task CreateRuleAsync(string sourceKey, TimeSeriesRule rule, l var args = new List { sourceKey }; args.AddRule(rule); args.Add(alignTimestamp); - return ResponseParser.OKtoBoolean(await _db.ExecuteAsync(TS.CREATERULE, args)); + return (await _db.ExecuteAsync(TS.CREATERULE, args)).OKtoBoolean(); } /// @@ -314,7 +314,7 @@ public async Task CreateRuleAsync(string sourceKey, TimeSeriesRule rule, l public bool DeleteRule(string sourceKey, string destKey) { var args = new List { sourceKey, destKey }; - return ResponseParser.OKtoBoolean(_db.Execute(TS.DELETERULE, args)); + return _db.Execute(TS.DELETERULE, args).OKtoBoolean(); } /// @@ -327,7 +327,7 @@ public bool DeleteRule(string sourceKey, string destKey) public async Task DeleteRuleAsync(string sourceKey, string destKey) { var args = new List { sourceKey, destKey }; - return ResponseParser.OKtoBoolean(await _db.ExecuteAsync(TS.DELETERULE, args)); + return (await _db.ExecuteAsync(TS.DELETERULE, args)).OKtoBoolean(); } #endregion @@ -346,8 +346,9 @@ public async Task DeleteRuleAsync(string sourceKey, string destKey) /// public TimeSeriesTuple? Get(string key, bool latest = false) { - return ResponseParser.ToTimeSeriesTuple((latest) ? _db.Execute(TS.GET, key, TimeSeriesArgs.LATEST) - : _db.Execute(TS.GET, key)); + var result = (latest) ? _db.Execute(TS.GET, key, TimeSeriesArgs.LATEST) + : _db.Execute(TS.GET, key); + return result.ToTimeSeriesTuple(); } /// @@ -362,8 +363,9 @@ public async Task DeleteRuleAsync(string sourceKey, string destKey) /// public async Task GetAsync(string key, bool latest = false) { - return ResponseParser.ToTimeSeriesTuple(await ((latest) ? _db.ExecuteAsync(TS.GET, key, TimeSeriesArgs.LATEST) + var result = (await ((latest) ? _db.ExecuteAsync(TS.GET, key, TimeSeriesArgs.LATEST) : _db.ExecuteAsync(TS.GET, key))); + return result.ToTimeSeriesTuple(); } /// @@ -382,7 +384,7 @@ public async Task DeleteRuleAsync(string sourceKey, string destKey) bool? withLabels = null, IReadOnlyCollection? selectedLabels = null) { var args = TimeSeriesAux.BuildTsMgetArgs(latest, filter, withLabels, selectedLabels); - return ResponseParser.ParseMGetResponse(_db.Execute(TS.MGET, args)); + return _db.Execute(TS.MGET, args).ParseMGetResponse(); } /// @@ -401,7 +403,7 @@ public async Task DeleteRuleAsync(string sourceKey, string destKey) bool? withLabels = null, IReadOnlyCollection? selectedLabels = null) { var args = TimeSeriesAux.BuildTsMgetArgs(latest, filter, withLabels, selectedLabels); - return ResponseParser.ParseMGetResponse(await _db.ExecuteAsync(TS.MGET, args)); + return (await _db.ExecuteAsync(TS.MGET, args)).ParseMGetResponse(); } /// @@ -441,7 +443,7 @@ public IReadOnlyList Range(string key, latest, filterByTs, filterByValue, count, align, aggregation, timeBucket, bt, empty); - return ResponseParser.ToTimeSeriesTupleArray(_db.Execute(TS.RANGE, args)); + return _db.Execute(TS.RANGE, args).ToTimeSeriesTupleArray(); } /// @@ -481,7 +483,7 @@ public async Task> RangeAsync(string key, latest, filterByTs, filterByValue, count, align, aggregation, timeBucket, bt, empty); - return ResponseParser.ToTimeSeriesTupleArray(await _db.ExecuteAsync(TS.RANGE, args)); + return (await _db.ExecuteAsync(TS.RANGE, args)).ToTimeSeriesTupleArray(); } /// @@ -521,7 +523,7 @@ public IReadOnlyList RevRange(string key, latest, filterByTs, filterByValue, count, align, aggregation, timeBucket, bt, empty); - return ResponseParser.ToTimeSeriesTupleArray(_db.Execute(TS.REVRANGE, args)); + return _db.Execute(TS.REVRANGE, args).ToTimeSeriesTupleArray(); } /// @@ -561,7 +563,7 @@ public async Task> RevRangeAsync(string key, latest, filterByTs, filterByValue, count, align, aggregation, timeBucket, bt, empty); - return ResponseParser.ToTimeSeriesTupleArray(await _db.ExecuteAsync(TS.REVRANGE, args)); + return (await _db.ExecuteAsync(TS.REVRANGE, args)).ToTimeSeriesTupleArray(); } /// @@ -607,7 +609,7 @@ public async Task> RevRangeAsync(string key, var args = TimeSeriesAux.BuildMultiRangeArgs(fromTimeStamp, toTimeStamp, filter, latest, filterByTs, filterByValue, withLabels, selectLabels, count, align, aggregation, timeBucket, bt, empty, groupbyTuple); - return ResponseParser.ParseMRangeResponse(_db.Execute(TS.MRANGE, args)); + return _db.Execute(TS.MRANGE, args).ParseMRangeResponse(); } /// @@ -653,7 +655,7 @@ public async Task> RevRangeAsync(string key, var args = TimeSeriesAux.BuildMultiRangeArgs(fromTimeStamp, toTimeStamp, filter, latest, filterByTs, filterByValue, withLabels, selectLabels, count, align, aggregation, timeBucket, bt, empty, groupbyTuple); - return ResponseParser.ParseMRangeResponse(await _db.ExecuteAsync(TS.MRANGE, args)); + return (await _db.ExecuteAsync(TS.MRANGE, args)).ParseMRangeResponse(); } /// @@ -699,7 +701,7 @@ public async Task> RevRangeAsync(string key, var args = TimeSeriesAux.BuildMultiRangeArgs(fromTimeStamp, toTimeStamp, filter, latest, filterByTs, filterByValue, withLabels, selectLabels, count, align, aggregation, timeBucket, bt, empty, groupbyTuple); - return ResponseParser.ParseMRangeResponse(_db.Execute(TS.MREVRANGE, args)); + return _db.Execute(TS.MREVRANGE, args).ParseMRangeResponse(); } /// @@ -745,7 +747,7 @@ public async Task> RevRangeAsync(string key, var args = TimeSeriesAux.BuildMultiRangeArgs(fromTimeStamp, toTimeStamp, filter, latest, filterByTs, filterByValue, withLabels, selectLabels, count, align, aggregation, timeBucket, bt, empty, groupbyTuple); - return ResponseParser.ParseMRangeResponse(await _db.ExecuteAsync(TS.MREVRANGE, args)); + return (await _db.ExecuteAsync(TS.MREVRANGE, args)).ParseMRangeResponse(); } #endregion @@ -761,8 +763,9 @@ public async Task> RevRangeAsync(string key, /// public TimeSeriesInformation Info(string key, bool debug = false) { - return ResponseParser.ToTimeSeriesInfo((debug) ? _db.Execute(TS.INFO, key, TimeSeriesArgs.DEBUG) - : _db.Execute(TS.INFO, key)); + var result = (debug) ? _db.Execute(TS.INFO, key, TimeSeriesArgs.DEBUG) + : _db.Execute(TS.INFO, key); + return result.ToTimeSeriesInfo(); } /// @@ -774,8 +777,9 @@ public TimeSeriesInformation Info(string key, bool debug = false) /// public async Task InfoAsync(string key, bool debug = false) { - return ResponseParser.ToTimeSeriesInfo(await ((debug) ? _db.ExecuteAsync(TS.INFO, key, TimeSeriesArgs.DEBUG) + var result = (await ((debug) ? _db.ExecuteAsync(TS.INFO, key, TimeSeriesArgs.DEBUG) : _db.ExecuteAsync(TS.INFO, key))); + return result.ToTimeSeriesInfo(); } /// @@ -787,7 +791,7 @@ public async Task InfoAsync(string key, bool debug = fals public IReadOnlyList QueryIndex(IReadOnlyCollection filter) { var args = new List(filter); - return ResponseParser.ToStringArray(_db.Execute(TS.QUERYINDEX, args)); + return _db.Execute(TS.QUERYINDEX, args).ToStringArray(); } /// @@ -799,7 +803,7 @@ public IReadOnlyList QueryIndex(IReadOnlyCollection filter) public async Task> QueryIndexAsync(IReadOnlyCollection filter) { var args = new List(filter); - return ResponseParser.ToStringArray(await _db.ExecuteAsync(TS.QUERYINDEX, args)); + return (await _db.ExecuteAsync(TS.QUERYINDEX, args)).ToStringArray(); } #endregion diff --git a/src/NRedisStack/TopK/TopKCommands.cs b/src/NRedisStack/TopK/TopKCommands.cs index 71cba07b..477a30c8 100644 --- a/src/NRedisStack/TopK/TopKCommands.cs +++ b/src/NRedisStack/TopK/TopKCommands.cs @@ -21,14 +21,14 @@ public TopKCommands(IDatabase db) // /// // public RedisResult[] Add(RedisKey key, RedisValue item) // { - // return ResponseParser.ToArray(_db.Execute(TOPK.ADD, key, item)); + // return _db.Execute(TOPK.ADD, key, item).ToArray(); // } // /// // public async Task AddAsync(RedisKey key, RedisValue item) // { // var result = await _db.ExecuteAsync(TOPK.ADD, key, item); - // return ResponseParser.ToArray(result); + // return result.ToArray(); // } /// @@ -68,14 +68,14 @@ public TopKCommands(IDatabase db) // /// // public long Count(RedisKey key, RedisValue item) // { - // return ResponseParser.ToLong(_db.Execute(TOPK.COUNT, key, item)); + // return _db.Execute(TOPK.COUNT, key, item).ToLong(); // } // /// // public async Task CountAsync(RedisKey key, RedisValue item) // { // var result = await _db.ExecuteAsync(TOPK.COUNT, key, item); - // return ResponseParser.ToLong(result); + // return result.ToLong(); // } /// @@ -91,7 +91,7 @@ public long[] Count(RedisKey key, params RedisValue[] items) throw new ArgumentOutOfRangeException(nameof(items)); var args = Auxiliary.MergeArgs(key, items); - return ResponseParser.ToLongArray(_db.Execute(TOPK.COUNT, args)); + return _db.Execute(TOPK.COUNT, args).ToLongArray(); } /// @@ -102,7 +102,7 @@ public async Task CountAsync(RedisKey key, params RedisValue[] items) var args = Auxiliary.MergeArgs(key, items); var result = await _db.ExecuteAsync(TOPK.COUNT, args); - return ResponseParser.ToLongArray(result); + return result.ToLongArray(); } @@ -125,7 +125,7 @@ public RedisResult[] IncrBy(RedisKey key, params Tuple[] itemI args.Add(pair.Item1); args.Add(pair.Item2); } - return ResponseParser.ToArray(_db.Execute(TOPK.INCRBY, args)); + return _db.Execute(TOPK.INCRBY, args).ToArray(); } /// @@ -149,7 +149,7 @@ public async Task IncrByAsync(RedisKey key, params Tuple @@ -161,7 +161,7 @@ public async Task IncrByAsync(RedisKey key, params Tuple @@ -173,7 +173,7 @@ public TopKInformation Info(RedisKey key) public async Task InfoAsync(RedisKey key) { var info = await _db.ExecuteAsync(TOPK.INFO, key); - return ResponseParser.ToTopKInfo(info); + return info.ToTopKInfo(); } /// @@ -187,7 +187,7 @@ public RedisResult[] List(RedisKey key, bool withcount = false) { var result = (withcount) ? _db.Execute(TOPK.LIST, key, "WITHCOUNT") : _db.Execute(TOPK.LIST, key); - return ResponseParser.ToArray(result); + return result.ToArray(); } /// @@ -201,7 +201,7 @@ public async Task ListAsync(RedisKey key, bool withcount = false) { var result = await ((withcount) ? _db.ExecuteAsync(TOPK.LIST, key, "WITHCOUNT") : _db.ExecuteAsync(TOPK.LIST, key)); - return ResponseParser.ToArray(result); + return result.ToArray(); } /// @@ -243,7 +243,7 @@ public bool[] Query(RedisKey key, params RedisValue[] items) var args = Auxiliary.MergeArgs(key, items); - return ResponseParser.ToBooleanArray(_db.Execute(TOPK.QUERY, args)); + return _db.Execute(TOPK.QUERY, args).ToBooleanArray(); } /// @@ -261,7 +261,7 @@ public async Task QueryAsync(RedisKey key, params RedisValue[] items) var args = Auxiliary.MergeArgs(key, items); var result = await _db.ExecuteAsync(TOPK.QUERY, args); - return ResponseParser.ToBooleanArray(result); + return result.ToBooleanArray(); } /// @@ -276,7 +276,7 @@ public async Task QueryAsync(RedisKey key, params RedisValue[] items) /// public bool Reserve(RedisKey key, long topk, long width = 7, long depth = 8, double decay = 0.9) { - return ResponseParser.OKtoBoolean(_db.Execute(TOPK.RESERVE, key, topk, width, depth, decay)); + return _db.Execute(TOPK.RESERVE, key, topk, width, depth, decay).OKtoBoolean(); } /// @@ -292,7 +292,7 @@ public bool Reserve(RedisKey key, long topk, long width = 7, long depth = 8, dou public async Task ReserveAsync(RedisKey key, long topk, long width = 7, long depth = 8, double decay = 0.9) { var result = await _db.ExecuteAsync(TOPK.RESERVE, key, topk, width, depth, decay); - return ResponseParser.OKtoBoolean(result); + return result.OKtoBoolean(); } } } From 5951d7d6ec9bdc1adf986783b4ab8432d1bb0251 Mon Sep 17 00:00:00 2001 From: shacharPash Date: Wed, 31 Aug 2022 17:50:43 +0300 Subject: [PATCH 4/7] Extract common logic to new BloomAux Class --- src/NRedisStack/Bloom/BloomAux.cs | 79 +++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 src/NRedisStack/Bloom/BloomAux.cs diff --git a/src/NRedisStack/Bloom/BloomAux.cs b/src/NRedisStack/Bloom/BloomAux.cs new file mode 100644 index 00000000..6dfaa923 --- /dev/null +++ b/src/NRedisStack/Bloom/BloomAux.cs @@ -0,0 +1,79 @@ +using System; +using System.Collections.Generic; +using NRedisStack.Literals; +using NRedisStack.Literals.Enums; +using NRedisStack.DataTypes; +using NRedisStack.Extensions; +using StackExchange.Redis; + +namespace NRedisStack +{ + public static class BloomAux + { + public static List BuildInsertArgs(RedisKey key, RedisValue[] items, int? capacity, + double? error, int? expansion, bool nocreate, bool nonscaling) + { + var args = new List { key }; + args.AddCapacity(capacity); + args.AddError(error); + args.AddExpansion(expansion); + args.AddNoCreate(nocreate); + args.AddNoScaling(nonscaling); + args.AddItems(items); + + return args; + } + + private static void AddItems(this List args, RedisValue[] items) + { + args.Add(BloomArgs.ITEMS); + foreach (var item in items) + { + args.Add(item); + } + } + + private static void AddNoScaling(this List args, bool nonscaling) + { + if (nonscaling) + { + args.Add(BloomArgs.NONSCALING); + } + } + + private static void AddNoCreate(this List args, bool nocreate) + { + if (nocreate) + { + args.Add(BloomArgs.NOCREATE); + } + } + + private static void AddExpansion(this List args, int? expansion) + { + if (expansion != null) + { + args.Add(BloomArgs.EXPANSION); + args.Add(expansion); + } + } + + private static void AddError(this List args, double? error) + { + if (error != null) + { + args.Add(BloomArgs.ERROR); + args.Add(error); + } + } + + private static void AddCapacity(this List args, int? capacity) + { + if (capacity != null) + { + args.Add(BloomArgs.CAPACITY); + args.Add(capacity); + } + } + } +} From 023fa3b3fd616f9a5abbea2d7f4e8503ea1e5820 Mon Sep 17 00:00:00 2001 From: shacharPash Date: Wed, 31 Aug 2022 17:53:26 +0300 Subject: [PATCH 5/7] Shorting Bloom Insert Code --- src/NRedisStack/Bloom/BloomCommands.cs | 78 +------------------ src/NRedisStack/CountMinSketch/CmsCommands.cs | 4 +- src/NRedisStack/Tdigest/TdigestCommands.cs | 20 ++--- src/NRedisStack/TopK/TopKCommands.cs | 4 +- 4 files changed, 17 insertions(+), 89 deletions(-) diff --git a/src/NRedisStack/Bloom/BloomCommands.cs b/src/NRedisStack/Bloom/BloomCommands.cs index df4d4ed6..dfc96deb 100644 --- a/src/NRedisStack/Bloom/BloomCommands.cs +++ b/src/NRedisStack/Bloom/BloomCommands.cs @@ -107,48 +107,11 @@ public bool[] Insert(RedisKey key, RedisValue[] items, int? capacity = null, double? error = null, int? expansion = null, bool nocreate = false, bool nonscaling = false) { - // TODO: extract common logic to a new method if (items.Length < 1) throw new ArgumentOutOfRangeException(nameof(items)); - List args = new List { key }; - - if (capacity != null) - { - args.Add(BloomArgs.CAPACITY); - args.Add(capacity); - } - - - if (error != null) - { - args.Add(BloomArgs.ERROR); - args.Add(error); - } - - if (expansion != null) - { - args.Add(BloomArgs.EXPANSION); - args.Add(expansion); - } - - if (nocreate) - { - args.Add(BloomArgs.NOCREATE); - - } - - if (nonscaling) - { - args.Add(BloomArgs.NONSCALING); - } - - args.Add(BloomArgs.ITEMS); - foreach (var item in items) - { - args.Add(item); - } - + var args = BloomAux.BuildInsertArgs(key, items, capacity, error, expansion, nocreate, nonscaling); + return _db.Execute(BF.INSERT, args).ToBooleanArray(); } @@ -175,42 +138,7 @@ public async Task InsertAsync(RedisKey key, RedisValue[] items, int? cap if (items.Length < 1) throw new ArgumentOutOfRangeException(nameof(items)); - List args = new List { key }; - - if (capacity != null) - { - args.Add(BloomArgs.CAPACITY); - args.Add(capacity); - } - - if (error != null) - { - args.Add(BloomArgs.ERROR); - args.Add(error); - } - - if (expansion != null) - { - args.Add(BloomArgs.EXPANSION); - args.Add(expansion); - } - - if (nocreate) - { - args.Add(BloomArgs.NOCREATE); - - } - - if (nonscaling) - { - args.Add(BloomArgs.NONSCALING); - } - - args.Add(BloomArgs.ITEMS); - foreach (var item in items) - { - args.Add(item); - } + var args = BloomAux.BuildInsertArgs(key, items, capacity, error, expansion, nocreate, nonscaling); var result = await _db.ExecuteAsync(BF.INSERT, args); return result.ToBooleanArray(); diff --git a/src/NRedisStack/CountMinSketch/CmsCommands.cs b/src/NRedisStack/CountMinSketch/CmsCommands.cs index 046aafa0..2e41c866 100644 --- a/src/NRedisStack/CountMinSketch/CmsCommands.cs +++ b/src/NRedisStack/CountMinSketch/CmsCommands.cs @@ -50,7 +50,7 @@ public async Task IncrByAsync(RedisKey key, RedisValue item, long incremen public long[] IncrBy(RedisKey key, Tuple[] itemIncrements) { if (itemIncrements.Length < 1) - throw new ArgumentException(nameof(itemIncrements)); + throw new ArgumentOutOfRangeException(nameof(itemIncrements)); List args = new List { key }; foreach (var pair in itemIncrements) @@ -72,7 +72,7 @@ public long[] IncrBy(RedisKey key, Tuple[] itemIncrements) public async Task IncrByAsync(RedisKey key, Tuple[] itemIncrements) { if (itemIncrements.Length < 1) - throw new ArgumentException(nameof(itemIncrements)); + throw new ArgumentOutOfRangeException(nameof(itemIncrements)); List args = new List { key }; foreach (var pair in itemIncrements) diff --git a/src/NRedisStack/Tdigest/TdigestCommands.cs b/src/NRedisStack/Tdigest/TdigestCommands.cs index 1d2954c2..ebe413cd 100644 --- a/src/NRedisStack/Tdigest/TdigestCommands.cs +++ b/src/NRedisStack/Tdigest/TdigestCommands.cs @@ -22,7 +22,7 @@ public TdigestCommands(IDatabase db) /// public bool Add(RedisKey key, double item, double weight) { - if (weight < 0) throw new ArgumentException(nameof(weight)); + if (weight < 0) throw new ArgumentOutOfRangeException(nameof(weight)); return _db.Execute(TDIGEST.ADD, key, item, weight).OKtoBoolean(); } @@ -37,7 +37,7 @@ public bool Add(RedisKey key, double item, double weight) /// public async Task AddAsync(RedisKey key, double item, double weight) { - if (weight < 0) throw new ArgumentException(nameof(weight)); + if (weight < 0) throw new ArgumentOutOfRangeException(nameof(weight)); var result = await _db.ExecuteAsync(TDIGEST.ADD, key, item, weight); return result.OKtoBoolean(); @@ -59,7 +59,7 @@ public bool Add(RedisKey key, params Tuple[] valueWeight) foreach (var pair in valueWeight) { - if (pair.Item2 < 0) throw new ArgumentException(nameof(pair.Item2)); + if (pair.Item2 < 0) throw new ArgumentOutOfRangeException(nameof(pair.Item2)); args.Add(pair.Item1); args.Add(pair.Item2); } @@ -82,7 +82,7 @@ public async Task AddAsync(RedisKey key, params Tuple[] va foreach (var pair in valueWeight) { - if (pair.Item2 < 0) throw new ArgumentException(nameof(pair.Item2)); + if (pair.Item2 < 0) throw new ArgumentOutOfRangeException(nameof(pair.Item2)); args.Add(pair.Item1); args.Add(pair.Item2); } @@ -239,7 +239,7 @@ public async Task MergeAsync(RedisKey destinationKey, RedisKey sourceKey) /// public bool Merge(RedisKey destinationKey, params RedisKey[] sourceKeys) { - if (sourceKeys.Length < 1) throw new ArgumentException(nameof(sourceKeys)); + if (sourceKeys.Length < 1) throw new ArgumentOutOfRangeException(nameof(sourceKeys)); var args = sourceKeys.ToList(); args.Insert(0, destinationKey); @@ -256,7 +256,7 @@ public bool Merge(RedisKey destinationKey, params RedisKey[] sourceKeys) /// public async Task MergeAsync(RedisKey destinationKey, params RedisKey[] sourceKeys) { - if (sourceKeys.Length < 1) throw new ArgumentException(nameof(sourceKeys)); + if (sourceKeys.Length < 1) throw new ArgumentOutOfRangeException(nameof(sourceKeys)); var args = sourceKeys.ToList(); args.Insert(0, destinationKey); @@ -276,7 +276,7 @@ public async Task MergeAsync(RedisKey destinationKey, params RedisKey[] so /// public bool MergeStore(RedisKey destinationKey, long numkeys, long compression = 100, params RedisKey[] sourceKeys) { - if (sourceKeys.Length < 1) throw new ArgumentException(nameof(sourceKeys)); + if (sourceKeys.Length < 1) throw new ArgumentOutOfRangeException(nameof(sourceKeys)); var args = new List { destinationKey, numkeys }; foreach (var key in sourceKeys) args.Add(key); @@ -297,7 +297,7 @@ public bool MergeStore(RedisKey destinationKey, long numkeys, long compression = /// public async Task MergeStoreAsync(RedisKey destinationKey, long numkeys, long compression = 100, params RedisKey[] sourceKeys) { - if (sourceKeys.Length < 1) throw new ArgumentException(nameof(sourceKeys)); + if (sourceKeys.Length < 1) throw new ArgumentOutOfRangeException(nameof(sourceKeys)); var args = new List { destinationKey, numkeys }; foreach (var key in sourceKeys) args.Add(key); @@ -318,7 +318,7 @@ public async Task MergeStoreAsync(RedisKey destinationKey, long numkeys, l /// public double[] Quantile(RedisKey key, params double[] quantile) { - if (quantile.Length < 1) throw new ArgumentException(nameof(quantile)); + if (quantile.Length < 1) throw new ArgumentOutOfRangeException(nameof(quantile)); var args = new List { key }; foreach (var q in quantile) args.Add(q); @@ -336,7 +336,7 @@ public double[] Quantile(RedisKey key, params double[] quantile) /// public async Task QuantileAsync(RedisKey key, params double[] quantile) { - if (quantile.Length < 1) throw new ArgumentException(nameof(quantile)); + if (quantile.Length < 1) throw new ArgumentOutOfRangeException(nameof(quantile)); var args = new List { key }; foreach (var q in quantile) args.Add(q); diff --git a/src/NRedisStack/TopK/TopKCommands.cs b/src/NRedisStack/TopK/TopKCommands.cs index 477a30c8..24031840 100644 --- a/src/NRedisStack/TopK/TopKCommands.cs +++ b/src/NRedisStack/TopK/TopKCommands.cs @@ -117,7 +117,7 @@ public async Task CountAsync(RedisKey key, params RedisValue[] items) public RedisResult[] IncrBy(RedisKey key, params Tuple[] itemIncrements) { if (itemIncrements.Length < 1) - throw new ArgumentException(nameof(itemIncrements)); + throw new ArgumentOutOfRangeException(nameof(itemIncrements)); List args = new List { key }; foreach (var pair in itemIncrements) @@ -139,7 +139,7 @@ public RedisResult[] IncrBy(RedisKey key, params Tuple[] itemI public async Task IncrByAsync(RedisKey key, params Tuple[] itemIncrements) { if (itemIncrements.Length < 1) - throw new ArgumentException(nameof(itemIncrements)); + throw new ArgumentOutOfRangeException(nameof(itemIncrements)); List args = new List { key }; foreach (var pair in itemIncrements) From 8b1cb62c974c1e0d9992c28da9777e9059643142 Mon Sep 17 00:00:00 2001 From: shacharPash Date: Wed, 31 Aug 2022 18:07:17 +0300 Subject: [PATCH 6/7] Change Literal classes to contain const strings instead of methods that returns the literal string --- src/NRedisStack/Bloom/Literals/Commands.cs | 18 +++---- .../CountMinSketch/Literals/CommandArgs.cs | 8 +-- .../CountMinSketch/Literals/Commands.cs | 12 ++--- .../CuckooFilter/Literals/CommandArgs.cs | 12 ++--- .../CuckooFilter/Literals/Commands.cs | 24 ++++----- src/NRedisStack/Json/Literals/CommandArgs.cs | 6 +-- src/NRedisStack/Json/Literals/Commands.cs | 48 +++++++++--------- src/NRedisStack/Search/Literals/Commands.cs | 50 +++++++++---------- .../Tdigest/Literals/CommandArgs.cs | 2 +- src/NRedisStack/Tdigest/Literals/Commands.cs | 22 ++++---- .../TimeSeries/Literals/CommandArgs.cs | 42 ++++++++-------- .../TimeSeries/Literals/Commands.cs | 34 ++++++------- src/NRedisStack/TopK/Literals/Commands.cs | 14 +++--- 13 files changed, 143 insertions(+), 149 deletions(-) diff --git a/src/NRedisStack/Bloom/Literals/Commands.cs b/src/NRedisStack/Bloom/Literals/Commands.cs index d664647f..48092b42 100644 --- a/src/NRedisStack/Bloom/Literals/Commands.cs +++ b/src/NRedisStack/Bloom/Literals/Commands.cs @@ -2,14 +2,14 @@ { internal class BF { - public static string ADD => "BF.ADD"; - public static string EXISTS => "BF.EXISTS"; - public static string INFO => "BF.INFO"; - public static string INSERT => "BF.INSERT"; - public static string LOADCHUNK => "BF.LOADCHUNK"; - public static string MADD => "BF.MADD"; - public static string MEXISTS => "BF.MEXISTS"; - public static string RESERVE => "BF.RESERVE"; - public static string SCANDUMP => "BF.SCANDUMP"; + public const string ADD = "BF.ADD"; + public const string EXISTS = "BF.EXISTS"; + public const string INFO = "BF.INFO"; + public const string INSERT = "BF.INSERT"; + public const string LOADCHUNK = "BF.LOADCHUNK"; + public const string MADD = "BF.MADD"; + public const string MEXISTS = "BF.MEXISTS"; + public const string RESERVE = "BF.RESERVE"; + public const string SCANDUMP = "BF.SCANDUMP"; } } \ No newline at end of file diff --git a/src/NRedisStack/CountMinSketch/Literals/CommandArgs.cs b/src/NRedisStack/CountMinSketch/Literals/CommandArgs.cs index bab74ae9..5f9eb298 100644 --- a/src/NRedisStack/CountMinSketch/Literals/CommandArgs.cs +++ b/src/NRedisStack/CountMinSketch/Literals/CommandArgs.cs @@ -2,12 +2,6 @@ namespace NRedisStack.Literals { internal class CmsArgs { - public static string WEIGHTS => "WEIGHTS"; - // public static string CAPACITY => "CAPACITY"; - // public static string EXPANSION => "EXPANSION"; - // public static string NOCREATE => "NOCREATE"; - // public static string ITEMS => "ITEMS"; - // public static string BUCKETSIZE => "BUCKETSIZE"; - // public static string MAXITERATIONS => "MAXITERATIONS"; + public const string WEIGHTS = "WEIGHTS"; } } \ No newline at end of file diff --git a/src/NRedisStack/CountMinSketch/Literals/Commands.cs b/src/NRedisStack/CountMinSketch/Literals/Commands.cs index 1a0517e5..d90af8f3 100644 --- a/src/NRedisStack/CountMinSketch/Literals/Commands.cs +++ b/src/NRedisStack/CountMinSketch/Literals/Commands.cs @@ -2,11 +2,11 @@ { internal class CMS { - public static string INITBYDIM => "CMS.INITBYDIM"; - public static string INITBYPROB => "CMS.INITBYPROB"; - public static string INCRBY => "CMS.INCRBY"; - public static string QUERY => "CMS.QUERY"; - public static string MERGE => "CMS.MERGE"; - public static string INFO => "CMS.INFO"; + public const string INITBYDIM = "CMS.INITBYDIM"; + public const string INITBYPROB = "CMS.INITBYPROB"; + public const string INCRBY = "CMS.INCRBY"; + public const string QUERY = "CMS.QUERY"; + public const string MERGE = "CMS.MERGE"; + public const string INFO = "CMS.INFO"; } } \ No newline at end of file diff --git a/src/NRedisStack/CuckooFilter/Literals/CommandArgs.cs b/src/NRedisStack/CuckooFilter/Literals/CommandArgs.cs index 1d144237..5c7fa3a2 100644 --- a/src/NRedisStack/CuckooFilter/Literals/CommandArgs.cs +++ b/src/NRedisStack/CuckooFilter/Literals/CommandArgs.cs @@ -2,11 +2,11 @@ namespace NRedisStack.Literals { internal class CuckooArgs { - public static string CAPACITY => "CAPACITY"; - public static string EXPANSION => "EXPANSION"; - public static string NOCREATE => "NOCREATE"; - public static string ITEMS => "ITEMS"; - public static string BUCKETSIZE => "BUCKETSIZE"; - public static string MAXITERATIONS => "MAXITERATIONS"; + public const string CAPACITY = "CAPACITY"; + public const string EXPANSION = "EXPANSION"; + public const string NOCREATE = "NOCREATE"; + public const string ITEMS = "ITEMS"; + public const string BUCKETSIZE = "BUCKETSIZE"; + public const string MAXITERATIONS = "MAXITERATIONS"; } } \ No newline at end of file diff --git a/src/NRedisStack/CuckooFilter/Literals/Commands.cs b/src/NRedisStack/CuckooFilter/Literals/Commands.cs index e79b03cc..dd68e75a 100644 --- a/src/NRedisStack/CuckooFilter/Literals/Commands.cs +++ b/src/NRedisStack/CuckooFilter/Literals/Commands.cs @@ -2,17 +2,17 @@ { internal class CF { - public static string RESERVE => "CF.RESERVE"; - public static string ADD => "CF.ADD"; - public static string ADDNX => "CF.ADDNX"; - public static string INSERT => "CF.INSERT"; - public static string INSERTNX => "CF.INSERTNX"; - public static string EXISTS => "CF.EXISTS"; - public static string MEXISTS => "CF.MEXISTS"; - public static string DEL => "CF.DEL"; - public static string COUNT => "CF.COUNT"; - public static string SCANDUMP => "CF.SCANDUMP"; - public static string LOADCHUNK => "CF.LOADCHUNK"; - public static string INFO => "CF.INFO"; + public const string RESERVE = "CF.RESERVE"; + public const string ADD = "CF.ADD"; + public const string ADDNX = "CF.ADDNX"; + public const string INSERT = "CF.INSERT"; + public const string INSERTNX = "CF.INSERTNX"; + public const string EXISTS = "CF.EXISTS"; + public const string MEXISTS = "CF.MEXISTS"; + public const string DEL = "CF.DEL"; + public const string COUNT = "CF.COUNT"; + public const string SCANDUMP = "CF.SCANDUMP"; + public const string LOADCHUNK = "CF.LOADCHUNK"; + public const string INFO = "CF.INFO"; } } \ No newline at end of file diff --git a/src/NRedisStack/Json/Literals/CommandArgs.cs b/src/NRedisStack/Json/Literals/CommandArgs.cs index 3403b53f..212d1843 100644 --- a/src/NRedisStack/Json/Literals/CommandArgs.cs +++ b/src/NRedisStack/Json/Literals/CommandArgs.cs @@ -2,9 +2,9 @@ namespace NRedisStack.Literals { internal class JsonArgs { - public static string INDENT => "INDENT"; - public static string NEWLINE => "NEWLINE"; - public static string SPACE => "SPACE"; + public const string INDENT = "INDENT"; + public const string NEWLINE = "NEWLINE"; + public const string SPACE = "SPACE"; } } \ No newline at end of file diff --git a/src/NRedisStack/Json/Literals/Commands.cs b/src/NRedisStack/Json/Literals/Commands.cs index b0f267d6..159a4c81 100644 --- a/src/NRedisStack/Json/Literals/Commands.cs +++ b/src/NRedisStack/Json/Literals/Commands.cs @@ -2,29 +2,29 @@ { internal class JSON { - public static string ARRAPPEND => "JSON.ARRAPPEND"; - public static string ARRINDEX => "JSON.ARRINDEX"; - public static string ARRINSERT => "JSON.ARRINSERT"; - public static string ARRLEN => "JSON.ARRLEN"; - public static string ARRPOP => "JSON.ARRPOP"; - public static string ARRTRIM => "JSON.ARRTRIM"; - public static string CLEAR => "JSON.CLEAR"; - public static string DEBUG => "JSON.DEBUG"; - public static string DEBUG_HELP => "JSON.DEBUG HELP"; - public static string DEBUG_MEMORY => "JSON.DEBUG MEMORY"; - public static string DEL => "JSON.DEL"; - public static string FORGET => "JSON.FORGET"; - public static string GET => "JSON.GET"; - public static string MGET => "JSON.MGET"; - public static string NUMINCRBY => "JSON.NUMINCRBY"; - public static string NUMMULTBY => "JSON.NUMMULTBY"; - public static string OBJKEYS => "JSON.OBJKEYS"; - public static string OBJLEN => "JSON.OBJLEN"; - public static string RESP => "JSON.RESP"; - public static string SET => "JSON.SET"; - public static string STRAPPEND => "JSON.STRAPPEND"; - public static string STRLEN => "JSON.STRLEN"; - public static string TOGGLE => "JSON.TOGGLE"; - public static string TYPE => "JSON.TYPE"; + public const string ARRAPPEND = "JSON.ARRAPPEND"; + public const string ARRINDEX = "JSON.ARRINDEX"; + public const string ARRINSERT = "JSON.ARRINSERT"; + public const string ARRLEN = "JSON.ARRLEN"; + public const string ARRPOP = "JSON.ARRPOP"; + public const string ARRTRIM = "JSON.ARRTRIM"; + public const string CLEAR = "JSON.CLEAR"; + public const string DEBUG = "JSON.DEBUG"; + public const string DEBUG_HELP = "JSON.DEBUG HELP"; + public const string DEBUG_MEMORY = "JSON.DEBUG MEMORY"; + public const string DEL = "JSON.DEL"; + public const string FORGET = "JSON.FORGET"; + public const string GET = "JSON.GET"; + public const string MGET = "JSON.MGET"; + public const string NUMINCRBY = "JSON.NUMINCRBY"; + public const string NUMMULTBY = "JSON.NUMMULTBY"; + public const string OBJKEYS = "JSON.OBJKEYS"; + public const string OBJLEN = "JSON.OBJLEN"; + public const string RESP = "JSON.RESP"; + public const string SET = "JSON.SET"; + public const string STRAPPEND = "JSON.STRAPPEND"; + public const string STRLEN = "JSON.STRLEN"; + public const string TOGGLE = "JSON.TOGGLE"; + public const string TYPE = "JSON.TYPE"; } } \ No newline at end of file diff --git a/src/NRedisStack/Search/Literals/Commands.cs b/src/NRedisStack/Search/Literals/Commands.cs index 595f6912..505ad479 100644 --- a/src/NRedisStack/Search/Literals/Commands.cs +++ b/src/NRedisStack/Search/Literals/Commands.cs @@ -2,30 +2,30 @@ { internal class FT { - public static string _LIST => "FT._LIST"; - public static string AGGREGATE => "FT.AGGREGATE"; - public static string ALIASADD => "FT.ALIASADD"; - public static string ALIASDEL => "FT.ALIASDEL"; - public static string ALIASUPDATE => "FT.ALIASUPDATE"; - public static string ALTER => "FT.ALTER"; - public static string CONFIG_GET => "FT.CONFIG GET"; - public static string CONFIG_HELP => "FT.CONFIG HELP"; - public static string CONFIG_SET => "FT.CONFIG SET"; - public static string CREATE => "FT.CREATE"; - public static string CURSOR_DEL => "FT.CURSOR DEL"; - public static string CURSOR_READ => "FT.CURSOR READ"; - public static string DICTADD => "FT.DICTADD"; - public static string DICTDEL => "FT.DICTDEL"; - public static string DICTDUMP => "FT.DICTDUMP"; - public static string DROPINDEX => "FT.DROPINDEX"; - public static string EXPLAIN => "FT.EXPLAIN"; - public static string EXPLAINCLI => "FT.EXPLAINCLI"; - public static string INFO => "FT.INFO"; - public static string PROFILE => "FT.PROFILE"; - public static string SEARCH => "FT.SEARCH"; - public static string SPELLCHECK => "FT.SPELLCHECK"; - public static string SYNDUMP => "FT.SYNDUMP"; - public static string SYNUPDATE => "FT.SYNUPDATE"; - public static string TAGVALS => "FT.TAGVALS"; + public const string _LIST = "FT._LIST"; + public const string AGGREGATE = "FT.AGGREGATE"; + public const string ALIASADD = "FT.ALIASADD"; + public const string ALIASDEL = "FT.ALIASDEL"; + public const string ALIASUPDATE = "FT.ALIASUPDATE"; + public const string ALTER = "FT.ALTER"; + public const string CONFIG_GET = "FT.CONFIG GET"; + public const string CONFIG_HELP = "FT.CONFIG HELP"; + public const string CONFIG_SET = "FT.CONFIG SET"; + public const string CREATE = "FT.CREATE"; + public const string CURSOR_DEL = "FT.CURSOR DEL"; + public const string CURSOR_READ = "FT.CURSOR READ"; + public const string DICTADD = "FT.DICTADD"; + public const string DICTDEL = "FT.DICTDEL"; + public const string DICTDUMP = "FT.DICTDUMP"; + public const string DROPINDEX = "FT.DROPINDEX"; + public const string EXPLAIN = "FT.EXPLAIN"; + public const string EXPLAINCLI = "FT.EXPLAINCLI"; + public const string INFO = "FT.INFO"; + public const string PROFILE = "FT.PROFILE"; + public const string SEARCH = "FT.SEARCH"; + public const string SPELLCHECK = "FT.SPELLCHECK"; + public const string SYNDUMP = "FT.SYNDUMP"; + public const string SYNUPDATE = "FT.SYNUPDATE"; + public const string TAGVALS = "FT.TAGVALS"; } } \ No newline at end of file diff --git a/src/NRedisStack/Tdigest/Literals/CommandArgs.cs b/src/NRedisStack/Tdigest/Literals/CommandArgs.cs index ffcbfd00..04390e37 100644 --- a/src/NRedisStack/Tdigest/Literals/CommandArgs.cs +++ b/src/NRedisStack/Tdigest/Literals/CommandArgs.cs @@ -2,6 +2,6 @@ namespace NRedisStack.Literals { internal class TdigestArgs { - public static string COMPRESSION => "COMPRESSION"; + public const string COMPRESSION = "COMPRESSION"; } } \ No newline at end of file diff --git a/src/NRedisStack/Tdigest/Literals/Commands.cs b/src/NRedisStack/Tdigest/Literals/Commands.cs index 7baf2431..c9ef8167 100644 --- a/src/NRedisStack/Tdigest/Literals/Commands.cs +++ b/src/NRedisStack/Tdigest/Literals/Commands.cs @@ -2,16 +2,16 @@ { internal class TDIGEST { - public static string CREATE => "TDIGEST.CREATE"; - public static string RESET => "TDIGEST.RESET"; - public static string ADD => "TDIGEST.ADD"; - public static string MERGE => "TDIGEST.MERGE"; - public static string MERGESTORE => "TDIGEST.MERGESTORE"; - public static string MIN => "TDIGEST.MIN"; - public static string MAX => "TDIGEST.MAX"; - public static string QUANTILE => "TDIGEST.QUANTILE"; - public static string CDF => "TDIGEST.CDF"; - public static string TRIMMED_MEAN => "TDIGEST.TRIMMED_MEAN"; - public static string INFO => "TDIGEST.INFO"; + public const string CREATE = "TDIGEST.CREATE"; + public const string RESET = "TDIGEST.RESET"; + public const string ADD = "TDIGEST.ADD"; + public const string MERGE = "TDIGEST.MERGE"; + public const string MERGESTORE = "TDIGEST.MERGESTORE"; + public const string MIN = "TDIGEST.MIN"; + public const string MAX = "TDIGEST.MAX"; + public const string QUANTILE = "TDIGEST.QUANTILE"; + public const string CDF = "TDIGEST.CDF"; + public const string TRIMMED_MEAN = "TDIGEST.TRIMMED_MEAN"; + public const string INFO = "TDIGEST.INFO"; } } \ No newline at end of file diff --git a/src/NRedisStack/TimeSeries/Literals/CommandArgs.cs b/src/NRedisStack/TimeSeries/Literals/CommandArgs.cs index d807103a..7b17ed4a 100644 --- a/src/NRedisStack/TimeSeries/Literals/CommandArgs.cs +++ b/src/NRedisStack/TimeSeries/Literals/CommandArgs.cs @@ -2,26 +2,26 @@ namespace NRedisStack.Literals { internal class TimeSeriesArgs { - public static string RETENTION => "RETENTION"; - public static string LABELS => "LABELS"; - public static string UNCOMPRESSED => "UNCOMPRESSED"; - public static string COUNT => "COUNT"; - public static string AGGREGATION => "AGGREGATION"; - public static string ALIGN => "ALIGN"; - public static string FILTER => "FILTER"; - public static string WITHLABELS => "WITHLABELS"; - public static string SELECTEDLABELS => "SELECTED_LABELS"; - public static string TIMESTAMP => "TIMESTAMP"; - public static string CHUNK_SIZE => "CHUNK_SIZE"; - public static string DUPLICATE_POLICY => "DUPLICATE_POLICY"; - public static string ON_DUPLICATE => "ON_DUPLICATE"; - public static string GROPUBY => "GROUPBY"; - public static string REDUCE => "REDUCE"; - public static string FILTER_BY_TS => "FILTER_BY_TS"; - public static string FILTER_BY_VALUE => "FILTER_BY_VALUE"; - public static string LATEST => "LATEST"; - public static string DEBUG => "DEBUG"; - public static string BUCKETTIMESTAMP => "BUCKETTIMESTAMP"; - public static string EMPTY => "EMPTY"; + public const string RETENTION = "RETENTION"; + public const string LABELS = "LABELS"; + public const string UNCOMPRESSED = "UNCOMPRESSED"; + public const string COUNT = "COUNT"; + public const string AGGREGATION = "AGGREGATION"; + public const string ALIGN = "ALIGN"; + public const string FILTER = "FILTER"; + public const string WITHLABELS = "WITHLABELS"; + public const string SELECTEDLABELS = "SELECTED_LABELS"; + public const string TIMESTAMP = "TIMESTAMP"; + public const string CHUNK_SIZE = "CHUNK_SIZE"; + public const string DUPLICATE_POLICY = "DUPLICATE_POLICY"; + public const string ON_DUPLICATE = "ON_DUPLICATE"; + public const string GROPUBY = "GROUPBY"; + public const string REDUCE = "REDUCE"; + public const string FILTER_BY_TS = "FILTER_BY_TS"; + public const string FILTER_BY_VALUE = "FILTER_BY_VALUE"; + public const string LATEST = "LATEST"; + public const string DEBUG = "DEBUG"; + public const string BUCKETTIMESTAMP = "BUCKETTIMESTAMP"; + public const string EMPTY = "EMPTY"; } } diff --git a/src/NRedisStack/TimeSeries/Literals/Commands.cs b/src/NRedisStack/TimeSeries/Literals/Commands.cs index e288cf44..f0a719e4 100644 --- a/src/NRedisStack/TimeSeries/Literals/Commands.cs +++ b/src/NRedisStack/TimeSeries/Literals/Commands.cs @@ -2,22 +2,22 @@ { internal class TS { - public static string CREATE => "TS.CREATE"; - public static string ALTER => "TS.ALTER"; - public static string ADD => "TS.ADD"; - public static string MADD => "TS.MADD"; - public static string INCRBY => "TS.INCRBY"; - public static string DECRBY => "TS.DECRBY"; - public static string DEL => "TS.DEL"; - public static string CREATERULE => "TS.CREATERULE"; - public static string DELETERULE => "TS.DELETERULE"; - public static string RANGE => "TS.RANGE"; - public static string REVRANGE => "TS.REVRANGE"; - public static string MRANGE => "TS.MRANGE"; - public static string MREVRANGE => "TS.MREVRANGE"; - public static string GET => "TS.GET"; - public static string MGET => "TS.MGET"; - public static string INFO => "TS.INFO"; - public static string QUERYINDEX => "TS.QUERYINDEX"; + public const string CREATE = "TS.CREATE"; + public const string ALTER = "TS.ALTER"; + public const string ADD = "TS.ADD"; + public const string MADD = "TS.MADD"; + public const string INCRBY = "TS.INCRBY"; + public const string DECRBY = "TS.DECRBY"; + public const string DEL = "TS.DEL"; + public const string CREATERULE = "TS.CREATERULE"; + public const string DELETERULE = "TS.DELETERULE"; + public const string RANGE = "TS.RANGE"; + public const string REVRANGE = "TS.REVRANGE"; + public const string MRANGE = "TS.MRANGE"; + public const string MREVRANGE = "TS.MREVRANGE"; + public const string GET = "TS.GET"; + public const string MGET = "TS.MGET"; + public const string INFO = "TS.INFO"; + public const string QUERYINDEX = "TS.QUERYINDEX"; } } diff --git a/src/NRedisStack/TopK/Literals/Commands.cs b/src/NRedisStack/TopK/Literals/Commands.cs index a9aa8912..80281409 100644 --- a/src/NRedisStack/TopK/Literals/Commands.cs +++ b/src/NRedisStack/TopK/Literals/Commands.cs @@ -2,12 +2,12 @@ { internal class TOPK { - public static string RESERVE => "TOPK.RESERVE"; - public static string ADD => "TOPK.ADD"; - public static string INCRBY => "TOPK.INCRBY"; - public static string QUERY => "TOPK.QUERY"; - public static string COUNT => "TOPK.COUNT"; - public static string LIST => "TOPK.LIST"; - public static string INFO => "TOPK.INFO"; + public const string RESERVE = "TOPK.RESERVE"; + public const string ADD = "TOPK.ADD"; + public const string INCRBY = "TOPK.INCRBY"; + public const string QUERY = "TOPK.QUERY"; + public const string COUNT = "TOPK.COUNT"; + public const string LIST = "TOPK.LIST"; + public const string INFO = "TOPK.INFO"; } } \ No newline at end of file From a2bdcc8711648c1c003df330f8473ac0eebe7860 Mon Sep 17 00:00:00 2001 From: shacharPash Date: Wed, 31 Aug 2022 18:17:19 +0300 Subject: [PATCH 7/7] Add public void TestModulePrefixs() --- tests/NRedisStack.Tests/Bloom/BloomTests.cs | 36 +++++++++++++++++++ .../CountMinSketch/CmsTests.cs | 36 +++++++++++++++++++ .../CuckooFilter/CuckooTests.cs | 36 +++++++++++++++++++ tests/NRedisStack.Tests/Json/JsonTests.cs | 36 +++++++++++++++++++ tests/NRedisStack.Tests/Search/SearchTests.cs | 36 +++++++++++++++++++ .../NRedisStack.Tests/Tdigest/TdigestTests.cs | 36 +++++++++++++++++++ .../TimeSeries/TimeSeriesTests.cs | 36 +++++++++++++++++++ tests/NRedisStack.Tests/TopK/TopKTests.cs | 10 +++--- 8 files changed, 257 insertions(+), 5 deletions(-) diff --git a/tests/NRedisStack.Tests/Bloom/BloomTests.cs b/tests/NRedisStack.Tests/Bloom/BloomTests.cs index bbee25e3..33bd5135 100644 --- a/tests/NRedisStack.Tests/Bloom/BloomTests.cs +++ b/tests/NRedisStack.Tests/Bloom/BloomTests.cs @@ -317,4 +317,40 @@ public async Task TestScanDumpAndLoadChunkAsync() // check for existing items Assert.True(await bf.ExistsAsync("bloom-load", "a")); } + + + [Fact] + public void TestModulePrefixs() + { + IDatabase db1 = redisFixture.Redis.GetDatabase(); + IDatabase db2 = redisFixture.Redis.GetDatabase(); + + var bf1 = db1.FT(); + var bf2 = db2.FT(); + + Assert.NotEqual(bf1.GetHashCode(), bf2.GetHashCode()); + } + + [Fact] + public void TestModulePrefixs1() + { + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var bf = db.FT(); + // ... + conn.Dispose(); + } + + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var bf = db.FT(); + // ... + conn.Dispose(); + } + + } } \ No newline at end of file diff --git a/tests/NRedisStack.Tests/CountMinSketch/CmsTests.cs b/tests/NRedisStack.Tests/CountMinSketch/CmsTests.cs index dbe3f46b..69d675d4 100644 --- a/tests/NRedisStack.Tests/CountMinSketch/CmsTests.cs +++ b/tests/NRedisStack.Tests/CountMinSketch/CmsTests.cs @@ -313,5 +313,41 @@ public async Task TestMergeAsync() var q5 = await cms.QueryAsync("C", new RedisValue[] { "foo", "bar", "baz" }); Assert.Equal(new long[] { 16L, 15L, 21L }, q5); } + + + [Fact] + public void TestModulePrefixs() + { + IDatabase db1 = redisFixture.Redis.GetDatabase(); + IDatabase db2 = redisFixture.Redis.GetDatabase(); + + var cms1 = db1.CMS(); + var cms2 = db2.CMS(); + + Assert.NotEqual(cms1.GetHashCode(), cms2.GetHashCode()); + } + + [Fact] + public void TestModulePrefixs1() + { + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var cms = db.CMS(); + // ... + conn.Dispose(); + } + + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var cms = db.CMS(); + // ... + conn.Dispose(); + } + + } } diff --git a/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs b/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs index ede6f88e..47bd95fb 100644 --- a/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs +++ b/tests/NRedisStack.Tests/CuckooFilter/CuckooTests.cs @@ -363,4 +363,40 @@ public async Task TestScanDumpAndLoadChunkAsync() // check for existing items Assert.True(await cf.ExistsAsync("cuckoo-load", "a")); } + + + [Fact] + public void TestModulePrefixs() + { + IDatabase db1 = redisFixture.Redis.GetDatabase(); + IDatabase db2 = redisFixture.Redis.GetDatabase(); + + var cf1 = db1.CF(); + var cf2 = db2.CF(); + + Assert.NotEqual(cf1.GetHashCode(), cf2.GetHashCode()); + } + + [Fact] + public void TestModulePrefixs1() + { + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var cf = db.CF(); + // ... + conn.Dispose(); + } + + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var cf = db.CF(); + // ... + conn.Dispose(); + } + + } } \ No newline at end of file diff --git a/tests/NRedisStack.Tests/Json/JsonTests.cs b/tests/NRedisStack.Tests/Json/JsonTests.cs index c62876cd..33b0eaf7 100644 --- a/tests/NRedisStack.Tests/Json/JsonTests.cs +++ b/tests/NRedisStack.Tests/Json/JsonTests.cs @@ -68,4 +68,40 @@ public void TestJsonSetNotExist() // // throw new ArgumentNullException(nameof(result)); // Assert.Equal(result.ToString(), expected); // } + + + [Fact] + public void TestModulePrefixs() + { + IDatabase db1 = redisFixture.Redis.GetDatabase(); + IDatabase db2 = redisFixture.Redis.GetDatabase(); + + var json1 = db1.JSON(); + var json2 = db2.JSON(); + + Assert.NotEqual(json1.GetHashCode(), json2.GetHashCode()); + } + + [Fact] + public void TestModulePrefixs1() + { + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var json = db.JSON(); + // ... + conn.Dispose(); + } + + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var json = db.JSON(); + // ... + conn.Dispose(); + } + + } } \ No newline at end of file diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index dd1ec794..ca963510 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -17,4 +17,40 @@ public void Dispose() redisFixture.Redis.GetDatabase().KeyDelete(key); } + + [Fact] + public void TestModulePrefixs() + { + IDatabase db1 = redisFixture.Redis.GetDatabase(); + IDatabase db2 = redisFixture.Redis.GetDatabase(); + + var ft1 = db1.FT(); + var ft2 = db2.FT(); + + Assert.NotEqual(ft1.GetHashCode(), ft2.GetHashCode()); + } + + [Fact] + public void TestModulePrefixs1() + { + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var ft = db.FT(); + // ... + conn.Dispose(); + } + + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var ft = db.FT(); + // ... + conn.Dispose(); + } + + } + } \ No newline at end of file diff --git a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs index 5ad9d18c..f79cf474 100644 --- a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs +++ b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs @@ -382,6 +382,42 @@ public async Task TestTrimmedMeanAsync() Assert.Equal(14.5, await tdigest.TrimmedMeanAsync(key, 0.5, 1.0)); } + + [Fact] + public void TestModulePrefixs() + { + IDatabase db1 = redisFixture.Redis.GetDatabase(); + IDatabase db2 = redisFixture.Redis.GetDatabase(); + + var tdigest1 = db1.TDIGEST(); + var tdigest2 = db2.TDIGEST(); + + Assert.NotEqual(tdigest1.GetHashCode(), tdigest2.GetHashCode()); + } + + [Fact] + public void TestModulePrefixs1() + { + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var tdigest = db.TDIGEST(); + // ... + conn.Dispose(); + } + + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var tdigest = db.TDIGEST(); + // ... + conn.Dispose(); + } + + } + static Tuple RandomValueWeight() { Random random = new Random(); diff --git a/tests/NRedisStack.Tests/TimeSeries/TimeSeriesTests.cs b/tests/NRedisStack.Tests/TimeSeries/TimeSeriesTests.cs index 3b1942c9..da5221d7 100644 --- a/tests/NRedisStack.Tests/TimeSeries/TimeSeriesTests.cs +++ b/tests/NRedisStack.Tests/TimeSeries/TimeSeriesTests.cs @@ -26,4 +26,40 @@ public void Dispose() // //TimeSeriesInformation info = ts.Info(key); // } + + [Fact] + public void TestModulePrefixs() + { + IDatabase db1 = redisFixture.Redis.GetDatabase(); + IDatabase db2 = redisFixture.Redis.GetDatabase(); + + var ts1 = db1.TS(); + var ts2 = db2.TS(); + + Assert.NotEqual(ts1.GetHashCode(), ts2.GetHashCode()); + } + + [Fact] + public void TestModulePrefixs1() + { + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var ts = db.TS(); + // ... + conn.Dispose(); + } + + { + var conn = ConnectionMultiplexer.Connect("localhost"); + IDatabase db = conn.GetDatabase(); + + var ts = db.TS(); + // ... + conn.Dispose(); + } + + } + } \ No newline at end of file diff --git a/tests/NRedisStack.Tests/TopK/TopKTests.cs b/tests/NRedisStack.Tests/TopK/TopKTests.cs index 23041efe..0594e967 100644 --- a/tests/NRedisStack.Tests/TopK/TopKTests.cs +++ b/tests/NRedisStack.Tests/TopK/TopKTests.cs @@ -79,10 +79,10 @@ public void TestModulePrefixs() IDatabase db1 = redisFixture.Redis.GetDatabase(); IDatabase db2 = redisFixture.Redis.GetDatabase(); - var ft1 = db1.FT(); - var ft2 = db2.FT(); + var topk1 = db1.TOPK(); + var topk2 = db2.TOPK(); - Assert.NotEqual(ft1.GetHashCode(), ft2.GetHashCode()); + Assert.NotEqual(topk1.GetHashCode(), topk2.GetHashCode()); } [Fact] @@ -92,7 +92,7 @@ public void TestModulePrefixs1() var conn = ConnectionMultiplexer.Connect("localhost"); IDatabase db = conn.GetDatabase(); - var ft = db.FT(); + var topk = db.TOPK(); // ... conn.Dispose(); } @@ -101,7 +101,7 @@ public void TestModulePrefixs1() var conn = ConnectionMultiplexer.Connect("localhost"); IDatabase db = conn.GetDatabase(); - var ft = db.FT(); + var topk = db.TOPK(); // ... conn.Dispose(); }