Skip to content

Commit 155e2c5

Browse files
committed
fix #2740 reintroduce show_term_document_count_error which went blatantly missing in 5.x
1 parent 0285c46 commit 155e2c5

File tree

6 files changed

+106
-68
lines changed

6 files changed

+106
-68
lines changed

src/Nest/Aggregations/AggregateJsonConverter.cs

Lines changed: 48 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@ internal class AggregateJsonConverter : JsonConverter
1515

1616
public override bool CanWrite => false;
1717

18-
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) => this.ReadAggregate(reader, serializer);
18+
public override object ReadJson(JsonReader reader, Type objectType, object existingValue,
19+
JsonSerializer serializer) => this.ReadAggregate(reader, serializer);
1920

2021
public override bool CanConvert(Type objectType) => objectType == typeof(IAggregate);
2122

@@ -73,20 +74,20 @@ static AggregateJsonConverter()
7374
{
7475
AllReservedAggregationNames = typeof(Parser)
7576
.GetFields(BindingFlags.Static | BindingFlags.NonPublic | BindingFlags.Public)
76-
.Where(f=>f.IsLiteral && !f.IsInitOnly)
77+
.Where(f => f.IsLiteral && !f.IsInitOnly)
7778
#if DOTNETCORE
7879
.Select(f => (string)f.GetValue(null))
7980
#else
80-
.Select(f => (string)f.GetRawConstantValue())
81+
.Select(f => (string) f.GetRawConstantValue())
8182
#endif
8283
.ToArray();
8384

8485
var allKeys = string.Join(", ", AllReservedAggregationNames);
8586
UsingReservedAggNameFormat =
8687
"'{0}' is one of the reserved aggregation keywords"
87-
+ " we use a heuristics based response parser and using these reserved keywords"
88-
+ " could throw its heuritics off course. We are working on a solution in elasticsearch itself to make"
89-
+ " the response parsable. For now these are all the reserved keywords: "
88+
+ " we use a heuristics based response parser and using these reserved keywords"
89+
+ " could throw its heuritics off course. We are working on a solution in elasticsearch itself to make"
90+
+ " the response parsable. For now these are all the reserved keywords: "
9091
+ allKeys;
9192
}
9293

@@ -101,7 +102,7 @@ private IAggregate ReadAggregate(JsonReader reader, JsonSerializer serializer)
101102

102103
IAggregate aggregate = null;
103104

104-
var propertyName = (string)reader.Value;
105+
var propertyName = (string) reader.Value;
105106
if (_numeric.IsMatch(propertyName))
106107
aggregate = GetPercentilesAggregate(reader, serializer, oldFormat: true);
107108

@@ -115,7 +116,7 @@ private IAggregate ReadAggregate(JsonReader reader, JsonSerializer serializer)
115116
return aggregate;
116117
}
117118

118-
propertyName = (string)reader.Value;
119+
propertyName = (string) reader.Value;
119120
switch (propertyName)
120121
{
121122
case Parser.Values:
@@ -165,7 +166,7 @@ private IBucket ReadBucket(JsonReader reader, JsonSerializer serializer)
165166
return null;
166167

167168
IBucket item;
168-
var property = (string)reader.Value;
169+
var property = (string) reader.Value;
169170
switch (property)
170171
{
171172
case Parser.Key:
@@ -194,7 +195,7 @@ private Dictionary<string, object> GetMetadata(JsonReader reader)
194195
reader.Read();
195196
while (reader.TokenType != JsonToken.EndObject)
196197
{
197-
var key = (string)reader.Value;
198+
var key = (string) reader.Value;
198199
reader.Read();
199200
var value = reader.Value;
200201
meta.Add(key, value);
@@ -224,13 +225,13 @@ private IAggregate GetTopHitsAggregate(JsonReader reader, JsonSerializer seriali
224225
var maxScore = o[Parser.MaxScore].ToObject<double?>();
225226
var hits = o[Parser.Hits].Children().OfType<JObject>();
226227
reader.Read();
227-
return new TopHitsAggregate(hits, serializer) { Total = total, MaxScore = maxScore };
228+
return new TopHitsAggregate(hits, serializer) {Total = total, MaxScore = maxScore};
228229
}
229230

230231
private IAggregate GetGeoCentroidAggregate(JsonReader reader, JsonSerializer serializer)
231232
{
232233
reader.Read();
233-
var geoCentroid = new GeoCentroidAggregate { Location = serializer.Deserialize<GeoLocation>(reader) };
234+
var geoCentroid = new GeoCentroidAggregate {Location = serializer.Deserialize<GeoLocation>(reader)};
234235
reader.Read();
235236
return geoCentroid;
236237
}
@@ -269,15 +270,15 @@ private IAggregate GetPercentilesAggregate(JsonReader reader, JsonSerializer ser
269270
reader.Read();
270271
while (reader.TokenType != JsonToken.EndObject)
271272
{
272-
var propertyName = (string)reader.Value;
273+
var propertyName = (string) reader.Value;
273274
if (propertyName.Contains(Parser.AsStringSuffix))
274275
{
275276
reader.Read();
276277
reader.Read();
277278
}
278279
if (reader.TokenType != JsonToken.EndObject)
279280
{
280-
var percentileValue = (string)reader.Value;
281+
var percentileValue = (string) reader.Value;
281282
var percentile = double.Parse(percentileValue, CultureInfo.InvariantCulture);
282283
reader.Read();
283284
var value = reader.Value as double?;
@@ -298,9 +299,9 @@ private IAggregate GetSingleBucketAggregate(JsonReader reader, JsonSerializer se
298299
{
299300
reader.Read();
300301
var docCount = (reader.Value as long?).GetValueOrDefault(0);
301-
var bucket = new SingleBucketAggregate { DocCount = docCount };
302+
var bucket = new SingleBucketAggregate {DocCount = docCount};
302303
reader.Read();
303-
if (reader.TokenType == JsonToken.PropertyName && (string)reader.Value == Parser.Buckets)
304+
if (reader.TokenType == JsonToken.PropertyName && (string) reader.Value == Parser.Buckets)
304305
{
305306
var b = this.GetMultiBucketAggregate(reader, serializer) as BucketAggregate;
306307
return new BucketAggregate
@@ -319,13 +320,17 @@ private IAggregate GetStatsAggregate(JsonReader reader, JsonSerializer serialize
319320
{
320321
reader.Read();
321322
var count = (reader.Value as long?).GetValueOrDefault(0);
322-
reader.Read(); reader.Read();
323+
reader.Read();
324+
reader.Read();
323325
var min = reader.Value as double?;
324-
reader.Read(); reader.Read();
326+
reader.Read();
327+
reader.Read();
325328
var max = reader.Value as double?;
326-
reader.Read(); reader.Read();
329+
reader.Read();
330+
reader.Read();
327331
var average = reader.Value as double?;
328-
reader.Read(); reader.Read();
332+
reader.Read();
333+
reader.Read();
329334
var sum = reader.Value as double?;
330335

331336
var statsMetric = new StatsAggregate()
@@ -342,7 +347,7 @@ private IAggregate GetStatsAggregate(JsonReader reader, JsonSerializer serialize
342347
if (reader.TokenType == JsonToken.EndObject)
343348
return statsMetric;
344349

345-
var propertyName = (string)reader.Value;
350+
var propertyName = (string) reader.Value;
346351
while (reader.TokenType != JsonToken.EndObject && propertyName.Contains(Parser.AsStringSuffix))
347352
{
348353
reader.Read();
@@ -371,7 +376,8 @@ private IAggregate GetExtendedStatsAggregate(StatsAggregate statsMetric, JsonRea
371376
reader.Read();
372377
reader.Read();
373378
extendedStatsMetric.Variance = (reader.Value as double?);
374-
reader.Read(); reader.Read();
379+
reader.Read();
380+
reader.Read();
375381
extendedStatsMetric.StdDeviation = (reader.Value as double?);
376382
reader.Read();
377383

@@ -383,15 +389,15 @@ private IAggregate GetExtendedStatsAggregate(StatsAggregate statsMetric, JsonRea
383389
reader.Read();
384390
reader.Read();
385391

386-
propertyName = (string)reader.Value;
392+
propertyName = (string) reader.Value;
387393
if (propertyName == Parser.Upper)
388394
{
389395
reader.Read();
390396
bounds.Upper = reader.Value as double?;
391397
}
392398
reader.Read();
393399

394-
propertyName = (string)reader.Value;
400+
propertyName = (string) reader.Value;
395401
if (propertyName == Parser.Lower)
396402
{
397403
reader.Read();
@@ -402,7 +408,7 @@ private IAggregate GetExtendedStatsAggregate(StatsAggregate statsMetric, JsonRea
402408
reader.Read();
403409
}
404410

405-
propertyName = (string)reader.Value;
411+
propertyName = (string) reader.Value;
406412
while (reader.TokenType != JsonToken.EndObject && propertyName.Contains(Parser.AsStringSuffix))
407413
{
408414
// std_deviation_bounds is an object, so we need to skip its properties
@@ -428,7 +434,7 @@ private IReadOnlyDictionary<string, IAggregate> GetSubAggregates(JsonReader read
428434
var currentDepth = reader.Depth;
429435
do
430436
{
431-
var fieldName = (string)reader.Value;
437+
var fieldName = (string) reader.Value;
432438
reader.Read();
433439
var agg = this.ReadAggregate(reader, serializer);
434440
nestedAggs.Add(fieldName, agg);
@@ -442,14 +448,14 @@ private IReadOnlyDictionary<string, IAggregate> GetSubAggregates(JsonReader read
442448
private IAggregate GetMultiBucketAggregate(JsonReader reader, JsonSerializer serializer)
443449
{
444450
var bucket = new BucketAggregate();
445-
var propertyName = (string)reader.Value;
451+
var propertyName = (string) reader.Value;
446452
if (propertyName == Parser.DocCountErrorUpperBound)
447453
{
448454
reader.Read();
449455
bucket.DocCountErrorUpperBound = reader.Value as long?;
450456
reader.Read();
451457
}
452-
propertyName = (string)reader.Value;
458+
propertyName = (string) reader.Value;
453459
if (propertyName == Parser.SumOtherDocCount)
454460
{
455461
reader.Read();
@@ -513,7 +519,7 @@ private IAggregate GetValueAggregate(JsonReader reader, JsonSerializer serialize
513519
{
514520
if (reader.TokenType == JsonToken.PropertyName)
515521
{
516-
var propertyName = (string)reader.Value;
522+
var propertyName = (string) reader.Value;
517523

518524
if (propertyName == Parser.ValueAsString)
519525
{
@@ -523,7 +529,7 @@ private IAggregate GetValueAggregate(JsonReader reader, JsonSerializer serialize
523529

524530
if (reader.TokenType == JsonToken.PropertyName)
525531
{
526-
propertyName = (string)reader.Value;
532+
propertyName = (string) reader.Value;
527533
if (propertyName == Parser.Keys)
528534
{
529535
var keyedValueMetric = new KeyedValueAggregate
@@ -556,7 +562,7 @@ private IAggregate GetValueAggregate(JsonReader reader, JsonSerializer serialize
556562
var scriptedMetric = serializer.Deserialize(reader);
557563

558564
if (scriptedMetric != null)
559-
return new ScriptedMetricAggregate { _Value = scriptedMetric };
565+
return new ScriptedMetricAggregate {_Value = scriptedMetric};
560566

561567
reader.Read();
562568
return valueMetric;
@@ -576,13 +582,13 @@ public IBucket GetRangeBucket(JsonReader reader, JsonSerializer serializer, stri
576582
case Parser.From:
577583
reader.Read();
578584
if (reader.ValueType == typeof(double))
579-
fromDouble = (double)reader.Value;
585+
fromDouble = (double) reader.Value;
580586
reader.Read();
581587
break;
582588
case Parser.To:
583589
reader.Read();
584590
if (reader.ValueType == typeof(double))
585-
toDouble = (double)reader.Value;
591+
toDouble = (double) reader.Value;
586592
reader.Read();
587593
break;
588594
case Parser.Key:
@@ -642,19 +648,18 @@ private IBucket GetDateHistogramBucket(JsonReader reader, JsonSerializer seriali
642648
};
643649

644650
return dateHistogram;
645-
646651
}
647652

648653
private IBucket GetKeyedBucket(JsonReader reader, JsonSerializer serializer)
649654
{
650655
reader.Read();
651656
var key = reader.Value;
652657
reader.Read();
653-
var propertyName = (string)reader.Value;
658+
var propertyName = (string) reader.Value;
654659
if (propertyName == Parser.From || propertyName == Parser.To)
655660
return GetRangeBucket(reader, serializer, key as string);
656661

657-
var bucket = new KeyedBucket<object> { Key = key };
662+
var bucket = new KeyedBucket<object> {Key = key};
658663

659664
if (propertyName == Parser.KeyAsString)
660665
{
@@ -667,10 +672,16 @@ private IBucket GetKeyedBucket(JsonReader reader, JsonSerializer serializer)
667672
bucket.DocCount = docCount.GetValueOrDefault(0);
668673
reader.Read();
669674

670-
var nextProperty = (string)reader.Value;
675+
var nextProperty = (string) reader.Value;
671676
if (nextProperty == Parser.Score)
672677
return GetSignificantTermsBucket(reader, serializer, bucket);
673678

679+
if (nextProperty == Parser.DocCountErrorUpperBound)
680+
{
681+
reader.Read();
682+
bucket.DocCountErrorUpperBound = reader.Value as long?;
683+
reader.Read();
684+
}
674685
bucket.Aggregations = this.GetSubAggregates(reader, serializer);
675686
return bucket;
676687
}

src/Nest/Aggregations/AggregationsHelper.cs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,8 @@ private IEnumerable<KeyedBucket<TKey>> GetKeyedBuckets<TKey>(IEnumerable<IBucket
194194
Key = (TKey)Convert.ChangeType(bucket.Key, typeof(TKey)),
195195
KeyAsString = bucket.KeyAsString,
196196
Aggregations = bucket.Aggregations,
197-
DocCount = bucket.DocCount
197+
DocCount = bucket.DocCount,
198+
DocCountErrorUpperBound = bucket.DocCountErrorUpperBound
198199
};
199200
}
200201
}

src/Nest/Aggregations/Bucket/KeyedBucket.cs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,5 +13,7 @@ public KeyedBucket(IDictionary<string, IAggregate> aggregations) : base(aggregat
1313
public TKey Key { get; set; }
1414
public string KeyAsString { get; set; }
1515
public long? DocCount { get; set; }
16+
17+
public long? DocCountErrorUpperBound { get; set; }
1618
}
1719
}

src/Nest/Aggregations/Bucket/Terms/TermsAggregation.cs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,11 @@ public interface ITermsAggregation : IBucketAggregation
4141

4242
[JsonProperty("missing")]
4343
string Missing { get; set; }
44+
45+
[JsonProperty("show_term_doc_count_error")]
46+
bool? ShowTermDocCountError { get; set; }
47+
48+
4449
}
4550

4651
public class TermsAggregation : BucketAggregationBase, ITermsAggregation
@@ -56,6 +61,7 @@ public class TermsAggregation : BucketAggregationBase, ITermsAggregation
5661
public TermsIncludeExclude Exclude { get; set; }
5762
public TermsAggregationCollectMode? CollectMode { get; set; }
5863
public string Missing { get; set; }
64+
public bool? ShowTermDocCountError { get; set; }
5965

6066
internal TermsAggregation() { }
6167

@@ -91,6 +97,8 @@ public class TermsAggregationDescriptor<T>
9197

9298
string ITermsAggregation.Missing { get; set; }
9399

100+
bool? ITermsAggregation.ShowTermDocCountError { get; set; }
101+
94102
public TermsAggregationDescriptor<T> Field(Field field) => Assign(a => a.Field = field);
95103

96104
public TermsAggregationDescriptor<T> Field(Expression<Func<T, object>> field) => Assign(a => a.Field = field);
@@ -159,5 +167,7 @@ public TermsAggregationDescriptor<T> CollectMode(TermsAggregationCollectMode col
159167
Assign(a => a.CollectMode = collectMode);
160168

161169
public TermsAggregationDescriptor<T> Missing(string missing) => Assign(a => a.Missing = missing);
170+
171+
public TermsAggregationDescriptor<T> ShowTermDocCountError(bool? showTermDocCountError = true) => Assign(a => a.ShowTermDocCountError = showTermDocCountError);
162172
}
163173
}

0 commit comments

Comments
 (0)