Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

package org.elasticsearch.xpack.oss;

import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand Down Expand Up @@ -110,28 +111,28 @@ public Set<String> getUsedAnalyzerTypes() {
* Return the set of used built-in char filters in the cluster.
*/
public Set<String> getUsedBuiltInCharFilters() {
return usedCharFilters;
return usedBuiltInCharFilters;
}

/**
* Return the set of used built-in tokenizers in the cluster.
*/
public Set<String> getUsedBuiltInTokenizers() {
return usedTokenizers;
return usedBuiltInTokenizers;
}

/**
* Return the set of used built-in token filters in the cluster.
*/
public Set<String> getUsedBuiltInTokenFilters() {
return usedTokenFilters;
return usedBuiltInTokenFilters;
}

/**
* Return the set of used built-in analyzers in the cluster.
*/
public Set<String> getUsedBuiltInAnalyzers() {
return usedAnalyzers;
return usedBuiltInAnalyzers;
}

@Override
Expand Down Expand Up @@ -182,4 +183,9 @@ public int hashCode() {
usedAnalyzers, usedBuiltInCharFilters, usedBuiltInTokenizers, usedBuiltInTokenFilters,
usedBuiltInAnalyzers);
}

@Override
public String toString() {
return Strings.toString(this, true, true);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ protected IndexFeatureSetUsage mutateInstance(IndexFeatureSetUsage instance) thr
instance.getUsedBuiltInTokenizers(), instance.getUsedBuiltInTokenFilters(),
instance.getUsedBuiltInAnalyzers());
case 4:
Set<String> analyzers = new HashSet<>(instance.getUsedAnalyzerTypes());
Set<String> analyzers = new HashSet<>(instance.getUsedAnalyzerTypes());
if (analyzers.add("english") == false) {
analyzers.remove("english");
}
Expand All @@ -125,7 +125,7 @@ protected IndexFeatureSetUsage mutateInstance(IndexFeatureSetUsage instance) thr
instance.getUsedBuiltInCharFilters(), instance.getUsedBuiltInTokenizers(), instance.getUsedBuiltInTokenFilters(),
instance.getUsedBuiltInAnalyzers());
case 5:
Set<String> builtInCharFilters = new HashSet<>();
Set<String> builtInCharFilters = new HashSet<>(instance.getUsedBuiltInCharFilters());
if (builtInCharFilters.add("html_strip") == false) {
builtInCharFilters.remove("html_strip");
}
Expand All @@ -135,7 +135,7 @@ protected IndexFeatureSetUsage mutateInstance(IndexFeatureSetUsage instance) thr
instance.getUsedBuiltInTokenizers(), instance.getUsedBuiltInTokenFilters(),
instance.getUsedBuiltInAnalyzers());
case 6:
Set<String> builtInTokenizers = new HashSet<>();
Set<String> builtInTokenizers = new HashSet<>(instance.getUsedBuiltInTokenizers());
if (builtInTokenizers.add("keyword") == false) {
builtInTokenizers.remove("keyword");
}
Expand All @@ -144,7 +144,7 @@ protected IndexFeatureSetUsage mutateInstance(IndexFeatureSetUsage instance) thr
instance.getUsedBuiltInCharFilters(), builtInTokenizers, instance.getUsedBuiltInTokenFilters(),
instance.getUsedBuiltInAnalyzers());
case 7:
Set<String> builtInTokenFilters = new HashSet<>();
Set<String> builtInTokenFilters = new HashSet<>(instance.getUsedBuiltInTokenFilters());
if (builtInTokenFilters.add("trim") == false) {
builtInTokenFilters.remove("trim");
}
Expand All @@ -153,7 +153,7 @@ protected IndexFeatureSetUsage mutateInstance(IndexFeatureSetUsage instance) thr
instance.getUsedBuiltInCharFilters(), instance.getUsedBuiltInTokenizers(), builtInTokenFilters,
instance.getUsedBuiltInAnalyzers());
case 8:
Set<String> builtInAnalyzers = new HashSet<>();
Set<String> builtInAnalyzers = new HashSet<>(instance.getUsedBuiltInAnalyzers());
if (builtInAnalyzers.add("french") == false) {
builtInAnalyzers.remove("french");
}
Expand Down