diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java index 75d4eca4254f8..5d6135549b882 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SynonymTokenFilterFactory.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.Analysis; +import org.elasticsearch.index.analysis.AnalysisMode; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; @@ -50,6 +51,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { private final boolean lenient; protected final Settings settings; protected final Environment environment; + private final boolean updateable; SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { @@ -65,9 +67,15 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { this.expand = settings.getAsBoolean("expand", true); this.lenient = settings.getAsBoolean("lenient", false); this.format = settings.get("format", ""); + this.updateable = settings.getAsBoolean("updateable", false); this.environment = env; } + @Override + public AnalysisMode getAnalysisMode() { + return this.updateable ? AnalysisMode.SEARCH_TIME : AnalysisMode.ALL; + } + @Override public TokenStream create(TokenStream tokenStream) { throw new IllegalStateException("Call createPerAnalyzerSynonymFactory to specialize this factory for an analysis chain first"); @@ -98,6 +106,11 @@ public TokenFilterFactory getSynonymFilter() { // which doesn't support stacked input tokens return IDENTITY_FILTER; } + + @Override + public AnalysisMode getAnalysisMode() { + return updateable ? AnalysisMode.SEARCH_TIME : AnalysisMode.ALL; + } }; } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymAnalyzerIT.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymAnalyzerIT.java new file mode 100644 index 0000000000000..5d4df091840ec --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymAnalyzerIT.java @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse.AnalyzeToken; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzersResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; + +public class SynonymAnalyzerIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(CommonAnalysisPlugin.class); + } + + /** + * This test needs to write to the config directory, this is difficult in an external cluster so we overwrite this to force running with + * {@link InternalTestCluster} + */ + @Override + protected boolean ignoreExternalCluster() { + return true; + } + + public void testSynonymsUpdateable() throws FileNotFoundException, IOException, InterruptedException { + Path config = internalCluster().getInstance(Environment.class).configFile(); + String synonymsFileName = "synonyms.txt"; + Path synonymsFile = config.resolve(synonymsFileName); + Files.createFile(synonymsFile); + assertTrue(Files.exists(synonymsFile)); + try (PrintWriter out = new PrintWriter( + new OutputStreamWriter(Files.newOutputStream(synonymsFile, StandardOpenOption.CREATE), StandardCharsets.UTF_8))) { + out.println("foo, baz"); + } + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", cluster().numDataNodes() * 2) + .put("index.number_of_replicas", 1) + .put("analysis.analyzer.my_synonym_analyzer.tokenizer", "standard") + .put("analysis.analyzer.my_synonym_analyzer.filter", "my_synonym_filter") + .put("analysis.filter.my_synonym_filter.type", "synonym") + .put("analysis.filter.my_synonym_filter.updateable", "true") + .put("analysis.filter.my_synonym_filter.synonyms_path", synonymsFileName)) + .addMapping("_doc", "field", "type=text,analyzer=standard,search_analyzer=my_synonym_analyzer")); + + client().prepareIndex("test", "_doc", "1").setSource("field", "foo").get(); + assertNoFailures(client().admin().indices().prepareRefresh("test").execute().actionGet()); + + SearchResponse response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + assertHitCount(response, 0L); + AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get(); + assertEquals(2, analyzeResponse.getTokens().size()); + assertEquals("foo", analyzeResponse.getTokens().get(0).getTerm()); + assertEquals("baz", analyzeResponse.getTokens().get(1).getTerm()); + + // now update synonyms file several times and trigger reloading + for (int i = 0; i < 10; i++) { + String testTerm = randomAlphaOfLength(10); + try (PrintWriter out = new PrintWriter( + new OutputStreamWriter(Files.newOutputStream(synonymsFile, StandardOpenOption.WRITE), StandardCharsets.UTF_8))) { + out.println("foo, baz, " + testTerm); + } + ReloadAnalyzersResponse reloadResponse = client().admin().indices().prepareReloadAnalyzers("test").execute().actionGet(); + assertNoFailures(reloadResponse); + assertEquals(cluster().numDataNodes(), reloadResponse.getSuccessfulShards()); + + analyzeResponse = client().admin().indices().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get(); + assertEquals(3, analyzeResponse.getTokens().size()); + Set tokens = new HashSet<>(); + analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); + assertTrue(tokens.contains("foo")); + assertTrue(tokens.contains("baz")); + assertTrue(tokens.contains(testTerm)); + + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", testTerm)).get(); + assertHitCount(response, 1L); + } + } +} \ No newline at end of file diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymAnalyzerTests.java new file mode 100644 index 0000000000000..e96708cfa67f9 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymAnalyzerTests.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse.AnalyzeToken; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; + +public class SynonymAnalyzerTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Arrays.asList(CommonAnalysisPlugin.class); + } + + public void testSynonymsUpdateable() throws FileNotFoundException, IOException { + String synonymsFileName = "synonyms.txt"; + Path configDir = node().getEnvironment().configFile(); + if (Files.exists(configDir) == false) { + Files.createDirectory(configDir); + } + Path synonymsFile = configDir.resolve(synonymsFileName); + if (Files.exists(synonymsFile) == false) { + Files.createFile(synonymsFile); + } + try (PrintWriter out = new PrintWriter( + new OutputStreamWriter(Files.newOutputStream(synonymsFile, StandardOpenOption.WRITE), StandardCharsets.UTF_8))) { + out.println("foo, baz"); + } + + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", 5) + .put("index.number_of_replicas", 0) + .put("analysis.analyzer.my_synonym_analyzer.tokenizer", "standard") + .putList("analysis.analyzer.my_synonym_analyzer.filter", "lowercase", "my_synonym_filter") + .put("analysis.filter.my_synonym_filter.type", "synonym") + .put("analysis.filter.my_synonym_filter.updateable", "true") + .put("analysis.filter.my_synonym_filter.synonyms_path", synonymsFileName)) + .addMapping("_doc", "field", "type=text,analyzer=standard,search_analyzer=my_synonym_analyzer")); + + client().prepareIndex("test", "_doc", "1").setSource("field", "Foo").get(); + assertNoFailures(client().admin().indices().prepareRefresh("test").execute().actionGet()); + + SearchResponse response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + assertHitCount(response, 0L); + AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get(); + assertEquals(2, analyzeResponse.getTokens().size()); + assertEquals("foo", analyzeResponse.getTokens().get(0).getTerm()); + assertEquals("baz", analyzeResponse.getTokens().get(1).getTerm()); + + // now update synonyms file and trigger reloading + try (PrintWriter out = new PrintWriter( + new OutputStreamWriter(Files.newOutputStream(synonymsFile, StandardOpenOption.WRITE), StandardCharsets.UTF_8))) { + out.println("foo, baz, buzz"); + } + assertNoFailures(client().admin().indices().prepareReloadAnalyzers("test").execute().actionGet()); + + analyzeResponse = client().admin().indices().prepareAnalyze("test", "Foo").setAnalyzer("my_synonym_analyzer").get(); + assertEquals(3, analyzeResponse.getTokens().size()); + Set tokens = new HashSet<>(); + analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); + assertTrue(tokens.contains("foo")); + assertTrue(tokens.contains("baz")); + assertTrue(tokens.contains("buzz")); + + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + assertHitCount(response, 1L); + } +} \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 2cfe66372115f..337dd28e60843 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -125,6 +125,8 @@ import org.elasticsearch.action.admin.indices.recovery.TransportRecoveryAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.TransportRefreshAction; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzerAction; +import org.elasticsearch.action.admin.indices.reloadanalyzer.TransportReloadAnalyzersAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.TransportRolloverAction; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsAction; @@ -509,6 +511,7 @@ public void reg actions.register(ClearScrollAction.INSTANCE, TransportClearScrollAction.class); actions.register(RecoveryAction.INSTANCE, TransportRecoveryAction.class); actions.register(NodesReloadSecureSettingsAction.INSTANCE, TransportNodesReloadSecureSettingsAction.class); + actions.register(ReloadAnalyzerAction.INSTANCE, TransportReloadAnalyzersAction.class); //Indexed scripts actions.register(PutStoredScriptAction.INSTANCE, TransportPutStoredScriptAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 55bd593742667..133244e6659ac 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -49,10 +49,12 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.CustomAnalyzerProvider.AnalyzerComponents; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NormalizingCharFilterFactory; import org.elasticsearch.index.analysis.NormalizingTokenFilterFactory; +import org.elasticsearch.index.analysis.ReloadableCustomAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -299,18 +301,36 @@ private static DetailAnalyzeResponse detailAnalyze(AnalyzeRequest request, Analy } } - CustomAnalyzer customAnalyzer = null; - if (analyzer instanceof CustomAnalyzer) { - customAnalyzer = (CustomAnalyzer) analyzer; - } else if (analyzer instanceof NamedAnalyzer && ((NamedAnalyzer) analyzer).analyzer() instanceof CustomAnalyzer) { - customAnalyzer = (CustomAnalyzer) ((NamedAnalyzer) analyzer).analyzer(); + Analyzer customAnalyzer = null; + // maybe unwrap analyzer from NamedAnalyzer + Analyzer potentialCustomAnalyzer = analyzer; + if (analyzer instanceof NamedAnalyzer) { + potentialCustomAnalyzer = ((NamedAnalyzer) analyzer).analyzer(); + } + if (potentialCustomAnalyzer instanceof CustomAnalyzer || potentialCustomAnalyzer instanceof ReloadableCustomAnalyzer) { + customAnalyzer = potentialCustomAnalyzer; } if (customAnalyzer != null) { - // customAnalyzer = divide charfilter, tokenizer tokenfilters - CharFilterFactory[] charFilterFactories = customAnalyzer.charFilters(); - TokenizerFactory tokenizerFactory = customAnalyzer.tokenizerFactory(); - TokenFilterFactory[] tokenFilterFactories = customAnalyzer.tokenFilters(); + // divide charfilter, tokenizer tokenfilters + CharFilterFactory[] charFilterFactories; + TokenizerFactory tokenizerFactory; + TokenFilterFactory[] tokenFilterFactories; + String tokenizerName; + if (customAnalyzer instanceof CustomAnalyzer) { + CustomAnalyzer casted = (CustomAnalyzer) analyzer; + charFilterFactories = casted.charFilters(); + tokenizerFactory = casted.tokenizerFactory(); + tokenFilterFactories = casted.tokenFilters(); + tokenizerName = casted.getTokenizerName(); + } else { + // for ReloadableCustomAnalyzer we want to make sure we get the factories from the same components object + AnalyzerComponents components = ((ReloadableCustomAnalyzer) customAnalyzer).getComponents(); + charFilterFactories = components.getCharFilters(); + tokenizerFactory = components.getTokenizerFactory(); + tokenFilterFactories = components.getTokenFilters(); + tokenizerName = components.getTokenizerName(); + } String[][] charFiltersTexts = new String[charFilterFactories != null ? charFilterFactories.length : 0][request.text().length]; TokenListCreator[] tokenFiltersTokenListCreator = new TokenListCreator[tokenFilterFactories != null ? @@ -370,7 +390,7 @@ private static DetailAnalyzeResponse detailAnalyze(AnalyzeRequest request, Analy } } detailResponse = new DetailAnalyzeResponse(charFilteredLists, new DetailAnalyzeResponse.AnalyzeTokenList( - customAnalyzer.getTokenizerName(), tokenizerTokenListCreator.getArrayTokens()), tokenFilterLists); + tokenizerName, tokenizerTokenListCreator.getArrayTokens()), tokenFilterLists); } else { String name; if (analyzer instanceof NamedAnalyzer) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzerAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzerAction.java new file mode 100644 index 0000000000000..b432f65630b99 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzerAction.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.reloadanalyzer; + +import org.elasticsearch.action.Action; + +public class ReloadAnalyzerAction extends Action { + + public static final ReloadAnalyzerAction INSTANCE = new ReloadAnalyzerAction(); + public static final String NAME = "indices:admin/reload_analyzers"; + + private ReloadAnalyzerAction() { + super(NAME); + } + + @Override + public ReloadAnalyzersResponse newResponse() { + return new ReloadAnalyzersResponse(); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzerRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzerRequestBuilder.java new file mode 100644 index 0000000000000..0680699af901a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzerRequestBuilder.java @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.reloadanalyzer; + +import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Builder for reloading of analyzers + */ +public class ReloadAnalyzerRequestBuilder + extends BroadcastOperationRequestBuilder { + + public ReloadAnalyzerRequestBuilder(ElasticsearchClient client, ReloadAnalyzerAction action, String... indices) { + super(client, action, new ReloadAnalyzersRequest(indices)); + } + +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzersRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzersRequest.java new file mode 100644 index 0000000000000..0db7b00d64f74 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzersRequest.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.reloadanalyzer; + +import org.elasticsearch.action.support.broadcast.BroadcastRequest; + +import java.util.Arrays; +import java.util.Objects; + +/** + * Request for reloading index search analyzers + */ +public class ReloadAnalyzersRequest extends BroadcastRequest { + + /** + * Constructs a new request for reloading index search analyzers for one or more indices + */ + public ReloadAnalyzersRequest(String... indices) { + super(indices); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ReloadAnalyzersRequest that = (ReloadAnalyzersRequest) o; + return Objects.equals(indicesOptions(), that.indicesOptions()) + && Arrays.equals(indices, that.indices); + } + + @Override + public int hashCode() { + return Objects.hash(indicesOptions(), Arrays.hashCode(indices)); + } + +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzersResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzersResponse.java new file mode 100644 index 0000000000000..950f82b3f14e6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzersResponse.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.indices.reloadanalyzer; + +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * The response object that will be returned when reloading analyzers + */ +public class ReloadAnalyzersResponse extends BroadcastResponse { + + private final Map> reloadedIndicesNodes; + + public ReloadAnalyzersResponse() { + reloadedIndicesNodes = Collections.emptyMap(); + } + + public ReloadAnalyzersResponse(int totalShards, int successfulShards, int failedShards, + List shardFailures, Map> reloadedIndicesNodes) { + super(totalShards, successfulShards, failedShards, shardFailures); + this.reloadedIndicesNodes = reloadedIndicesNodes; + } + + /** + * Override in subclass to add custom fields following the common `_shards` field + */ + @Override + protected void addCustomXContentFields(XContentBuilder builder, Params params) throws IOException { + builder.startArray("reloaded_nodes"); + for (Entry> indexNodesReloaded : reloadedIndicesNodes.entrySet()) { + builder.startObject(); + builder.field("index", indexNodesReloaded.getKey()); + builder.field("reloaded_node_ids", indexNodesReloaded.getValue()); + builder.endObject(); + } + builder.endArray(); + } + + @SuppressWarnings({ "unchecked" }) + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("reload_analyzer", + true, arg -> { + BroadcastResponse response = (BroadcastResponse) arg[0]; + List>> results = (List>>) arg[1]; + Map> reloadedNodeIds = new HashMap<>(); + for (Tuple> result : results) { + reloadedNodeIds.put(result.v1(), result.v2()); + } + return new ReloadAnalyzersResponse(response.getTotalShards(), response.getSuccessfulShards(), response.getFailedShards(), + Arrays.asList(response.getShardFailures()), reloadedNodeIds); + }); + + @SuppressWarnings({ "unchecked" }) + private static final ConstructingObjectParser>, Void> ENTRY_PARSER = new ConstructingObjectParser<>( + "reload_analyzer.entry", true, arg -> { + String index = (String) arg[0]; + List nodeIds = (List) arg[1]; + return new Tuple<>(index, nodeIds); + }); + + static { + declareBroadcastFields(PARSER); + PARSER.declareObjectArray(constructorArg(), ENTRY_PARSER, new ParseField("reloaded_nodes")); + ENTRY_PARSER.declareString(constructorArg(), new ParseField("index")); + ENTRY_PARSER.declareStringArray(constructorArg(), new ParseField("reloaded_node_ids")); + } + + public static ReloadAnalyzersResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/TransportReloadAnalyzersAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/TransportReloadAnalyzersAction.java new file mode 100644 index 0000000000000..b2b4ad3e1b02c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/reloadanalyzer/TransportReloadAnalyzersAction.java @@ -0,0 +1,169 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.reloadanalyzer; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.admin.indices.reloadanalyzer.TransportReloadAnalyzersAction.ReloadResult; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.PlainShardsIterator; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + + +/** + * Indices clear cache action. + */ +public class TransportReloadAnalyzersAction + extends TransportBroadcastByNodeAction { + + private static final Logger logger = LogManager.getLogger(TransportReloadAnalyzersAction.class); + private final IndicesService indicesService; + + @Inject + public TransportReloadAnalyzersAction(ClusterService clusterService, TransportService transportService, IndicesService indicesService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + super(ReloadAnalyzerAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, + ReloadAnalyzersRequest::new, ThreadPool.Names.MANAGEMENT, false); + this.indicesService = indicesService; + } + + @Override + protected ReloadResult readShardResult(StreamInput in) throws IOException { + ReloadResult reloadResult = new ReloadResult(); + reloadResult.readFrom(in); + return reloadResult; + } + + @Override + protected ReloadAnalyzersResponse newResponse(ReloadAnalyzersRequest request, int totalShards, int successfulShards, int failedShards, + List responses, List shardFailures, ClusterState clusterState) { + Map> reloadedIndicesNodes = new HashMap>(); + for (ReloadResult result : responses) { + if (reloadedIndicesNodes.containsKey(result.index)) { + List nodes = reloadedIndicesNodes.get(result.index); + nodes.add(result.nodeId); + } else { + List nodes = new ArrayList<>(); + nodes.add(result.nodeId); + reloadedIndicesNodes.put(result.index, nodes); + } + } + return new ReloadAnalyzersResponse(totalShards, successfulShards, failedShards, shardFailures, reloadedIndicesNodes); + } + + @Override + protected ReloadAnalyzersRequest readRequestFrom(StreamInput in) throws IOException { + final ReloadAnalyzersRequest request = new ReloadAnalyzersRequest(); + request.readFrom(in); + return request; + } + + @Override + protected ReloadResult shardOperation(ReloadAnalyzersRequest request, ShardRouting shardRouting) throws IOException { + logger.info("reloading analyzers for index shard " + shardRouting); + IndexService indexService = indicesService.indexService(shardRouting.index()); + indexService.mapperService().reloadSearchAnalyzers(indicesService.getAnalysis()); + return new ReloadResult(shardRouting.index().getName(), "sdfhsjkd"); + } + + public static final class ReloadResult implements Streamable { + String index; + String nodeId; + + private ReloadResult(String index, String nodeId) { + this.index = index; + this.nodeId = nodeId; + } + + private ReloadResult() { + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.index = in.readString(); + this.nodeId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(index); + out.writeString(nodeId); + } + } + + /** + * The reload request should go to only one shard per node the index lives on + */ + @Override + protected ShardsIterator shards(ClusterState clusterState, ReloadAnalyzersRequest request, String[] concreteIndices) { + RoutingTable routingTable = clusterState.routingTable(); + List shards = new ArrayList<>(); + for (String index : concreteIndices) { + Set nodesCovered = new HashSet<>(); + IndexRoutingTable indexRoutingTable = routingTable.index(index); + for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { + for (ShardRouting shardRouting : indexShardRoutingTable) { + if (nodesCovered.contains(shardRouting.currentNodeId()) == false) { + shards.add(shardRouting); + nodesCovered.add(shardRouting.currentNodeId()); + } + } + } + } + return new PlainShardsIterator(shards); + } + + @Override + protected ClusterBlockException checkGlobalBlock(ClusterState state, ReloadAnalyzersRequest request) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + @Override + protected ClusterBlockException checkRequestBlock(ClusterState state, ReloadAnalyzersRequest request, String[] concreteIndices) { + return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, concreteIndices); + } +} diff --git a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index d5a73981f29f1..7f4265a18514d 100644 --- a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -77,6 +77,9 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzerRequestBuilder; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzersRequest; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzersResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequestBuilder; import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; @@ -819,4 +822,19 @@ public interface IndicesAdminClient extends ElasticsearchClient { * Swaps the index pointed to by an alias given all provided conditions are satisfied */ void rolloverIndex(RolloverRequest request, ActionListener listener); + + /** + * Reloads analyzers of one or more indices. + */ + ActionFuture reloadAnalyzers(ReloadAnalyzersRequest request); + + /** + * Reloads analyzers of one or more indices. + */ + void reloadAnalyzers(ReloadAnalyzersRequest request, ActionListener listener); + + /** + * Reloads analyzers of one or more indices. + */ + ReloadAnalyzerRequestBuilder prepareReloadAnalyzers(String... indices); } diff --git a/server/src/main/java/org/elasticsearch/client/Requests.java b/server/src/main/java/org/elasticsearch/client/Requests.java index 19ad2fb397edc..8e955f496c040 100644 --- a/server/src/main/java/org/elasticsearch/client/Requests.java +++ b/server/src/main/java/org/elasticsearch/client/Requests.java @@ -52,6 +52,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzersRequest; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; @@ -535,4 +536,13 @@ public static SnapshotsStatusRequest snapshotsStatusRequest(String repository) { return new SnapshotsStatusRequest(repository); } + /** + * A request to reload search Analyzers indices settings. + * + * @param indices The indices to update the settings for. Use {@code null} or {@code _all} to executed against all indices. + * @return The request + */ + public static ReloadAnalyzersRequest reloadAnalyzersRequest(String... indices) { + return new ReloadAnalyzersRequest(indices); + } } diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index e79f0567babe6..f0bdafe1b89fb 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -207,6 +207,10 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzerAction; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzerRequestBuilder; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzersRequest; +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzersResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequestBuilder; @@ -1520,6 +1524,11 @@ public RefreshRequestBuilder prepareRefresh(String... indices) { return new RefreshRequestBuilder(this, RefreshAction.INSTANCE).setIndices(indices); } + @Override + public ReloadAnalyzerRequestBuilder prepareReloadAnalyzers(String... indices) { + return new ReloadAnalyzerRequestBuilder(this, ReloadAnalyzerAction.INSTANCE).setIndices(indices); + } + @Override public ActionFuture stats(final IndicesStatsRequest request) { return execute(IndicesStatsAction.INSTANCE, request); @@ -1725,6 +1734,16 @@ public ActionFuture getSettings(GetSettingsRequest request) public void getSettings(GetSettingsRequest request, ActionListener listener) { execute(GetSettingsAction.INSTANCE, request, listener); } + + @Override + public ActionFuture reloadAnalyzers(ReloadAnalyzersRequest request) { + return execute(ReloadAnalyzerAction.INSTANCE, request); + } + + @Override + public void reloadAnalyzers(final ReloadAnalyzersRequest request, final ActionListener listener) { + execute(ReloadAnalyzerAction.INSTANCE, request, listener); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 684d36c311f8b..8da51acb30733 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -414,7 +414,6 @@ public IndexAnalyzers build(IndexSettings indexSettings, Map tokenizerFactoryFactories, Map charFilterFactoryFactories, Map tokenFilterFactoryFactories) { - Map analyzers = new HashMap<>(); Map normalizers = new HashMap<>(); Map whitespaceNormalizers = new HashMap<>(); @@ -456,9 +455,11 @@ public IndexAnalyzers build(IndexSettings indexSettings, return new IndexAnalyzers(indexSettings, analyzers, normalizers, whitespaceNormalizers); } - private static NamedAnalyzer produceAnalyzer(String name, AnalyzerProvider analyzerFactory, - Map tokenFilters, Map charFilters, - Map tokenizers) { + private static NamedAnalyzer produceAnalyzer(String name, + AnalyzerProvider analyzerFactory, + Map tokenFilters, + Map charFilters, + Map tokenizers) { /* * Lucene defaults positionIncrementGap to 0 in all analyzers but * Elasticsearch defaults them to 0 only before version 2.0 diff --git a/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java b/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java index a41ee33564400..2c6622cf6b532 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java @@ -25,7 +25,7 @@ import java.io.Reader; -public final class CustomAnalyzer extends Analyzer { +public final class CustomAnalyzer extends Analyzer implements TokenFilterComposite { private final String tokenizerName; private final TokenizerFactory tokenizerFactory; diff --git a/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java index 8080a6af876a4..87f1fc254c7e6 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.analysis; +import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.TextFieldMapper; @@ -31,11 +32,11 @@ * A custom analyzer that is built out of a single {@link org.apache.lucene.analysis.Tokenizer} and a list * of {@link org.apache.lucene.analysis.TokenFilter}s. */ -public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider { +public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final Settings analyzerSettings; - private CustomAnalyzer customAnalyzer; + private Analyzer customAnalyzer; public CustomAnalyzerProvider(IndexSettings indexSettings, String name, Settings settings) { @@ -43,16 +44,43 @@ public CustomAnalyzerProvider(IndexSettings indexSettings, this.analyzerSettings = settings; } - void build(final Map tokenizers, final Map charFilters, - final Map tokenFilters) { + void build(final Map tokenizers, + final Map charFilters, + final Map tokenFilters) { + customAnalyzer = create(name(), analyzerSettings, tokenizers, charFilters, tokenFilters); + } + + /** + * Factory method that either returns a plain {@link ReloadableCustomAnalyzer} if the components used for creation are supporting index + * and search time use, or a {@link ReloadableCustomAnalyzer} if the components are intended for search time use only. + */ + private static Analyzer create(String name, Settings analyzerSettings, Map tokenizers, + Map charFilters, + Map tokenFilters) { + int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP; + positionIncrementGap = analyzerSettings.getAsInt("position_increment_gap", positionIncrementGap); + int offsetGap = analyzerSettings.getAsInt("offset_gap", -1); + AnalyzerComponents components = createComponents(name, analyzerSettings, tokenizers, charFilters, tokenFilters); + if (components.analysisMode().equals(AnalysisMode.SEARCH_TIME)) { + return new ReloadableCustomAnalyzer(components, positionIncrementGap, offsetGap); + } else { + return new CustomAnalyzer(components.getTokenizerName(), components.getTokenizerFactory(), components.getCharFilters(), + components.getTokenFilters(), positionIncrementGap, offsetGap); + } + } + + static AnalyzerComponents createComponents(String name, Settings analyzerSettings, + final Map tokenizers, + final Map charFilters, + final Map tokenFilters) { String tokenizerName = analyzerSettings.get("tokenizer"); if (tokenizerName == null) { - throw new IllegalArgumentException("Custom Analyzer [" + name() + "] must be configured with a tokenizer"); + throw new IllegalArgumentException("Custom Analyzer [" + name + "] must be configured with a tokenizer"); } TokenizerFactory tokenizer = tokenizers.get(tokenizerName); if (tokenizer == null) { - throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name " + + throw new IllegalArgumentException("Custom Analyzer [" + name + "] failed to find tokenizer under name " + "[" + tokenizerName + "]"); } @@ -61,7 +89,7 @@ void build(final Map tokenizers, final Map tokenizers, final Map an throw new IllegalStateException( "default analyzer must have the name [default] but was: [" + analyzers.get(DEFAULT_ANALYZER_NAME).name() + "]"); } - this.analyzers = unmodifiableMap(analyzers); - this.normalizers = unmodifiableMap(normalizers); - this.whitespaceNormalizers = unmodifiableMap(whitespaceNormalizers); + this.analyzers = unmodifiableMap(new HashMap<>(analyzers)); + this.normalizers = unmodifiableMap(new HashMap<>(normalizers)); + this.whitespaceNormalizers = unmodifiableMap(new HashMap<>(whitespaceNormalizers)); } /** @@ -65,6 +66,13 @@ public NamedAnalyzer get(String name) { return analyzers.get(name); } + /** + * Returns an (unmodifiable) map of containing the index analyzers + */ + public Map getAnalyzers() { + return analyzers; + } + /** * Returns a normalizer mapped to the given name or null if not present */ diff --git a/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java b/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java index 4831d88f3aa1f..b2c786472fd5c 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/NamedAnalyzer.java @@ -112,8 +112,8 @@ public void checkAllowedInMode(AnalysisMode mode) { return; // everything allowed if this analyzer is in ALL mode } if (this.getAnalysisMode() != mode) { - if (analyzer instanceof CustomAnalyzer) { - TokenFilterFactory[] tokenFilters = ((CustomAnalyzer) analyzer).tokenFilters(); + if (analyzer instanceof TokenFilterComposite) { + TokenFilterFactory[] tokenFilters = ((TokenFilterComposite) analyzer).tokenFilters(); List offendingFilters = new ArrayList<>(); for (TokenFilterFactory tokenFilter : tokenFilters) { if (tokenFilter.getAnalysisMode() != mode) { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzer.java b/server/src/main/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzer.java new file mode 100644 index 0000000000000..2fbbf5b217fa0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/ReloadableCustomAnalyzer.java @@ -0,0 +1,165 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.util.CloseableThreadLocal; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.analysis.CustomAnalyzerProvider.AnalyzerComponents; + +import java.io.Reader; +import java.util.Map; + +public final class ReloadableCustomAnalyzer extends Analyzer implements TokenFilterComposite { + + private volatile AnalyzerComponents components; + + private CloseableThreadLocal storedComponents = new CloseableThreadLocal<>(); + + private final int positionIncrementGap; + + private final int offsetGap; + + /** + * An alternative {@link ReuseStrategy} that allows swapping the stored the analyzer components when they change. + * This is used to change e.g. token filters in search time analyzers. + */ + private static final ReuseStrategy UPDATE_STRATEGY = new ReuseStrategy() { + @Override + public TokenStreamComponents getReusableComponents(Analyzer analyzer, String fieldName) { + ReloadableCustomAnalyzer custom = (ReloadableCustomAnalyzer) analyzer; + AnalyzerComponents components = custom.getStoredComponents(); + if (components == null || custom.shouldReload(components)) { + custom.setStoredComponents(custom.getComponents()); + return null; + } + TokenStreamComponents tokenStream = (TokenStreamComponents) getStoredValue(analyzer); + assert tokenStream != null; + return tokenStream; + } + + @Override + public void setReusableComponents(Analyzer analyzer, String fieldName, TokenStreamComponents tokenStream) { + setStoredValue(analyzer, tokenStream); + } + }; + + ReloadableCustomAnalyzer(AnalyzerComponents components, int positionIncrementGap, int offsetGap) { + super(UPDATE_STRATEGY); + this.components = components; + this.positionIncrementGap = positionIncrementGap; + this.offsetGap = offsetGap; + } + + public AnalyzerComponents getComponents() { + return this.components; + } + + @Override + public TokenFilterFactory[] tokenFilters() { + return this.components.getTokenFilters(); + } + + @Override + public int getPositionIncrementGap(String fieldName) { + return this.positionIncrementGap; + } + + @Override + public int getOffsetGap(String field) { + if (this.offsetGap < 0) { + return super.getOffsetGap(field); + } + return this.offsetGap; + } + + public AnalysisMode getAnalysisMode() { + return this.components.analysisMode(); + } + + @Override + protected Reader initReaderForNormalization(String fieldName, Reader reader) { + final AnalyzerComponents components = getComponents(); + for (CharFilterFactory charFilter : components.getCharFilters()) { + reader = charFilter.normalize(reader); + } + return reader; + } + + @Override + protected TokenStream normalize(String fieldName, TokenStream in) { + final AnalyzerComponents components = getComponents(); + TokenStream result = in; + for (TokenFilterFactory filter : components.getTokenFilters()) { + result = filter.normalize(result); + } + return result; + } + + private boolean shouldReload(AnalyzerComponents source) { + return this.components != source; + } + + public synchronized void reload(String name, + Settings settings, + final Map tokenizers, + final Map charFilters, + final Map tokenFilters) { + AnalyzerComponents components = CustomAnalyzerProvider.createComponents(name, settings, tokenizers, charFilters, tokenFilters); + this.components = components; + } + + @Override + public void close() { + storedComponents.close(); + } + + private void setStoredComponents(AnalyzerComponents components) { + storedComponents.set(components); + } + + private AnalyzerComponents getStoredComponents() { + return storedComponents.get(); + } + + @Override + protected TokenStreamComponents createComponents(String fieldName) { + final AnalyzerComponents components = getStoredComponents(); + Tokenizer tokenizer = components.getTokenizerFactory().create(); + TokenStream tokenStream = tokenizer; + for (TokenFilterFactory tokenFilter : components.getTokenFilters()) { + tokenStream = tokenFilter.create(tokenStream); + } + return new TokenStreamComponents(tokenizer, tokenStream); + } + + @Override + protected Reader initReader(String fieldName, Reader reader) { + final AnalyzerComponents components = getStoredComponents(); + if (components.getCharFilters() != null && components.getCharFilters().length > 0) { + for (CharFilterFactory charFilter : components.getCharFilters()) { + reader = charFilter.create(reader); + } + } + return reader; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/TokenFilterComposite.java b/server/src/main/java/org/elasticsearch/index/analysis/TokenFilterComposite.java new file mode 100644 index 0000000000000..94ca50ba38745 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/TokenFilterComposite.java @@ -0,0 +1,28 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +/** + * Analyzers that provide access to their token filters should implement this + */ +public interface TokenFilterComposite { + + TokenFilterFactory[] tokenFilters(); +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 27d061d8c2788..0dc8b6a00c09e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -86,7 +86,6 @@ public FieldTypeLookup copyAndAddAll(String type, return new FieldTypeLookup(fullName, aliases); } - /** Returns the field for the given field */ public MappedFieldType get(String field) { String concreteField = aliasToConcreteName.getOrDefault(field, field); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 487a6ac4789e3..fc7c94372f16c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; @@ -46,8 +47,13 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSortConfig; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.ReloadableCustomAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityService; @@ -843,4 +849,20 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { return defaultAnalyzer; } } + + public synchronized void reloadSearchAnalyzers(AnalysisRegistry registry) throws IOException { + logger.info("reloading search analyzers"); + // refresh indexAnalyzers and search analyzers + final Map tokenizerFactories = registry.buildTokenizerFactories(indexSettings); + final Map charFilterFactories = registry.buildCharFilterFactories(indexSettings); + final Map tokenFilterFactories = registry.buildTokenFilterFactories(indexSettings); + final Map settings = indexSettings.getSettings().getGroups("index.analysis.analyzer"); + for (NamedAnalyzer namedAnalyzer : indexAnalyzers.getAnalyzers().values()) { + if (namedAnalyzer.analyzer() instanceof ReloadableCustomAnalyzer) { + ReloadableCustomAnalyzer analyzer = (ReloadableCustomAnalyzer) namedAnalyzer.analyzer(); + Settings analyzerSettings = settings.get(namedAnalyzer.name()); + analyzer.reload(namedAnalyzer.name(), analyzerSettings, tokenizerFactories, charFilterFactories, tokenFilterFactories); + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestReloadAnalyzersAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestReloadAnalyzersAction.java new file mode 100644 index 0000000000000..245800f0d49c1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestReloadAnalyzersAction.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.indices; + +import org.elasticsearch.action.admin.indices.reloadanalyzer.ReloadAnalyzersRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; + +import static org.elasticsearch.client.Requests.reloadAnalyzersRequest; +public class RestReloadAnalyzersAction extends BaseRestHandler { + + public RestReloadAnalyzersAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, "/{index}/_reload_search_analyzers", this); + controller.registerHandler(RestRequest.Method.POST, "/{index}/_reload_search_analyzers", this); + } + + @Override + public String getName() { + return "reload_search_analyzers_action"; + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + ReloadAnalyzersRequest reloadAnalyuersRequest = reloadAnalyzersRequest(Strings.splitStringByCommaToArray(request.param("index"))); + reloadAnalyuersRequest.indicesOptions(IndicesOptions.fromRequest(request, reloadAnalyuersRequest.indicesOptions())); + return channel -> client.admin().indices().reloadAnalyzers(reloadAnalyuersRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java index 583516c5cd4c2..813a32ff03c0b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java @@ -26,8 +26,8 @@ import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo.SubInfo; import org.apache.lucene.search.vectorhighlight.FragmentsBuilder; import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterComposite; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.mapper.MappedFieldType; @@ -81,9 +81,8 @@ private static boolean containsBrokenAnalysis(Analyzer analyzer) { if (analyzer instanceof NamedAnalyzer) { analyzer = ((NamedAnalyzer) analyzer).analyzer(); } - if (analyzer instanceof CustomAnalyzer) { - final CustomAnalyzer a = (CustomAnalyzer) analyzer; - TokenFilterFactory[] tokenFilters = a.tokenFilters(); + if (analyzer instanceof TokenFilterComposite) { + final TokenFilterFactory[] tokenFilters = ((TokenFilterComposite) analyzer).tokenFilters(); for (TokenFilterFactory tokenFilterFactory : tokenFilters) { if (tokenFilterFactory.breaksFastVectorHighlighter()) { return true; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index 74b9437d67821..2b1b395c574de 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -31,9 +31,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterComposite; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryShardContext; @@ -675,9 +675,8 @@ private static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyze if (analyzer instanceof NamedAnalyzer) { analyzer = ((NamedAnalyzer)analyzer).analyzer(); } - if (analyzer instanceof CustomAnalyzer) { - final CustomAnalyzer a = (CustomAnalyzer) analyzer; - final TokenFilterFactory[] tokenFilters = a.tokenFilters(); + if (analyzer instanceof TokenFilterComposite) { + final TokenFilterFactory[] tokenFilters = ((TokenFilterComposite) analyzer).tokenFilters(); for (TokenFilterFactory tokenFilterFactory : tokenFilters) { if (tokenFilterFactory instanceof ShingleTokenFilterFactory) { return ((ShingleTokenFilterFactory)tokenFilterFactory).getInnerFactory(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzersResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzersResponseTests.java new file mode 100644 index 0000000000000..126a08d21e7b0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/reloadanalyzer/ReloadAnalyzersResponseTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.reloadanalyzer; + +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.AbstractBroadcastResponseTestCase; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ReloadAnalyzersResponseTests extends AbstractBroadcastResponseTestCase { + + @Override + protected ReloadAnalyzersResponse createTestInstance(int totalShards, int successfulShards, int failedShards, + List failures) { + Map> reloadedIndicesNodes = new HashMap<>(); + int randomIndices = randomIntBetween(0, 5); + for (int i = 0; i < randomIndices; i++) { + List randomNodeIds = Arrays.asList(generateRandomStringArray(5, 5, false, true)); + reloadedIndicesNodes.put(randomAlphaOfLengthBetween(5, 10), randomNodeIds); + } + return new ReloadAnalyzersResponse(totalShards, successfulShards, failedShards, failures, reloadedIndicesNodes); + } + + @Override + protected ReloadAnalyzersResponse doParseInstance(XContentParser parser) throws IOException { + return ReloadAnalyzersResponse.fromXContent(parser); + } + + @Override + public void testToXContent() { + Map> reloadedIndicesNodes = Collections.singletonMap("index", Collections.singletonList("nodeId")); + ReloadAnalyzersResponse response = new ReloadAnalyzersResponse(10, 5, 5, null, reloadedIndicesNodes); + String output = Strings.toString(response); + assertEquals( + "{\"_shards\":{\"total\":10,\"successful\":5,\"failed\":5}," + + "\"reloaded_nodes\":[{\"index\":\"index\",\"reloaded_node_ids\":[\"nodeId\"]}]" + + "}", + output); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index d8c120e492d31..6bdfc167dec8b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.analysis.TokenStream; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -27,11 +29,21 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisMode; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.ReloadableCustomAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.indices.InvalidTypeNameException; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -39,6 +51,8 @@ import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; import static org.hamcrest.CoreMatchers.containsString; @@ -49,7 +63,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return Collections.singleton(InternalSettingsPlugin.class); + return List.of(InternalSettingsPlugin.class, ReloadableFilterPlugin.class); } public void testTypeNameStartsWithIllegalDot() { @@ -434,4 +448,97 @@ public void testMappingRecoverySkipFieldNameLengthLimit() throws Throwable { assertEquals(testString, documentMapper.mappers().getMapper(testString).simpleName()); } + public void testReloadSearchAnalyzers() throws IOException { + Settings settings = Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put("index.analysis.analyzer.reloadableAnalyzer.type", "custom") + .put("index.analysis.analyzer.reloadableAnalyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.reloadableAnalyzer.filter", "myReloadableFilter").build(); + + MapperService mapperService = createIndex("test_index", settings).mapperService(); + CompressedXContent mapping = new CompressedXContent(BytesReference.bytes( + XContentFactory.jsonBuilder().startObject().startObject("_doc") + .startObject("properties") + .startObject("field") + .field("type", "text") + .field("analyzer", "simple") + .field("search_analyzer", "reloadableAnalyzer") + .field("search_quote_analyzer", "stop") + .endObject() + .startObject("otherField") + .field("type", "text") + .field("analyzer", "standard") + .field("search_analyzer", "simple") + .field("search_quote_analyzer", "reloadableAnalyzer") + .endObject() + .endObject() + .endObject().endObject())); + + mapperService.merge("_doc", mapping, MergeReason.MAPPING_UPDATE); + IndexAnalyzers current = mapperService.getIndexAnalyzers(); + + ReloadableCustomAnalyzer originalReloadableAnalyzer = (ReloadableCustomAnalyzer) current.get("reloadableAnalyzer").analyzer(); + TokenFilterFactory[] originalTokenFilters = originalReloadableAnalyzer.getComponents().getTokenFilters(); + assertEquals(1, originalTokenFilters.length); + assertEquals("myReloadableFilter", originalTokenFilters[0].name()); + + // now reload, this should change the tokenfilterFactory inside the analyzer + mapperService.reloadSearchAnalyzers(getInstanceFromNode(AnalysisRegistry.class)); + IndexAnalyzers updatedAnalyzers = mapperService.getIndexAnalyzers(); + assertSame(current, updatedAnalyzers); + assertSame(current.getDefaultIndexAnalyzer(), updatedAnalyzers.getDefaultIndexAnalyzer()); + assertSame(current.getDefaultSearchAnalyzer(), updatedAnalyzers.getDefaultSearchAnalyzer()); + assertSame(current.getDefaultSearchQuoteAnalyzer(), updatedAnalyzers.getDefaultSearchQuoteAnalyzer()); + + assertFalse(assertSameContainedFilters(originalTokenFilters, current.get("reloadableAnalyzer"))); + assertFalse(assertSameContainedFilters(originalTokenFilters, mapperService.fullName("field").searchAnalyzer())); + assertFalse(assertSameContainedFilters(originalTokenFilters, mapperService.fullName("otherField").searchQuoteAnalyzer())); + } + + private boolean assertSameContainedFilters(TokenFilterFactory[] originalTokenFilter, NamedAnalyzer updatedAnalyzer) { + ReloadableCustomAnalyzer updatedReloadableAnalyzer = (ReloadableCustomAnalyzer) updatedAnalyzer.analyzer(); + TokenFilterFactory[] newTokenFilters = updatedReloadableAnalyzer.getComponents().getTokenFilters(); + assertEquals(originalTokenFilter.length, newTokenFilters.length); + int i = 0; + for (TokenFilterFactory tf : newTokenFilters ) { + assertEquals(originalTokenFilter[i].name(), tf.name()); + if (originalTokenFilter[i] != tf) { + return false; + } + i++; + } + return true; + } + + public static final class ReloadableFilterPlugin extends Plugin implements AnalysisPlugin { + + @Override + public Map> getTokenFilters() { + return Collections.singletonMap("myReloadableFilter", new AnalysisProvider() { + + @Override + public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) + throws IOException { + return new TokenFilterFactory() { + + @Override + public String name() { + return "myReloadableFilter"; + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return tokenStream; + } + + @Override + public AnalysisMode getAnalysisMode() { + return AnalysisMode.SEARCH_TIME; + } + }; + } + }); + } + } + }