Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.Version;
import org.elasticsearch.VersionTests;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
Expand Down Expand Up @@ -53,7 +52,6 @@
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.OldIndexUtils;
Expand Down Expand Up @@ -229,7 +227,6 @@ void assertOldIndexWorks(String index) throws Exception {
// node startup
upgradeIndexFolder();
importIndex(indexName);
assertBasicSearchWorks(indexName);
assertAllSearchWorks(indexName);
assertBasicAggregationWorks(indexName);
assertRealtimeGetWorks(indexName);
Expand All @@ -241,31 +238,6 @@ void assertOldIndexWorks(String index) throws Exception {
unloadIndex(indexName);
}

void assertBasicSearchWorks(String indexName) {
logger.info("--> testing basic search");
SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery());
SearchResponse searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
long numDocs = searchRsp.getHits().getTotalHits();
logger.info("Found {} in old index", numDocs);

logger.info("--> testing basic search with sort");
searchReq.addSort("long_sort", SortOrder.ASC);
ElasticsearchAssertions.assertNoFailures(searchReq.get());

logger.info("--> testing exists filter");
searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("string"));
searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
assertEquals(numDocs, searchRsp.getHits().getTotalHits());
GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings(indexName).get();
searchReq = client().prepareSearch(indexName)
.setQuery(QueryBuilders.existsQuery("field.with.dots"));
searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
assertEquals(numDocs, searchRsp.getHits().getTotalHits());
}

boolean findPayloadBoostInExplanation(Explanation expl) {
if (expl.getDescription().startsWith("payloadBoost=") && expl.getValue() != 1f) {
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,17 @@
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.test.rest.ESRestTestCase;

import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
Expand All @@ -38,6 +43,7 @@

import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.containsString;

/**
Expand All @@ -64,6 +70,95 @@ protected boolean preserveReposUponCompletion() {
return true;
}

public void testSearch() throws Exception {
if (runningAgainstOldCluster) {
XContentBuilder mappingsAndSettings = jsonBuilder();
mappingsAndSettings.startObject();
{
mappingsAndSettings.startObject("settings");
mappingsAndSettings.field("number_of_shards", 1);
mappingsAndSettings.field("number_of_replicas", 0);
mappingsAndSettings.endObject();
}
{
mappingsAndSettings.startObject("mappings");
mappingsAndSettings.startObject("doc");
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("string");
mappingsAndSettings.field("type", "text");
mappingsAndSettings.endObject();
}
{
mappingsAndSettings.startObject("dots_in_field_names");
mappingsAndSettings.field("type", "text");
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
mappingsAndSettings.endObject();
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/index", Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));

int numDocs = randomIntBetween(2000, 3000);
indexRandomDocuments("index", numDocs, true, i -> {
return JsonXContent.contentBuilder().startObject()
.field("string", randomAlphaOfLength(10))
.field("int", randomInt(100))
.field("float", randomFloat())
// be sure to create a "proper" boolean (True, False) for the first document so that automapping is correct
.field("bool", i > 0 && supportsLenientBooleans ? randomLenientBoolean() : randomBoolean())
.field("field.with.dots", randomAlphaOfLength(10))
// TODO a binary field
.endObject();
});
client().performRequest("POST", "/_flush");
}
assertBasicSearchWorks();
}

void assertBasicSearchWorks() throws IOException {
logger.info("--> testing basic search");
Map<String, Object> response = toMap(client().performRequest("GET", "/index/_search"));
assertNoFailures(response);
int numDocs1 = (int) XContentMapValues.extractValue("hits.total", response);
logger.info("Found {} in old index", numDocs1);

logger.info("--> testing basic search with sort");
String searchRequestBody = "{ \"sort\": [{ \"int\" : \"asc\" }]}";
response = toMap(client().performRequest("GET", "/index/_search", Collections.emptyMap(),
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
assertNoFailures(response);
int numDocs2 = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(numDocs1, numDocs2);

logger.info("--> testing exists filter");
searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"string\"} }}";
response = toMap(client().performRequest("GET", "/index/_search", Collections.emptyMap(),
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
assertNoFailures(response);
numDocs2 = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(numDocs1, numDocs2);

searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}";
response = toMap(client().performRequest("GET", "/index/_search", Collections.emptyMap(),
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
assertNoFailures(response);
numDocs2 = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(numDocs1, numDocs2);
}

static Map<String, Object> toMap(Response response) throws IOException {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false);
}

static void assertNoFailures(Map<String, Object> response) {
int failed = (int) XContentMapValues.extractValue("_shards.failed", response);
assertEquals(0, failed);
}

/**
* Tests that a single document survives. Super basic smoke test.
*/
Expand Down Expand Up @@ -93,13 +188,14 @@ public void testRandomDocumentsAndSnapshot() throws IOException {
* or not we have one. */
shouldHaveTranslog = randomBoolean();
logger.info("Creating {} documents", count);
indexRandomDocuments(index, count, true);
indexRandomDocuments(index, count, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
createSnapshot();
// Explicitly flush so we're sure to have a bunch of documents in the Lucene index
client().performRequest("POST", "/_flush");
if (shouldHaveTranslog) {
// Update a few documents so we are sure to have a translog
indexRandomDocuments(index, count / 10, false /* Flushing here would invalidate the whole thing....*/);
indexRandomDocuments(index, count / 10, false /* Flushing here would invalidate the whole thing....*/,
i -> jsonBuilder().startObject().field("field", "value").endObject());
}

// Record how many documents we built so we can compare later
Expand Down Expand Up @@ -137,20 +233,11 @@ public void testRandomDocumentsAndSnapshot() throws IOException {

// TODO tests for upgrades after shrink. We've had trouble with shrink in the past.

private void indexRandomDocuments(String index, int count, boolean flushAllowed) throws IOException {
private void indexRandomDocuments(String index, int count, boolean flushAllowed,
CheckedFunction<Integer, XContentBuilder, IOException> docSupplier) throws IOException {
for (int i = 0; i < count; i++) {
XContentBuilder doc = JsonXContent.contentBuilder().startObject(); {
doc.field("string", randomAlphaOfLength(10));
doc.field("int", randomInt(100));
doc.field("float", randomFloat());
// be sure to create a "proper" boolean (True, False) for the first document so that automapping is correct
doc.field("bool", i > 0 && supportsLenientBooleans ? randomLenientBoolean() : randomBoolean());
doc.field("field.with.dots", randomAlphaOfLength(10));
// TODO a binary field
}
doc.endObject();
client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(),
new StringEntity(doc.string(), ContentType.APPLICATION_JSON));
new StringEntity(docSupplier.apply(i).string(), ContentType.APPLICATION_JSON));
if (rarely()) {
client().performRequest("POST", "/_refresh");
}
Expand Down