Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -808,7 +808,7 @@ public void writeGenericValue(@Nullable Object value) throws IOException {
if (writer != null) {
writer.write(this, value);
} else {
throw new IOException("can not write type [" + type + "]");
throw new IllegalArgumentException("can not write type [" + type + "]");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder;
Expand All @@ -43,6 +44,7 @@
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;

import java.io.Closeable;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
Expand Down Expand Up @@ -115,8 +117,8 @@ public void onRemoval(RemovalNotification<Key, BytesReference> notification) {
// NORELEASE The cacheKeyRenderer has been added in order to debug
// https://github.com/elastic/elasticsearch/issues/32827, it should be
// removed when this issue is solved
BytesReference getOrCompute(CacheEntity cacheEntity, Supplier<BytesReference> loader,
DirectoryReader reader, BytesReference cacheKey, Supplier<String> cacheKeyRenderer) throws Exception {
BytesReference getOrCompute(CacheEntity cacheEntity, CheckedSupplier<BytesReference, IOException> loader,
DirectoryReader reader, BytesReference cacheKey, Supplier<String> cacheKeyRenderer) throws Exception {
assert reader.getReaderCacheHelper() != null;
final Key key = new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey);
Loader cacheLoader = new Loader(cacheEntity, loader);
Expand Down Expand Up @@ -157,10 +159,10 @@ void invalidate(CacheEntity cacheEntity, DirectoryReader reader, BytesReference
private static class Loader implements CacheLoader<Key, BytesReference> {

private final CacheEntity entity;
private final Supplier<BytesReference> loader;
private final CheckedSupplier<BytesReference, IOException> loader;
private boolean loaded;

Loader(CacheEntity entity, Supplier<BytesReference> loader) {
Loader(CacheEntity entity, CheckedSupplier<BytesReference, IOException> loader) {
this.entity = entity;
this.loader = loader;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@
import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.bytes.BytesReference;
Expand Down Expand Up @@ -1376,12 +1378,7 @@ public void loadIntoContext(ShardSearchRequest request, SearchContext context, Q
() -> "Shard: " + request.shardId() + "\nSource:\n" + request.source(),
out -> {
queryPhase.execute(context);
try {
context.queryResult().writeToNoId(out);

} catch (IOException e) {
throw new AssertionError("Could not serialize response", e);
}
context.queryResult().writeToNoId(out);
loadedFromCache[0] = false;
});

Expand Down Expand Up @@ -1420,9 +1417,9 @@ public ByteSizeValue getTotalIndexingBufferBytes() {
* @return the contents of the cache or the result of calling the loader
*/
private BytesReference cacheShardLevelResult(IndexShard shard, DirectoryReader reader, BytesReference cacheKey,
Supplier<String> cacheKeyRenderer, Consumer<StreamOutput> loader) throws Exception {
Supplier<String> cacheKeyRenderer, CheckedConsumer<StreamOutput, IOException> loader) throws Exception {
IndexShardCacheEntity cacheEntity = new IndexShardCacheEntity(shard);
Supplier<BytesReference> supplier = () -> {
CheckedSupplier<BytesReference, IOException> supplier = () -> {
/* BytesStreamOutput allows to pass the expected size but by default uses
* BigArrays.PAGE_SIZE_IN_BYTES which is 16k. A common cached result ie.
* a date histogram with 3 buckets is ~100byte so 16k might be very wasteful
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -349,6 +349,13 @@ public void testSimpleStreams() throws Exception {
assertThat(jdt.getZonedDateTime().toInstant().toEpochMilli(), equalTo(123456L));
assertThat(jdt.getZonedDateTime().getZone(), equalTo(ZoneId.of("America/Los_Angeles")));
assertEquals(0, in.available());
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> out.writeGenericValue(new Object() {
@Override
public String toString() {
return "This object cannot be serialized by writeGeneric method";
}
}));
assertThat(ex.getMessage(), containsString("can not write type"));
in.close();
out.close();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.bytes.AbstractBytesReference;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
Expand All @@ -49,7 +50,6 @@
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier;

public class IndicesRequestCacheTests extends ESTestCase {

Expand Down Expand Up @@ -331,7 +331,7 @@ public Iterable<Field> newDoc(int id, String value) {
StringField.TYPE_STORED));
}

private static class Loader implements Supplier<BytesReference> {
private static class Loader implements CheckedSupplier<BytesReference, IOException> {

private final DirectoryReader reader;
private final int id;
Expand Down