Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -294,8 +294,6 @@ public ClusterState execute(ClusterState currentState) throws Exception {
List<IndexTemplateMetaData> templates =
MetaDataIndexTemplateService.findTemplates(currentState.metaData(), request.index());

Map<String, Map<String, String>> customs = new HashMap<>();

// add the request mapping
Map<String, Map<String, Object>> mappings = new HashMap<>();

Expand Down Expand Up @@ -513,10 +511,6 @@ public ClusterState execute(ClusterState currentState) throws Exception {
indexMetaDataBuilder.putAlias(aliasMetaData);
}

for (Map.Entry<String, Map<String, String>> customEntry : customs.entrySet()) {
indexMetaDataBuilder.putCustom(customEntry.getKey(), customEntry.getValue());
}

indexMetaDataBuilder.state(request.state());

final IndexMetaData indexMetaData;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp

private static final String DATA_BLOB_PREFIX = "__";

private final Settings settings;
protected final Settings settings;

private final RateLimiter snapshotRateLimiter;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,12 @@ public class FsRepository extends BlobStoreRepository {
Setting.boolSetting("repositories.fs.compress", false, Property.NodeScope);
private final Environment environment;

private boolean chunkSizeInitialized = false;
private ByteSizeValue chunkSize;

private final BlobPath basePath;

private boolean compressInitialized = false;
private boolean compress;

/**
Expand Down Expand Up @@ -100,13 +102,6 @@ public FsRepository(RepositoryMetaData metadata, Environment environment,
}
}

if (CHUNK_SIZE_SETTING.exists(metadata.settings())) {
this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings());
} else {
this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(environment.settings());
}
this.compress = COMPRESS_SETTING.exists(metadata.settings())
? COMPRESS_SETTING.get(metadata.settings()) : REPOSITORIES_COMPRESS_SETTING.get(environment.settings());
this.basePath = BlobPath.cleanPath();
}

Expand All @@ -117,13 +112,36 @@ protected BlobStore createBlobStore() throws Exception {
return new FsBlobStore(environment.settings(), locationFile);
}

/*
* Note: this method gets called by the super constructor, so we can't rely on instance fields in this class having been initialized
* yet.
*/
@Override
protected boolean isCompress() {
if (!compressInitialized) {
this.compress = COMPRESS_SETTING.exists(metadata.settings()) ? COMPRESS_SETTING.get(metadata.settings())
: REPOSITORIES_COMPRESS_SETTING.get(settings);
compressInitialized = true;
}

return compress;
}

/*
* Note: this method gets called by the super constructor, so we can't rely on instance fields in this class having been initialized
* yet.
*/
@Override
protected ByteSizeValue chunkSize() {
if (!chunkSizeInitialized) {
if (CHUNK_SIZE_SETTING.exists(metadata.settings())) {
this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings());
} else {
this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(settings);
}
chunkSizeInitialized = true;
}

return chunkSize;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,12 +91,12 @@ static ResultSetMetaData proxy(ResultSetMetaDataProxy handler) {
static Statement proxy(Object statement, StatementProxy handler) {
Class<? extends Statement> i = Statement.class;

if (statement instanceof PreparedStatement) {
i = PreparedStatement.class;
}
else if (statement instanceof CallableStatement) {
if (statement instanceof CallableStatement) {
i = CallableStatement.class;
}
else if (statement instanceof PreparedStatement) {
i = PreparedStatement.class;
}

return createProxy(i, handler);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -887,18 +887,18 @@ private List<NamedExpression> assignAliases(List<? extends NamedExpression> expr
NamedExpression expr = exprs.get(i);
NamedExpression transformed = (NamedExpression) expr.transformUp(ua -> {
Expression child = ua.child();
if (child instanceof NamedExpression) {
return child;
}
if (!child.resolved()) {
return ua;
}
if (child instanceof Cast) {
Cast c = (Cast) child;
if (c.field() instanceof NamedExpression) {
return new Alias(c.source(), ((NamedExpression) c.field()).name(), c);
}
}
if (child instanceof NamedExpression) {
return child;
}
if (!child.resolved()) {
return ua;
}
return new Alias(child.source(), child.sourceText(), child);
}, UnresolvedAlias.class);
newExpr.add(expr.equals(transformed) ? expr : transformed);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -404,12 +404,12 @@ static String nameOf(Expression e) {
if (e instanceof DateTimeFunction) {
return nameOf(((DateTimeFunction) e).field());
}
if (e instanceof NamedExpression) {
return ((NamedExpression) e).name();
}
if (e instanceof Literal) {
return String.valueOf(e.fold());
}
if (e instanceof NamedExpression) {
return ((NamedExpression) e).name();
}
throw new SqlIllegalArgumentException("Cannot determine name for {}", e);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,17 +179,13 @@ private FieldExtraction topHitFieldRef(FieldAttribute fieldAttr) {
}

private Tuple<QueryContainer, FieldExtraction> nestedHitFieldRef(FieldAttribute attr) {
// Find the nested query for this field. If there isn't one then create it
List<FieldExtraction> nestedRefs = new ArrayList<>();

String name = aliasName(attr);
String format = attr.field().getDataType() == DataType.DATETIME ? "epoch_millis" : DocValueFieldsContext.USE_DEFAULT_FORMAT;
Query q = rewriteToContainNestedField(query, attr.source(),
attr.nestedParent().name(), name, format, attr.field().isAggregatable());

SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.field().getDataType(),
attr.field().isAggregatable(), attr.parent().name());
nestedRefs.add(nestedFieldRef);

return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef);
}
Expand Down