Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
Expand All @@ -38,11 +37,9 @@
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.translog.Translog;

import java.io.Closeable;
Expand Down Expand Up @@ -247,9 +244,8 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException {
assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]";
} else {
final String id = fields.id();
final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
if (isTombstone) {
op = new Translog.Delete(id, uid, seqNo, primaryTerm, version);
op = new Translog.Delete(id, seqNo, primaryTerm, version);
assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]";
} else {
final BytesReference source = fields.source();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -884,16 +884,14 @@ private Engine.DeleteResult applyDeleteOperation(Engine engine, long seqNo, long
assert opPrimaryTerm <= getOperationPrimaryTerm()
: "op term [ " + opPrimaryTerm + " ] > shard term [" + getOperationPrimaryTerm() + "]";
ensureWriteAllowed(origin);
final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
final Engine.Delete delete = prepareDelete(id, uid, seqNo, opPrimaryTerm, version,
versionType, origin, ifSeqNo, ifPrimaryTerm);
final Engine.Delete delete = prepareDelete(id, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm);
return delete(engine, delete);
}

private Engine.Delete prepareDelete(String id, Term uid, long seqNo, long primaryTerm, long version,
VersionType versionType, Engine.Operation.Origin origin,
long ifSeqNo, long ifPrimaryTerm) {
public static Engine.Delete prepareDelete(String id, long seqNo, long primaryTerm, long version, VersionType versionType,
Engine.Operation.Origin origin, long ifSeqNo, long ifPrimaryTerm) {
long startTime = System.nanoTime();
final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
return new Engine.Delete(id, uid, seqNo, primaryTerm, version, versionType,
origin, startTime, ifSeqNo, ifPrimaryTerm);
}
Expand Down
40 changes: 19 additions & 21 deletions server/src/main/java/org/elasticsearch/index/translog/Translog.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.elasticsearch.index.translog;

import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
Expand All @@ -41,7 +40,9 @@
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.IndexShardComponent;
Expand Down Expand Up @@ -1279,7 +1280,6 @@ public static class Delete implements Operation {
public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE;

private final String id;
private final Term uid;
private final long seqNo;
private final long primaryTerm;
private final long version;
Expand All @@ -1292,7 +1292,11 @@ private Delete(final StreamInput in) throws IOException {
// Can't assert that this is _doc because pre-8.0 indexes can have any name for a type
}
id = in.readString();
uid = new Term(in.readString(), in.readBytesRef());
if (format < FORMAT_NO_DOC_TYPE) {
final String docType = in.readString();
assert docType.equals(IdFieldMapper.NAME) : docType + " != " + IdFieldMapper.NAME;
in.readBytesRef(); // uid
}
this.version = in.readLong();
if (format < FORMAT_NO_VERSION_TYPE) {
in.readByte(); // versionType
Expand All @@ -1302,17 +1306,16 @@ private Delete(final StreamInput in) throws IOException {
}

public Delete(Engine.Delete delete, Engine.DeleteResult deleteResult) {
this(delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion());
this(delete.id(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion());
}

/** utility for testing */
public Delete(String id, long seqNo, long primaryTerm, Term uid) {
this(id, uid, seqNo, primaryTerm, Versions.MATCH_ANY);
public Delete(String id, long seqNo, long primaryTerm) {
this(id, seqNo, primaryTerm, Versions.MATCH_ANY);
}

public Delete(String id, Term uid, long seqNo, long primaryTerm, long version) {
public Delete(String id, long seqNo, long primaryTerm, long version) {
this.id = Objects.requireNonNull(id);
this.uid = uid;
this.seqNo = seqNo;
this.primaryTerm = primaryTerm;
this.version = version;
Expand All @@ -1325,17 +1328,13 @@ public Type opType() {

@Override
public long estimateSize() {
return ((uid.field().length() + uid.text().length()) * 2) + 20;
return 4 + (id.length() * 2) + 24;
}

public String id() {
return id;
}

public Term uid() {
return this.uid;
}

@Override
public long seqNo() {
return seqNo;
Expand All @@ -1362,8 +1361,10 @@ private void write(final StreamOutput out) throws IOException {
out.writeString(MapperService.SINGLE_MAPPING_NAME);
}
out.writeString(id);
out.writeString(uid.field());
out.writeBytesRef(uid.bytes());
if (format < FORMAT_NO_DOC_TYPE) {
out.writeString(IdFieldMapper.NAME);
out.writeBytesRef(Uid.encodeId(id));
}
out.writeLong(version);
if (format < FORMAT_NO_VERSION_TYPE) {
out.writeByte(VersionType.EXTERNAL.getValue());
Expand All @@ -1385,14 +1386,12 @@ public boolean equals(Object o) {

return version == delete.version &&
seqNo == delete.seqNo &&
primaryTerm == delete.primaryTerm &&
uid.equals(delete.uid);
primaryTerm == delete.primaryTerm;
}

@Override
public int hashCode() {
int result = uid.hashCode();
result = 31 * result + Long.hashCode(seqNo);
int result = Long.hashCode(seqNo);
result = 31 * result + Long.hashCode(primaryTerm);
result = 31 * result + Long.hashCode(version);
return result;
Expand All @@ -1401,8 +1400,7 @@ public int hashCode() {
@Override
public String toString() {
return "Delete{" +
"uid=" + uid +
", seqNo=" + seqNo +
"seqNo=" + seqNo +
", primaryTerm=" + primaryTerm +
", version=" + version +
'}';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ public void testSimpleOperations() throws IOException {
assertThat(snapshot.totalOperations(), equalTo(ops.size()));
}

addToTranslogAndList(translog, ops, new Translog.Delete("2", 1, primaryTerm.get(), newUid("2")));
addToTranslogAndList(translog, ops, new Translog.Delete("2", 1, primaryTerm.get()));
try (Translog.Snapshot snapshot = translog.newSnapshot()) {
assertThat(snapshot, SnapshotMatchers.equalsTo(ops));
assertThat(snapshot.totalOperations(), equalTo(ops.size()));
Expand All @@ -342,7 +342,7 @@ public void testSimpleOperations() throws IOException {

Translog.Delete delete = (Translog.Delete) snapshot.next();
assertNotNull(delete);
assertThat(delete.uid(), equalTo(newUid("2")));
assertThat(delete.id(), equalTo("2"));

Translog.NoOp noOp = (Translog.NoOp) snapshot.next();
assertNotNull(noOp);
Expand Down Expand Up @@ -423,43 +423,43 @@ public void testStats() throws IOException {
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
}

translog.add(new Translog.Delete("2", 1, primaryTerm.get(), newUid("2")));
translog.add(new Translog.Delete("2", 1, primaryTerm.get()));
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(2));
assertThat(stats.getTranslogSizeInBytes(), equalTo(200L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(193L));
assertThat(stats.getUncommittedOperations(), equalTo(2));
assertThat(stats.getUncommittedSizeInBytes(), equalTo(145L));
assertThat(stats.getUncommittedSizeInBytes(), equalTo(138L));
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
}

translog.add(new Translog.Delete("3", 2, primaryTerm.get(), newUid("3")));
translog.add(new Translog.Delete("3", 2, primaryTerm.get()));
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(3));
assertThat(stats.getTranslogSizeInBytes(), equalTo(243L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(229L));
assertThat(stats.getUncommittedOperations(), equalTo(3));
assertThat(stats.getUncommittedSizeInBytes(), equalTo(188L));
assertThat(stats.getUncommittedSizeInBytes(), equalTo(174L));
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
}

translog.add(new Translog.NoOp(3, 1, randomAlphaOfLength(16)));
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(4));
assertThat(stats.getTranslogSizeInBytes(), equalTo(285L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(271L));
assertThat(stats.getUncommittedOperations(), equalTo(4));
assertThat(stats.getUncommittedSizeInBytes(), equalTo(230L));
assertThat(stats.getUncommittedSizeInBytes(), equalTo(216L));
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
}

translog.rollGeneration();
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(4));
assertThat(stats.getTranslogSizeInBytes(), equalTo(340L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(326L));
assertThat(stats.getUncommittedOperations(), equalTo(4));
assertThat(stats.getUncommittedSizeInBytes(), equalTo(285L));
assertThat(stats.getUncommittedSizeInBytes(), equalTo(271L));
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
}

Expand All @@ -469,14 +469,14 @@ public void testStats() throws IOException {
stats.writeTo(out);
final TranslogStats copy = new TranslogStats(out.bytes().streamInput());
assertThat(copy.estimatedNumberOfOperations(), equalTo(4));
assertThat(copy.getTranslogSizeInBytes(), equalTo(340L));
assertThat(copy.getTranslogSizeInBytes(), equalTo(326L));

try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.startObject();
copy.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
assertThat(Strings.toString(builder), equalTo("{\"translog\":{\"operations\":4,\"size_in_bytes\":" + 340
+ ",\"uncommitted_operations\":4,\"uncommitted_size_in_bytes\":" + 285
assertThat(Strings.toString(builder), equalTo("{\"translog\":{\"operations\":4,\"size_in_bytes\":" + 326
+ ",\"uncommitted_operations\":4,\"uncommitted_size_in_bytes\":" + 271
+ ",\"earliest_last_modified_age\":" + stats.getEarliestLastModifiedAge() + "}}"));
}
}
Expand Down Expand Up @@ -787,7 +787,7 @@ public void testConcurrentWritesWithVaryingSize() throws Throwable {
case DELETE:
Translog.Delete delOp = (Translog.Delete) op;
Translog.Delete expDelOp = (Translog.Delete) expectedOp;
assertEquals(expDelOp.uid(), delOp.uid());
assertEquals(expDelOp.id(), delOp.id());
assertEquals(expDelOp.version(), delOp.version());
break;
case NO_OP:
Expand Down Expand Up @@ -947,7 +947,7 @@ public void doRun() throws BrokenBarrierException, InterruptedException, IOExcep
op = new Translog.Index("" + id, id, primaryTerm.get(), new byte[]{(byte) id});
break;
case DELETE:
op = new Translog.Delete(Long.toString(id), id, primaryTerm.get(), newUid(Long.toString(id)));
op = new Translog.Delete(Long.toString(id), id, primaryTerm.get());
break;
case NO_OP:
op = new Translog.NoOp(id, 1, Long.toString(id));
Expand Down Expand Up @@ -1927,7 +1927,6 @@ public void run() {
case DELETE:
op = new Translog.Delete(
threadId + "_" + opCount,
new Term("_uid", threadId + "_" + opCount),
seqNoGenerator.getAndIncrement(),
primaryTerm.get(),
1 + randomInt(100000));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,8 @@ public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.O
return engineIndex;
case DELETE:
final Translog.Delete delete = (Translog.Delete) operation;
final Engine.Delete engineDelete = new Engine.Delete(delete.id(), delete.uid(), delete.seqNo(),
delete.primaryTerm(), delete.version(), versionType, origin, System.nanoTime(),
SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM);
return engineDelete;
return IndexShard.prepareDelete(delete.id(), delete.seqNo(), delete.primaryTerm(), delete.version(), versionType,
origin, SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM);
case NO_OP:
final Translog.NoOp noOp = (Translog.NoOp) operation;
final Engine.NoOp engineNoOp =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,6 @@ public static Translog.Operation rewriteOperationWithPrimaryTerm(Translog.Operat
final Translog.Delete delete = (Translog.Delete) operation;
operationWithPrimaryTerm = new Translog.Delete(
delete.id(),
delete.uid(),
delete.seqNo(),
primaryTerm,
delete.version());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,12 @@

package org.elasticsearch.xpack.ccr.action.bulk;

import org.apache.lucene.index.Term;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.support.replication.TransportWriteAction;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardTestCase;
import org.elasticsearch.index.translog.Translog;
Expand Down Expand Up @@ -58,7 +56,7 @@ public void testPrimaryTermFromFollower() throws IOException {
break;
case DELETE:
operations.add(
new Translog.Delete(id, new Term("_id", Uid.encodeId(id)), seqNo, primaryTerm, 0));
new Translog.Delete(id, seqNo, primaryTerm, 0));
break;
case NO_OP:
operations.add(new Translog.NoOp(seqNo, primaryTerm, "test"));
Expand Down Expand Up @@ -101,7 +99,7 @@ public void testPrimaryResultIncludeOnlyAppliedOperations() throws Exception {
if (randomBoolean()) {
op = new Translog.Index(id, seqno++, primaryTerm, 0, SOURCE, null, -1);
} else if (randomBoolean()) {
op = new Translog.Delete(id, new Term("_id", Uid.encodeId(id)), seqno++, primaryTerm, 0);
op = new Translog.Delete(id, seqno++, primaryTerm, 0);
} else {
op = new Translog.NoOp(seqno++, primaryTerm, "test-" + i);
}
Expand Down