Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -4078,9 +4078,9 @@ private static Cell reckonDelta(final Cell delta, final Cell currentCell,
Function<Cell, byte[]> supplier) throws IOException {
// Forward any tags found on the delta.
List<Tag> tags = TagUtil.carryForwardTags(delta);
tags = TagUtil.carryForwardTTLTag(tags, mutation.getTTL());
if (currentCell != null) {
tags = TagUtil.carryForwardTags(tags, currentCell);
tags = TagUtil.carryForwardTTLTag(tags, mutation.getTTL());
byte[] newValue = supplier.apply(currentCell);
return ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(mutation.getRow(), 0, mutation.getRow().length)
Expand All @@ -4093,6 +4093,7 @@ private static Cell reckonDelta(final Cell delta, final Cell currentCell,
.setTags(TagUtil.fromList(tags))
.build();
} else {
tags = TagUtil.carryForwardTTLTag(tags, mutation.getTTL());
PrivateCellUtil.updateLatestStamp(delta, now);
return CollectionUtils.isEmpty(tags) ? delta : PrivateCellUtil.createCell(delta, tags);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase.client;

import static org.apache.hadoop.hbase.HConstants.RPC_CODEC_CONF_KEY;
import static org.apache.hadoop.hbase.ipc.RpcClient.DEFAULT_CODEC_CLASS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
Expand All @@ -38,7 +40,11 @@
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
import org.apache.hadoop.hbase.testclassification.LargeTests;
Expand Down Expand Up @@ -542,4 +548,51 @@ static void assertIncrementKey(Cell key, byte [] row, byte [] family,
public static String filterStringSoTableNameSafe(final String str) {
return str.replaceAll("\\[fast\\=(.*)\\]", ".FAST.is.$1");
}

/*
Test that we have only 1 ttl tag with increment mutation.
*/
@Test
public void testIncrementWithTtlTags() throws Exception {
LOG.info("Starting " + this.name.getMethodName());
final TableName tableName =
TableName.valueOf(filterStringSoTableNameSafe(this.name.getMethodName()));
Table ht = TEST_UTIL.createTable(tableName, FAMILY);
final byte[] COLUMN = Bytes.toBytes("column");

Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// Set RPC_CODEC_CONF_KEY to KeyValueCodecWithTags so that scan will return tags.
conf.set(RPC_CODEC_CONF_KEY, KeyValueCodecWithTags.class.getName());
conf.set(DEFAULT_CODEC_CLASS, "");
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
for (int i = 0; i < 10; i++) {
Increment inc = new Increment(ROW);
inc.addColumn(FAMILY, COLUMN, 1);
long ttl = i + 3600000 ;
inc.setTTL(ttl);
ht.increment(inc);

Scan scan = new Scan().withStartRow(ROW);
ResultScanner scanner = table.getScanner(scan);
int count = 0;
Result result;
while ((result = scanner.next()) != null) {
Cell[] cells = result.rawCells();
for (Cell cell: cells) {
List<Tag> tags = PrivateCellUtil.getTags(cell);
// Make sure there is only 1 tag.
assertEquals(1, tags.size());
Tag tag = tags.get(0);
assertEquals(TagType.TTL_TAG_TYPE, tag.getType());
long ttlTagValue = Bytes.toLong(tag.getValueArray(), tag.getValueOffset());
assertEquals(ttl, ttlTagValue);
}
count++;
}
// Make sure there is only 1 result.
assertEquals(1, count);
}
}
}
}