Skip to content

Commit ca6baa8

Browse files
apurtellApache9
authored andcommitted
HBASE-27234 Clean up error-prone warnings in hbase-examples
Close #4647 Co-authored-by: Duo Zhang <[email protected]> Signed-off-by: Duo Zhang <[email protected]> Signed-off-by: Viraj Jasani <[email protected]> (cherry picked from commit 35fb37c) Conflicts: hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java hbase-examples/src/test/java/org/apache/hadoop/hbase/security/provider/example/TestShadeSaslAuthenticationProvider.java
1 parent e5f551e commit ca6baa8

File tree

14 files changed

+80
-69
lines changed

14 files changed

+80
-69
lines changed

hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@
2222

2323
import java.io.IOException;
2424
import java.net.InetSocketAddress;
25+
import java.util.Iterator;
26+
import java.util.List;
2527
import java.util.concurrent.ExecutionException;
2628
import org.apache.hadoop.conf.Configuration;
2729
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -35,6 +37,7 @@
3537
import org.apache.yetus.audience.InterfaceAudience;
3638

3739
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
40+
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
3841
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
3942
import org.apache.hbase.thirdparty.io.netty.bootstrap.ServerBootstrap;
4043
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
@@ -158,12 +161,20 @@ private void write(ChannelHandlerContext ctx, HttpResponseStatus status, String
158161
}
159162

160163
private Params parse(FullHttpRequest req) {
161-
String[] components = new QueryStringDecoder(req.uri()).path().split("/");
162-
Preconditions.checkArgument(components.length == 4, "Unrecognized uri: %s", req.uri());
164+
List<String> components =
165+
Splitter.on('/').splitToList(new QueryStringDecoder(req.uri()).path());
166+
Preconditions.checkArgument(components.size() == 4, "Unrecognized uri: %s", req.uri());
167+
Iterator<String> i = components.iterator();
163168
// path is start with '/' so split will give an empty component
164-
String[] cfAndCq = components[3].split(":");
165-
Preconditions.checkArgument(cfAndCq.length == 2, "Unrecognized uri: %s", req.uri());
166-
return new Params(components[1], components[2], cfAndCq[0], cfAndCq[1]);
169+
i.next();
170+
String table = i.next();
171+
String row = i.next();
172+
List<String> cfAndCq = Splitter.on(':').splitToList(i.next());
173+
Preconditions.checkArgument(cfAndCq.size() == 2, "Unrecognized uri: %s", req.uri());
174+
i = cfAndCq.iterator();
175+
String family = i.next();
176+
String qualifier = i.next();
177+
return new Params(table, row, family, qualifier);
167178
}
168179

169180
private void get(ChannelHandlerContext ctx, FullHttpRequest req) {

hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@
3030
import java.util.concurrent.TimeUnit;
3131
import org.apache.hadoop.conf.Configured;
3232
import org.apache.hadoop.hbase.Cell;
33-
import org.apache.hadoop.hbase.Cell.Type;
3433
import org.apache.hadoop.hbase.CellBuilderFactory;
3534
import org.apache.hadoop.hbase.CellBuilderType;
3635
import org.apache.hadoop.hbase.TableName;
@@ -240,7 +239,7 @@ public Boolean call() throws Exception {
240239
byte[] rk = Bytes.toBytes(ThreadLocalRandom.current().nextLong());
241240
Put p = new Put(rk);
242241
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(rk).setFamily(FAMILY)
243-
.setQualifier(QUAL).setTimestamp(p.getTimestamp()).setType(Type.Put).setValue(value)
242+
.setQualifier(QUAL).setTimestamp(p.getTimestamp()).setType(Cell.Type.Put).setValue(value)
244243
.build());
245244
t.put(p);
246245
}

hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
import org.apache.hadoop.hbase.CellUtil;
3232
import org.apache.hadoop.hbase.CoprocessorEnvironment;
3333
import org.apache.hadoop.hbase.HConstants;
34-
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
3534
import org.apache.hadoop.hbase.client.Delete;
3635
import org.apache.hadoop.hbase.client.Mutation;
3736
import org.apache.hadoop.hbase.client.Scan;
@@ -41,7 +40,6 @@
4140
import org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest;
4241
import org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType;
4342
import org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse;
44-
import org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder;
4543
import org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService;
4644
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
4745
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
@@ -155,7 +153,7 @@ public void delete(RpcController controller, BulkDeleteRequest request,
155153
}
156154
OperationStatus[] opStatus = region.batchMutate(deleteArr);
157155
for (i = 0; i < opStatus.length; i++) {
158-
if (opStatus[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) {
156+
if (opStatus[i].getOperationStatusCode() != HConstants.OperationStatusCode.SUCCESS) {
159157
break;
160158
}
161159
totalRowsDeleted++;
@@ -181,7 +179,7 @@ public void delete(RpcController controller, BulkDeleteRequest request,
181179
}
182180
}
183181
}
184-
Builder responseBuilder = BulkDeleteResponse.newBuilder();
182+
BulkDeleteResponse.Builder responseBuilder = BulkDeleteResponse.newBuilder();
185183
responseBuilder.setRowsDeleted(totalRowsDeleted);
186184
if (deleteType == DeleteType.VERSION) {
187185
responseBuilder.setVersionsDeleted(totalVersionsDeleted);

hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,8 @@ private void performCostlyOperation() {
109109
// simulate the operation by sleeping.
110110
Thread.sleep(ThreadLocalRandom.current().nextLong(100));
111111
} catch (InterruptedException ignore) {
112+
// Restore the interrupt status
113+
Thread.currentThread().interrupt();
112114
}
113115
}
114116
}

hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
import java.util.ArrayList;
2525
import java.util.Collections;
2626
import java.util.List;
27+
import org.apache.commons.io.IOUtils;
2728
import org.apache.hadoop.hbase.Cell;
2829
import org.apache.hadoop.hbase.CellUtil;
2930
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -38,6 +39,8 @@
3839
import org.apache.hadoop.hbase.util.Bytes;
3940
import org.apache.yetus.audience.InterfaceAudience;
4041

42+
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
43+
4144
/**
4245
* Sample coprocessor endpoint exposing a Service interface for counting rows and key values.
4346
* <p>
@@ -93,10 +96,7 @@ public void getRowCount(RpcController controller, ExampleProtos.CountRequest req
9396
CoprocessorRpcUtils.setControllerException(controller, ioe);
9497
} finally {
9598
if (scanner != null) {
96-
try {
97-
scanner.close();
98-
} catch (IOException ignored) {
99-
}
99+
IOUtils.closeQuietly(scanner);
100100
}
101101
}
102102
done.run(response);
@@ -117,9 +117,7 @@ public void getKeyValueCount(RpcController controller, ExampleProtos.CountReques
117117
long count = 0;
118118
do {
119119
hasMore = scanner.next(results);
120-
for (Cell kv : results) {
121-
count++;
122-
}
120+
count += Iterables.size(results);
123121
results.clear();
124122
} while (hasMore);
125123

@@ -128,10 +126,7 @@ public void getKeyValueCount(RpcController controller, ExampleProtos.CountReques
128126
CoprocessorRpcUtils.setControllerException(controller, ioe);
129127
} finally {
130128
if (scanner != null) {
131-
try {
132-
scanner.close();
133-
} catch (IOException ignored) {
134-
}
129+
IOUtils.closeQuietly(scanner);
135130
}
136131
}
137132
done.run(response);

hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ public static Job configureJob(Configuration conf, String[] args) throws IOExcep
131131
return job;
132132
}
133133

134+
@Override
134135
public int run(String[] args) throws Exception {
135136
Configuration conf = HBaseConfiguration.create(getConf());
136137
if (args.length < 3) {

hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
package org.apache.hadoop.hbase.mapreduce;
1919

2020
import java.io.IOException;
21+
import java.util.Iterator;
22+
import java.util.List;
2123
import org.apache.hadoop.conf.Configuration;
2224
import org.apache.hadoop.conf.Configured;
2325
import org.apache.hadoop.fs.Path;
@@ -35,6 +37,8 @@
3537
import org.apache.hadoop.util.ToolRunner;
3638
import org.apache.yetus.audience.InterfaceAudience;
3739

40+
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
41+
3842
/**
3943
* Sample Uploader MapReduce
4044
* <p>
@@ -77,16 +81,16 @@ public void map(LongWritable key, Text line, Context context) throws IOException
7781
// Each line is comma-delimited; row,family,qualifier,value
7882

7983
// Split CSV line
80-
String[] values = line.toString().split(",");
81-
if (values.length != 4) {
84+
List<String> values = Splitter.on(',').splitToList(line.toString());
85+
if (values.size() != 4) {
8286
return;
8387
}
84-
88+
Iterator<String> i = values.iterator();
8589
// Extract each value
86-
byte[] row = Bytes.toBytes(values[0]);
87-
byte[] family = Bytes.toBytes(values[1]);
88-
byte[] qualifier = Bytes.toBytes(values[2]);
89-
byte[] value = Bytes.toBytes(values[3]);
90+
byte[] row = Bytes.toBytes(i.next());
91+
byte[] family = Bytes.toBytes(i.next());
92+
byte[] qualifier = Bytes.toBytes(i.next());
93+
byte[] value = Bytes.toBytes(i.next());
9094

9195
// Create Put
9296
Put put = new Put(row);
@@ -132,6 +136,7 @@ public static Job configureJob(Configuration conf, String[] args) throws IOExcep
132136
* @param otherArgs The command line parameters after ToolRunner handles standard.
133137
* @throws Exception When running the job fails.
134138
*/
139+
@Override
135140
public int run(String[] otherArgs) throws Exception {
136141
if (otherArgs.length != 2) {
137142
System.err.println("Wrong number of arguments: " + otherArgs.length);

hbase-examples/src/main/java/org/apache/hadoop/hbase/security/provider/example/ShadeSaslServerAuthenticationProvider.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import java.io.BufferedReader;
2121
import java.io.IOException;
2222
import java.io.InputStreamReader;
23+
import java.nio.charset.StandardCharsets;
2324
import java.util.Arrays;
2425
import java.util.HashMap;
2526
import java.util.Map;
@@ -86,8 +87,8 @@ Map<String, char[]> readPasswordDB(Configuration conf) throws IOException {
8687
}
8788

8889
Map<String, char[]> passwordDb = new HashMap<>();
89-
try (FSDataInputStream fdis = fs.open(passwordFile);
90-
BufferedReader reader = new BufferedReader(new InputStreamReader(fdis))) {
90+
try (FSDataInputStream fdis = fs.open(passwordFile); BufferedReader reader =
91+
new BufferedReader(new InputStreamReader(fdis, StandardCharsets.UTF_8))) {
9192
String line = null;
9293
int offset = 0;
9394
while ((line = reader.readLine()) != null) {

hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -130,15 +130,15 @@ private void run() throws Exception {
130130
System.out.println("scanning tables...");
131131

132132
for (ByteBuffer name : client.getTableNames()) {
133-
System.out.println(" found: " + ClientUtils.utf8(name.array()));
133+
System.out.println(" found: " + ClientUtils.utf8(name));
134134

135135
if (name.equals(demoTable) || name.equals(disabledTable)) {
136136
if (client.isTableEnabled(name)) {
137-
System.out.println(" disabling table: " + ClientUtils.utf8(name.array()));
137+
System.out.println(" disabling table: " + ClientUtils.utf8(name));
138138
client.disableTable(name);
139139
}
140140

141-
System.out.println(" deleting table: " + ClientUtils.utf8(name.array()));
141+
System.out.println(" deleting table: " + ClientUtils.utf8(name));
142142
client.deleteTable(name);
143143
}
144144
}
@@ -326,7 +326,7 @@ private void run() throws Exception {
326326
columnNames.clear();
327327

328328
for (ColumnDescriptor col2 : client.getColumnDescriptors(demoTable).values()) {
329-
System.out.println("column with name: " + new String(col2.name.array()));
329+
System.out.println("column with name: " + ClientUtils.utf8(col2.name));
330330
System.out.println(col2.toString());
331331

332332
columnNames.add(col2.name);
@@ -358,7 +358,7 @@ private void printVersions(ByteBuffer row, List<TCell> versions) {
358358
rowStr.append("; ");
359359
}
360360

361-
System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr);
361+
System.out.println("row: " + ClientUtils.utf8(row) + ", values: " + rowStr);
362362
}
363363

364364
private void printRow(TRowResult rowResult) {

hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java

Lines changed: 7 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
import java.util.Base64;
2727
import java.util.HashMap;
2828
import java.util.HashSet;
29-
import java.util.List;
3029
import java.util.Map;
3130
import java.util.Set;
3231
import javax.security.auth.Subject;
@@ -37,8 +36,6 @@
3736
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
3837
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
3938
import org.apache.hadoop.hbase.thrift.generated.Hbase;
40-
import org.apache.hadoop.hbase.thrift.generated.TCell;
41-
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
4239
import org.apache.hadoop.hbase.util.Bytes;
4340
import org.apache.hadoop.hbase.util.ClientUtils;
4441
import org.apache.thrift.protocol.TBinaryProtocol;
@@ -132,13 +129,13 @@ private void run() throws Exception {
132129
//
133130
System.out.println("scanning tables...");
134131
for (ByteBuffer name : refresh(client, httpClient).getTableNames()) {
135-
System.out.println(" found: " + ClientUtils.utf8(name.array()));
136-
if (ClientUtils.utf8(name.array()).equals(ClientUtils.utf8(t))) {
132+
System.out.println(" found: " + ClientUtils.utf8(name));
133+
if (ClientUtils.utf8(name).equals(ClientUtils.utf8(t))) {
137134
if (refresh(client, httpClient).isTableEnabled(name)) {
138-
System.out.println(" disabling table: " + ClientUtils.utf8(name.array()));
135+
System.out.println(" disabling table: " + ClientUtils.utf8(name));
139136
refresh(client, httpClient).disableTable(name);
140137
}
141-
System.out.println(" deleting table: " + ClientUtils.utf8(name.array()));
138+
System.out.println(" deleting table: " + ClientUtils.utf8(name));
142139
refresh(client, httpClient).deleteTable(name);
143140
}
144141
}
@@ -170,8 +167,8 @@ private void run() throws Exception {
170167
Map<ByteBuffer, ColumnDescriptor> columnMap =
171168
refresh(client, httpClient).getColumnDescriptors(ByteBuffer.wrap(t));
172169
for (ColumnDescriptor col2 : columnMap.values()) {
173-
System.out.println(
174-
" column: " + ClientUtils.utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions);
170+
System.out
171+
.println(" column: " + ClientUtils.utf8(col2.name) + ", maxVer: " + col2.maxVersions);
175172
}
176173

177174
transport.close();
@@ -208,26 +205,13 @@ private String generateTicket() throws GSSException {
208205
context.requestInteg(true);
209206

210207
final byte[] outToken = context.initSecContext(new byte[0], 0, 0);
211-
StringBuffer outputBuffer = new StringBuffer();
208+
StringBuilder outputBuffer = new StringBuilder();
212209
outputBuffer.append("Negotiate ");
213210
outputBuffer.append(Bytes.toString(Base64.getEncoder().encode(outToken)));
214211
System.out.print("Ticket is: " + outputBuffer);
215212
return outputBuffer.toString();
216213
}
217214

218-
private void printVersions(ByteBuffer row, List<TCell> versions) {
219-
StringBuilder rowStr = new StringBuilder();
220-
for (TCell cell : versions) {
221-
rowStr.append(ClientUtils.utf8(cell.value.array()));
222-
rowStr.append("; ");
223-
}
224-
System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr);
225-
}
226-
227-
private void printRow(TRowResult rowResult) {
228-
ClientUtils.printRow(rowResult);
229-
}
230-
231215
static Subject getSubject() throws Exception {
232216
if (!secure) {
233217
return new Subject();

0 commit comments

Comments
 (0)