1717 */
1818package org .apache .hadoop .hbase .client ;
1919
20+ import static org .apache .hadoop .hbase .ipc .RpcServer .MAX_REQUEST_SIZE ;
2021import static org .junit .Assert .assertTrue ;
21-
2222import org .apache .hadoop .hbase .HBaseClassTestRule ;
2323import org .apache .hadoop .hbase .HBaseTestingUtil ;
2424import org .apache .hadoop .hbase .TableName ;
@@ -48,6 +48,7 @@ public class TestRequestTooBigException {
4848
4949 @ BeforeClass
5050 public static void setUpBeforeClass () throws Exception {
51+ TEST_UTIL .getConfiguration ().setInt (MAX_REQUEST_SIZE , 10000 );
5152 TEST_UTIL .startMiniCluster ();
5253 }
5354
@@ -64,17 +65,18 @@ public void testHbasePutDeleteCell() throws Exception {
6465 TEST_UTIL .waitTableAvailable (tableName .getName (), 5000 );
6566 try {
6667 byte [] value = new byte [2 * 2014 * 1024 ];
67-
68- Put p = new Put (Bytes .toBytes ("bigrow" ));
69- // big request = 400*2 M
70- for (int i = 0 ; i < 400 ; i ++) {
71- p .addColumn (family , Bytes .toBytes ("someQualifier" + i ), value );
72- }
73- try {
74- table .put (p );
75- assertTrue ("expected RequestTooBigException" , false );
76- } catch (RequestTooBigException e ) {
77- assertTrue ("expected RequestTooBigException" , true );
68+ for (int m = 0 ; m < 10000 ; m ++) {
69+ Put p = new Put (Bytes .toBytes ("bigrow" ));
70+ // big request = 400*2 M
71+ for (int i = 0 ; i < 400 ; i ++) {
72+ p .addColumn (family , Bytes .toBytes ("someQualifier" + i ), value );
73+ }
74+ try {
75+ table .put (p );
76+ assertTrue ("expected RequestTooBigException" , false );
77+ } catch (RequestTooBigException e ) {
78+ assertTrue ("expected RequestTooBigException" , true );
79+ }
7880 }
7981 } finally {
8082 table .close ();
0 commit comments