diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 2e32864afa6f..59987810f117 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -196,11 +196,6 @@
junit-jupiter-params
test
-
- org.junit.vintage
- junit-vintage-engine
- test
-
org.hamcrest
hamcrest-core
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
index 6516494278ee..603037825c5d 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java
@@ -28,8 +28,8 @@
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
@@ -45,7 +45,7 @@
* -DnumGetters=2 -DnumScanners=2 -DnumUniqueRows=5
*
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestAcidGuarantees extends IntegrationTestBase {
private static final int SERVER_COUNT = 1; // number of slaves for the smallest cluster
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
index 4a475d077fa3..ea55a5c61d76 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
@@ -18,7 +18,8 @@
package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.IntegrationTestingUtility.createPreSplitLoadTestTable;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.nio.charset.Charset;
@@ -52,11 +53,10 @@
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -71,7 +71,7 @@
* @see HBASE-7912
* @see HBASE-14123
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestBackupRestore extends IntegrationTestBase {
private static final String CLASS_NAME = IntegrationTestBackupRestore.class.getSimpleName();
protected static final Logger LOG = LoggerFactory.getLogger(IntegrationTestBackupRestore.class);
@@ -135,7 +135,7 @@ public void run() {
}
@Override
- @Before
+ @BeforeEach
public void setUp() throws Exception {
util = new IntegrationTestingUtility();
Configuration conf = util.getConfiguration();
@@ -151,7 +151,7 @@ public void setUp() throws Exception {
LOG.info("Cluster initialized and ready");
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
LOG.info("Cleaning up after test.");
if (util.isDistributedCluster()) {
@@ -330,7 +330,7 @@ private void runTestSingle(TableName table) throws IOException {
restore(createRestoreRequest(BACKUP_ROOT_DIR, backupId, false, tablesRestoreIncMultiple, null,
true), client);
Table hTable = conn.getTable(table);
- Assert.assertEquals(util.countRows(hTable), rowsInIteration * numIterations);
+ assertEquals(util.countRows(hTable), rowsInIteration * numIterations);
hTable.close();
LOG.info("{} loop {} finished.", Thread.currentThread().getName(), (count - 1));
}
@@ -343,7 +343,7 @@ private void restoreVerifyTable(Connection conn, BackupAdmin client, TableName t
createRestoreRequest(BACKUP_ROOT_DIR, backupId, false, tablesRestoreIncMultiple, null, true),
client);
Table hTable = conn.getTable(table);
- Assert.assertEquals(expectedRows, util.countRows(hTable));
+ assertEquals(expectedRows, util.countRows(hTable));
hTable.close();
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
index 75a3025913b5..dc7384e13f1a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java
@@ -27,8 +27,8 @@
import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -159,13 +159,13 @@ protected int doWork() throws Exception {
return result;
}
- @Before
+ @BeforeEach
public void setUp() throws Exception {
setUpCluster();
setUpMonkey();
}
- @After
+ @AfterEach
public void cleanUp() throws Exception {
cleanUpMonkey();
cleanUpCluster();
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
index 022be0579947..6ca28bb4ae53 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -195,8 +197,8 @@ protected void verifyNamespaces() throws IOException {
// iterating concurrent map
for (String nsName : namespaceMap.keySet()) {
try {
- Assert.assertTrue("Namespace: " + nsName + " in namespaceMap does not exist",
- admin.getNamespaceDescriptor(nsName) != null);
+ assertTrue(admin.getNamespaceDescriptor(nsName) != null,
+ "Namespace: " + nsName + " in namespaceMap does not exist");
} catch (NamespaceNotFoundException nsnfe) {
Assert
.fail("Namespace: " + nsName + " in namespaceMap does not exist: " + nsnfe.getMessage());
@@ -210,12 +212,12 @@ protected void verifyTables() throws IOException {
Admin admin = connection.getAdmin();
// iterating concurrent map
for (TableName tableName : enabledTables.keySet()) {
- Assert.assertTrue("Table: " + tableName + " in enabledTables is not enabled",
- admin.isTableEnabled(tableName));
+ assertTrue(admin.isTableEnabled(tableName),
+ "Table: " + tableName + " in enabledTables is not enabled");
}
for (TableName tableName : disabledTables.keySet()) {
- Assert.assertTrue("Table: " + tableName + " in disabledTables is not disabled",
- admin.isTableDisabled(tableName));
+ assertTrue(admin.isTableDisabled(tableName),
+ "Table: " + tableName + " in disabledTables is not disabled");
}
for (TableName tableName : deletedTables.keySet()) {
Assert.assertFalse("Table: " + tableName + " in deletedTables is not deleted",
@@ -291,7 +293,7 @@ void perform() throws IOException {
LOG.info("Creating namespace:" + nsd);
admin.createNamespace(nsd);
NamespaceDescriptor freshNamespaceDesc = admin.getNamespaceDescriptor(nsd.getName());
- Assert.assertTrue("Namespace: " + nsd + " was not created", freshNamespaceDesc != null);
+ assertTrue(freshNamespaceDesc != null, "Namespace: " + nsd + " was not created");
namespaceMap.put(nsd.getName(), freshNamespaceDesc);
LOG.info("Created namespace:" + freshNamespaceDesc);
} catch (Exception e) {
@@ -333,10 +335,10 @@ void perform() throws IOException {
modifiedNsd.setConfiguration(nsTestConfigKey, nsValueNew);
admin.modifyNamespace(modifiedNsd);
NamespaceDescriptor freshNamespaceDesc = admin.getNamespaceDescriptor(namespaceName);
- Assert.assertTrue("Namespace: " + selected + " was not modified",
- freshNamespaceDesc.getConfigurationValue(nsTestConfigKey).equals(nsValueNew));
- Assert.assertTrue("Namespace: " + namespaceName + " does not exist",
- admin.getNamespaceDescriptor(namespaceName) != null);
+ assertTrue(freshNamespaceDesc.getConfigurationValue(nsTestConfigKey).equals(nsValueNew),
+ "Namespace: " + selected + " was not modified");
+ assertTrue(admin.getNamespaceDescriptor(namespaceName) != null,
+ "Namespace: " + namespaceName + " does not exist");
namespaceMap.put(namespaceName, freshNamespaceDesc);
LOG.info("Modified namespace :" + freshNamespaceDesc);
} catch (Exception e) {
@@ -364,7 +366,7 @@ void perform() throws IOException {
try {
if (admin.getNamespaceDescriptor(namespaceName) != null) {
// the namespace still exists.
- Assert.assertTrue("Namespace: " + selected + " was not deleted", false);
+ assertTrue(false, "Namespace: " + selected + " was not deleted");
} else {
LOG.info("Deleted namespace :" + selected);
}
@@ -415,10 +417,10 @@ void perform() throws IOException {
byte[] endKey = Bytes.toBytes("row-" + Integer.MAX_VALUE);
LOG.info("Creating table:" + td);
admin.createTable(td, startKey, endKey, numRegions);
- Assert.assertTrue("Table: " + td + " was not created", admin.tableExists(tableName));
+ assertTrue(admin.tableExists(tableName), "Table: " + td + " was not created");
TableDescriptor freshTableDesc = admin.getDescriptor(tableName);
- Assert.assertTrue("After create, Table: " + tableName + " in not enabled",
- admin.isTableEnabled(tableName));
+ assertTrue(admin.isTableEnabled(tableName),
+ "After create, Table: " + tableName + " in not enabled");
enabledTables.put(tableName, freshTableDesc);
LOG.info("Created table:" + freshTableDesc);
} catch (Exception e) {
@@ -453,11 +455,10 @@ void perform() throws IOException {
TableName tableName = selected.getTableName();
LOG.info("Disabling table :" + selected);
admin.disableTable(tableName);
- Assert.assertTrue("Table: " + selected + " was not disabled",
- admin.isTableDisabled(tableName));
+ assertTrue(admin.isTableDisabled(tableName), "Table: " + selected + " was not disabled");
TableDescriptor freshTableDesc = admin.getDescriptor(tableName);
- Assert.assertTrue("After disable, Table: " + tableName + " is not disabled",
- admin.isTableDisabled(tableName));
+ assertTrue(admin.isTableDisabled(tableName),
+ "After disable, Table: " + tableName + " is not disabled");
disabledTables.put(tableName, freshTableDesc);
LOG.info("Disabled table :" + freshTableDesc);
} catch (Exception e) {
@@ -501,11 +502,10 @@ void perform() throws IOException {
TableName tableName = selected.getTableName();
LOG.info("Enabling table :" + selected);
admin.enableTable(tableName);
- Assert.assertTrue("Table: " + selected + " was not enabled",
- admin.isTableEnabled(tableName));
+ assertTrue(admin.isTableEnabled(tableName), "Table: " + selected + " was not enabled");
TableDescriptor freshTableDesc = admin.getDescriptor(tableName);
- Assert.assertTrue("After enable, Table: " + tableName + " in not enabled",
- admin.isTableEnabled(tableName));
+ assertTrue(admin.isTableEnabled(tableName),
+ "After enable, Table: " + tableName + " in not enabled");
enabledTables.put(tableName, freshTableDesc);
LOG.info("Enabled table :" + freshTableDesc);
} catch (Exception e) {
@@ -598,10 +598,10 @@ void perform() throws IOException {
admin.addColumnFamily(tableName, cfd);
// assertion
TableDescriptor freshTableDesc = admin.getDescriptor(tableName);
- Assert.assertTrue("Column family: " + cfd + " was not added",
- freshTableDesc.hasColumnFamily(cfd.getName()));
- Assert.assertTrue("After add column family, Table: " + tableName + " is not disabled",
- admin.isTableDisabled(tableName));
+ assertTrue(freshTableDesc.hasColumnFamily(cfd.getName()),
+ "Column family: " + cfd + " was not added");
+ assertTrue(admin.isTableDisabled(tableName),
+ "After add column family, Table: " + tableName + " is not disabled");
disabledTables.put(tableName, freshTableDesc);
LOG.info("Added column family: " + cfd + " to table: " + tableName);
} catch (Exception e) {
@@ -652,9 +652,8 @@ void perform() throws IOException {
freshColumnDesc.getMaxVersions(), versions);
Assert.assertEquals("Column family: " + freshColumnDesc + " was not altered",
freshColumnDesc.getMinVersions(), versions);
- Assert.assertTrue(
- "After alter versions of column family, Table: " + tableName + " is not disabled",
- admin.isTableDisabled(tableName));
+ assertTrue(admin.isTableDisabled(tableName),
+ "After alter versions of column family, Table: " + tableName + " is not disabled");
disabledTables.put(tableName, freshTableDesc);
LOG.info("Altered versions of column family: " + columnDesc + " to: " + versions
+ " in table: " + tableName);
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java
index dfd6483b2efc..435dd3573227 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java
@@ -17,6 +17,9 @@
*/
package org.apache.hadoop.hbase;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -30,9 +33,8 @@
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -43,7 +45,7 @@
* A base class for tests that do something with the cluster while running {@link LoadTestTool} to
* write and verify some data.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestIngest extends IntegrationTestBase {
public static final char HIPHEN = '-';
private static final int SERVER_COUNT = 1; // number of slaves for the smallest cluster
@@ -93,7 +95,7 @@ protected int getMinServerCount() {
protected void initTable() throws IOException {
int ret = loadTool.run(getArgsForLoadTestToolInitTable());
- Assert.assertEquals("Failed to initialize LoadTestTool", 0, ret);
+ assertEquals(0, ret, "Failed to initialize LoadTestTool");
}
@Override
@@ -173,7 +175,7 @@ protected void runIngestTest(long defaultRunTime, long keysPerServerPerIter, int
if (0 != ret) {
String errorMsg = "Load failed with error code " + ret;
LOG.error(errorMsg);
- Assert.fail(errorMsg);
+ fail(errorMsg);
}
ret = loadTool.run(getArgsForLoadTestTool("-update", String.format("60:%d:1", writeThreads),
@@ -181,7 +183,7 @@ protected void runIngestTest(long defaultRunTime, long keysPerServerPerIter, int
if (0 != ret) {
String errorMsg = "Update failed with error code " + ret;
LOG.error(errorMsg);
- Assert.fail(errorMsg);
+ fail(errorMsg);
}
ret = loadTool.run(
@@ -195,7 +197,7 @@ protected void runIngestTest(long defaultRunTime, long keysPerServerPerIter, int
if (0 != ret) {
LOG.error("Rerun of Verification failed with error code " + ret);
}
- Assert.fail(errorMsg);
+ fail(errorMsg);
}
startKey += numKeys;
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java
index e39bb2c90f44..389757d424d2 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java
@@ -31,13 +31,13 @@
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.HFileTestUtil;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
/**
* A test class that does the same things as IntegrationTestIngest but with stripe compactions. Can
* be used with ChaosMonkey in the same manner.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestIngestStripeCompactions extends IntegrationTestIngest {
@Override
protected void initTable() throws IOException {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java
index 6b7566496ce9..bfa66381d9ff 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java
@@ -29,7 +29,7 @@
import org.apache.hadoop.hbase.util.LoadTestTool;
import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
@@ -39,7 +39,7 @@
* WRITE permissions are not read back and cells with READ permissions are read back. Every
* operation happens in the user's specific context
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestIngestWithACL extends IntegrationTestIngest {
private static final char COLON = ':';
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
index e5aa4308651a..e56e14ceb41d 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
@@ -29,12 +29,12 @@
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.EncryptionTest;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Before;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestIngestWithEncryption extends IntegrationTestIngest {
private final static Logger LOG =
LoggerFactory.getLogger(IntegrationTestIngestWithEncryption.class);
@@ -63,7 +63,7 @@ public void setUpCluster() throws Exception {
initialized = true;
}
- @Before
+ @BeforeEach
@Override
public void setUp() throws Exception {
// Initialize the cluster. This invokes LoadTestTool -init_only, which
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java
index b82056c96216..4159d45b3c1c 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java
@@ -34,15 +34,15 @@
import org.apache.hadoop.hbase.util.LoadTestDataGeneratorWithMOB;
import org.apache.hadoop.hbase.util.LoadTestTool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
/**
* Integration Test for MOB ingest.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestIngestWithMOB extends IntegrationTestIngest {
private static final char COLON = ':';
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java
index 870ab1b47415..1f9b7000c6a9 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java
@@ -24,9 +24,9 @@
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.LoadTestDataGeneratorWithTags;
import org.apache.hadoop.hbase.util.LoadTestTool;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestIngestWithTags extends IntegrationTestIngest {
private static final char COLON = ':';
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java
index 1993d7950f6a..23b635cf4cce 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java
@@ -28,9 +28,9 @@
import org.apache.hadoop.hbase.security.visibility.VisibilityTestUtil;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.LoadTestTool;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestIngestWithVisibilityLabels extends IntegrationTestIngest {
private static final char COMMA = ',';
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
index 1e514b070ddf..dc29cedbbe57 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
@@ -17,6 +17,9 @@
*/
package org.apache.hadoop.hbase;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
import java.security.InvalidParameterException;
import java.util.Map;
import java.util.Set;
@@ -42,11 +45,10 @@
import org.apache.hadoop.hbase.util.MultiThreadedWriter;
import org.apache.hadoop.hbase.util.RegionSplitter;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -54,7 +56,7 @@
* Integration test that verifies lazy CF loading during scans by doing repeated scans with this
* feature while multiple threads are continuously writing values; and verifying the result.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestLazyCfLoading {
private static final TableName TABLE_NAME =
TableName.valueOf(IntegrationTestLazyCfLoading.class.getSimpleName());
@@ -172,7 +174,7 @@ public Filter getScanFilter() {
}
}
- @Before
+ @BeforeEach
public void setUp() throws Exception {
LOG.info("Initializing cluster with " + NUM_SERVERS + " servers");
util.initializeCluster(NUM_SERVERS);
@@ -211,7 +213,7 @@ private void deleteTable() throws Exception {
}
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
deleteTable();
LOG.info("Restoring the cluster");
@@ -267,18 +269,17 @@ public void testReadersAndWriters() throws Exception {
// Verify and count the results.
while ((result = results.next()) != null) {
boolean isOk = writer.verifyResultAgainstDataGenerator(result, true, true);
- Assert.assertTrue("Failed to verify [" + Bytes.toString(result.getRow()) + "]", isOk);
+ assertTrue(isOk, "Failed to verify [" + Bytes.toString(result.getRow()) + "]");
++resultCount;
}
long timeTaken = EnvironmentEdgeManager.currentTime() - startTs;
// Verify the result count.
long onesGennedAfterScan = dataGen.getExpectedNumberOfKeys();
- Assert.assertTrue(
- "Read " + resultCount + " keys when at most " + onesGennedAfterScan + " were generated ",
- onesGennedAfterScan >= resultCount);
+ assertTrue(onesGennedAfterScan >= resultCount,
+ "Read " + resultCount + " keys when at most " + onesGennedAfterScan + " were generated ");
if (isWriterDone) {
- Assert.assertTrue("Read " + resultCount + " keys; the writer is done and "
- + onesGennedAfterScan + " keys were generated", onesGennedAfterScan == resultCount);
+ assertTrue(onesGennedAfterScan == resultCount, "Read " + resultCount
+ + " keys; the writer is done and " + onesGennedAfterScan + " keys were generated");
} else if (onesGennedBeforeScan * 0.9 > resultCount) {
LOG.warn("Read way too few keys (" + resultCount + "/" + onesGennedBeforeScan
+ ") - there might be a problem, or the writer might just be slow");
@@ -289,8 +290,8 @@ public void testReadersAndWriters() throws Exception {
now = EnvironmentEdgeManager.currentTime();
}
}
- Assert.assertEquals("There are write failures", 0, writer.getNumWriteFailures());
- Assert.assertTrue("Writer is not done", isWriterDone);
+ assertEquals(0, writer.getNumWriteFailures(), "There are write failures");
+ assertTrue(isWriterDone, "Writer is not done");
// Assert.fail("Boom!");
connection.close();
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestManyRegions.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestManyRegions.java
index c2048a8da32a..6de0b2ee2b7f 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestManyRegions.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestManyRegions.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase;
import java.io.IOException;
-import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@@ -29,13 +28,10 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.RegionSplitter;
import org.apache.hadoop.hbase.util.RegionSplitter.SplitAlgorithm;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestRule;
-import org.junit.rules.Timeout;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -44,7 +40,7 @@
* verify it completes within a reasonable amount of time.
* @see HBASE-7220
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestManyRegions {
private static final String CLASS_NAME = IntegrationTestManyRegions.class.getSimpleName();
@@ -69,14 +65,10 @@ public class IntegrationTestManyRegions {
protected static final int DEFAULT_TIMEOUT_MINUTES = 5;
protected static final int TIMEOUT_MINUTES =
UTIL.getConfiguration().getInt(TIMEOUT_MINUTES_KEY, DEFAULT_TIMEOUT_MINUTES);
- // This timeout is suitable since there is only single testcase in this test.
- @ClassRule
- public static final TestRule timeout = Timeout.builder()
- .withTimeout(TIMEOUT_MINUTES, TimeUnit.MINUTES).withLookingForStuckThread(true).build();
private Admin admin;
- @Before
+ @BeforeEach
public void setUp() throws Exception {
LOG.info(String.format("Initializing cluster with %d region servers.", REGION_SERVER_COUNT));
UTIL.initializeCluster(REGION_SERVER_COUNT);
@@ -92,7 +84,7 @@ public void setUp() throws Exception {
LOG.info("Cluster ready");
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
LOG.info("Cleaning up after test.");
if (admin.tableExists(TABLE_NAME)) {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMetaReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMetaReplicas.java
index a8c3a16d13dc..77ea221ef5a9 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMetaReplicas.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMetaReplicas.java
@@ -25,10 +25,10 @@
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -38,7 +38,7 @@
* requests on the created table - the other replicas of the meta would be used to get the location
* of the region of the created table.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestMetaReplicas {
/**
@@ -46,7 +46,7 @@ public class IntegrationTestMetaReplicas {
*/
private static IntegrationTestingUtility util;
- @BeforeClass
+ @BeforeAll
public static void setUp() throws Exception {
// Set up the integration test util
if (util == null) {
@@ -70,7 +70,7 @@ public static void setUp() throws Exception {
waitUntilZnodeAvailable(2);
}
- @AfterClass
+ @AfterAll
public static void teardown() throws Exception {
// Clean everything up.
util.restoreCluster();
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMobCompaction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMobCompaction.java
index 8e1952a696cd..312f5287fc34 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMobCompaction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMobCompaction.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@@ -45,10 +45,10 @@
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -71,8 +71,7 @@
*
*/
@SuppressWarnings("deprecation")
-
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestMobCompaction extends IntegrationTestBase {
protected static final Logger LOG = LoggerFactory.getLogger(IntegrationTestMobCompaction.class);
@@ -104,7 +103,7 @@ public class IntegrationTestMobCompaction extends IntegrationTestBase {
private static volatile boolean run = true;
@Override
- @Before
+ @BeforeEach
public void setUp() throws Exception {
util = getTestingUtil(getConf());
conf = util.getConfiguration();
@@ -129,7 +128,7 @@ private void createTestTable() throws IOException {
table = util.createTable(tableDescriptor, null);
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
LOG.info("Cleaning up after test.");
if (util.isDistributedCluster()) {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
index 644d89a4e314..26a27cc36374 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase;
import static java.lang.String.format;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import com.codahale.metrics.Histogram;
import java.util.ArrayDeque;
@@ -44,7 +44,7 @@
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -58,7 +58,7 @@
* IntegrationTestBase is incompatible with the JUnit runner. Hence no @Test annotations either. See
* {@code -help} for full list of options.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestRegionReplicaPerf extends IntegrationTestBase {
private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestRegionReplicaPerf.class);
@@ -164,12 +164,12 @@ public void setUp() throws Exception {
// sanity check cluster
// TODO: this should reach out to master and verify online state instead
- assertEquals("Master must be configured with StochasticLoadBalancer",
- "org.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer",
- conf.get("hbase.master.loadbalancer.class"));
+ assertEquals("org.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer",
+ conf.get("hbase.master.loadbalancer.class"),
+ "Master must be configured with StochasticLoadBalancer");
// TODO: this should reach out to master and verify online state instead
- assertTrue("hbase.regionserver.storefile.refresh.period must be greater than zero.",
- conf.getLong("hbase.regionserver.storefile.refresh.period", 0) > 0);
+ assertTrue(conf.getLong("hbase.regionserver.storefile.refresh.period", 0) > 0,
+ "hbase.regionserver.storefile.refresh.period must be greater than zero.");
// enable client-side settings
conf.setBoolean(RpcClient.SPECIFIC_WRITE_THREAD, true);
@@ -241,7 +241,7 @@ private static double calcMean(String desc, Stat stat, List result
for (TimingResult tr : results) {
for (PerformanceEvaluation.RunResult r : tr.results) {
- assertNotNull("One of the run results is missing detailed run data.", r.hist);
+ assertNotNull(r.hist, "One of the run results is missing detailed run data.");
sum += stat.apply(r.hist);
count += 1;
LOG.debug(desc + "{" + YammerHistogramUtils.getHistogramReport(r.hist) + "}");
@@ -269,9 +269,9 @@ public void test() throws Exception {
new PerfEvalCallable(util.getAdmin(), writeOpts).call();
// one last sanity check, then send in the clowns!
- assertEquals("Table must be created with DisabledRegionSplitPolicy. Broken test.",
- DisabledRegionSplitPolicy.class.getName(),
- util.getAdmin().getDescriptor(tableName).getRegionSplitPolicyClassName());
+ assertEquals(DisabledRegionSplitPolicy.class.getName(),
+ util.getAdmin().getDescriptor(tableName).getRegionSplitPolicyClassName(),
+ "Table must be created with DisabledRegionSplitPolicy. Broken test.");
startMonkey();
// collect a baseline without region replicas.
@@ -313,16 +313,14 @@ public void test() throws Exception {
.add("withReplicasStdevMean", withReplicasStdevMean)
.add("withReplicas99.99Mean", withReplicas9999Mean).toString());
- assertTrue(
+ assertTrue(withReplicasStdevMean <= withoutReplicasStdevMean,
"Running with region replicas under chaos should have less request variance than without. "
+ "withReplicas.stdev.mean: " + withReplicasStdevMean + "ms "
- + "withoutReplicas.stdev.mean: " + withoutReplicasStdevMean + "ms.",
- withReplicasStdevMean <= withoutReplicasStdevMean);
- assertTrue(
+ + "withoutReplicas.stdev.mean: " + withoutReplicasStdevMean + "ms.");
+ assertTrue(withReplicas9999Mean <= withoutReplicas9999Mean,
"Running with region replicas under chaos should improve 99.99pct latency. "
+ "withReplicas.99.99.mean: " + withReplicas9999Mean + "ms "
- + "withoutReplicas.99.99.mean: " + withoutReplicas9999Mean + "ms.",
- withReplicas9999Mean <= withoutReplicas9999Mean);
+ + "withoutReplicas.99.99.mean: " + withoutReplicas9999Mean + "ms.");
}
public static void main(String[] args) throws Exception {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/MockHttpApiRule.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/MockHttpApiRule.java
index be50b3137256..382efdad4b6c 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/MockHttpApiRule.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/MockHttpApiRule.java
@@ -27,7 +27,6 @@
import java.util.function.BiConsumer;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletResponse;
-import org.junit.rules.ExternalResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -45,10 +44,9 @@
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.RegexSet;
/**
- * A {@link org.junit.Rule} that manages a simple http server. The caller registers request handlers
- * to URI path regexp.
+ * A simple http server for testing. The caller registers request handlers to URI path regexp.
*/
-public class MockHttpApiRule extends ExternalResource {
+public class MockHttpApiRule {
private static final Logger LOG = LoggerFactory.getLogger(MockHttpApiRule.class);
private MockHandler handler;
@@ -100,8 +98,7 @@ public URI getURI() {
return server.getURI();
}
- @Override
- protected void before() throws Exception {
+ public void start() throws Exception {
handler = new MockHandler();
server = new Server();
final ServerConnector http = new ServerConnector(server);
@@ -113,8 +110,7 @@ protected void before() throws Exception {
server.start();
}
- @Override
- protected void after() {
+ public void close() {
try {
server.stop();
} catch (Exception e) {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
index 469f64937200..12e26528aee9 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestIntegrationTestBase.java
@@ -17,23 +17,18 @@
*/
package org.apache.hadoop.hbase;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
public class TestIntegrationTestBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestIntegrationTestBase.class);
-
@Test
public void testMonkeyPropertiesParsing() {
final Configuration conf = new Configuration(false);
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestRESTApiClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestRESTApiClusterManager.java
index ba5d0dae2ff4..44350058a2bc 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestRESTApiClusterManager.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestRESTApiClusterManager.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.HashMap;
@@ -26,48 +26,46 @@
import org.apache.hadoop.hbase.ClusterManager.ServiceType;
import org.apache.hadoop.hbase.RESTApiClusterManager.Service;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
public class TestRESTApiClusterManager {
- @ClassRule
- public static final HBaseClassTestRule testRule =
- HBaseClassTestRule.forClass(TestRESTApiClusterManager.class);
-
- @ClassRule
public static MockHttpApiRule mockHttpApi = new MockHttpApiRule();
- @Rule
- public final TestName testName = new TestName();
-
private static HBaseCommonTestingUtil testingUtility;
private ClusterManager clusterManager;
- @BeforeClass
- public static void beforeClass() {
+ @BeforeAll
+ public static void beforeClass() throws Exception {
+ mockHttpApi.start();
testingUtility = new HBaseCommonTestingUtil();
configureClusterManager(testingUtility.getConfiguration());
}
- @Before
- public void before() {
+ @AfterAll
+ public static void afterClass() throws Exception {
+ mockHttpApi.close();
+ }
+
+ @BeforeEach
+ public void before(TestInfo testInfo) {
mockHttpApi.clearRegistrations();
final Configuration methodConf = new Configuration(testingUtility.getConfiguration());
- methodConf.set("hbase.it.clustermanager.restapi.clustername", testName.getMethodName());
+ methodConf.set("hbase.it.clustermanager.restapi.clustername",
+ testInfo.getTestMethod().get().getName());
clusterManager = new RESTApiClusterManager();
clusterManager.setConf(methodConf);
}
@Test
- public void isRunningPositive() throws IOException {
- final String clusterName = testName.getMethodName();
+ public void isRunningPositive(TestInfo testInfo) throws IOException {
+ final String clusterName = testInfo.getTestMethod().get().getName();
final String hostName = "somehost";
final String serviceName = "hbase";
final String hostId = "some-id";
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestShellExecEndpointCoprocessor.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestShellExecEndpointCoprocessor.java
index 4496318e0e7a..be3d021c6461 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestShellExecEndpointCoprocessor.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestShellExecEndpointCoprocessor.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
@@ -29,35 +29,53 @@
import java.nio.file.Paths;
import java.util.Optional;
import java.util.function.Consumer;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.AsyncAdmin;
import org.apache.hadoop.hbase.client.AsyncConnection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecRequest;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecResponse;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ShellExecEndpoint.ShellExecService;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Test for the {@link ShellExecEndpointCoprocessor}.
*/
-@Category(MediumTests.class)
+@Tag(MediumTests.TAG)
public class TestShellExecEndpointCoprocessor {
- @ClassRule
- public static final HBaseClassTestRule testRule =
- HBaseClassTestRule.forClass(TestShellExecEndpointCoprocessor.class);
+ private static HBaseTestingUtil testingUtility;
+ private AsyncConnection conn;
- @ClassRule
- public static final MiniClusterRule miniClusterRule =
- MiniClusterRule.newBuilder().setConfiguration(createConfiguration()).build();
+ @BeforeAll
+ public static void setUp() throws Exception {
+ testingUtility = new HBaseTestingUtil();
+ testingUtility.getConfiguration().set("hbase.coprocessor.master.classes",
+ ShellExecEndpointCoprocessor.class.getName());
+ testingUtility.startMiniCluster();
+ }
+
+ @AfterAll
+ public static void tearDown() throws Exception {
+ testingUtility.shutdownMiniCluster();
+ }
+
+ @BeforeEach
+ public void setUpConnection() throws Exception {
+ conn = ConnectionFactory.createAsyncConnection(testingUtility.getConfiguration()).get();
+ }
- @Rule
- public final ConnectionRule connectionRule =
- ConnectionRule.createAsyncConnectionRule(miniClusterRule::createAsyncConnection);
+ @AfterEach
+ public void tearDownConnection() throws IOException {
+ if (conn != null) {
+ conn.close();
+ }
+ }
@Test
public void testShellExecUnspecified() {
@@ -71,7 +89,6 @@ public void testShellExecForeground() {
}
private void testShellExecForeground(final Consumer consumer) {
- final AsyncConnection conn = connectionRule.getAsyncConnection();
final AsyncAdmin admin = conn.getAdmin();
final String command = "echo -n \"hello world\"";
@@ -87,10 +104,9 @@ admin. coprocessorService(ShellExecSer
@Test
public void testShellExecBackground() throws IOException {
- final AsyncConnection conn = connectionRule.getAsyncConnection();
final AsyncAdmin admin = conn.getAdmin();
- final File testDataDir = ensureTestDataDirExists(miniClusterRule.getTestingUtility());
+ final File testDataDir = ensureTestDataDirExists(testingUtility);
final File testFile = new File(testDataDir, "shell_exec_background.txt");
assertTrue(testFile.createNewFile());
assertEquals(0, testFile.length());
@@ -102,9 +118,9 @@ public void testShellExecBackground() throws IOException {
admin. coprocessorService(ShellExecService::newStub,
(stub, controller, callback) -> stub.shellExec(controller, req, callback)).join();
- assertFalse("the response from a background task should have no exit code", resp.hasExitCode());
- assertFalse("the response from a background task should have no stdout", resp.hasStdout());
- assertFalse("the response from a background task should have no stderr", resp.hasStderr());
+ assertFalse(resp.hasExitCode(), "the response from a background task should have no exit code");
+ assertFalse(resp.hasStdout(), "the response from a background task should have no stdout");
+ assertFalse(resp.hasStderr(), "the response from a background task should have no stderr");
Waiter.waitFor(conn.getConfiguration(), 5_000, () -> testFile.length() > 0);
final String content =
@@ -121,10 +137,4 @@ private static File ensureTestDataDirExists(final HBaseTestingUtil testingUtilit
assertTrue(testDataDirFile.exists());
return testDataDirFile;
}
-
- private static Configuration createConfiguration() {
- final Configuration conf = HBaseConfiguration.create();
- conf.set("hbase.coprocessor.master.classes", ShellExecEndpointCoprocessor.class.getName());
- return conf;
- }
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java
index 35aee4c52053..420aa1070edb 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/TestChangeSplitPolicyAction.java
@@ -17,42 +17,36 @@
*/
package org.apache.hadoop.hbase.chaos.actions;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
-@Category({ MediumTests.class })
+@Tag(MediumTests.TAG)
public class TestChangeSplitPolicyAction {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestChangeSplitPolicyAction.class);
-
private final static IntegrationTestingUtility TEST_UTIL = new IntegrationTestingUtility();
private final TableName tableName = TableName.valueOf("ChangeSplitPolicyAction");
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.startMiniCluster(2);
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
- @Before
+ @BeforeEach
public void setUp() throws Exception {
Admin admin = TEST_UTIL.getAdmin();
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
index b021de2d73e7..beb33601b92a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
@@ -20,9 +20,9 @@
import static org.apache.hadoop.hbase.ipc.RpcClient.SPECIFIC_WRITE_THREAD;
import static org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.SERVICE;
import static org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.newBlockingStub;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.net.InetSocketAddress;
@@ -41,9 +41,9 @@
import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.Threads;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -53,7 +53,7 @@
import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto;
import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface;
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestRpcClient {
private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestRpcClient.class);
@@ -330,7 +330,7 @@ public Void call() throws Exception {
}
@Test
- @Ignore // TODO: test fails with async client
+ @Disabled // TODO: test fails with async client
public void testRpcWithChaosMonkeyWithAsyncClient() throws Throwable {
for (int i = 0; i < numIterations; i++) {
TimeoutThread.runWithTimeout(new Callable() {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
index 4828bd602f4f..19fec55a3713 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
@@ -17,7 +17,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.DataInput;
import java.io.DataOutput;
@@ -77,8 +78,8 @@
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -99,7 +100,7 @@
* hbase.IntegrationTestBulkLoad.replicaCount How many region replicas to configure for the table
* under test.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestBulkLoad extends IntegrationTestBase {
private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestBulkLoad.class);
@@ -272,7 +273,7 @@ private void runLinkedListMRJob(int iteration) throws Exception {
HFileOutputFormat2.configureIncrementalLoad(job, admin.getDescriptor(getTablename()),
regionLocator);
// Run the job making sure it works.
- assertEquals(true, job.waitForCompletion(true));
+ assertTrue(job.waitForCompletion(true));
}
// Create a new loader.
BulkLoadHFiles loader = BulkLoadHFiles.create(conf);
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestFileBasedSFTBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestFileBasedSFTBulkLoad.java
index abb1ae297815..4267fdfefae3 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestFileBasedSFTBulkLoad.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestFileBasedSFTBulkLoad.java
@@ -23,8 +23,8 @@
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -42,7 +42,7 @@
* hbase.IntegrationTestBulkLoad.replicaCount How many region replicas to configure for the table
* under test.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestFileBasedSFTBulkLoad extends IntegrationTestBulkLoad {
private static final Logger LOG =
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
index e5c1fbed1a56..602758678aa9 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase.mapreduce;
import static java.lang.String.format;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
@@ -50,12 +50,11 @@
import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -65,7 +64,7 @@
/**
* Validate ImportTsv + BulkLoadFiles on a distributed cluster.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestImportTsv extends Configured implements Tool {
private static final String NAME = IntegrationTestImportTsv.class.getSimpleName();
@@ -77,9 +76,6 @@ public class IntegrationTestImportTsv extends Configured implements Tool {
+ "row3\t1\tc1\tc2\n" + "row4\t1\tc1\tc2\n" + "row5\t1\tc1\tc2\n" + "row6\t1\tc1\tc2\n"
+ "row7\t1\tc1\tc2\n" + "row8\t1\tc1\tc2\n" + "row9\t1\tc1\tc2\n" + "row10\t1\tc1\tc2\n";
- @Rule
- public TestName name = new TestName();
-
protected static final Set simple_expected =
new TreeSet(CellComparator.getInstance()) {
private static final long serialVersionUID = 1L;
@@ -113,7 +109,7 @@ public void setConf(Configuration conf) {
LOG.debug("Ignoring setConf call.");
}
- @BeforeClass
+ @BeforeAll
public static void provisionCluster() throws Exception {
if (null == util) {
util = new IntegrationTestingUtility();
@@ -125,7 +121,7 @@ public static void provisionCluster() throws Exception {
}
}
- @AfterClass
+ @AfterAll
public static void releaseCluster() throws Exception {
util.restoreCluster();
if (!util.isDistributedCluster()) {
@@ -141,8 +137,8 @@ protected void doLoadIncrementalHFiles(Path hfiles, TableName tableName) throws
String[] args = { hfiles.toString(), tableName.getNameAsString() };
LOG.info(format("Running LoadIncrememntalHFiles with args: %s", Arrays.asList(args)));
- assertEquals("Loading HFiles failed.", 0,
- ToolRunner.run(new BulkLoadHFilesTool(getConf()), args));
+ assertEquals(0, ToolRunner.run(new BulkLoadHFilesTool(getConf()), args),
+ "Loading HFiles failed.");
Table table = null;
Scan scan = new Scan();
@@ -156,14 +152,14 @@ protected void doLoadIncrementalHFiles(Path hfiles, TableName tableName) throws
while (resultsIt.hasNext() && expectedIt.hasNext()) {
Result r = resultsIt.next();
for (Cell actual : r.rawCells()) {
- assertTrue("Ran out of expected values prematurely!", expectedIt.hasNext());
+ assertTrue(expectedIt.hasNext(), "Ran out of expected values prematurely!");
KeyValue expected = expectedIt.next();
- assertEquals("Scan produced surprising result", 0,
- CellComparator.getInstance().compare(expected, actual));
+ assertEquals(0, CellComparator.getInstance().compare(expected, actual),
+ "Scan produced surprising result");
}
}
- assertFalse("Did not consume all expected values.", expectedIt.hasNext());
- assertFalse("Did not consume all scan results.", resultsIt.hasNext());
+ assertFalse(expectedIt.hasNext(), "Did not consume all expected values.");
+ assertFalse(resultsIt.hasNext(), "Did not consume all scan results.");
} finally {
if (null != table) table.close();
}
@@ -177,12 +173,12 @@ protected static void validateDeletedPartitionsFile(Configuration conf) throws I
FileSystem fs = FileSystem.get(conf);
Path partitionsFile = new Path(TotalOrderPartitioner.getPartitionFile(conf));
- assertFalse("Failed to clean up partitions file.", fs.exists(partitionsFile));
+ assertFalse(fs.exists(partitionsFile), "Failed to clean up partitions file.");
}
@Test
- public void testGenerateAndLoad() throws Exception {
- generateAndLoad(TableName.valueOf(name.getMethodName()));
+ public void testGenerateAndLoad(TestInfo testInfo) throws Exception {
+ generateAndLoad(TableName.valueOf(testInfo.getTestMethod().get().getName()));
}
void generateAndLoad(final TableName table) throws Exception {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableMapReduceUtil.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableMapReduceUtil.java
index 5bdb8e6014af..be694b18ca8d 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableMapReduceUtil.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableMapReduceUtil.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
@@ -28,32 +28,32 @@
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Test that we add tmpjars correctly including the named dependencies. Runs as an integration test
* so that classpath is realistic.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestTableMapReduceUtil implements Configurable, Tool {
private static IntegrationTestingUtility util;
- @BeforeClass
+ @BeforeAll
public static void provisionCluster() throws Exception {
if (null == util) {
util = new IntegrationTestingUtility();
}
}
- @Before
+ @BeforeEach
public void skipMiniCluster() {
// test probably also works with a local cluster, but
// IntegrationTestingUtility doesn't support this concept.
- assumeTrue("test requires a distributed cluster.", util.isDistributedCluster());
+ assumeTrue(util.isDistributedCluster(), "test requires a distributed cluster.");
}
/**
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index 2bb87ca8f2f6..b8ad76c239ba 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.mttr;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assume.assumeFalse;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assumptions.assumeFalse;
import io.opentelemetry.api.trace.Span;
import io.opentelemetry.context.Scope;
@@ -63,10 +63,10 @@
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.LoadTestTool;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -102,7 +102,7 @@
* At the end of the test a log line is output on the INFO level containing the timing data that was
* collected.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestMTTR {
/**
* Constants.
@@ -143,7 +143,7 @@ public class IntegrationTestMTTR {
*/
private static LoadTestTool loadTool;
- @BeforeClass
+ @BeforeAll
public static void setUp() throws Exception {
// Set up the integration test util
if (util == null) {
@@ -232,10 +232,10 @@ private static void setupTables() throws IOException {
// Setup the table for LoadTestTool
int ret = loadTool.run(new String[] { "-tn", loadTableName.getNameAsString(), "-init_only" });
- assertEquals("Failed to initialize LoadTestTool", 0, ret);
+ assertEquals(0, ret, "Failed to initialize LoadTestTool");
}
- @AfterClass
+ @AfterAll
public static void after() throws IOException {
// Clean everything up.
util.restoreCluster();
@@ -598,7 +598,7 @@ public Boolean call() throws Exception {
int ret = loadTool.run(new String[] { "-tn", loadTableName.getNameAsString(), "-write",
String.format("%d:%d:%d", colsPerKey, 500, writeThreads), "-num_keys",
String.valueOf(numKeys), "-skip_init" });
- assertEquals("Load failed", 0, ret);
+ assertEquals(0, ret, "Load failed");
} while (!future.isDone());
return true;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 58c329c0cd76..0b1da0568ff6 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -112,8 +112,8 @@
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -210,7 +210,7 @@
*
*
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestBigLinkedList extends IntegrationTestBase {
protected static final byte[] NO_KEY = new byte[1];
protected static String TABLE_NAME_KEY = "IntegrationTestBigLinkedList.table";
@@ -1872,7 +1872,7 @@ public void testContinuousIngest() throws IOException, Exception {
}
int ret = ToolRunner.run(conf, new Loop(), new String[] { "1", "1", "2000000",
util.getDataTestDirOnTestFS("IntegrationTestBigLinkedList").toString(), "1" });
- org.junit.Assert.assertEquals(0, ret);
+ org.junit.jupiter.api.Assertions.assertEquals(0, ret);
}
private void usage() {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
index 25640ed294d5..bc9d7af34465 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
@@ -66,8 +66,8 @@
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -91,7 +91,7 @@
* 20000 /tmp 1 10000 or ./hbase org.apache.hadoop.hbase.IntegrationTestsDriver -r
* .*IntegrationTestBigLinkedListWithVisibility.*
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestBigLinkedList {
private static final String CONFIDENTIAL = "confidential";
@@ -635,7 +635,7 @@ public void testContinuousIngest() throws IOException, Exception {
new String[] { "1", "1", "20000",
util.getDataTestDirOnTestFS("IntegrationTestBigLinkedListWithVisibility").toString(), "1",
"10000" });
- org.junit.Assert.assertEquals(0, ret);
+ org.junit.jupiter.api.Assertions.assertEquals(0, ret);
}
public static void main(String[] args) throws Exception {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
index 5566bd79cab0..5ca782ff0769 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
@@ -82,8 +82,8 @@
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -102,7 +102,7 @@
* hdfs and inspected later. This class can be run as a unit test, as an integration test, or from
* the command line Originally taken from Apache Bigtop.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestLoadAndVerify extends IntegrationTestBase {
private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestLoadAndVerify.class);
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestMonkeys.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestMonkeys.java
index ea0ec46f785f..8691d7440b74 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestMonkeys.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestMonkeys.java
@@ -24,14 +24,14 @@
import org.apache.hadoop.hbase.chaos.util.Monkeys;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* This is an integration test for showing a simple usage of how to use {@link Monkeys} to control
* {@link ChaosMonkeyRunner}.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestMonkeys extends ChaosMonkeyRunner {
private static final int RUN_SECS = 15 * 1000;
private static final int WAIT_SECS = 10 * 1000;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedMultiGetRequestsWithRegionReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedMultiGetRequestsWithRegionReplicas.java
index 996ddbc9f0bf..af139441050d 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedMultiGetRequestsWithRegionReplicas.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedMultiGetRequestsWithRegionReplicas.java
@@ -24,7 +24,7 @@
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.LoadTestTool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
@@ -43,7 +43,7 @@
* conjunction with multiget_batchsize would have different behaviors - the batch of gets goes to
* the same region or to multiple regions.
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestTimeBoundedMultiGetRequestsWithRegionReplicas
extends IntegrationTestTimeBoundedRequestsWithRegionReplicas {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
index e21f1d5e54ec..a0ada22b381a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase.test;
+import static org.junit.jupiter.api.Assertions.fail;
+
import java.io.IOException;
import java.util.List;
import java.util.concurrent.Executors;
@@ -47,8 +49,7 @@
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Assert;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -90,7 +91,7 @@
* -Dhbase.ipc.client.allowsInterrupt=true --monkey serverKilling
*
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestTimeBoundedRequestsWithRegionReplicas extends IntegrationTestIngest {
private static final Logger LOG =
@@ -133,7 +134,7 @@ protected void writeData(int colsPerKey, int recordSize, int writeThreads, long
if (0 != ret) {
String errorMsg = "Load failed with error code " + ret;
LOG.error(errorMsg);
- Assert.fail(errorMsg);
+ fail(errorMsg);
}
}
@@ -221,7 +222,7 @@ public void run() {
if (0 != ret) {
String errorMsg = "Verification failed with error code " + ret;
LOG.error(errorMsg);
- Assert.fail(errorMsg);
+ fail(errorMsg);
}
} finally {
if (result != null) result.cancel(false);
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
index db9a1c40d741..8763b5b5459d 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
@@ -57,7 +57,7 @@
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
@@ -74,7 +74,7 @@
* line. Originally taken from Apache Bigtop. Issue user names as comma seperated list. ./hbase
* IntegrationTestWithCellVisibilityLoadAndVerify -u usera,userb
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestWithCellVisibilityLoadAndVerify extends IntegrationTestLoadAndVerify {
private static final String ERROR_STR =
"Two user names are to be specified seperated by a ',' like 'usera,userb'";
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java
index 00bfe35983f1..ca84530598cb 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.util.List;
@@ -45,7 +45,7 @@
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Id;
import org.apache.zookeeper.data.Stat;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -62,7 +62,7 @@
*
* Example usage: hbase org.apache.hadoop.hbase.test.IntegrationTestZnodeACLs -h
*/
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestZKAndFSPermissions extends AbstractHBaseTool {
private static final Logger LOG =
LoggerFactory.getLogger(IntegrationTestZKAndFSPermissions.class);
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
index 481dd5ee7e81..0db2591f35cc 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
@@ -45,12 +45,12 @@
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-@Category(IntegrationTests.class)
+@Tag(IntegrationTests.TAG)
public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
public static final String TABLE_ARG = "t";