diff --git a/hadoop-tools/hadoop-aws/pom.xml b/hadoop-tools/hadoop-aws/pom.xml
index f10eea7a4d3bf..629c56e3fa5b5 100644
--- a/hadoop-tools/hadoop-aws/pom.xml
+++ b/hadoop-tools/hadoop-aws/pom.xml
@@ -499,11 +499,6 @@
wildfly-openssl
runtime
-
- junit
- junit
- test
-
org.mockito
mockito-inline
@@ -618,10 +613,5 @@
junit-platform-launcher
test
-
- org.junit.vintage
- junit-vintage-engine
- test
-
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java
index 940e23026af46..81a295345a8fc 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java
@@ -28,10 +28,8 @@
import org.apache.hadoop.conf.Configuration;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.rules.ExpectedException;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
/**
@@ -49,14 +47,11 @@ public abstract class AbstractS3AMockTest {
.build())
.build();
- @Rule
- public ExpectedException exception = ExpectedException.none();
-
protected S3AFileSystem fs;
protected S3Client s3;
protected Configuration conf;
- @Before
+ @BeforeEach
public void setup() throws Exception {
conf = createConfiguration();
fs = new S3AFileSystem();
@@ -97,7 +92,7 @@ public S3Client getS3Client() {
return s3;
}
- @After
+ @AfterEach
public void teardown() throws Exception {
if (fs != null) {
fs.close();
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestBlockingThreadPoolExecutorService.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestBlockingThreadPoolExecutorService.java
index cf9ad877add89..baaa6eb37a4fe 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestBlockingThreadPoolExecutorService.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestBlockingThreadPoolExecutorService.java
@@ -22,10 +22,9 @@
import org.apache.hadoop.util.SemaphoredDelegatingExecutor;
import org.apache.hadoop.util.StopWatch;
-import org.junit.AfterClass;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.Timeout;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -35,11 +34,12 @@
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* Basic test for S3A's blocking executor service.
*/
+@Timeout(60)
public class ITestBlockingThreadPoolExecutorService {
private static final Logger LOG = LoggerFactory.getLogger(
@@ -56,10 +56,7 @@ public class ITestBlockingThreadPoolExecutorService {
private static BlockingThreadPoolExecutorService tpe;
- @Rule
- public Timeout testTimeout = new Timeout(60, TimeUnit.SECONDS);
-
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
ensureDestroyed();
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestEMRFSCompatibility.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestEMRFSCompatibility.java
index 402f32d2c34da..578eeb53b8cae 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestEMRFSCompatibility.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestEMRFSCompatibility.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.fs.s3a;
import org.assertj.core.api.Assertions;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.apache.hadoop.fs.Path;
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java
index e6f258e556417..1f842cb21ff4f 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java
@@ -20,7 +20,6 @@
import java.io.IOException;
import java.nio.file.AccessDeniedException;
-import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -29,10 +28,9 @@
import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;
import org.assertj.core.api.Assertions;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.Timeout;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.auth.credentials.AwsCredentials;
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
@@ -46,19 +44,18 @@
import static org.apache.hadoop.fs.s3a.test.PublicDatasetTestUtils.getExternalData;
import static org.apache.hadoop.fs.s3a.test.PublicDatasetTestUtils.isUsingDefaultExternalDataFile;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
/**
* Integration tests for {@link Constants#AWS_CREDENTIALS_PROVIDER} logic
* through the S3A Filesystem instantiation process.
*/
+@Timeout(60)
public class ITestS3AAWSCredentialsProvider {
private static final Logger LOG =
LoggerFactory.getLogger(ITestS3AAWSCredentialsProvider.class);
- @Rule
- public Timeout testTimeout = new Timeout(60_1000, TimeUnit.MILLISECONDS);
-
/**
* Expecting a wrapped ClassNotFoundException.
*/
@@ -219,9 +216,8 @@ public void testAnonymousProvider() throws Exception {
.describedAs("Filesystem")
.isNotNull();
FileStatus stat = fs.getFileStatus(testFile);
- assertEquals(
- "The qualified path returned by getFileStatus should be same as the original file",
- testFile, stat.getPath());
+ assertEquals(testFile, stat.getPath(),
+ "The qualified path returned by getFileStatus should be same as the original file");
}
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAnalyticsAcceleratorStreamReading.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAnalyticsAcceleratorStreamReading.java
index dff171bbdd8eb..9bdf98839ac42 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAnalyticsAcceleratorStreamReading.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAnalyticsAcceleratorStreamReading.java
@@ -23,8 +23,8 @@
import java.io.IOException;
import java.io.InputStream;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.assertj.core.api.Assertions;
import org.apache.hadoop.conf.Configuration;
@@ -67,7 +67,7 @@ public class ITestS3AAnalyticsAcceleratorStreamReading extends AbstractS3ATestBa
private Path externalTestFile;
- @Before
+ @BeforeEach
public void setUp() throws Exception {
super.setup();
skipIfClientSideEncryption();
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AChecksum.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AChecksum.java
index f477f46ceb6c8..75266461565dc 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AChecksum.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AChecksum.java
@@ -21,7 +21,7 @@
import java.io.IOException;
import org.assertj.core.api.Assertions;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import software.amazon.awssdk.services.s3.model.ChecksumAlgorithm;
import software.amazon.awssdk.services.s3.model.ChecksumMode;
import software.amazon.awssdk.services.s3.model.HeadObjectRequest;
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ADSSEEncryptionWithDefaultS3Settings.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ADSSEEncryptionWithDefaultS3Settings.java
index 732f95702d993..0b4856661b578 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ADSSEEncryptionWithDefaultS3Settings.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ADSSEEncryptionWithDefaultS3Settings.java
@@ -20,8 +20,8 @@
import java.io.IOException;
-import org.junit.Ignore;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.apache.commons.lang3.StringUtils;
@@ -96,13 +96,13 @@ protected void assertEncrypted(Path path) throws IOException {
}
@Override
- @Ignore
+ @Disabled
@Test
public void testEncryptionSettingPropagation() throws Throwable {
}
@Override
- @Ignore
+ @Disabled
@Test
public void testEncryption() throws Throwable {
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATestUtils.java
index 88204b25e0799..b7a44d142e764 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATestUtils.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATestUtils.java
@@ -19,9 +19,9 @@
package org.apache.hadoop.fs.s3a;
import org.apache.hadoop.conf.Configuration;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -31,12 +31,12 @@
* Test the test utils. Why an integration test? it's needed to
* verify property pushdown.
*/
-public class ITestS3ATestUtils extends Assert {
+public class ITestS3ATestUtils extends Assertions {
private static final Logger LOG =
LoggerFactory.getLogger(ITestS3ATestUtils.class);
public static final String KEY = "undefined.property";
- @Before
+ @BeforeEach
public void clear() {
System.clearProperty(KEY);
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MultipartTestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MultipartTestUtils.java
index 8ece43d50a514..64eb846661608 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MultipartTestUtils.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MultipartTestUtils.java
@@ -27,7 +27,7 @@
import org.apache.hadoop.fs.s3a.impl.PutObjectOptions;
import org.apache.hadoop.fs.store.audit.AuditSpan;
-import org.junit.Assert;
+import org.junit.jupiter.api.Assertions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -45,6 +45,7 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
import static org.apache.hadoop.fs.s3a.commit.CommitConstants.MAGIC_PATH_PREFIX;
+import static org.junit.jupiter.api.Assertions.assertFalse;
/**
* Utilities for S3A multipart upload tests.
@@ -80,8 +81,7 @@ static void cleanupParts(S3AFileSystem fs, Set keySet) {
anyFailure = true;
}
}
- Assert.assertFalse("Failure aborting multipart upload(s), see log.",
- anyFailure);
+ assertFalse(anyFailure, "Failure aborting multipart upload(s), see log.");
}
public static IdKey createPartUpload(S3AFileSystem fs, String key, int len,
@@ -116,7 +116,7 @@ public static void assertNoUploadsAt(S3AFileSystem fs, Path path) throws
RemoteIterator uploads = fs.listUploads(key);
while (uploads.hasNext()) {
MultipartUpload upload = uploads.next();
- Assert.fail("Found unexpected upload " + upload.key() + " " +
+ Assertions.fail("Found unexpected upload " + upload.key() + " " +
truncatedUploadId(upload.uploadId()));
}
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
index aa296d8bf7b46..771a356713018 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
@@ -70,7 +70,6 @@
import org.assertj.core.api.Assertions;
import org.assertj.core.api.Assumptions;
-import org.junit.Assert;
import org.junit.Assume;
import org.junit.AssumptionViolatedException;
import org.slf4j.Logger;
@@ -126,7 +125,10 @@
import static org.apache.hadoop.util.functional.FunctionalIO.uncheckIOExceptions;
import static org.apache.hadoop.util.functional.RemoteIterators.mappingRemoteIterator;
import static org.apache.hadoop.util.functional.RemoteIterators.toList;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Utilities for the S3A tests.
@@ -1264,8 +1266,7 @@ public void assertDiffEquals(String message, long expected) {
// Log in error ensures that the details appear in the test output
LOG.error(text + " expected {}, actual {}", expected, diff);
}
- Assert.assertEquals(text,
- expected, diff);
+ assertEquals(expected, diff, text);
}
/**
@@ -1282,8 +1283,8 @@ public void assertDiffEquals(long expected) {
* @param that the other metric diff instance.
*/
public void assertDiffEquals(MetricDiff that) {
- Assert.assertEquals(this.toString() + " != " + that,
- this.diff(), that.diff());
+ assertEquals(this.diff(), that.diff(),
+ this.toString() + " != " + that);
}
/**
@@ -1337,9 +1338,9 @@ public long getStartingValue() {
* @param obj object to check
*/
public static void assertInstanceOf(Class> expectedClass, Object obj) {
- Assert.assertTrue(String.format("Expected instance of class %s, but is %s.",
- expectedClass, obj.getClass()),
- expectedClass.isAssignableFrom(obj.getClass()));
+ assertTrue(expectedClass.isAssignableFrom(obj.getClass()),
+ String.format("Expected instance of class %s, but is %s.",
+ expectedClass, obj.getClass()));
}
/**
@@ -1399,17 +1400,17 @@ public static void verifyFileStatus(FileStatus status,
String group,
FsPermission permission) {
String details = status.toString();
- assertFalse("Not a dir: " + details, status.isDirectory());
- assertEquals("Mod time: " + details, modTime, status.getModificationTime());
- assertEquals("File size: " + details, size, status.getLen());
- assertEquals("Block size: " + details, blockSize, status.getBlockSize());
+ assertFalse(status.isDirectory(), "Not a dir: " + details);
+ assertEquals(modTime, status.getModificationTime(), "Mod time: " + details);
+ assertEquals(size, status.getLen(), "File size: " + details);
+ assertEquals(blockSize, status.getBlockSize(), "Block size: " + details);
if (replication > 0) {
- assertEquals("Replication value: " + details, replication,
- status.getReplication());
+ assertEquals(replication, status.getReplication(),
+ "Replication value: " + details);
}
if (accessTime != 0) {
- assertEquals("Access time: " + details, accessTime,
- status.getAccessTime());
+ assertEquals(accessTime, status.getAccessTime(),
+ "Access time: " + details);
}
if (owner != null) {
assertEquals("Owner: " + details, owner, status.getOwner());
@@ -1418,8 +1419,8 @@ public static void verifyFileStatus(FileStatus status,
assertEquals("Group: " + details, group, status.getGroup());
}
if (permission != null) {
- assertEquals("Permission: " + details, permission,
- status.getPermission());
+ assertEquals(permission, status.getPermission(),
+ "Permission: " + details);
}
}
@@ -1433,19 +1434,20 @@ public static void verifyDirStatus(S3AFileStatus status,
int replication,
String owner) {
String details = status.toString();
- assertTrue("Is a dir: " + details, status.isDirectory());
- assertEquals("zero length: " + details, 0, status.getLen());
+ assertTrue(status.isDirectory(), "Is a dir: " + details);
+ assertEquals(0, status.getLen(), "zero length: " + details);
// S3AFileStatus always assigns modTime = System.currentTimeMillis()
- assertTrue("Mod time: " + details, status.getModificationTime() > 0);
- assertEquals("Replication value: " + details, replication,
- status.getReplication());
- assertEquals("Access time: " + details, 0, status.getAccessTime());
+ assertTrue(status.getModificationTime() > 0, "Mod time: " + details);
+ assertEquals(replication, status.getReplication(),
+ "Replication value: " + details);
+ assertEquals(0, status.getAccessTime(),
+ "Access time: " + details);
assertEquals("Owner: " + details, owner, status.getOwner());
// S3AFileStatus always assigns group=owner
assertEquals("Group: " + details, owner, status.getGroup());
// S3AFileStatus always assigns permission = default
- assertEquals("Permission: " + details,
- FsPermission.getDefault(), status.getPermission());
+ assertEquals(FsPermission.getDefault(), status.getPermission(),
+ "Permission: " + details);
}
/**
@@ -1590,15 +1592,15 @@ public static void checkListingDoesNotContainPath(S3AFileSystem fs, Path filePat
fs.listFiles(filePath.getParent(), false);
while (listIter.hasNext()) {
final LocatedFileStatus lfs = listIter.next();
- assertNotEquals("Listing was not supposed to include " + filePath,
- filePath, lfs.getPath());
+ assertNotEquals(filePath, lfs.getPath(),
+ "Listing was not supposed to include " + filePath);
}
LOG.info("{}; file omitted from listFiles listing as expected.", filePath);
final FileStatus[] fileStatuses = fs.listStatus(filePath.getParent());
for (FileStatus fileStatus : fileStatuses) {
- assertNotEquals("Listing was not supposed to include " + filePath,
- filePath, fileStatus.getPath());
+ assertNotEquals(filePath, fileStatus.getPath(),
+ "Listing was not supposed to include " + filePath);
}
LOG.info("{}; file omitted from listStatus as expected.", filePath);
}
@@ -1626,10 +1628,10 @@ public static void checkListingContainsPath(S3AFileSystem fs, Path filePath)
listStatusHasIt = true;
}
}
- assertTrue("fs.listFiles didn't include " + filePath,
- listFilesHasIt);
- assertTrue("fs.listStatus didn't include " + filePath,
- listStatusHasIt);
+ assertTrue(listFilesHasIt,
+ "fs.listFiles didn't include " + filePath);
+ assertTrue(listStatusHasIt,
+ "fs.listStatus didn't include " + filePath);
}
/**
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestListing.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestListing.java
index 38993b43ebf45..01e444187785d 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestListing.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestListing.java
@@ -20,8 +20,8 @@
import java.util.NoSuchElementException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
@@ -45,10 +45,10 @@ public void testProvidedFileStatusIteratorEnd() throws Exception {
RemoteIterator it = Listing.toProvidedFileStatusIterator(
statuses);
- Assert.assertTrue("hasNext() should return true first time", it.hasNext());
- Assert.assertEquals("first element from iterator",
- s3aStatus, it.next());
- Assert.assertFalse("hasNext() should now be false", it.hasNext());
+ Assertions.assertTrue(it.hasNext(), "hasNext() should return true first time");
+ Assertions.assertEquals(s3aStatus, it.next(),
+ "first element from iterator");
+ Assertions.assertFalse(it.hasNext(), "hasNext() should now be false");
intercept(NoSuchElementException.class, it::next);
}
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java
index 5d9d78aee68f2..fe926dec0c8c9 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java
@@ -644,10 +644,10 @@ public void testConcurrentAuthentication() throws Throwable {
for (Future result : results) {
AwsCredentials credentials = result.get();
- assertEquals("Access key from credential provider",
- "expectedAccessKey", credentials.accessKeyId());
- assertEquals("Secret key from credential provider",
- "expectedSecret", credentials.secretAccessKey());
+ assertEquals("expectedAccessKey", credentials.accessKeyId(),
+ "Access key from credential provider");
+ assertEquals("expectedSecret", credentials.secretAccessKey(),
+ "Secret key from credential provider");
}
} finally {
pool.awaitTermination(TERMINATION_TIMEOUT, TimeUnit.SECONDS);
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ABlockOutputStream.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ABlockOutputStream.java
index a22b55155c9f4..703da8574c70a 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ABlockOutputStream.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ABlockOutputStream.java
@@ -28,8 +28,8 @@
import org.apache.hadoop.fs.s3a.test.MinimalWriteOperationHelperCallbacks;
import org.apache.hadoop.fs.statistics.IOStatisticsContext;
import org.apache.hadoop.util.Progressable;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import java.util.concurrent.ExecutorService;
@@ -76,7 +76,7 @@ private S3ABlockOutputStream.BlockOutputStreamBuilder mockS3ABuilder() {
return builder;
}
- @Before
+ @BeforeEach
public void setUp() throws Exception {
final S3ABlockOutputStream.BlockOutputStreamBuilder
builder = mockS3ABuilder();
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java
index 5b8e1dc430065..695222d01cff7 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java
@@ -35,7 +35,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.mockito.ArgumentMatcher;
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AEndpointParsing.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AEndpointParsing.java
index 8a77c102ac67d..8be0708cad542 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AEndpointParsing.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AEndpointParsing.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.fs.s3a;
import org.assertj.core.api.Assertions;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import software.amazon.awssdk.regions.Region;
public class TestS3AEndpointParsing extends AbstractS3AMockTest {
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AGetFileStatus.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AGetFileStatus.java
index 1a2a21a6e5111..c52fb96a0e235 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AGetFileStatus.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AGetFileStatus.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.fs.s3a;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
@@ -42,7 +42,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.mockito.ArgumentMatcher;
@@ -66,9 +66,9 @@ public void testFile() throws Exception {
assertEquals(objectMetadata.contentLength().longValue(), stat.getLen());
assertEquals(Date.from(objectMetadata.lastModified()).getTime(), stat.getModificationTime());
ContractTestUtils.assertNotErasureCoded(fs, path);
- assertTrue(path + " should have erasure coding unset in " +
- "FileStatus#toString(): " + stat,
- stat.toString().contains("isErasureCoded=false"));
+ assertTrue(stat.toString().contains("isErasureCoded=false"),
+ path + " should have erasure coding unset in " +
+ "FileStatus#toString(): " + stat);
}
@Test
@@ -107,9 +107,9 @@ public void testImplicitDirectory() throws Exception {
assertEquals(fs.makeQualified(path), stat.getPath());
assertTrue(stat.isDirectory());
ContractTestUtils.assertNotErasureCoded(fs, path);
- assertTrue(path + " should have erasure coding unset in " +
- "FileStatus#toString(): " + stat,
- stat.toString().contains("isErasureCoded=false"));
+ assertTrue(stat.toString().contains("isErasureCoded=false"),
+ path + " should have erasure coding unset in " +
+ "FileStatus#toString(): " + stat);
}
@Test
@@ -131,16 +131,17 @@ public void testRoot() throws Exception {
@Test
public void testNotFound() throws Exception {
- Path path = new Path("/dir");
- String key = path.toUri().getPath().substring(1);
- when(s3.headObject(argThat(correctGetMetadataRequest(BUCKET, key))))
- .thenThrow(NOT_FOUND);
- when(s3.headObject(argThat(
- correctGetMetadataRequest(BUCKET, key + "/")
- ))).thenThrow(NOT_FOUND);
- setupListMocks(Collections.emptyList(), Collections.emptyList());
- exception.expect(FileNotFoundException.class);
- fs.getFileStatus(path);
+ assertThrows(FileNotFoundException.class, () -> {
+ Path path = new Path("/dir");
+ String key = path.toUri().getPath().substring(1);
+ when(s3.headObject(argThat(correctGetMetadataRequest(BUCKET, key))))
+ .thenThrow(NOT_FOUND);
+ when(s3.headObject(argThat(
+ correctGetMetadataRequest(BUCKET, key + "/")
+ ))).thenThrow(NOT_FOUND);
+ setupListMocks(Collections.emptyList(), Collections.emptyList());
+ fs.getFileStatus(path);
+ });
}
private void setupListMocks(List prefixes,
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputPolicies.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputPolicies.java
index c0c8137aaf676..a58518fc3cec9 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputPolicies.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputPolicies.java
@@ -18,10 +18,9 @@
package org.apache.hadoop.fs.s3a;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
import java.util.Arrays;
import java.util.Collection;
@@ -29,7 +28,6 @@
/**
* Unit test of the input policy logic, without making any S3 calls.
*/
-@RunWith(Parameterized.class)
public class TestS3AInputPolicies {
private S3AInputPolicy policy;
@@ -45,21 +43,20 @@ public class TestS3AInputPolicies {
public static final long _1MB = 1024L * 1024;
public static final long _10MB = _1MB * 10;
- public TestS3AInputPolicies(S3AInputPolicy policy,
- long targetPos,
- long length,
- long contentLength,
- long readahead,
- long expectedLimit) {
- this.policy = policy;
- this.targetPos = targetPos;
- this.length = length;
- this.contentLength = contentLength;
- this.readahead = readahead;
- this.expectedLimit = expectedLimit;
+ public void initTestS3AInputPolicies(S3AInputPolicy pPolicy,
+ long pTargetPos,
+ long pLength,
+ long pContentLength,
+ long pReadahead,
+ long pExpectedLimit) {
+ this.policy = pPolicy;
+ this.targetPos = pTargetPos;
+ this.length = pLength;
+ this.contentLength = pContentLength;
+ this.readahead = pReadahead;
+ this.expectedLimit = pExpectedLimit;
}
- @Parameterized.Parameters
public static Collection