diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIO.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIO.java index 2bcd83d244f7..c8b9dd5bbab9 100644 --- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIO.java +++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIO.java @@ -18,10 +18,6 @@ */ package org.apache.iceberg.aws.s3; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; import static org.mockito.AdditionalAnswers.delegatesTo; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doReturn; @@ -29,7 +25,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import com.adobe.testing.s3mock.junit4.S3MockRule; +import com.adobe.testing.s3mock.junit5.S3MockExtension; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -61,14 +57,12 @@ import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SerializableSupplier; import org.assertj.core.api.Assertions; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; import software.amazon.awssdk.core.sync.RequestBody; import software.amazon.awssdk.http.urlconnection.UrlConnectionHttpClient; import software.amazon.awssdk.regions.Region; @@ -79,10 +73,12 @@ import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse; import software.amazon.awssdk.services.s3.model.S3Error; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(S3MockExtension.class) public class TestS3FileIO { - @ClassRule public static final S3MockRule S3_MOCK_RULE = S3MockRule.builder().silent().build(); - public SerializableSupplier s3 = S3_MOCK_RULE::createS3ClientV2; + @RegisterExtension + public static final S3MockExtension S3_MOCK = S3MockExtension.builder().silent().build(); + + public SerializableSupplier s3 = S3_MOCK::createS3ClientV2; private final S3Client s3mock = mock(S3Client.class, delegatesTo(s3.get())); private final Random random = new Random(1); private final int numBucketsForBatchDeletion = 3; @@ -97,7 +93,7 @@ public class TestS3FileIO { "s3.delete.batch-size", Integer.toString(batchDeletionSize)); - @Before + @BeforeEach public void before() { s3FileIO = new S3FileIO(() -> s3mock); s3FileIO.initialize(properties); @@ -108,7 +104,7 @@ public void before() { StaticClientFactory.client = s3mock; } - @After + @AfterEach public void after() { if (null != s3FileIO) { s3FileIO.close(); @@ -122,25 +118,25 @@ public void testNewInputFile() throws IOException { random.nextBytes(expected); InputFile in = s3FileIO.newInputFile(location); - assertFalse(in.exists()); + Assertions.assertThat(in.exists()).isFalse(); OutputFile out = s3FileIO.newOutputFile(location); try (OutputStream os = out.createOrOverwrite()) { IOUtils.write(expected, os); } - assertTrue(in.exists()); + Assertions.assertThat(in.exists()).isTrue(); byte[] actual; try (InputStream is = in.newStream()) { actual = IOUtils.readFully(is, expected.length); } - assertArrayEquals(expected, actual); + Assertions.assertThat(actual).isEqualTo(expected); s3FileIO.deleteFile(in); - assertFalse(s3FileIO.newInputFile(location).exists()); + Assertions.assertThat(s3FileIO.newInputFile(location).exists()).isFalse(); } @Test @@ -162,7 +158,7 @@ public void testDeleteFilesSingleBatchWithRemainder() { public void testDeleteEmptyList() throws IOException { String location = "s3://bucket/path/to/file.txt"; InputFile in = s3FileIO.newInputFile(location); - assertFalse(in.exists()); + Assertions.assertThat(in.exists()).isFalse(); OutputFile out = s3FileIO.newOutputFile(location); try (OutputStream os = out.createOrOverwrite()) { IOUtils.write(new byte[1024 * 1024], os); @@ -170,9 +166,9 @@ public void testDeleteEmptyList() throws IOException { s3FileIO.deleteFiles(Lists.newArrayList()); - Assert.assertTrue(s3FileIO.newInputFile(location).exists()); + Assertions.assertThat(s3FileIO.newInputFile(location).exists()).isTrue(); s3FileIO.deleteFile(in); - assertFalse(s3FileIO.newInputFile(location).exists()); + Assertions.assertThat(s3FileIO.newInputFile(location).exists()).isFalse(); } @Test @@ -207,7 +203,7 @@ private void testBatchDelete(int numObjects) { int expectedDeleteRequests = expectedNumberOfBatchesPerBucket * numBucketsForBatchDeletion; verify(s3mock, times(expectedDeleteRequests)).deleteObjects((DeleteObjectsRequest) any()); for (String path : paths) { - Assert.assertFalse(s3FileIO.newInputFile(path).exists()); + Assertions.assertThat(s3FileIO.newInputFile(path).exists()).isFalse(); } } @@ -223,7 +219,7 @@ public void testSerializeClient() { byte[] data = SerializationUtils.serialize(pre); SerializableSupplier post = SerializationUtils.deserialize(data); - assertEquals("s3", post.get().serviceName()); + Assertions.assertThat(post.get().serviceName()).isEqualTo("s3"); } @Test @@ -239,11 +235,13 @@ public void testPrefixList() { String scalePrefix = String.format("%s/%s/", prefix, scale); createRandomObjects(scalePrefix, scale); - assertEquals((long) scale, Streams.stream(s3FileIO.listPrefix(scalePrefix)).count()); + Assertions.assertThat(Streams.stream(s3FileIO.listPrefix(scalePrefix)).count()) + .isEqualTo((long) scale); }); long totalFiles = scaleSizes.stream().mapToLong(Integer::longValue).sum(); - assertEquals(totalFiles, Streams.stream(s3FileIO.listPrefix(prefix)).count()); + Assertions.assertThat(Streams.stream(s3FileIO.listPrefix(prefix)).count()) + .isEqualTo(totalFiles); } /** @@ -251,7 +249,7 @@ public void testPrefixList() { * exists through integration tests. */ @Test - @Ignore + @Disabled public void testPrefixDelete() { String prefix = "s3://bucket/path/to/delete"; List scaleSizes = Lists.newArrayList(0, 5, 1001); @@ -262,7 +260,8 @@ public void testPrefixDelete() { createRandomObjects(scalePrefix, scale); s3FileIO.deletePrefix(scalePrefix); - assertEquals(0L, Streams.stream(s3FileIO.listPrefix(scalePrefix)).count()); + Assertions.assertThat(Streams.stream(s3FileIO.listPrefix(scalePrefix)).count()) + .isEqualTo(0); }); } @@ -306,7 +305,7 @@ public void testMissingTableMetadata() { .hasMessageStartingWith("Location does not exist"); long duration = System.currentTimeMillis() - start; - Assert.assertTrue("Should take less than 10 seconds", duration < 10_000); + Assertions.assertThat(duration < 10_000).as("Should take less than 10 seconds").isTrue(); } } @@ -321,8 +320,8 @@ public void testFileIOJsonSerialization() { String json = FileIOParser.toJson(s3FileIO); try (FileIO deserialized = FileIOParser.fromJson(json, conf)) { - Assert.assertTrue(deserialized instanceof S3FileIO); - Assert.assertEquals(s3FileIO.properties(), deserialized.properties()); + Assertions.assertThat(deserialized).isInstanceOf(S3FileIO.class); + Assertions.assertThat(deserialized.properties()).isEqualTo(s3FileIO.properties()); } } @@ -334,7 +333,8 @@ public void testS3FileIOKryoSerialization() throws IOException { testS3FileIO.initialize(ImmutableMap.of("k1", "v1")); FileIO roundTripSerializedFileIO = TestHelpers.KryoHelpers.roundTripSerialize(testS3FileIO); - Assert.assertEquals(testS3FileIO.properties(), roundTripSerializedFileIO.properties()); + Assertions.assertThat(roundTripSerializedFileIO.properties()) + .isEqualTo(testS3FileIO.properties()); } @Test @@ -345,7 +345,8 @@ public void testS3FileIOWithEmptyPropsKryoSerialization() throws IOException { testS3FileIO.initialize(ImmutableMap.of()); FileIO roundTripSerializedFileIO = TestHelpers.KryoHelpers.roundTripSerialize(testS3FileIO); - Assert.assertEquals(testS3FileIO.properties(), roundTripSerializedFileIO.properties()); + Assertions.assertThat(roundTripSerializedFileIO.properties()) + .isEqualTo(testS3FileIO.properties()); } @Test @@ -356,7 +357,8 @@ public void testS3FileIOJavaSerialization() throws IOException, ClassNotFoundExc testS3FileIO.initialize(ImmutableMap.of("k1", "v1")); FileIO roundTripSerializedFileIO = TestHelpers.roundTripSerialize(testS3FileIO); - Assert.assertEquals(testS3FileIO.properties(), roundTripSerializedFileIO.properties()); + Assertions.assertThat(roundTripSerializedFileIO.properties()) + .isEqualTo(testS3FileIO.properties()); } private void createRandomObjects(String prefix, int count) { diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3InputStream.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3InputStream.java index 406b62c5f1dc..81edc0b41fbd 100644 --- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3InputStream.java +++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3InputStream.java @@ -18,33 +18,33 @@ */ package org.apache.iceberg.aws.s3; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; - -import com.adobe.testing.s3mock.junit4.S3MockRule; +import com.adobe.testing.s3mock.junit5.S3MockExtension; import java.io.IOException; import java.util.Arrays; import java.util.Random; import org.apache.commons.io.IOUtils; import org.apache.iceberg.io.RangeReadable; import org.apache.iceberg.io.SeekableInputStream; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; import software.amazon.awssdk.core.sync.RequestBody; import software.amazon.awssdk.services.s3.S3Client; import software.amazon.awssdk.services.s3.model.BucketAlreadyExistsException; import software.amazon.awssdk.services.s3.model.CreateBucketRequest; import software.amazon.awssdk.services.s3.model.PutObjectRequest; +@ExtendWith(S3MockExtension.class) public class TestS3InputStream { - @ClassRule public static final S3MockRule S3_MOCK_RULE = S3MockRule.builder().silent().build(); + @RegisterExtension + public static final S3MockExtension S3_MOCK = S3MockExtension.builder().silent().build(); - private final S3Client s3 = S3_MOCK_RULE.createS3ClientV2(); + private final S3Client s3 = S3_MOCK.createS3ClientV2(); private final Random random = new Random(1); - @Before + @BeforeEach public void before() { createBucket("bucket"); } @@ -88,7 +88,7 @@ private void readAndCheck( SeekableInputStream in, long rangeStart, int size, byte[] original, boolean buffered) throws IOException { in.seek(rangeStart); - assertEquals(rangeStart, in.getPos()); + Assertions.assertThat(in.getPos()).isEqualTo(rangeStart); long rangeEnd = rangeStart + size; byte[] actual = new byte[size]; @@ -102,8 +102,9 @@ private void readAndCheck( } } - assertEquals(rangeEnd, in.getPos()); - assertArrayEquals(Arrays.copyOfRange(original, (int) rangeStart, (int) rangeEnd), actual); + Assertions.assertThat(in.getPos()).isEqualTo(rangeEnd); + Assertions.assertThat(actual) + .isEqualTo(Arrays.copyOfRange(original, (int) rangeStart, (int) rangeEnd)); } @Test @@ -144,9 +145,8 @@ private void readAndCheckRanges( throws IOException { in.readFully(position, buffer, offset, length); - assertArrayEquals( - Arrays.copyOfRange(original, offset, offset + length), - Arrays.copyOfRange(buffer, offset, offset + length)); + Assertions.assertThat(Arrays.copyOfRange(buffer, offset, offset + length)) + .isEqualTo(Arrays.copyOfRange(original, offset, offset + length)); } @Test @@ -154,7 +154,9 @@ public void testClose() throws Exception { S3URI uri = new S3URI("s3://bucket/path/to/closed.dat"); SeekableInputStream closed = new S3InputStream(s3, uri); closed.close(); - assertThrows(IllegalStateException.class, () -> closed.seek(0)); + Assertions.assertThatThrownBy(() -> closed.seek(0)) + .isInstanceOf(IllegalStateException.class) + .hasMessage("already closed"); } @Test @@ -167,7 +169,8 @@ public void testSeek() throws Exception { try (SeekableInputStream in = new S3InputStream(s3, uri)) { in.seek(expected.length / 2); byte[] actual = IOUtils.readFully(in, expected.length / 2); - assertArrayEquals(Arrays.copyOfRange(expected, expected.length / 2, expected.length), actual); + Assertions.assertThat(actual) + .isEqualTo(Arrays.copyOfRange(expected, expected.length / 2, expected.length)); } } diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3OutputStream.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3OutputStream.java index b927760f6612..6f8d1d6cb988 100644 --- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3OutputStream.java +++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3OutputStream.java @@ -19,9 +19,6 @@ package org.apache.iceberg.aws.s3; import static org.apache.iceberg.metrics.MetricsContext.nullMetrics; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; import static org.mockito.AdditionalAnswers.delegatesTo; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doThrow; @@ -30,7 +27,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import com.adobe.testing.s3mock.junit4.S3MockRule; +import com.adobe.testing.s3mock.junit5.S3MockExtension; import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; @@ -47,14 +44,13 @@ import java.util.stream.Stream; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.assertj.core.api.Assertions; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.core.ResponseBytes; @@ -72,15 +68,16 @@ import software.amazon.awssdk.services.s3.model.UploadPartRequest; import software.amazon.awssdk.utils.BinaryUtils; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(S3MockExtension.class) public class TestS3OutputStream { private static final Logger LOG = LoggerFactory.getLogger(TestS3OutputStream.class); private static final String BUCKET = "test-bucket"; private static final int FIVE_MBS = 5 * 1024 * 1024; - @ClassRule public static final S3MockRule S3_MOCK_RULE = S3MockRule.builder().silent().build(); + @RegisterExtension + public static final S3MockExtension S3_MOCK = S3MockExtension.builder().silent().build(); - private final S3Client s3 = S3_MOCK_RULE.createS3ClientV2(); + private final S3Client s3 = S3_MOCK.createS3ClientV2(); private final S3Client s3mock = mock(S3Client.class, delegatesTo(s3)); private final Random random = new Random(1); private final Path tmpDir = Files.createTempDirectory("s3fileio-test-"); @@ -102,13 +99,13 @@ public class TestS3OutputStream { public TestS3OutputStream() throws IOException {} - @Before + @BeforeEach public void before() { properties.setChecksumEnabled(false); createBucket(BUCKET); } - @After + @AfterEach public void after() { File newStagingDirectory = new File(newTmpDirectory); if (newStagingDirectory.exists()) { @@ -255,7 +252,8 @@ private void checkUploadPartRequestContent( for (int i = 0; i < uploadPartRequests.size(); ++i) { int offset = i * FIVE_MBS; int len = (i + 1) * FIVE_MBS - 1 > data.length ? data.length - offset : FIVE_MBS; - assertEquals(getDigest(data, offset, len), uploadPartRequests.get(i).contentMD5()); + Assertions.assertThat(uploadPartRequests.get(i).contentMD5()) + .isEqualTo(getDigest(data, offset, len)); } } } @@ -264,7 +262,8 @@ private void checkPutObjectRequestContent( byte[] data, ArgumentCaptor putObjectRequestArgumentCaptor) { if (properties.isChecksumEnabled()) { List putObjectRequests = putObjectRequestArgumentCaptor.getAllValues(); - assertEquals(getDigest(data, 0, data.length), putObjectRequests.get(0).contentMD5()); + Assertions.assertThat(putObjectRequests.get(0).contentMD5()) + .isEqualTo(getDigest(data, 0, data.length)); } } @@ -272,7 +271,7 @@ private void checkTags(ArgumentCaptor putObjectRequestArgument if (properties.isChecksumEnabled()) { List putObjectRequests = putObjectRequestArgumentCaptor.getAllValues(); String tagging = putObjectRequests.get(0).tagging(); - assertEquals(getTags(properties.writeTags()), tagging); + Assertions.assertThat(getTags(properties.writeTags())).isEqualTo(tagging); } } @@ -286,7 +285,7 @@ private String getDigest(byte[] data, int offset, int length) { md5.update(data, offset, length); return BinaryUtils.toBase64(md5.digest()); } catch (NoSuchAlgorithmException e) { - fail(String.format("Failed to get MD5 MessageDigest. %s", e)); + Assertions.fail("Failed to get MD5 MessageDigest. %s", e); } return null; } @@ -295,11 +294,11 @@ private void writeAndVerify(S3Client client, S3URI uri, byte[] data, boolean arr try (S3OutputStream stream = new S3OutputStream(client, uri, properties, nullMetrics())) { if (arrayWrite) { stream.write(data); - assertEquals(data.length, stream.getPos()); + Assertions.assertThat(stream.getPos()).isEqualTo(data.length); } else { for (int i = 0; i < data.length; i++) { stream.write(data[i]); - assertEquals(i + 1, stream.getPos()); + Assertions.assertThat(stream.getPos()).isEqualTo(i + 1); } } } catch (IOException e) { @@ -307,11 +306,11 @@ private void writeAndVerify(S3Client client, S3URI uri, byte[] data, boolean arr } byte[] actual = readS3Data(uri); - assertArrayEquals(data, actual); + Assertions.assertThat(actual).isEqualTo(data); // Verify all staging files are cleaned up try { - assertEquals(0, Files.list(tmpDir).count()); + Assertions.assertThat(Files.list(tmpDir)).isEmpty(); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3URI.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3URI.java index f352d46b509a..803cbe098416 100644 --- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3URI.java +++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3URI.java @@ -18,8 +18,6 @@ */ package org.apache.iceberg.aws.s3; -import static org.junit.Assert.assertEquals; - import java.util.Map; import org.apache.iceberg.exceptions.ValidationException; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; @@ -34,9 +32,9 @@ public void testLocationParsing() { String p1 = "s3://bucket/path/to/file"; S3URI uri1 = new S3URI(p1); - assertEquals("bucket", uri1.bucket()); - assertEquals("path/to/file", uri1.key()); - assertEquals(p1, uri1.toString()); + Assertions.assertThat(uri1.bucket()).isEqualTo("bucket"); + Assertions.assertThat(uri1.key()).isEqualTo("path/to/file"); + Assertions.assertThat(uri1.toString()).isEqualTo(p1); } @Test @@ -44,9 +42,9 @@ public void testEncodedString() { String p1 = "s3://bucket/path%20to%20file"; S3URI uri1 = new S3URI(p1); - assertEquals("bucket", uri1.bucket()); - assertEquals("path%20to%20file", uri1.key()); - assertEquals(p1, uri1.toString()); + Assertions.assertThat(uri1.bucket()).isEqualTo("bucket"); + Assertions.assertThat(uri1.key()).isEqualTo("path%20to%20file"); + Assertions.assertThat(uri1.toString()).isEqualTo(p1); } @Test @@ -62,9 +60,9 @@ public void testOnlyBucketNameLocation() { String p1 = "s3://bucket"; S3URI url1 = new S3URI(p1); - assertEquals("bucket", url1.bucket()); - assertEquals("", url1.key()); - assertEquals(p1, url1.toString()); + Assertions.assertThat(url1.bucket()).isEqualTo("bucket"); + Assertions.assertThat(url1.key()).isEqualTo(""); + Assertions.assertThat(url1.toString()).isEqualTo(p1); } @Test @@ -72,17 +70,17 @@ public void testQueryAndFragment() { String p1 = "s3://bucket/path/to/file?query=foo#bar"; S3URI uri1 = new S3URI(p1); - assertEquals("bucket", uri1.bucket()); - assertEquals("path/to/file", uri1.key()); - assertEquals(p1, uri1.toString()); + Assertions.assertThat(uri1.bucket()).isEqualTo("bucket"); + Assertions.assertThat(uri1.key()).isEqualTo("path/to/file"); + Assertions.assertThat(uri1.toString()).isEqualTo(p1); } @Test public void testValidSchemes() { for (String scheme : Lists.newArrayList("https", "s3", "s3a", "s3n", "gs")) { S3URI uri = new S3URI(scheme + "://bucket/path/to/file"); - assertEquals("bucket", uri.bucket()); - assertEquals("path/to/file", uri.key()); + Assertions.assertThat(uri.bucket()).isEqualTo("bucket"); + Assertions.assertThat(uri.key()).isEqualTo("path/to/file"); } } @@ -92,8 +90,8 @@ public void testS3URIWithBucketToAccessPointMapping() { Map bucketToAccessPointMapping = ImmutableMap.of("bucket", "access-point"); S3URI uri1 = new S3URI(p1, bucketToAccessPointMapping); - assertEquals("access-point", uri1.bucket()); - assertEquals("path/to/file", uri1.key()); - assertEquals(p1, uri1.toString()); + Assertions.assertThat(uri1.bucket()).isEqualTo("access-point"); + Assertions.assertThat(uri1.key()).isEqualTo("path/to/file"); + Assertions.assertThat(uri1.toString()).isEqualTo(p1); } } diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java index 304116c939ba..1e44e533188b 100644 --- a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java +++ b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java @@ -33,12 +33,11 @@ import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.jetbrains.annotations.NotNull; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; @@ -69,18 +68,14 @@ public class TestS3RestSigner { static final AwsCredentialsProvider CREDENTIALS_PROVIDER = StaticCredentialsProvider.create( AwsBasicCredentials.create("accessKeyId", "secretAccessKey")); + private static final MinioContainer MINIO_CONTAINER = + new MinioContainer(CREDENTIALS_PROVIDER.resolveCredentials()); private static Server httpServer; private static ValidatingSigner validatingSigner; private S3Client s3; - @Rule public TemporaryFolder temp = new TemporaryFolder(); - - @Rule - public MinioContainer minioContainer = - new MinioContainer(CREDENTIALS_PROVIDER.resolveCredentials()); - - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { if (null == httpServer) { httpServer = initHttpServer(); @@ -99,7 +94,7 @@ public static void beforeClass() throws Exception { new CustomAwsS3V4Signer()); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { assertThat(validatingSigner.icebergSigner.tokenRefreshExecutor()) .isInstanceOf(ScheduledThreadPoolExecutor.class); @@ -123,8 +118,9 @@ public static void afterClass() throws Exception { } } - @Before + @BeforeEach public void before() throws Exception { + MINIO_CONTAINER.start(); s3 = S3Client.builder() .region(REGION) @@ -133,7 +129,7 @@ public void before() throws Exception { s3ClientBuilder -> s3ClientBuilder.httpClientBuilder( software.amazon.awssdk.http.apache.ApacheHttpClient.builder())) - .endpointOverride(minioContainer.getURI()) + .endpointOverride(MINIO_CONTAINER.getURI()) .forcePathStyle(true) // OSX won't resolve subdomains .overrideConfiguration( c -> c.putAdvancedOption(SdkAdvancedClientOption.SIGNER, validatingSigner)) @@ -199,6 +195,11 @@ public void validatedCreateMultiPartUpload() { CreateMultipartUploadRequest.builder().bucket(BUCKET).key("some/multipart-key").build()); } + @AfterEach + public void after() { + MINIO_CONTAINER.stop(); + } + @Test public void validatedUploadPart() { String multipartUploadId = diff --git a/build.gradle b/build.gradle index 187ebed52f08..8b05afbb6e6a 100644 --- a/build.gradle +++ b/build.gradle @@ -486,7 +486,7 @@ project(':iceberg-aws') { testImplementation 'software.amazon.awssdk:iam' testImplementation 'software.amazon.awssdk:s3control' testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts') - testImplementation("com.adobe.testing:s3mock-junit4") { + testImplementation("com.adobe.testing:s3mock-junit5") { exclude module: "spring-boot-starter-logging" exclude module: "logback-classic" exclude group: 'junit' @@ -525,6 +525,9 @@ project(':iceberg-aws') { project(':iceberg-delta-lake') { // use integration test since we can take advantages of spark 3.3 to read datafiles of delta lake table // and create some tests involving sql query. + test { + useJUnitPlatform() + } configurations { integrationImplementation.extendsFrom testImplementation integrationRuntime.extendsFrom testRuntimeOnly @@ -912,6 +915,7 @@ project(':iceberg-snowflake') { runtimeOnly("net.snowflake:snowflake-jdbc") + testImplementation 'org.mockito:mockito-junit-jupiter' testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') } } diff --git a/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java b/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java index 421514c6b82d..33172aa7f09c 100644 --- a/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java +++ b/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java @@ -34,8 +34,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.assertj.core.api.Assertions; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestCatalogUtil { @@ -48,8 +47,8 @@ public void loadCustomCatalog() { Catalog catalog = CatalogUtil.loadCatalog(TestCatalog.class.getName(), name, options, hadoopConf); Assertions.assertThat(catalog).isInstanceOf(TestCatalog.class); - Assert.assertEquals(name, ((TestCatalog) catalog).catalogName); - Assert.assertEquals(options, ((TestCatalog) catalog).catalogProperties); + Assertions.assertThat(((TestCatalog) catalog).catalogName).isEqualTo(name); + Assertions.assertThat(((TestCatalog) catalog).catalogProperties).isEqualTo(options); } @Test @@ -62,9 +61,9 @@ public void loadCustomCatalog_withHadoopConfig() { Catalog catalog = CatalogUtil.loadCatalog(TestCatalogConfigurable.class.getName(), name, options, hadoopConf); Assertions.assertThat(catalog).isInstanceOf(TestCatalogConfigurable.class); - Assert.assertEquals(name, ((TestCatalogConfigurable) catalog).catalogName); - Assert.assertEquals(options, ((TestCatalogConfigurable) catalog).catalogProperties); - Assert.assertEquals(hadoopConf, ((TestCatalogConfigurable) catalog).configuration); + Assertions.assertThat(((TestCatalogConfigurable) catalog).catalogName).isEqualTo(name); + Assertions.assertThat(((TestCatalogConfigurable) catalog).catalogProperties).isEqualTo(options); + Assertions.assertThat(((TestCatalogConfigurable) catalog).configuration).isEqualTo(hadoopConf); } @Test @@ -132,7 +131,7 @@ public void loadCustomFileIO_noArg() { properties.put("key", "val"); FileIO fileIO = CatalogUtil.loadFileIO(TestFileIONoArg.class.getName(), properties, null); Assertions.assertThat(fileIO).isInstanceOf(TestFileIONoArg.class); - Assert.assertEquals(properties, ((TestFileIONoArg) fileIO).map); + Assertions.assertThat(((TestFileIONoArg) fileIO).map).isEqualTo(properties); } @Test @@ -142,7 +141,7 @@ public void loadCustomFileIO_hadoopConfigConstructor() { FileIO fileIO = CatalogUtil.loadFileIO(HadoopFileIO.class.getName(), Maps.newHashMap(), configuration); Assertions.assertThat(fileIO).isInstanceOf(HadoopFileIO.class); - Assert.assertEquals("val", ((HadoopFileIO) fileIO).conf().get("key")); + Assertions.assertThat(((HadoopFileIO) fileIO).conf().get("key")).isEqualTo("val"); } @Test @@ -153,7 +152,7 @@ public void loadCustomFileIO_configurable() { CatalogUtil.loadFileIO( TestFileIOConfigurable.class.getName(), Maps.newHashMap(), configuration); Assertions.assertThat(fileIO).isInstanceOf(TestFileIOConfigurable.class); - Assert.assertEquals(configuration, ((TestFileIOConfigurable) fileIO).configuration); + Assertions.assertThat(((TestFileIOConfigurable) fileIO).configuration).isEqualTo(configuration); } @Test diff --git a/core/src/test/java/org/apache/iceberg/TestEnvironmentContext.java b/core/src/test/java/org/apache/iceberg/TestEnvironmentContext.java index f6cb9c699bd2..52d38fa97061 100644 --- a/core/src/test/java/org/apache/iceberg/TestEnvironmentContext.java +++ b/core/src/test/java/org/apache/iceberg/TestEnvironmentContext.java @@ -19,7 +19,7 @@ package org.apache.iceberg; import org.assertj.core.api.Assertions; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestEnvironmentContext { diff --git a/core/src/test/java/org/apache/iceberg/TestFixedSizeSplitScanTaskIterator.java b/core/src/test/java/org/apache/iceberg/TestFixedSizeSplitScanTaskIterator.java index b08449e3a7d3..314e805b827e 100644 --- a/core/src/test/java/org/apache/iceberg/TestFixedSizeSplitScanTaskIterator.java +++ b/core/src/test/java/org/apache/iceberg/TestFixedSizeSplitScanTaskIterator.java @@ -21,8 +21,8 @@ import java.util.List; import org.apache.iceberg.BaseFileScanTask.SplitScanTask; import org.apache.iceberg.relocated.com.google.common.collect.Lists; -import org.junit.Assert; -import org.junit.Test; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; public class TestFixedSizeSplitScanTaskIterator { @Test @@ -57,8 +57,8 @@ private static void verify(long splitSize, long fileLen, List> offset List split = offsetLenPairs.get(i); long offset = split.get(0); long length = split.get(1); - Assert.assertEquals(offset, task.start()); - Assert.assertEquals(length, task.length()); + Assertions.assertThat(task.start()).isEqualTo(offset); + Assertions.assertThat(task.length()).isEqualTo(length); } } diff --git a/core/src/test/java/org/apache/iceberg/TestMetricsTruncation.java b/core/src/test/java/org/apache/iceberg/TestMetricsTruncation.java index efb7d1da09dd..3fe974f086a1 100644 --- a/core/src/test/java/org/apache/iceberg/TestMetricsTruncation.java +++ b/core/src/test/java/org/apache/iceberg/TestMetricsTruncation.java @@ -28,8 +28,7 @@ import java.util.Comparator; import org.apache.iceberg.expressions.Literal; import org.assertj.core.api.Assertions; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; @SuppressWarnings("checkstyle:LocalVariableName") public class TestMetricsTruncation { @@ -40,25 +39,31 @@ public void testTruncateBinary() { ByteBuffer emptyByteBuffer = ByteBuffer.allocate(0); Comparator cmp = Literal.of(original).comparator(); - Assert.assertEquals( - "Truncating to a length of zero should return an empty ByteBuffer", - 0, - cmp.compare(truncateBinary(original, 0), emptyByteBuffer)); - Assert.assertEquals( - "Truncating to the original buffer's remaining size should return the original buffer", - original, - truncateBinary(original, original.remaining())); - Assert.assertEquals( - "Truncating with a length greater than the input's remaining size should return the input", - original, - truncateBinary(original, 16)); + Assertions.assertThat(cmp.compare(truncateBinary(original, 0), emptyByteBuffer)) + .as("Truncating to a length of zero should return an empty ByteBuffer") + .isEqualTo(0); + Assertions.assertThat(truncateBinary(original, original.remaining())) + .as("Truncating to the original buffer's remaining size should return the original buffer") + .isEqualTo(original); + Assertions.assertThat(truncateBinary(original, 16)) + .as( + "Truncating with a length greater than the input's remaining size should return the input") + .isEqualTo(original); ByteBuffer truncated = truncateBinary(original, 2); - Assert.assertTrue( - "Truncating with a length less than the input's remaining size should truncate properly", - truncated.remaining() == 2 && truncated.position() == 0); - Assert.assertTrue( - "Truncating should not modify the input buffer", - original.remaining() == 4 && original.position() == 0); + Assertions.assertThat(truncated.remaining()) + .as( + "Truncating with a length less than the input's remaining size should truncate properly") + .isEqualTo(2); + Assertions.assertThat(truncated.position()) + .as( + "Truncating with a length less than the input's remaining size should truncate properly") + .isEqualTo(0); + Assertions.assertThat(original.remaining()) + .as("Truncating should not modify the input buffer") + .isEqualTo(4); + Assertions.assertThat(original.position()) + .as("Truncating should not modify the input buffer") + .isEqualTo(0); Assertions.assertThatThrownBy(() -> truncateBinary(original, -1)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Truncate length should be non-negative"); @@ -73,22 +78,24 @@ public void testTruncateBinaryMin() { ByteBuffer test2_2 = ByteBuffer.wrap(new byte[] {(byte) 0xFF, (byte) 0xFF}); Comparator cmp = Literal.of(test1).comparator(); - Assert.assertTrue( - "Truncated lower bound should be lower than or equal to the actual lower bound", - cmp.compare(truncateBinaryMin(Literal.of(test1), 2).value(), test1) <= 0); - Assert.assertTrue( - "Output must have the first two bytes of the input", - cmp.compare(truncateBinaryMin(Literal.of(test1), 2).value(), test1_2_expected) == 0); - Assert.assertTrue( - "No truncation required as truncate length is greater than the input size", - cmp.compare(truncateBinaryMin(Literal.of(test1), 5).value(), test1) == 0); - Assert.assertTrue( - "Truncated lower bound should be lower than or equal to the actual lower bound", - cmp.compare(truncateBinaryMin(Literal.of(test2), 2).value(), test2) <= 0); - Assert.assertTrue( - "Output must have the first two bytes of the input. A lower bound exists " - + "even though the first two bytes are the max value", - cmp.compare(truncateBinaryMin(Literal.of(test2), 2).value(), test2_2) == 0); + Assertions.assertThat(cmp.compare(truncateBinaryMin(Literal.of(test1), 2).value(), test1)) + .as("Truncated lower bound should be lower than or equal to the actual lower bound") + .isLessThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateBinaryMin(Literal.of(test1), 2).value(), test1_2_expected)) + .as("Output must have the first two bytes of the input") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateBinaryMin(Literal.of(test1), 5).value(), test1)) + .as("No truncation required as truncate length is greater than the input size") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateBinaryMin(Literal.of(test2), 2).value(), test2)) + .as("Truncated lower bound should be lower than or equal to the actual lower bound") + .isLessThanOrEqualTo(0); + Assertions.assertThat(cmp.compare(truncateBinaryMin(Literal.of(test2), 2).value(), test2_2)) + .as( + "Output must have the first two bytes of the input. A lower bound exists " + + "even though the first two bytes are the max value") + .isEqualTo(0); } @Test @@ -100,32 +107,37 @@ public void testTruncateBinaryMax() { ByteBuffer expectedOutput = ByteBuffer.wrap(new byte[] {1, 2}); Comparator cmp = Literal.of(test1).comparator(); - Assert.assertTrue( - "Truncated upper bound should be greater than or equal to the actual upper bound", - cmp.compare(truncateBinaryMax(Literal.of(test1), 2).value(), test1) >= 0); - Assert.assertTrue( - "Output must have two bytes and the second byte of the input must be incremented", - cmp.compare(truncateBinaryMax(Literal.of(test1), 2).value(), expectedOutput) == 0); - Assert.assertTrue( - "Truncated upper bound should be greater than or equal to the actual upper bound", - cmp.compare(truncateBinaryMax(Literal.of(test2), 2).value(), test2) >= 0); - Assert.assertTrue( - "Since the third byte is already the max value, output must have two bytes " - + "with the second byte incremented ", - cmp.compare(truncateBinaryMax(Literal.of(test2), 3).value(), expectedOutput) == 0); - Assert.assertTrue( - "No truncation required as truncate length is greater than the input size", - cmp.compare(truncateBinaryMax(Literal.of(test3), 5).value(), test3) == 0); - Assert.assertNull( - "An upper bound doesn't exist since the first two bytes are the max value", - truncateBinaryMax(Literal.of(test3), 2)); - Assert.assertTrue( - "Truncated upper bound should be greater than or equal to the actual upper bound", - cmp.compare(truncateBinaryMax(Literal.of(test4), 2).value(), test4) >= 0); - Assert.assertTrue( - "Since a shorter sequence is considered smaller, output must have two bytes " - + "and the second byte of the input must be incremented", - cmp.compare(truncateBinaryMax(Literal.of(test4), 2).value(), expectedOutput) == 0); + Assertions.assertThat(cmp.compare(truncateBinaryMax(Literal.of(test1), 2).value(), test1)) + .as("Truncated upper bound should be greater than or equal to the actual upper bound") + .isGreaterThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateBinaryMax(Literal.of(test1), 2).value(), expectedOutput)) + .as("Output must have two bytes and the second byte of the input must be incremented") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateBinaryMax(Literal.of(test2), 2).value(), test2)) + .as("Truncated upper bound should be greater than or equal to the actual upper bound") + .isGreaterThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateBinaryMax(Literal.of(test2), 3).value(), expectedOutput)) + .as( + "Since the third byte is already the max value, output must have two bytes " + + "with the second byte incremented ") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateBinaryMax(Literal.of(test3), 5).value(), test3)) + .as("No truncation required as truncate length is greater than the input size") + .isEqualTo(0); + Assertions.assertThat(truncateBinaryMax(Literal.of(test3), 2)) + .as("An upper bound doesn't exist since the first two bytes are the max value") + .isNull(); + Assertions.assertThat(cmp.compare(truncateBinaryMax(Literal.of(test4), 2).value(), test4)) + .as("Truncated upper bound should be greater than or equal to the actual upper bound") + .isGreaterThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateBinaryMax(Literal.of(test4), 2).value(), expectedOutput)) + .as( + "Since a shorter sequence is considered smaller, output must have two bytes " + + "and the second byte of the input must be incremented") + .isEqualTo(0); } @SuppressWarnings("checkstyle:AvoidEscapedUnicodeCharacters") @@ -143,36 +155,40 @@ public void testTruncateStringMin() { String test4 = "\uD800\uDC00\uD800\uDC00"; String test4_1_expected = "\uD800\uDC00"; Comparator cmp = Literal.of(test1).comparator(); - Assert.assertTrue( - "Truncated lower bound should be lower than or equal to the actual lower bound", - cmp.compare(truncateStringMin(Literal.of(test1), 3).value(), test1) <= 0); - Assert.assertTrue( - "No truncation required as truncate length is greater than the input size", - cmp.compare(truncateStringMin(Literal.of(test1), 8).value(), test1) == 0); - Assert.assertTrue( - "Output must have the first two characters of the input", - cmp.compare(truncateStringMin(Literal.of(test1), 2).value(), test1_2_expected) == 0); - Assert.assertTrue( - "Output must have the first three characters of the input", - cmp.compare(truncateStringMin(Literal.of(test1), 3).value(), test1_3_expected) == 0); - Assert.assertTrue( - "Truncated lower bound should be lower than or equal to the actual lower bound", - cmp.compare(truncateStringMin(Literal.of(test2), 16).value(), test2) <= 0); - Assert.assertTrue( - "Output must have the first seven characters of the input", - cmp.compare(truncateStringMin(Literal.of(test2), 7).value(), test2_7_expected) == 0); - Assert.assertTrue( - "Truncated lower bound should be lower than or equal to the actual lower bound", - cmp.compare(truncateStringMin(Literal.of(test3), 2).value(), test3) <= 0); - Assert.assertTrue( - "No truncation required as truncate length is equal to the input size", - cmp.compare(truncateStringMin(Literal.of(test3), 2).value(), test3) == 0); - Assert.assertTrue( - "Truncated lower bound should be lower than or equal to the actual lower bound", - cmp.compare(truncateStringMin(Literal.of(test4), 1).value(), test4) <= 0); - Assert.assertTrue( - "Output must have the first 4 byte UTF-8 character of the input", - cmp.compare(truncateStringMin(Literal.of(test4), 1).value(), test4_1_expected) == 0); + Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test1), 3).value(), test1)) + .as("Truncated lower bound should be lower than or equal to the actual lower bound") + .isLessThanOrEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test1), 8).value(), test1)) + .as("No truncation required as truncate length is greater than the input size") + .isEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMin(Literal.of(test1), 2).value(), test1_2_expected)) + .as("Output must have the first two characters of the input") + .isEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMin(Literal.of(test1), 3).value(), test1_3_expected)) + .as("Output must have the first three characters of the input") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test2), 16).value(), test2)) + .as("Truncated lower bound should be lower than or equal to the actual lower bound") + .isLessThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMin(Literal.of(test2), 7).value(), test2_7_expected)) + .as("Output must have the first seven characters of the input") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test3), 2).value(), test3)) + .as("Truncated lower bound should be lower than or equal to the actual lower bound") + .isLessThanOrEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test3), 2).value(), test3)) + .as("No truncation required as truncate length is equal to the input size") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test4), 1).value(), test4)) + .as("Truncated lower bound should be lower than or equal to the actual lower bound") + .isLessThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMin(Literal.of(test4), 1).value(), test4_1_expected)) + .as("Output must have the first 4 byte UTF-8 character of the input") + .isEqualTo(0); } @SuppressWarnings("checkstyle:AvoidEscapedUnicodeCharacters") @@ -199,65 +215,78 @@ public void testTruncateStringMax() { String test7_1_expected = "\uD83D\uDE03"; Comparator cmp = Literal.of(test1).comparator(); - Assert.assertTrue( - "Truncated upper bound should be greater than or equal to the actual upper bound", - cmp.compare(truncateStringMax(Literal.of(test1), 4).value(), test1) >= 0); - Assert.assertTrue( - "No truncation required as truncate length is equal to the input size", - cmp.compare(truncateStringMax(Literal.of(test1), 7).value(), test1) == 0); - Assert.assertTrue( - "Output must have two characters and the second character of the input must " - + "be incremented", - cmp.compare(truncateStringMax(Literal.of(test1), 2).value(), test1_2_expected) == 0); - Assert.assertTrue( - "Output must have three characters and the third character of the input must " - + "be incremented", - cmp.compare(truncateStringMax(Literal.of(test1), 3).value(), test1_3_expected) == 0); - Assert.assertTrue( - "No truncation required as truncate length is greater than the input size", - cmp.compare(truncateStringMax(Literal.of(test1), 8).value(), test1) == 0); - Assert.assertTrue( - "Truncated upper bound should be greater than or equal to the actual upper " + "bound", - cmp.compare(truncateStringMax(Literal.of(test2), 8).value(), test2) >= 0); - Assert.assertTrue( - "Output must have seven characters and the seventh character of the input " - + "must be incremented", - cmp.compare(truncateStringMax(Literal.of(test2), 7).value(), test2_7_expected) == 0); - Assert.assertTrue( - "Truncated upper bound should be greater than or equal to the actual upper " + "bound", - cmp.compare(truncateStringMax(Literal.of(test3), 3).value(), test3) >= 0); - Assert.assertTrue( - "Output must have three characters and the third character of the input must " - + "be incremented. The second perceivable character in this string is actually a glyph. It consists of " - + "two unicode characters", - cmp.compare(truncateStringMax(Literal.of(test3), 3).value(), test3_3_expected) == 0); - Assert.assertTrue( - "Truncated upper bound should be greater than or equal to the actual upper bound", - cmp.compare(truncateStringMax(Literal.of(test4), 1).value(), test4) >= 0); - Assert.assertTrue( - "Output must have one character. Since the first character is the max 3 byte " - + "UTF-8 character, it should be incremented to the lowest 4 byte UTF-8 character", - cmp.compare(truncateStringMax(Literal.of(test4), 1).value(), test4_1_expected) == 0); - Assert.assertNull( - "An upper bound doesn't exist since the first two characters are max UTF-8 " + "characters", - truncateStringMax(Literal.of(test5), 1)); - Assert.assertTrue( - "Truncated upper bound should be greater than or equal to the actual upper bound", - cmp.compare(truncateStringMax(Literal.of(test6), 2).value(), test6) >= 0); - Assert.assertTrue( - "Test 4 byte UTF-8 character increment. Output must have one character with " - + "the first character incremented", - cmp.compare(truncateStringMax(Literal.of(test6), 1).value(), test6_2_expected) == 0); - Assert.assertTrue( - "Truncated upper bound should be greater than or equal to the actual upper bound", - cmp.compare(truncateStringMax(Literal.of(test7), 2).value(), test7) >= 0); - Assert.assertTrue( - "Test input with multiple 4 byte UTF-8 character where the second unicode " - + "character should be incremented", - cmp.compare(truncateStringMax(Literal.of(test7), 2).value(), test7_2_expected) == 0); - Assert.assertTrue( - "Test input with multiple 4 byte UTF-8 character where the first unicode " - + "character should be incremented", - cmp.compare(truncateStringMax(Literal.of(test7), 1).value(), test7_1_expected) == 0); + Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 4).value(), test1)) + .as("Truncated upper bound should be greater than or equal to the actual upper bound") + .isGreaterThanOrEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 7).value(), test1)) + .as("No truncation required as truncate length is equal to the input size") + .isEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMax(Literal.of(test1), 2).value(), test1_2_expected)) + .as( + "Output must have two characters and the second character of the input must " + + "be incremented") + .isEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMax(Literal.of(test1), 3).value(), test1_3_expected)) + .as( + "Output must have three characters and the third character of the input must " + + "be incremented") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 8).value(), test1)) + .as("No truncation required as truncate length is greater than the input size") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test2), 8).value(), test2)) + .as("Truncated upper bound should be greater than or equal to the actual upper bound") + .isGreaterThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMax(Literal.of(test2), 7).value(), test2_7_expected)) + .as( + "Output must have seven characters and the seventh character of the input must be incremented") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test3), 3).value(), test3)) + .as("Truncated upper bound should be greater than or equal to the actual upper bound") + .isGreaterThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMax(Literal.of(test3), 3).value(), test3_3_expected)) + .as( + "Output must have three characters and the third character of the input must " + + "be incremented. The second perceivable character in this string is actually a glyph. It consists of " + + "two unicode characters") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test4), 1).value(), test4)) + .as("Truncated upper bound should be greater than or equal to the actual upper bound") + .isGreaterThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMax(Literal.of(test4), 1).value(), test4_1_expected)) + .as( + "Output must have one character. Since the first character is the max 3 byte " + + "UTF-8 character, it should be incremented to the lowest 4 byte UTF-8 character") + .isEqualTo(0); + Assertions.assertThat(truncateStringMax(Literal.of(test5), 1)) + .as("An upper bound doesn't exist since the first two characters are max UTF-8 characters") + .isNull(); + Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test6), 2).value(), test6)) + .as("Truncated upper bound should be greater than or equal to the actual upper bound") + .isGreaterThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMax(Literal.of(test6), 1).value(), test6_2_expected)) + .as( + "Test 4 byte UTF-8 character increment. Output must have one character with " + + "the first character incremented") + .isEqualTo(0); + Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test7), 2).value(), test7)) + .as("Truncated upper bound should be greater than or equal to the actual upper bound") + .isGreaterThanOrEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMax(Literal.of(test7), 2).value(), test7_2_expected)) + .as( + "Test input with multiple 4 byte UTF-8 character where the second unicode character should be incremented") + .isEqualTo(0); + Assertions.assertThat( + cmp.compare(truncateStringMax(Literal.of(test7), 1).value(), test7_1_expected)) + .as( + "Test input with multiple 4 byte UTF-8 character where the first unicode character should be incremented") + .isEqualTo(0); } } diff --git a/core/src/test/java/org/apache/iceberg/TestOffsetsBasedSplitScanTaskIterator.java b/core/src/test/java/org/apache/iceberg/TestOffsetsBasedSplitScanTaskIterator.java index 7ca14a185a05..d9ad4fb3cbbd 100644 --- a/core/src/test/java/org/apache/iceberg/TestOffsetsBasedSplitScanTaskIterator.java +++ b/core/src/test/java/org/apache/iceberg/TestOffsetsBasedSplitScanTaskIterator.java @@ -21,8 +21,8 @@ import java.util.List; import org.apache.iceberg.BaseFileScanTask.SplitScanTask; import org.apache.iceberg.relocated.com.google.common.collect.Lists; -import org.junit.Assert; -import org.junit.Test; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; public class TestOffsetsBasedSplitScanTaskIterator { @Test @@ -62,15 +62,15 @@ private static void verify( offsetRanges, TestOffsetsBasedSplitScanTaskIterator::createSplitTask); List tasks = Lists.newArrayList(splitTaskIterator); - Assert.assertEquals("Number of tasks don't match", offsetLenPairs.size(), tasks.size()); + Assertions.assertThat(tasks).as("Number of tasks don't match").hasSameSizeAs(offsetLenPairs); for (int i = 0; i < tasks.size(); i++) { FileScanTask task = tasks.get(i); List split = offsetLenPairs.get(i); long offset = split.get(0); long length = split.get(1); - Assert.assertEquals(offset, task.start()); - Assert.assertEquals(length, task.length()); + Assertions.assertThat(task.start()).isEqualTo(offset); + Assertions.assertThat(task.length()).isEqualTo(length); } } diff --git a/core/src/test/java/org/apache/iceberg/TestSchemaUnionByFieldName.java b/core/src/test/java/org/apache/iceberg/TestSchemaUnionByFieldName.java index e43a19a9299b..5aedde6ce5b0 100644 --- a/core/src/test/java/org/apache/iceberg/TestSchemaUnionByFieldName.java +++ b/core/src/test/java/org/apache/iceberg/TestSchemaUnionByFieldName.java @@ -43,8 +43,7 @@ import org.apache.iceberg.types.Types.TimestampType; import org.apache.iceberg.types.Types.UUIDType; import org.assertj.core.api.Assertions; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestSchemaUnionByFieldName { @@ -83,7 +82,7 @@ private static NestedField[] primitiveFields( public void testAddTopLevelPrimitives() { Schema newSchema = new Schema(primitiveFields(0, primitiveTypes())); Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply(); - Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); } @Test @@ -92,7 +91,7 @@ public void testAddTopLevelListOfPrimitives() { Schema newSchema = new Schema(optional(1, "aList", Types.ListType.ofOptional(2, primitiveType))); Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply(); - Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); } } @@ -103,7 +102,7 @@ public void testAddTopLevelMapOfPrimitives() { new Schema( optional(1, "aMap", Types.MapType.ofOptional(2, 3, primitiveType, primitiveType))); Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply(); - Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); } } @@ -114,7 +113,7 @@ public void testAddTopLevelStructOfPrimitives() { new Schema( optional(1, "aStruct", Types.StructType.of(optional(2, "primitive", primitiveType)))); Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(currentSchema).apply(); - Assert.assertEquals(currentSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(currentSchema.asStruct()); } } @@ -126,7 +125,7 @@ public void testAddNestedPrimitive() { new Schema( optional(1, "aStruct", Types.StructType.of(optional(2, "primitive", primitiveType)))); Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply(); - Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); } } @@ -137,7 +136,7 @@ public void testAddNestedPrimitives() { new Schema( optional(1, "aStruct", Types.StructType.of(primitiveFields(1, primitiveTypes())))); Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply(); - Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); } @Test @@ -166,7 +165,7 @@ public void testAddNestedLists() { Types.ListType.ofOptional( 10, DecimalType.of(11, 20)))))))))))); Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply(); - Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); } @Test @@ -202,7 +201,7 @@ public void testAddNestedStruct() { "aString", StringType.get())))))))))))))); Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply(); - Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); } @Test @@ -235,7 +234,7 @@ public void testAddNestedMaps() { Types.MapType.ofOptional( 12, 13, StringType.get(), StringType.get())))))))); Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply(); - Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); } @Test @@ -289,8 +288,8 @@ public void testTypePromoteIntegerToLong() { Schema newSchema = new Schema(required(1, "aCol", LongType.get())); Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply(); - Assert.assertEquals(1, applied.asStruct().fields().size()); - Assert.assertEquals(LongType.get(), applied.asStruct().fields().get(0).type()); + Assertions.assertThat(applied.asStruct().fields()).hasSize(1); + Assertions.assertThat(applied.asStruct().fields().get(0).type()).isEqualTo(LongType.get()); } @Test @@ -300,12 +299,9 @@ public void testTypePromoteFloatToDouble() { Schema newSchema = new Schema(required(1, "aCol", DoubleType.get())); Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply(); - Assert.assertEquals(1, applied.asStruct().fields().size()); - Assert.assertEquals(DoubleType.get(), applied.asStruct().fields().get(0).type()); - // When attempted Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); - // Got java.lang.AssertionError: - // Expected :struct<1: aCol: required double> - // Actual :struct<1: aCol: required double ()> + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); + Assertions.assertThat(applied.asStruct().fields()).hasSize(1); + Assertions.assertThat(applied.asStruct().fields().get(0).type()).isEqualTo(DoubleType.get()); } @Test @@ -327,7 +323,7 @@ public void testTypePromoteDecimalToFixedScaleWithWiderPrecision() { Schema newSchema = new Schema(required(1, "aCol", DecimalType.of(22, 1))); Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply(); - Assert.assertEquals(newSchema.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct()); } @Test @@ -388,7 +384,7 @@ public void testAddPrimitiveToNestedStruct() { optional(5, "value", StringType.get()), optional(6, "time", TimeType.get()))))))))); - Assert.assertEquals(expected.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(expected.asStruct()); } @Test @@ -427,7 +423,7 @@ public void testMirroredSchemas() { Schema union = new SchemaUpdate(aSchema, 0).unionByNameWith(mirrored).apply(); // We don't expect the original schema to have been altered. - Assert.assertEquals(aSchema.asStruct(), union.asStruct()); + Assertions.assertThat(union.asStruct()).isEqualTo(aSchema.asStruct()); } @Test @@ -463,7 +459,7 @@ public void addNewTopLevelStruct() { 7, "d1", Types.StructType.of(optional(8, "d2", Types.StringType.get())))))); Schema union = new SchemaUpdate(schema, 5).unionByNameWith(observed).apply(); - Assert.assertEquals(observed.asStruct(), union.asStruct()); + Assertions.assertThat(union.asStruct()).isEqualTo(observed.asStruct()); } @Test @@ -514,7 +510,7 @@ public void testAppendNestedStruct() { StringType.get())))))))))))))); Schema applied = new SchemaUpdate(schema, 4).unionByNameWith(observed).apply(); - Assert.assertEquals(observed.asStruct(), applied.asStruct()); + Assertions.assertThat(applied.asStruct()).isEqualTo(observed.asStruct()); } @Test @@ -579,6 +575,6 @@ public void testAppendNestedLists() { "list2", ListType.ofOptional(7, StringType.get()))))))))); - Assert.assertEquals(expected.asStruct(), union.asStruct()); + Assertions.assertThat(union.asStruct()).isEqualTo(expected.asStruct()); } } diff --git a/core/src/test/java/org/apache/iceberg/avro/AvroTestHelpers.java b/core/src/test/java/org/apache/iceberg/avro/AvroTestHelpers.java index 974c29c14f86..af35e27f5b22 100644 --- a/core/src/test/java/org/apache/iceberg/avro/AvroTestHelpers.java +++ b/core/src/test/java/org/apache/iceberg/avro/AvroTestHelpers.java @@ -29,7 +29,6 @@ import org.apache.iceberg.types.Type; import org.apache.iceberg.types.Types; import org.assertj.core.api.Assertions; -import org.junit.Assert; class AvroTestHelpers { @@ -82,7 +81,7 @@ static void assertEquals(Types.StructType struct, Record expected, Record actual static void assertEquals(Types.ListType list, List expected, List actual) { Type elementType = list.elementType(); - Assert.assertEquals("List size should match", expected.size(), actual.size()); + Assertions.assertThat(actual).as("List size should match").hasSameSizeAs(expected); for (int i = 0; i < expected.size(); i += 1) { Object expectedValue = expected.get(i); @@ -95,7 +94,7 @@ static void assertEquals(Types.ListType list, List expected, List actual) static void assertEquals(Types.MapType map, Map expected, Map actual) { Type valueType = map.valueType(); - Assert.assertEquals("Map size should match", expected.size(), actual.size()); + Assertions.assertThat(actual).as("Map keys should match").hasSameSizeAs(expected); for (Object expectedKey : expected.keySet()) { Object expectedValue = expected.get(expectedKey); @@ -124,7 +123,9 @@ private static void assertEquals(Type type, Object expected, Object actual) { case FIXED: case BINARY: case DECIMAL: - Assert.assertEquals("Primitive value should be equal to expected", expected, actual); + Assertions.assertThat(actual) + .as("Primitive value should be equal to expected") + .isEqualTo(expected); break; case STRUCT: Assertions.assertThat(expected) diff --git a/delta-lake/src/integration/java/org/apache/iceberg/delta/SparkDeltaLakeSnapshotTestBase.java b/delta-lake/src/integration/java/org/apache/iceberg/delta/SparkDeltaLakeSnapshotTestBase.java index 7592b8e862d0..930481b12a48 100644 --- a/delta-lake/src/integration/java/org/apache/iceberg/delta/SparkDeltaLakeSnapshotTestBase.java +++ b/delta-lake/src/integration/java/org/apache/iceberg/delta/SparkDeltaLakeSnapshotTestBase.java @@ -23,8 +23,8 @@ import org.apache.iceberg.hive.TestHiveMetastore; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; @SuppressWarnings("VisibilityModifier") public abstract class SparkDeltaLakeSnapshotTestBase { @@ -32,7 +32,7 @@ public abstract class SparkDeltaLakeSnapshotTestBase { protected static HiveConf hiveConf = null; protected static SparkSession spark = null; - @BeforeClass + @BeforeAll public static void startMetastoreAndSpark() { SparkDeltaLakeSnapshotTestBase.metastore = new TestHiveMetastore(); metastore.start(); @@ -52,7 +52,7 @@ public static void startMetastoreAndSpark() { .getOrCreate(); } - @AfterClass + @AfterAll public static void stopMetastoreAndSpark() throws Exception { if (metastore != null) { metastore.stop(); diff --git a/delta-lake/src/integration/java/org/apache/iceberg/delta/TestSnapshotDeltaLakeTable.java b/delta-lake/src/integration/java/org/apache/iceberg/delta/TestSnapshotDeltaLakeTable.java index bace582c0658..52966a484e68 100644 --- a/delta-lake/src/integration/java/org/apache/iceberg/delta/TestSnapshotDeltaLakeTable.java +++ b/delta-lake/src/integration/java/org/apache/iceberg/delta/TestSnapshotDeltaLakeTable.java @@ -34,6 +34,7 @@ import java.io.IOException; import java.net.URI; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.sql.Timestamp; import java.util.Iterator; @@ -41,6 +42,7 @@ import java.util.Map; import java.util.Random; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.net.URLCodec; import org.apache.iceberg.Snapshot; @@ -58,15 +60,13 @@ import org.apache.spark.sql.connector.catalog.CatalogPlugin; import org.apache.spark.sql.delta.catalog.DeltaCatalog; import org.assertj.core.api.Assertions; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -@RunWith(Parameterized.class) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + public class TestSnapshotDeltaLakeTable extends SparkDeltaLakeSnapshotTestBase { private static final String SNAPSHOT_SOURCE_PROP = "snapshot_source"; private static final String DELTA_SOURCE_VALUE = "delta"; @@ -77,27 +77,24 @@ public class TestSnapshotDeltaLakeTable extends SparkDeltaLakeSnapshotTestBase { private static Dataset typeTestDataFrame; private static Dataset nestedDataFrame; - @Parameterized.Parameters(name = "Catalog Name {0} - Options {2}") - public static Object[][] parameters() { - return new Object[][] { - new Object[] { - icebergCatalogName, - SparkCatalog.class.getName(), - ImmutableMap.of( - "type", - "hive", - "default-namespace", - "default", - "parquet-enabled", - "true", - "cache-enabled", - "false" // Spark will delete tables using v1, leaving the cache out of sync - ) - } - }; + static Stream parameters() { + return Stream.of( + Arguments.of( + icebergCatalogName, + SparkCatalog.class.getName(), + ImmutableMap.of( + "type", + "hive", + "default-namespace", + "default", + "parquet-enabled", + "true", + "cache-enabled", + "false" // Spark will delete tables using v1, leaving the cache out of sync + ))); } - @Rule public TemporaryFolder temp = new TemporaryFolder(); + @TempDir private Path temp; public TestSnapshotDeltaLakeTable( String catalogName, String implementation, Map config) { @@ -105,7 +102,7 @@ public TestSnapshotDeltaLakeTable( spark.conf().set("spark.sql.catalog." + defaultSparkCatalog, DeltaCatalog.class.getName()); } - @BeforeClass + @BeforeAll public static void beforeClass() { spark.sql(String.format("CREATE DATABASE IF NOT EXISTS %s", NAMESPACE)); @@ -158,15 +155,16 @@ public static void beforeClass() { .withColumn("structCol3", expr("STRUCT(structCol2, mapCol3, arrayCol)")); } - @AfterClass + @AfterAll public static void afterClass() { spark.sql(String.format("DROP DATABASE IF EXISTS %s CASCADE", NAMESPACE)); } - @Test - public void testBasicSnapshotPartitioned() throws IOException { + @ParameterizedTest(name = "Catalog Name {0} - Options {2}") + @MethodSource("parameters") + public void testBasicSnapshotPartitioned() { String partitionedIdentifier = destName(defaultSparkCatalog, "partitioned_table"); - String partitionedLocation = temp.newFolder().toURI().toString(); + String partitionedLocation = temp.toFile().toURI().toString(); writeDeltaTable(nestedDataFrame, partitionedIdentifier, partitionedLocation, "id"); spark.sql("DELETE FROM " + partitionedIdentifier + " WHERE id=3"); @@ -184,10 +182,11 @@ public void testBasicSnapshotPartitioned() throws IOException { checkIcebergTableLocation(newTableIdentifier, partitionedLocation); } - @Test - public void testBasicSnapshotUnpartitioned() throws IOException { + @ParameterizedTest(name = "Catalog Name {0} - Options {2}") + @MethodSource("parameters") + public void testBasicSnapshotUnpartitioned() { String unpartitionedIdentifier = destName(defaultSparkCatalog, "unpartitioned_table"); - String unpartitionedLocation = temp.newFolder().toURI().toString(); + String unpartitionedLocation = temp.toFile().toURI().toString(); writeDeltaTable(nestedDataFrame, unpartitionedIdentifier, unpartitionedLocation, null); spark.sql("DELETE FROM " + unpartitionedIdentifier + " WHERE id=3"); @@ -205,11 +204,12 @@ public void testBasicSnapshotUnpartitioned() throws IOException { checkIcebergTableLocation(newTableIdentifier, unpartitionedLocation); } - @Test - public void testSnapshotWithNewLocation() throws IOException { + @ParameterizedTest(name = "Catalog Name {0} - Options {2}") + @MethodSource("parameters") + public void testSnapshotWithNewLocation() { String partitionedIdentifier = destName(defaultSparkCatalog, "partitioned_table"); - String partitionedLocation = temp.newFolder().toURI().toString(); - String newIcebergTableLocation = temp.newFolder().toURI().toString(); + String partitionedLocation = temp.toFile().toURI().toString(); + String newIcebergTableLocation = temp.toFile().toURI().toString(); writeDeltaTable(nestedDataFrame, partitionedIdentifier, partitionedLocation, "id"); spark.sql("DELETE FROM " + partitionedIdentifier + " WHERE id=3"); @@ -228,10 +228,11 @@ public void testSnapshotWithNewLocation() throws IOException { checkIcebergTableLocation(newTableIdentifier, newIcebergTableLocation); } - @Test - public void testSnapshotWithAdditionalProperties() throws IOException { + @ParameterizedTest(name = "Catalog Name {0} - Options {2}") + @MethodSource("parameters") + public void testSnapshotWithAdditionalProperties() { String unpartitionedIdentifier = destName(defaultSparkCatalog, "unpartitioned_table"); - String unpartitionedLocation = temp.newFolder().toURI().toString(); + String unpartitionedLocation = temp.toFile().toURI().toString(); writeDeltaTable(nestedDataFrame, unpartitionedIdentifier, unpartitionedLocation, null); spark.sql("DELETE FROM " + unpartitionedIdentifier + " WHERE id=3"); @@ -266,12 +267,13 @@ public void testSnapshotWithAdditionalProperties() throws IOException { unpartitionedLocation); } - @Test - public void testSnapshotTableWithExternalDataFiles() throws IOException { + @ParameterizedTest(name = "Catalog Name {0} - Options {2}") + @MethodSource("parameters") + public void testSnapshotTableWithExternalDataFiles() { String unpartitionedIdentifier = destName(defaultSparkCatalog, "unpartitioned_table"); String externalDataFilesIdentifier = destName(defaultSparkCatalog, "external_data_files_table"); - String unpartitionedLocation = temp.newFolder().toURI().toString(); - String externalDataFilesTableLocation = temp.newFolder().toURI().toString(); + String unpartitionedLocation = temp.toFile().toURI().toString(); + String externalDataFilesTableLocation = temp.toFile().toURI().toString(); writeDeltaTable(nestedDataFrame, unpartitionedIdentifier, unpartitionedLocation, null); spark.sql("DELETE FROM " + unpartitionedIdentifier + " WHERE id=3"); @@ -295,10 +297,11 @@ public void testSnapshotTableWithExternalDataFiles() throws IOException { checkDataFilePathsIntegrity(newTableIdentifier, externalDataFilesTableLocation); } - @Test - public void testSnapshotSupportedTypes() throws IOException { + @ParameterizedTest(name = "Catalog Name {0} - Options {2}") + @MethodSource("parameters") + public void testSnapshotSupportedTypes() { String typeTestIdentifier = destName(defaultSparkCatalog, "type_test_table"); - String typeTestTableLocation = temp.newFolder().toURI().toString(); + String typeTestTableLocation = temp.toFile().toURI().toString(); writeDeltaTable(typeTestDataFrame, typeTestIdentifier, typeTestTableLocation, "stringCol"); String newTableIdentifier = destName(icebergCatalogName, "iceberg_type_test_table"); @@ -313,10 +316,11 @@ public void testSnapshotSupportedTypes() throws IOException { checkIcebergTableProperties(newTableIdentifier, ImmutableMap.of(), typeTestTableLocation); } - @Test + @ParameterizedTest(name = "Catalog Name {0} - Options {2}") + @MethodSource("parameters") public void testSnapshotVacuumTable() throws IOException { String vacuumTestIdentifier = destName(defaultSparkCatalog, "vacuum_test_table"); - String vacuumTestTableLocation = temp.newFolder().toURI().toString(); + String vacuumTestTableLocation = temp.toFile().toURI().toString(); writeDeltaTable(nestedDataFrame, vacuumTestIdentifier, vacuumTestTableLocation, null); Random random = new Random(); @@ -348,10 +352,11 @@ public void testSnapshotVacuumTable() throws IOException { checkIcebergTableLocation(newTableIdentifier, vacuumTestTableLocation); } - @Test + @ParameterizedTest(name = "Catalog Name {0} - Options {2}") + @MethodSource("parameters") public void testSnapshotLogCleanTable() throws IOException { String logCleanTestIdentifier = destName(defaultSparkCatalog, "log_clean_test_table"); - String logCleanTestTableLocation = temp.newFolder().toURI().toString(); + String logCleanTestTableLocation = temp.toFile().toURI().toString(); writeDeltaTable(nestedDataFrame, logCleanTestIdentifier, logCleanTestTableLocation, "id"); Random random = new Random(); diff --git a/delta-lake/src/test/java/org/apache/iceberg/delta/TestBaseSnapshotDeltaLakeTableAction.java b/delta-lake/src/test/java/org/apache/iceberg/delta/TestBaseSnapshotDeltaLakeTableAction.java index ff33173f0e50..22e27c1e51e1 100644 --- a/delta-lake/src/test/java/org/apache/iceberg/delta/TestBaseSnapshotDeltaLakeTableAction.java +++ b/delta-lake/src/test/java/org/apache/iceberg/delta/TestBaseSnapshotDeltaLakeTableAction.java @@ -28,23 +28,20 @@ import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.TableIdentifier; import org.assertj.core.api.Assertions; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; public class TestBaseSnapshotDeltaLakeTableAction { - @Rule public TemporaryFolder temp1 = new TemporaryFolder(); - @Rule public TemporaryFolder temp2 = new TemporaryFolder(); + @TempDir private File sourceFolder; + @TempDir private File destFolder; private String sourceTableLocation; private final Configuration testHadoopConf = new Configuration(); private String newTableLocation; private final Catalog testCatalog = new TestCatalog(); - @Before + @BeforeEach public void before() throws IOException { - File sourceFolder = temp1.newFolder(); - File destFolder = temp2.newFolder(); sourceTableLocation = sourceFolder.toURI().toString(); newTableLocation = destFolder.toURI().toString(); } diff --git a/delta-lake/src/test/java/org/apache/iceberg/delta/TestDeltaLakeTypeToType.java b/delta-lake/src/test/java/org/apache/iceberg/delta/TestDeltaLakeTypeToType.java index 29a5c63c3d22..20989ee8c013 100644 --- a/delta-lake/src/test/java/org/apache/iceberg/delta/TestDeltaLakeTypeToType.java +++ b/delta-lake/src/test/java/org/apache/iceberg/delta/TestDeltaLakeTypeToType.java @@ -32,8 +32,8 @@ import org.apache.iceberg.types.Type; import org.apache.iceberg.types.Types; import org.assertj.core.api.Assertions; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestDeltaLakeTypeToType { private static final String optionalBooleanType = "testNullableBoolType"; @@ -48,7 +48,7 @@ public class TestDeltaLakeTypeToType { private StructType deltaShallowNullTypeSchema; private StructType deltaNullTypeSchema; - @Before + @BeforeEach public void constructDeltaLakeSchema() { deltaAtomicSchema = new StructType() diff --git a/snowflake/src/test/java/org/apache/iceberg/snowflake/JdbcSnowflakeClientTest.java b/snowflake/src/test/java/org/apache/iceberg/snowflake/JdbcSnowflakeClientTest.java index 4fd0ae0e21ed..6da321facc13 100644 --- a/snowflake/src/test/java/org/apache/iceberg/snowflake/JdbcSnowflakeClientTest.java +++ b/snowflake/src/test/java/org/apache/iceberg/snowflake/JdbcSnowflakeClientTest.java @@ -38,14 +38,17 @@ import org.apache.iceberg.jdbc.UncheckedInterruptedException; import org.apache.iceberg.jdbc.UncheckedSQLException; import org.assertj.core.api.Assertions; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentMatchers; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class JdbcSnowflakeClientTest { @Mock private Connection mockConnection; @Mock private JdbcClientPool mockClientPool; @@ -54,11 +57,10 @@ public class JdbcSnowflakeClientTest { private JdbcSnowflakeClient snowflakeClient; - @Before + @BeforeEach public void before() throws SQLException, InterruptedException { snowflakeClient = new JdbcSnowflakeClient(mockClientPool); snowflakeClient.setQueryHarness(mockQueryHarness); - doAnswer(invocation -> ((ClientPool.Action) invocation.getArguments()[0]).run(mockConnection)) .when(mockClientPool) .run(any(ClientPool.Action.class)); diff --git a/snowflake/src/test/java/org/apache/iceberg/snowflake/NamespaceHelpersTest.java b/snowflake/src/test/java/org/apache/iceberg/snowflake/NamespaceHelpersTest.java index 2dd7fb6ec9af..ef47f5b784fa 100644 --- a/snowflake/src/test/java/org/apache/iceberg/snowflake/NamespaceHelpersTest.java +++ b/snowflake/src/test/java/org/apache/iceberg/snowflake/NamespaceHelpersTest.java @@ -21,7 +21,7 @@ import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.TableIdentifier; import org.assertj.core.api.Assertions; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class NamespaceHelpersTest { @Test diff --git a/snowflake/src/test/java/org/apache/iceberg/snowflake/SnowflakeCatalogTest.java b/snowflake/src/test/java/org/apache/iceberg/snowflake/SnowflakeCatalogTest.java index 416f64a91e45..adda8bc65f20 100644 --- a/snowflake/src/test/java/org/apache/iceberg/snowflake/SnowflakeCatalogTest.java +++ b/snowflake/src/test/java/org/apache/iceberg/snowflake/SnowflakeCatalogTest.java @@ -33,8 +33,8 @@ import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.types.Types; import org.assertj.core.api.Assertions; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class SnowflakeCatalogTest { @@ -45,7 +45,7 @@ public class SnowflakeCatalogTest { private SnowflakeCatalog.FileIOFactory fakeFileIOFactory; private Map properties; - @Before + @BeforeEach public void before() { catalog = new SnowflakeCatalog(); diff --git a/versions.props b/versions.props index 4f64d8693c28..a9f390c422ba 100644 --- a/versions.props +++ b/versions.props @@ -36,7 +36,7 @@ org.junit.vintage:junit-vintage-engine = 5.9.2 org.junit.jupiter:* = 5.9.2 org.mockito:* = 4.11.0 org.apache.tez:* = 0.8.4 -com.adobe.testing:s3mock-junit4 = 2.11.0 +com.adobe.testing:s3mock-junit5 = 2.11.0 org.assertj:assertj-core = 3.24.2 org.xerial:sqlite-jdbc = 3.41.0.0 com.fasterxml.jackson.dataformat:jackson-dataformat-xml = 2.9.9