Statically import methods from AssertJ Assertions (#10517)
diff --git a/.baseline/checkstyle/checkstyle.xml b/.baseline/checkstyle/checkstyle.xml
index 8df46a5..c36700c 100644
--- a/.baseline/checkstyle/checkstyle.xml
+++ b/.baseline/checkstyle/checkstyle.xml
@@ -92,6 +92,10 @@
<property name="format" value="^\s*import\s+static\s+(?!\Qorg.assertj.core.api.Assertions.\E).*\.assertThatThrownBy;"/>
<property name="message" value="assertThatThrownBy() should be statically imported from org.assertj.core.api.Assertions"/>
</module>
+ <module name="RegexpMultiline">
+ <property name="format" value="^\s*import\s+\Qorg.assertj.core.api.Assertions;\E" />
+ <property name="message" value="org.assertj.core.api.Assertions should only be used with static imports" />
+ </module>
<module name="SuppressionFilter"> <!-- baseline-gradle: README.md -->
<property name="file" value="${config_loc}/checkstyle-suppressions.xml"/>
</module>
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/TestAliyunClientFactories.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/TestAliyunClientFactories.java
index a329a3bda7..d4bd5fd 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/TestAliyunClientFactories.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/TestAliyunClientFactories.java
@@ -18,31 +18,32 @@
*/
package org.apache.iceberg.aliyun;
+import static org.assertj.core.api.Assertions.assertThat;
+
import com.aliyun.oss.OSS;
import java.util.Map;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestAliyunClientFactories {
@Test
public void testLoadDefault() {
- Assertions.assertThat(AliyunClientFactories.defaultFactory())
+ assertThat(AliyunClientFactories.defaultFactory())
.as("Default client should be singleton")
.isEqualTo(AliyunClientFactories.defaultFactory());
AliyunClientFactory defaultFactory = AliyunClientFactories.from(Maps.newHashMap());
- Assertions.assertThat(defaultFactory)
+ assertThat(defaultFactory)
.as("Should load default when factory impl not configured")
.isInstanceOf(AliyunClientFactories.DefaultAliyunClientFactory.class);
- Assertions.assertThat(defaultFactory.aliyunProperties().accessKeyId())
+ assertThat(defaultFactory.aliyunProperties().accessKeyId())
.as("Should have no Aliyun properties set")
.isNull();
- Assertions.assertThat(defaultFactory.aliyunProperties().securityToken())
+ assertThat(defaultFactory.aliyunProperties().securityToken())
.as("Should have no security token")
.isNull();
@@ -53,15 +54,15 @@
"key",
AliyunProperties.CLIENT_SECURITY_TOKEN,
"token"));
- Assertions.assertThat(defaultFactoryWithConfig)
+ assertThat(defaultFactoryWithConfig)
.as("Should load default when factory impl not configured")
.isInstanceOf(AliyunClientFactories.DefaultAliyunClientFactory.class);
- Assertions.assertThat(defaultFactoryWithConfig.aliyunProperties().accessKeyId())
+ assertThat(defaultFactoryWithConfig.aliyunProperties().accessKeyId())
.as("Should have access key set")
.isEqualTo("key");
- Assertions.assertThat(defaultFactoryWithConfig.aliyunProperties().securityToken())
+ assertThat(defaultFactoryWithConfig.aliyunProperties().securityToken())
.as("Should have security token set")
.isEqualTo("token");
}
@@ -70,7 +71,7 @@
public void testLoadCustom() {
Map<String, String> properties = Maps.newHashMap();
properties.put(AliyunProperties.CLIENT_FACTORY, CustomFactory.class.getName());
- Assertions.assertThat(AliyunClientFactories.from(properties))
+ assertThat(AliyunClientFactories.from(properties))
.as("Should load custom class")
.isInstanceOf(CustomFactory.class);
}
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSFileIO.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSFileIO.java
index a4db1b9..dda4e75 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSFileIO.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSFileIO.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.aliyun.oss;
+import static org.assertj.core.api.Assertions.assertThat;
+
import com.aliyun.oss.OSS;
import com.aliyun.oss.OSSClient;
import com.aliyun.oss.OSSClientBuilder;
@@ -39,7 +41,6 @@
import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
import org.apache.iceberg.util.SerializableSupplier;
import org.apache.iceberg.util.SerializationUtil;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -73,33 +74,29 @@
writeOSSData(out, data);
OSSURI uri = new OSSURI(location);
- Assertions.assertThat(ossClient().get().doesObjectExist(uri.bucket(), uri.key()))
+ assertThat(ossClient().get().doesObjectExist(uri.bucket(), uri.key()))
.as("OSS file should exist")
.isTrue();
- Assertions.assertThat(out.location()).as("Should have expected location").isEqualTo(location);
- Assertions.assertThat(ossDataLength(uri)).as("Should have expected length").isEqualTo(dataSize);
- Assertions.assertThat(ossDataContent(uri, dataSize))
- .as("Should have expected content")
- .isEqualTo(data);
+ assertThat(out.location()).as("Should have expected location").isEqualTo(location);
+ assertThat(ossDataLength(uri)).as("Should have expected length").isEqualTo(dataSize);
+ assertThat(ossDataContent(uri, dataSize)).as("Should have expected content").isEqualTo(data);
}
@Test
public void testInputFile() throws IOException {
String location = randomLocation();
InputFile in = fileIO().newInputFile(location);
- Assertions.assertThat(in.exists()).as("OSS file should not exist").isFalse();
+ assertThat(in.exists()).as("OSS file should not exist").isFalse();
int dataSize = 1024 * 10;
byte[] data = randomData(dataSize);
OutputFile out = fileIO().newOutputFile(location);
writeOSSData(out, data);
- Assertions.assertThat(in.exists()).as("OSS file should exist").isTrue();
- Assertions.assertThat(in.location()).as("Should have expected location").isEqualTo(location);
- Assertions.assertThat(in.getLength()).as("Should have expected length").isEqualTo(dataSize);
- Assertions.assertThat(inFileContent(in, dataSize))
- .as("Should have expected content")
- .isEqualTo(data);
+ assertThat(in.exists()).as("OSS file should exist").isTrue();
+ assertThat(in.location()).as("Should have expected location").isEqualTo(location);
+ assertThat(in.getLength()).as("Should have expected length").isEqualTo(dataSize);
+ assertThat(inFileContent(in, dataSize)).as("Should have expected content").isEqualTo(data);
}
@Test
@@ -111,22 +108,20 @@
writeOSSData(out, data);
InputFile in = fileIO().newInputFile(location);
- Assertions.assertThat(in.exists()).as("OSS file should exist").isTrue();
+ assertThat(in.exists()).as("OSS file should exist").isTrue();
fileIO().deleteFile(in);
- Assertions.assertThat(fileIO().newInputFile(location).exists())
- .as("OSS file should not exist")
- .isFalse();
+ assertThat(fileIO().newInputFile(location).exists()).as("OSS file should not exist").isFalse();
}
@Test
public void testLoadFileIO() {
FileIO file = CatalogUtil.loadFileIO(OSS_IMPL_CLASS, ImmutableMap.of(), conf);
- Assertions.assertThat(file).as("Should be OSSFileIO").isInstanceOf(OSSFileIO.class);
+ assertThat(file).as("Should be OSSFileIO").isInstanceOf(OSSFileIO.class);
byte[] data = SerializationUtil.serializeToBytes(file);
FileIO expectedFileIO = SerializationUtil.deserializeFromBytes(data);
- Assertions.assertThat(expectedFileIO)
+ assertThat(expectedFileIO)
.as("The deserialized FileIO should be OSSFileIO")
.isInstanceOf(OSSFileIO.class);
}
@@ -143,22 +138,20 @@
SerializableSupplier<OSS> post = SerializationUtil.deserializeFromBytes(data);
OSS client = post.get();
- Assertions.assertThat(client)
- .as("Should be instance of oss client")
- .isInstanceOf(OSSClient.class);
+ assertThat(client).as("Should be instance of oss client").isInstanceOf(OSSClient.class);
OSSClient oss = (OSSClient) client;
- Assertions.assertThat(oss.getEndpoint())
+ assertThat(oss.getEndpoint())
.as("Should have expected endpoint")
.isEqualTo(new URI("http://" + endpoint));
- Assertions.assertThat(oss.getCredentialsProvider().getCredentials().getAccessKeyId())
+ assertThat(oss.getCredentialsProvider().getCredentials().getAccessKeyId())
.as("Should have expected access key")
.isEqualTo(accessKeyId);
- Assertions.assertThat(oss.getCredentialsProvider().getCredentials().getSecretAccessKey())
+ assertThat(oss.getCredentialsProvider().getCredentials().getSecretAccessKey())
.as("Should have expected secret key")
.isEqualTo(accessSecret);
- Assertions.assertThat(oss.getCredentialsProvider().getCredentials().getSecurityToken())
+ assertThat(oss.getCredentialsProvider().getCredentials().getSecurityToken())
.as("Should have no security token")
.isNull();
}
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSInputFile.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSInputFile.java
index 8d7cf51..f987113 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSInputFile.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSInputFile.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.aliyun.oss;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.AdditionalAnswers.delegatesTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
@@ -36,7 +38,6 @@
import org.apache.iceberg.io.SeekableInputStream;
import org.apache.iceberg.metrics.MetricsContext;
import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestOSSInputFile extends AliyunOSSTestBase {
@@ -60,7 +61,7 @@
@Test
public void testOSSInputFile() {
OSSURI uri = randomURI();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
new OSSInputFile(
ossClient().get(), uri, aliyunProperties, -1, MetricsContext.nullMetrics()))
@@ -74,7 +75,7 @@
InputFile inputFile =
new OSSInputFile(ossMock, uri, aliyunProperties, MetricsContext.nullMetrics());
- Assertions.assertThat(inputFile.exists()).as("OSS file should not exist").isFalse();
+ assertThat(inputFile.exists()).as("OSS file should not exist").isFalse();
verify(ossMock, times(1)).getSimplifiedObjectMeta(uri.bucket(), uri.key());
reset(ossMock);
@@ -82,7 +83,7 @@
byte[] data = randomData(dataSize);
writeOSSData(uri, data);
- Assertions.assertThat(inputFile.exists()).as("OSS file should exist").isTrue();
+ assertThat(inputFile.exists()).as("OSS file should exist").isTrue();
inputFile.exists();
verify(ossMock, times(1)).getSimplifiedObjectMeta(uri.bucket(), uri.key());
reset(ossMock);
@@ -108,17 +109,15 @@
private void readAndVerify(OSSURI uri, byte[] data) throws IOException {
InputFile inputFile =
new OSSInputFile(ossClient().get(), uri, aliyunProperties, MetricsContext.nullMetrics());
- Assertions.assertThat(inputFile.exists()).as("OSS file should exist").isTrue();
- Assertions.assertThat(inputFile.getLength())
- .as("Should have expected file length")
- .isEqualTo(data.length);
+ assertThat(inputFile.exists()).as("OSS file should exist").isTrue();
+ assertThat(inputFile.getLength()).as("Should have expected file length").isEqualTo(data.length);
byte[] actual = new byte[data.length];
try (SeekableInputStream in = inputFile.newStream()) {
ByteStreams.readFully(in, actual);
}
- Assertions.assertThat(actual).as("Should have same object content").isEqualTo(data);
+ assertThat(actual).as("Should have same object content").isEqualTo(data);
}
private void verifyLength(OSS ossClientMock, OSSURI uri, byte[] data, boolean isCache) {
@@ -132,9 +131,7 @@
new OSSInputFile(ossClientMock, uri, aliyunProperties, MetricsContext.nullMetrics());
}
inputFile.getLength();
- Assertions.assertThat(inputFile.getLength())
- .as("Should have expected file length")
- .isEqualTo(data.length);
+ assertThat(inputFile.getLength()).as("Should have expected file length").isEqualTo(data.length);
}
private OSSURI randomURI() {
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSInputStream.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSInputStream.java
index ccbfa39..0536109 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSInputStream.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSInputStream.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.aliyun.oss;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Arrays;
@@ -25,7 +28,6 @@
import java.util.concurrent.ThreadLocalRandom;
import org.apache.iceberg.io.SeekableInputStream;
import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestOSSInputStream extends AliyunOSSTestBase {
@@ -69,7 +71,7 @@
SeekableInputStream in, long rangeStart, int size, byte[] original, boolean buffered)
throws IOException {
in.seek(rangeStart);
- Assertions.assertThat(in.getPos()).as("Should have the correct position").isEqualTo(rangeStart);
+ assertThat(in.getPos()).as("Should have the correct position").isEqualTo(rangeStart);
long rangeEnd = rangeStart + size;
byte[] actual = new byte[size];
@@ -83,9 +85,9 @@
}
}
- Assertions.assertThat(in.getPos()).as("Should have the correct position").isEqualTo(rangeEnd);
+ assertThat(in.getPos()).as("Should have the correct position").isEqualTo(rangeEnd);
- Assertions.assertThat(actual)
+ assertThat(actual)
.as("Should have expected range data")
.isEqualTo(Arrays.copyOfRange(original, (int) rangeStart, (int) rangeEnd));
}
@@ -95,7 +97,7 @@
OSSURI uri = new OSSURI(location("closed.dat"));
SeekableInputStream closed = new OSSInputStream(ossClient().get(), uri);
closed.close();
- Assertions.assertThatThrownBy(() -> closed.seek(0))
+ assertThatThrownBy(() -> closed.seek(0))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Cannot seek: already closed");
}
@@ -111,7 +113,7 @@
in.seek(expected.length / 2);
byte[] actual = new byte[expected.length / 2];
ByteStreams.readFully(in, actual);
- Assertions.assertThat(actual)
+ assertThat(actual)
.as("Should have expected seeking stream")
.isEqualTo(Arrays.copyOfRange(expected, expected.length / 2, expected.length));
}
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSOutputFile.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSOutputFile.java
index 75f0951..2a06d21 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSOutputFile.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSOutputFile.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.aliyun.oss;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.aliyun.oss.OSS;
import java.io.ByteArrayInputStream;
import java.io.IOException;
@@ -32,7 +35,6 @@
import org.apache.iceberg.io.OutputFile;
import org.apache.iceberg.metrics.MetricsContext;
import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestOSSOutputFile extends AliyunOSSTestBase {
@@ -53,20 +55,19 @@
ByteStreams.copy(is, os);
}
- Assertions.assertThat(ossClient.doesObjectExist(uri.bucket(), uri.key()))
+ assertThat(ossClient.doesObjectExist(uri.bucket(), uri.key()))
.as("OSS file should exist")
.isTrue();
- Assertions.assertThat(ossDataLength(uri)).as("Object length should match").isEqualTo(dataSize);
+ assertThat(ossDataLength(uri)).as("Object length should match").isEqualTo(dataSize);
byte[] actual = ossDataContent(uri, dataSize);
- Assertions.assertThat(actual).as("Object content should match").isEqualTo(data);
+ assertThat(actual).as("Object content should match").isEqualTo(data);
}
@Test
public void testFromLocation() {
- Assertions.assertThatThrownBy(
- () -> OSSOutputFile.fromLocation(ossClient, null, aliyunProperties))
+ assertThatThrownBy(() -> OSSOutputFile.fromLocation(ossClient, null, aliyunProperties))
.isInstanceOf(NullPointerException.class)
.hasMessageContaining("location cannot be null");
}
@@ -81,7 +82,7 @@
OutputFile out = OSSOutputFile.fromLocation(ossClient, uri.location(), aliyunProperties);
- Assertions.assertThatThrownBy(out::create)
+ assertThatThrownBy(out::create)
.isInstanceOf(AlreadyExistsException.class)
.hasMessageContaining("Location already exists");
}
@@ -102,12 +103,12 @@
InputStream is = new ByteArrayInputStream(expect)) {
ByteStreams.copy(is, os);
}
- Assertions.assertThat(ossDataLength(uri))
+ assertThat(ossDataLength(uri))
.as(String.format("Should overwrite object length from %d to %d", dataSize, expectSize))
.isEqualTo(expectSize);
byte[] actual = ossDataContent(uri, expectSize);
- Assertions.assertThat(actual).as("Should overwrite object content").isEqualTo(expect);
+ assertThat(actual).as("Should overwrite object content").isEqualTo(expect);
}
@Test
@@ -115,7 +116,7 @@
OSSURI uri = randomURI();
OutputFile out =
new OSSOutputFile(ossClient, uri, aliyunProperties, MetricsContext.nullMetrics());
- Assertions.assertThat(out.location()).as("Location should match").isEqualTo(uri.location());
+ assertThat(out.location()).as("Location should match").isEqualTo(uri.location());
}
@Test
@@ -131,20 +132,16 @@
}
InputFile in = out.toInputFile();
- Assertions.assertThat(in)
- .as("Should be an instance of OSSInputFile")
- .isInstanceOf(OSSInputFile.class);
- Assertions.assertThat(in.exists()).as("OSS file should exist").isTrue();
- Assertions.assertThat(in.location())
- .as("Should have expected location")
- .isEqualTo(out.location());
- Assertions.assertThat(in.getLength()).as("Should have expected length").isEqualTo(dataSize);
+ assertThat(in).as("Should be an instance of OSSInputFile").isInstanceOf(OSSInputFile.class);
+ assertThat(in.exists()).as("OSS file should exist").isTrue();
+ assertThat(in.location()).as("Should have expected location").isEqualTo(out.location());
+ assertThat(in.getLength()).as("Should have expected length").isEqualTo(dataSize);
byte[] actual = new byte[dataSize];
try (InputStream as = in.newStream()) {
ByteStreams.readFully(as, actual);
}
- Assertions.assertThat(actual).as("Should have expected content").isEqualTo(data);
+ assertThat(actual).as("Should have expected content").isEqualTo(data);
}
private OSSURI randomURI() {
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSOutputStream.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSOutputStream.java
index fadad54..069ff91 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSOutputStream.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSOutputStream.java
@@ -18,6 +18,7 @@
*/
package org.apache.iceberg.aliyun.oss;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.AdditionalAnswers.delegatesTo;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
@@ -38,7 +39,6 @@
import org.apache.iceberg.metrics.MetricsContext;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -88,28 +88,27 @@
new OSSOutputStream(mock, uri, props, MetricsContext.nullMetrics())) {
if (arrayWrite) {
out.write(data);
- Assertions.assertThat(out.getPos()).as("OSSOutputStream position").isEqualTo(data.length);
+ assertThat(out.getPos()).as("OSSOutputStream position").isEqualTo(data.length);
} else {
for (int i = 0; i < data.length; i++) {
out.write(data[i]);
- Assertions.assertThat(out.getPos()).as("OSSOutputStream position").isEqualTo(i + 1);
+ assertThat(out.getPos()).as("OSSOutputStream position").isEqualTo(i + 1);
}
}
}
- Assertions.assertThat(ossClient.doesObjectExist(uri.bucket(), uri.key()))
+ assertThat(ossClient.doesObjectExist(uri.bucket(), uri.key()))
.as("OSS object should exist")
.isTrue();
- Assertions.assertThat(
- ossClient.getObject(uri.bucket(), uri.key()).getObjectMetadata().getContentLength())
+ assertThat(ossClient.getObject(uri.bucket(), uri.key()).getObjectMetadata().getContentLength())
.as("Object length")
.isEqualTo(data.length);
byte[] actual = ossDataContent(uri, data.length);
- Assertions.assertThat(actual).as("Object content").isEqualTo(data);
+ assertThat(actual).as("Object content").isEqualTo(data);
// Verify all staging files are cleaned up.
- Assertions.assertThat(Files.list(Paths.get(props.ossStagingDirectory())).count())
+ assertThat(Files.list(Paths.get(props.ossStagingDirectory())).count())
.as("Staging files should clean up")
.isEqualTo(0);
}
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSURI.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSURI.java
index 932dfe5..e388ee9 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSURI.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/TestOSSURI.java
@@ -19,10 +19,11 @@
package org.apache.iceberg.aliyun.oss;
import static com.aliyun.oss.internal.OSSUtils.OSS_RESOURCE_MANAGER;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestOSSURI {
@@ -31,9 +32,9 @@
String location = "oss://bucket/path/to/file";
OSSURI uri = new OSSURI(location);
- Assertions.assertThat(uri.bucket()).isEqualTo("bucket");
- Assertions.assertThat(uri.key()).isEqualTo("path/to/file");
- Assertions.assertThat(uri.toString()).isEqualTo(location);
+ assertThat(uri.bucket()).isEqualTo("bucket");
+ assertThat(uri.key()).isEqualTo("path/to/file");
+ assertThat(uri.toString()).isEqualTo(location);
}
@Test
@@ -41,15 +42,15 @@
String location = "oss://bucket/path%20to%20file";
OSSURI uri = new OSSURI(location);
- Assertions.assertThat(uri.bucket()).isEqualTo("bucket");
- Assertions.assertThat(uri.key()).isEqualTo("path%20to%20file");
- Assertions.assertThat(uri.toString()).isEqualTo(location);
+ assertThat(uri.bucket()).isEqualTo("bucket");
+ assertThat(uri.key()).isEqualTo("path%20to%20file");
+ assertThat(uri.toString()).isEqualTo(location);
}
@Test
public void invalidBucket() {
- Assertions.assertThatThrownBy(() -> new OSSURI("https://test_bucket/path/to/file"))
+ assertThatThrownBy(() -> new OSSURI("https://test_bucket/path/to/file"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining(
OSS_RESOURCE_MANAGER.getFormattedString("BucketNameInvalid", "test_bucket"));
@@ -58,14 +59,14 @@
@Test
public void missingKey() {
- Assertions.assertThatThrownBy(() -> new OSSURI("https://bucket/"))
+ assertThatThrownBy(() -> new OSSURI("https://bucket/"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Missing key in OSS location");
}
@Test
public void invalidKey() {
- Assertions.assertThatThrownBy(() -> new OSSURI("https://bucket/\\path/to/file"))
+ assertThatThrownBy(() -> new OSSURI("https://bucket/\\path/to/file"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining(
OSS_RESOURCE_MANAGER.getFormattedString("ObjectKeyInvalid", "\\path/to/file"));
@@ -74,7 +75,7 @@
@Test
public void relativePathing() {
- Assertions.assertThatThrownBy(() -> new OSSURI("/path/to/file"))
+ assertThatThrownBy(() -> new OSSURI("/path/to/file"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Invalid OSS location");
}
@@ -82,7 +83,7 @@
@Test
public void invalidScheme() {
- Assertions.assertThatThrownBy(() -> new OSSURI("invalid://bucket/"))
+ assertThatThrownBy(() -> new OSSURI("invalid://bucket/"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Invalid scheme");
}
@@ -92,9 +93,9 @@
String location = "oss://bucket/path/to/file#print";
OSSURI uri = new OSSURI(location);
- Assertions.assertThat(uri.bucket()).isEqualTo("bucket");
- Assertions.assertThat(uri.key()).isEqualTo("path/to/file");
- Assertions.assertThat(uri.toString()).isEqualTo(location);
+ assertThat(uri.bucket()).isEqualTo("bucket");
+ assertThat(uri.key()).isEqualTo("path/to/file");
+ assertThat(uri.toString()).isEqualTo(location);
}
@Test
@@ -102,17 +103,17 @@
String location = "oss://bucket/path/to/file?query=foo#bar";
OSSURI uri = new OSSURI(location);
- Assertions.assertThat(uri.bucket()).isEqualTo("bucket");
- Assertions.assertThat(uri.key()).isEqualTo("path/to/file");
- Assertions.assertThat(uri.toString()).isEqualTo(location);
+ assertThat(uri.bucket()).isEqualTo("bucket");
+ assertThat(uri.key()).isEqualTo("path/to/file");
+ assertThat(uri.toString()).isEqualTo(location);
}
@Test
public void testValidSchemes() {
for (String scheme : Lists.newArrayList("https", "oss")) {
OSSURI uri = new OSSURI(scheme + "://bucket/path/to/file");
- Assertions.assertThat(uri.bucket()).isEqualTo("bucket");
- Assertions.assertThat(uri.key()).isEqualTo("path/to/file");
+ assertThat(uri.bucket()).isEqualTo("bucket");
+ assertThat(uri.key()).isEqualTo("path/to/file");
}
}
}
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/TestLocalAliyunOSS.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/TestLocalAliyunOSS.java
index 5a47708..a661c17 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/TestLocalAliyunOSS.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/TestLocalAliyunOSS.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.aliyun.oss.mock;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.aliyun.oss.OSS;
import com.aliyun.oss.OSSErrorCode;
import com.aliyun.oss.OSSException;
@@ -33,7 +36,6 @@
import org.apache.iceberg.aliyun.TestUtility;
import org.apache.iceberg.aliyun.oss.AliyunOSSExtension;
import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
-import org.assertj.core.api.Assertions;
import org.assertj.core.api.Assumptions;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.junit.jupiter.api.AfterEach;
@@ -51,7 +53,7 @@
private final Random random = new Random(1);
private static void assertThrows(Runnable runnable, String expectedErrorCode) {
- Assertions.assertThatThrownBy(runnable::run)
+ assertThatThrownBy(runnable::run)
.isInstanceOf(OSSException.class)
.asInstanceOf(InstanceOfAssertFactories.type(OSSException.class))
.extracting(ServiceException::getErrorCode)
@@ -74,15 +76,15 @@
.as("Aliyun integration test cannot delete existing bucket from test environment.")
.isEqualTo(AliyunOSSMockExtension.class);
- Assertions.assertThat(doesBucketExist(bucketName)).isTrue();
+ assertThat(doesBucketExist(bucketName)).isTrue();
assertThrows(() -> oss.createBucket(bucketName), OSSErrorCode.BUCKET_ALREADY_EXISTS);
oss.deleteBucket(bucketName);
- Assertions.assertThat(doesBucketExist(bucketName)).isFalse();
+ assertThat(doesBucketExist(bucketName)).isFalse();
oss.createBucket(bucketName);
- Assertions.assertThat(doesBucketExist(bucketName)).isTrue();
+ assertThat(doesBucketExist(bucketName)).isTrue();
}
@Test
@@ -108,7 +110,7 @@
oss.deleteObject(bucketName, "object2");
oss.deleteBucket(bucketName);
- Assertions.assertThat(doesBucketExist(bucketName)).isFalse();
+ assertThat(doesBucketExist(bucketName)).isFalse();
oss.createBucket(bucketName);
}
@@ -123,18 +125,18 @@
() -> oss.putObject(bucketNotExist, "object", wrap(bytes)), OSSErrorCode.NO_SUCH_BUCKET);
PutObjectResult result = oss.putObject(bucketName, "object", wrap(bytes));
- Assertions.assertThat(result.getETag()).isEqualTo(AliyunOSSMockLocalStore.md5sum(wrap(bytes)));
+ assertThat(result.getETag()).isEqualTo(AliyunOSSMockLocalStore.md5sum(wrap(bytes)));
}
@Test
public void testDoesObjectExist() {
- Assertions.assertThat(oss.doesObjectExist(bucketName, "key")).isFalse();
+ assertThat(oss.doesObjectExist(bucketName, "key")).isFalse();
byte[] bytes = new byte[4 * 1024];
random.nextBytes(bytes);
oss.putObject(bucketName, "key", wrap(bytes));
- Assertions.assertThat(oss.doesObjectExist(bucketName, "key")).isTrue();
+ assertThat(oss.doesObjectExist(bucketName, "key")).isTrue();
oss.deleteObject(bucketName, "key");
}
@@ -154,7 +156,7 @@
try (InputStream is = oss.getObject(bucketName, "key").getObjectContent()) {
ByteStreams.readFully(is, actual);
}
- Assertions.assertThat(actual).isEqualTo(bytes);
+ assertThat(actual).isEqualTo(bytes);
oss.deleteObject(bucketName, "key");
}
@@ -230,7 +232,7 @@
try (InputStream is = oss.getObject(getObjectRequest).getObjectContent()) {
ByteStreams.readFully(is, actual);
}
- Assertions.assertThat(actual).isEqualTo(testBytes);
+ assertThat(actual).isEqualTo(testBytes);
}
private InputStream wrap(byte[] data) {
diff --git a/api/src/test/java/org/apache/iceberg/TestHelpers.java b/api/src/test/java/org/apache/iceberg/TestHelpers.java
index fcb528c..329a61f 100644
--- a/api/src/test/java/org/apache/iceberg/TestHelpers.java
+++ b/api/src/test/java/org/apache/iceberg/TestHelpers.java
@@ -47,7 +47,6 @@
import org.apache.iceberg.expressions.UnboundPredicate;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.util.ByteBuffers;
-import org.assertj.core.api.Assertions;
import org.objenesis.strategy.StdInstantiatorStrategy;
public class TestHelpers {
@@ -110,9 +109,7 @@
}
public static void assertSameSchemaList(List<Schema> list1, List<Schema> list2) {
- Assertions.assertThat(list1)
- .as("Should have same number of schemas in both lists")
- .hasSameSizeAs(list2);
+ assertThat(list1).as("Should have same number of schemas in both lists").hasSameSizeAs(list2);
IntStream.range(0, list1.size())
.forEach(
@@ -151,9 +148,7 @@
}
public static void assertSameSchemaMap(Map<Integer, Schema> map1, Map<Integer, Schema> map2) {
- Assertions.assertThat(map1)
- .as("Should have same number of schemas in both maps")
- .hasSameSizeAs(map2);
+ assertThat(map1).as("Should have same number of schemas in both maps").hasSameSizeAs(map2);
map1.forEach(
(schemaId, schema1) -> {
diff --git a/api/src/test/java/org/apache/iceberg/TestPartitionSpecValidation.java b/api/src/test/java/org/apache/iceberg/TestPartitionSpecValidation.java
index eb0e741..263db42 100644
--- a/api/src/test/java/org/apache/iceberg/TestPartitionSpecValidation.java
+++ b/api/src/test/java/org/apache/iceberg/TestPartitionSpecValidation.java
@@ -19,11 +19,11 @@
package org.apache.iceberg;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import org.apache.iceberg.transforms.Transforms;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.types.Types.NestedField;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestPartitionSpecValidation {
@@ -38,84 +38,70 @@
@Test
public void testMultipleTimestampPartitions() {
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).year("ts").year("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("ts").year("ts").build())
.hasMessageContaining("Cannot use partition name more than once")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).year("ts").month("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("ts").month("ts").build())
.hasMessageContaining("Cannot add redundant partition")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).year("ts").day("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("ts").day("ts").build())
.hasMessageContaining("Cannot add redundant partition")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).year("ts").hour("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("ts").hour("ts").build())
.hasMessageContaining("Cannot add redundant partition")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).month("ts").month("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).month("ts").month("ts").build())
.hasMessageContaining("Cannot use partition name more than once")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).month("ts").day("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).month("ts").day("ts").build())
.hasMessageContaining("Cannot add redundant partition")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).month("ts").hour("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).month("ts").hour("ts").build())
.hasMessageContaining("Cannot add redundant partition")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).day("ts").day("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).day("ts").day("ts").build())
.hasMessageContaining("Cannot use partition name more than once")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).day("ts").hour("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).day("ts").hour("ts").build())
.hasMessageContaining("Cannot add redundant partition")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).hour("ts").hour("ts").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).hour("ts").hour("ts").build())
.hasMessageContaining("Cannot use partition name more than once")
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void testMultipleDatePartitions() {
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).year("d").year("d").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("d").year("d").build())
.hasMessageContaining("Cannot use partition name more than once")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).year("d").month("d").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("d").month("d").build())
.hasMessageContaining("Cannot add redundant partition")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("d").day("d").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("d").day("d").build())
.hasMessageContaining("Cannot add redundant partition")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).month("d").month("d").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).month("d").month("d").build())
.hasMessageContaining("Cannot use partition name more than once")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).month("d").day("d").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).month("d").day("d").build())
.hasMessageContaining("Cannot add redundant partition")
.isInstanceOf(IllegalArgumentException.class);
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).day("d").day("d").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).day("d").day("d").build())
.hasMessageContaining("Cannot use partition name more than once")
.isInstanceOf(IllegalArgumentException.class);
}
@@ -151,17 +137,16 @@
@Test
public void testMultipleIdentityPartitions() {
PartitionSpec.builderFor(SCHEMA).year("d").identity("id").identity("d").identity("s").build();
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).identity("id").identity("id").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).identity("id").identity("id").build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot use partition name more than once");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> PartitionSpec.builderFor(SCHEMA).identity("id").identity("id", "test-id").build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot add redundant partition");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
PartitionSpec.builderFor(SCHEMA)
.identity("id", "test-id")
@@ -220,34 +205,31 @@
@Test
public void testSettingPartitionTransformsWithCustomTargetNamesThatAlreadyExist() {
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("ts", "another_ts"))
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("ts", "another_ts"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create partition from name that exists in schema: another_ts");
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).month("ts", "another_ts"))
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).month("ts", "another_ts"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create partition from name that exists in schema: another_ts");
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).day("ts", "another_ts"))
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).day("ts", "another_ts"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create partition from name that exists in schema: another_ts");
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).hour("ts", "another_ts"))
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).hour("ts", "another_ts"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create partition from name that exists in schema: another_ts");
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).truncate("ts", 2, "another_ts"))
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).truncate("ts", 2, "another_ts"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create partition from name that exists in schema: another_ts");
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).bucket("ts", 4, "another_ts"))
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).bucket("ts", 4, "another_ts"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create partition from name that exists in schema: another_ts");
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).identity("ts", "another_ts"))
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).identity("ts", "another_ts"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Cannot create identity partition sourced from different field in schema: another_ts");
@@ -255,34 +237,31 @@
@Test
public void testMissingSourceColumn() {
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("missing").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).year("missing").build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot find source column: missing");
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).month("missing").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).month("missing").build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot find source column: missing");
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).day("missing").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).day("missing").build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot find source column: missing");
- Assertions.assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).hour("missing").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).hour("missing").build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot find source column: missing");
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).bucket("missing", 4).build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).bucket("missing", 4).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot find source column: missing");
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).truncate("missing", 5).build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).truncate("missing", 5).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot find source column: missing");
- Assertions.assertThatThrownBy(
- () -> PartitionSpec.builderFor(SCHEMA).identity("missing").build())
+ assertThatThrownBy(() -> PartitionSpec.builderFor(SCHEMA).identity("missing").build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot find source column: missing");
}
diff --git a/api/src/test/java/org/apache/iceberg/TestSnapshotRef.java b/api/src/test/java/org/apache/iceberg/TestSnapshotRef.java
index 21ad472..1d36b66 100644
--- a/api/src/test/java/org/apache/iceberg/TestSnapshotRef.java
+++ b/api/src/test/java/org/apache/iceberg/TestSnapshotRef.java
@@ -19,8 +19,8 @@
package org.apache.iceberg;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestSnapshotRef {
@@ -69,38 +69,37 @@
@Test
public void testNoTypeFailure() {
- Assertions.assertThatThrownBy(() -> SnapshotRef.builderFor(1L, null).build())
+ assertThatThrownBy(() -> SnapshotRef.builderFor(1L, null).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Snapshot reference type must not be null");
}
@Test
public void testTagBuildFailures() {
- Assertions.assertThatThrownBy(() -> SnapshotRef.tagBuilder(1L).maxRefAgeMs(-1L).build())
+ assertThatThrownBy(() -> SnapshotRef.tagBuilder(1L).maxRefAgeMs(-1L).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Max reference age must be greater than 0");
- Assertions.assertThatThrownBy(() -> SnapshotRef.tagBuilder(1L).minSnapshotsToKeep(2).build())
+ assertThatThrownBy(() -> SnapshotRef.tagBuilder(1L).minSnapshotsToKeep(2).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Tags do not support setting minSnapshotsToKeep");
- Assertions.assertThatThrownBy(() -> SnapshotRef.tagBuilder(1L).maxSnapshotAgeMs(2L).build())
+ assertThatThrownBy(() -> SnapshotRef.tagBuilder(1L).maxSnapshotAgeMs(2L).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Tags do not support setting maxSnapshotAgeMs");
}
@Test
public void testBranchBuildFailures() {
- Assertions.assertThatThrownBy(() -> SnapshotRef.branchBuilder(1L).maxSnapshotAgeMs(-1L).build())
+ assertThatThrownBy(() -> SnapshotRef.branchBuilder(1L).maxSnapshotAgeMs(-1L).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Max snapshot age must be greater than 0 ms");
- Assertions.assertThatThrownBy(
- () -> SnapshotRef.branchBuilder(1L).minSnapshotsToKeep(-1).build())
+ assertThatThrownBy(() -> SnapshotRef.branchBuilder(1L).minSnapshotsToKeep(-1).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Min snapshots to keep must be greater than 0");
- Assertions.assertThatThrownBy(() -> SnapshotRef.branchBuilder(1L).maxRefAgeMs(-1L).build())
+ assertThatThrownBy(() -> SnapshotRef.branchBuilder(1L).maxRefAgeMs(-1L).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Max reference age must be greater than 0");
}
diff --git a/api/src/test/java/org/apache/iceberg/TestSortOrderComparators.java b/api/src/test/java/org/apache/iceberg/TestSortOrderComparators.java
index 90ac3e0..1791d8a 100644
--- a/api/src/test/java/org/apache/iceberg/TestSortOrderComparators.java
+++ b/api/src/test/java/org/apache/iceberg/TestSortOrderComparators.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.time.LocalDateTime;
@@ -31,7 +33,6 @@
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.SerializableFunction;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestSortOrderComparators {
@@ -46,26 +47,25 @@
Comparator<StructLike> comparator = SortOrderComparators.forSchema(schema, sortOrder);
// all fields should have the same sort direction in this test class
- Assertions.assertThat(sortOrder.fields().stream().map(SortField::direction).distinct())
- .hasSize(1);
+ assertThat(sortOrder.fields().stream().map(SortField::direction).distinct()).hasSize(1);
SortDirection direction = sortOrder.fields().get(0).direction();
- Assertions.assertThat(comparator.compare(less, less)).isEqualTo(0);
- Assertions.assertThat(comparator.compare(greater, greater)).isEqualTo(0);
- Assertions.assertThat(comparator.compare(less, lessCopy)).isEqualTo(0);
+ assertThat(comparator.compare(less, less)).isEqualTo(0);
+ assertThat(comparator.compare(greater, greater)).isEqualTo(0);
+ assertThat(comparator.compare(less, lessCopy)).isEqualTo(0);
if (direction == SortDirection.ASC) {
- Assertions.assertThat(comparator.compare(less, greater)).isEqualTo(-1);
- Assertions.assertThat(comparator.compare(greater, less)).isEqualTo(1);
+ assertThat(comparator.compare(less, greater)).isEqualTo(-1);
+ assertThat(comparator.compare(greater, less)).isEqualTo(1);
// null first
- Assertions.assertThat(comparator.compare(nullValue, less)).isEqualTo(-1);
- Assertions.assertThat(comparator.compare(less, nullValue)).isEqualTo(1);
+ assertThat(comparator.compare(nullValue, less)).isEqualTo(-1);
+ assertThat(comparator.compare(less, nullValue)).isEqualTo(1);
} else {
- Assertions.assertThat(comparator.compare(less, greater)).isEqualTo(1);
- Assertions.assertThat(comparator.compare(greater, less)).isEqualTo(-1);
+ assertThat(comparator.compare(less, greater)).isEqualTo(1);
+ assertThat(comparator.compare(greater, less)).isEqualTo(-1);
// null last
- Assertions.assertThat(comparator.compare(nullValue, greater)).isEqualTo(1);
- Assertions.assertThat(comparator.compare(less, nullValue)).isEqualTo(-1);
+ assertThat(comparator.compare(nullValue, greater)).isEqualTo(1);
+ assertThat(comparator.compare(less, nullValue)).isEqualTo(-1);
}
}
@@ -196,8 +196,8 @@
TimeUnit.SECONDS.toMicros(
LocalDateTime.of(2022, 1, 10, 1, 0, 0).toEpochSecond(ZoneOffset.UTC));
- Assertions.assertThat(transform.apply(lessMicro)).isLessThan(transform.apply(greaterMicro));
- Assertions.assertThat(transform.apply(lessMicro)).isEqualTo(transform.apply(lessCopyMicro));
+ assertThat(transform.apply(lessMicro)).isLessThan(transform.apply(greaterMicro));
+ assertThat(transform.apply(lessMicro)).isEqualTo(transform.apply(lessCopyMicro));
TestHelpers.Row less = TestHelpers.Row.of("id3", lessMicro);
TestHelpers.Row greater = TestHelpers.Row.of("id2", greaterMicro);
@@ -226,8 +226,8 @@
Transform<String, Integer> bucket = Transforms.bucket(4);
SerializableFunction<String, Integer> transform = bucket.bind(Types.StringType.get());
- Assertions.assertThat(transform.apply("bbb")).isLessThan(transform.apply("aaa"));
- Assertions.assertThat(transform.apply("bbb")).isEqualTo(transform.apply("cca"));
+ assertThat(transform.apply("bbb")).isLessThan(transform.apply("aaa"));
+ assertThat(transform.apply("bbb")).isEqualTo(transform.apply("cca"));
TestHelpers.Row less = TestHelpers.Row.of("id3", "bbb");
TestHelpers.Row greater = TestHelpers.Row.of("id2", "aaa");
@@ -260,9 +260,9 @@
Transform<UUID, Integer> bucket = Transforms.bucket(4);
SerializableFunction<UUID, Integer> transform = bucket.bind(Types.UUIDType.get());
- Assertions.assertThat(transform.apply(UUID.fromString("fd02441d-1423-4a3f-8785-c7dd5647e26b")))
+ assertThat(transform.apply(UUID.fromString("fd02441d-1423-4a3f-8785-c7dd5647e26b")))
.isLessThan(transform.apply(UUID.fromString("86873e7d-1374-4493-8e1d-9095eff7046c")));
- Assertions.assertThat(transform.apply(UUID.fromString("fd02441d-1423-4a3f-8785-c7dd5647e26b")))
+ assertThat(transform.apply(UUID.fromString("fd02441d-1423-4a3f-8785-c7dd5647e26b")))
.isEqualTo(transform.apply(UUID.fromString("81873e7d-1374-4493-8e1d-9095eff7046c")));
TestHelpers.Row less =
@@ -305,9 +305,9 @@
Transform<ByteBuffer, ByteBuffer> truncate = Transforms.truncate(2);
SerializableFunction<ByteBuffer, ByteBuffer> transform = truncate.bind(Types.BinaryType.get());
- Assertions.assertThat(transform.apply(ByteBuffer.wrap(new byte[] {1, 2, 3})))
+ assertThat(transform.apply(ByteBuffer.wrap(new byte[] {1, 2, 3})))
.isLessThan(transform.apply(ByteBuffer.wrap(new byte[] {1, 3, 1})));
- Assertions.assertThat(transform.apply(ByteBuffer.wrap(new byte[] {1, 2, 3})))
+ assertThat(transform.apply(ByteBuffer.wrap(new byte[] {1, 2, 3})))
.isEqualTo(transform.apply(ByteBuffer.wrap(new byte[] {1, 2, 5, 6})));
TestHelpers.Row less = TestHelpers.Row.of("id3", ByteBuffer.wrap(new byte[] {1, 2, 3}));
@@ -414,9 +414,9 @@
Transform<ByteBuffer, ByteBuffer> bucket = Transforms.truncate(2);
SerializableFunction<ByteBuffer, ByteBuffer> transform = bucket.bind(Types.BinaryType.get());
- Assertions.assertThat(transform.apply(ByteBuffer.wrap(new byte[] {2, 3, 4})))
+ assertThat(transform.apply(ByteBuffer.wrap(new byte[] {2, 3, 4})))
.isLessThan(transform.apply(ByteBuffer.wrap(new byte[] {9, 3, 4})));
- Assertions.assertThat(transform.apply(ByteBuffer.wrap(new byte[] {2, 3, 4})))
+ assertThat(transform.apply(ByteBuffer.wrap(new byte[] {2, 3, 4})))
.isEqualTo(transform.apply(ByteBuffer.wrap(new byte[] {2, 3, 9})));
TestHelpers.Row less =
diff --git a/api/src/test/java/org/apache/iceberg/catalog/TestNamespace.java b/api/src/test/java/org/apache/iceberg/catalog/TestNamespace.java
index 62335b4..77ddf8c 100644
--- a/api/src/test/java/org/apache/iceberg/catalog/TestNamespace.java
+++ b/api/src/test/java/org/apache/iceberg/catalog/TestNamespace.java
@@ -18,46 +18,48 @@
*/
package org.apache.iceberg.catalog;
-import org.assertj.core.api.Assertions;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import org.junit.jupiter.api.Test;
public class TestNamespace {
@Test
public void testWithNullAndEmpty() {
- Assertions.assertThatThrownBy(() -> Namespace.of((String[]) null))
+ assertThatThrownBy(() -> Namespace.of((String[]) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create Namespace from null array");
- Assertions.assertThat(Namespace.of()).isEqualTo(Namespace.empty());
+ assertThat(Namespace.of()).isEqualTo(Namespace.empty());
}
@Test
public void testNamespace() {
String[] levels = {"a", "b", "c", "d"};
Namespace namespace = Namespace.of(levels);
- Assertions.assertThat(namespace).isNotNull();
- Assertions.assertThat(namespace.levels()).hasSize(4);
- Assertions.assertThat(namespace).hasToString("a.b.c.d");
+ assertThat(namespace).isNotNull();
+ assertThat(namespace.levels()).hasSize(4);
+ assertThat(namespace).hasToString("a.b.c.d");
for (int i = 0; i < levels.length; i++) {
- Assertions.assertThat(namespace.level(i)).isEqualTo(levels[i]);
+ assertThat(namespace.level(i)).isEqualTo(levels[i]);
}
}
@Test
public void testWithNullInLevel() {
- Assertions.assertThatThrownBy(() -> Namespace.of("a", null, "b"))
+ assertThatThrownBy(() -> Namespace.of("a", null, "b"))
.isInstanceOf(NullPointerException.class)
.hasMessage("Cannot create a namespace with a null level");
}
@Test
public void testDisallowsNamespaceWithNullByte() {
- Assertions.assertThatThrownBy(() -> Namespace.of("ac", "\u0000c", "b"))
+ assertThatThrownBy(() -> Namespace.of("ac", "\u0000c", "b"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create a namespace with the null-byte character");
- Assertions.assertThatThrownBy(() -> Namespace.of("ac", "c\0", "b"))
+ assertThatThrownBy(() -> Namespace.of("ac", "c\0", "b"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create a namespace with the null-byte character");
}
diff --git a/api/src/test/java/org/apache/iceberg/catalog/TestTableIdentifier.java b/api/src/test/java/org/apache/iceberg/catalog/TestTableIdentifier.java
index 06e3295..ca95694 100644
--- a/api/src/test/java/org/apache/iceberg/catalog/TestTableIdentifier.java
+++ b/api/src/test/java/org/apache/iceberg/catalog/TestTableIdentifier.java
@@ -19,8 +19,8 @@
package org.apache.iceberg.catalog;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestTableIdentifier {
@@ -54,26 +54,26 @@
@Test
public void testInvalidTableName() {
- Assertions.assertThatThrownBy(() -> TableIdentifier.of(Namespace.empty(), ""))
+ assertThatThrownBy(() -> TableIdentifier.of(Namespace.empty(), ""))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid table name: null or empty");
- Assertions.assertThatThrownBy(() -> TableIdentifier.of(Namespace.empty(), null))
+ assertThatThrownBy(() -> TableIdentifier.of(Namespace.empty(), null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid table name: null or empty");
}
@Test
public void testNulls() {
- Assertions.assertThatThrownBy(() -> TableIdentifier.of((String[]) null))
+ assertThatThrownBy(() -> TableIdentifier.of((String[]) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create table identifier from null array");
- Assertions.assertThatThrownBy(() -> TableIdentifier.parse(null))
+ assertThatThrownBy(() -> TableIdentifier.parse(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse table identifier: null");
- Assertions.assertThatThrownBy(() -> TableIdentifier.of(null, "name"))
+ assertThatThrownBy(() -> TableIdentifier.of(null, "name"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid Namespace: null");
}
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestAggregateBinding.java b/api/src/test/java/org/apache/iceberg/expressions/TestAggregateBinding.java
index 869be1c..95a9ac2 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestAggregateBinding.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestAggregateBinding.java
@@ -19,13 +19,13 @@
package org.apache.iceberg.expressions;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.List;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.types.Types.StructType;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestAggregateBinding {
@@ -60,7 +60,7 @@
@Test
public void testBoundAggregateFails() {
Expression unbound = Expressions.count("x");
- Assertions.assertThatThrownBy(() -> Binder.bind(struct, Binder.bind(struct, unbound)))
+ assertThatThrownBy(() -> Binder.bind(struct, Binder.bind(struct, unbound)))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Found already bound aggregate");
}
@@ -79,7 +79,7 @@
@Test
public void testCaseSensitiveReference() {
Expression expr = Expressions.max("X");
- Assertions.assertThatThrownBy(() -> Binder.bind(struct, expr, true))
+ assertThatThrownBy(() -> Binder.bind(struct, expr, true))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'X' in struct");
}
@@ -87,13 +87,13 @@
@Test
public void testMissingField() {
UnboundAggregate<?> unbound = Expressions.count("missing");
- Assertions.assertThatThrownBy(() -> unbound.bind(struct, false))
+ assertThatThrownBy(() -> unbound.bind(struct, false))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'missing' in struct:");
}
private static <T, C> BoundAggregate<T, C> assertAndUnwrapAggregate(Expression expr) {
- Assertions.assertThat(expr).isInstanceOf(BoundAggregate.class);
+ assertThat(expr).isInstanceOf(BoundAggregate.class);
return (BoundAggregate<T, C>) expr;
}
}
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestEvaluator.java b/api/src/test/java/org/apache/iceberg/expressions/TestEvaluator.java
index 812b4cf..792e651 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestEvaluator.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestEvaluator.java
@@ -41,6 +41,7 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.Arrays;
import java.util.Collection;
@@ -50,7 +51,6 @@
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.types.Types.StructType;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestEvaluator {
@@ -590,7 +590,7 @@
@Test
public void testCaseSensitiveNot() {
- Assertions.assertThatThrownBy(() -> new Evaluator(STRUCT, not(equal("X", 7)), true))
+ assertThatThrownBy(() -> new Evaluator(STRUCT, not(equal("X", 7)), true))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'X' in struct");
}
@@ -678,32 +678,31 @@
@Test
public void testInExceptions() {
- Assertions.assertThatThrownBy(() -> in("x", (Literal) null))
+ assertThatThrownBy(() -> in("x", (Literal) null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Cannot create expression literal from null");
- Assertions.assertThatThrownBy(() -> in("x", (Collection<?>) null))
+ assertThatThrownBy(() -> in("x", (Collection<?>) null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Values cannot be null for IN predicate.");
- Assertions.assertThatThrownBy(() -> in("x", 5, 6).literal())
+ assertThatThrownBy(() -> in("x", 5, 6).literal())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("IN predicate cannot return a literal");
- Assertions.assertThatThrownBy(() -> in("x", 1, 2, null))
+ assertThatThrownBy(() -> in("x", 1, 2, null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Cannot create expression literal from null");
- Assertions.assertThatThrownBy(() -> new Evaluator(STRUCT, in("x", 7, 8, 9.1)))
+ assertThatThrownBy(() -> new Evaluator(STRUCT, in("x", 7, 8, 9.1)))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Invalid value for conversion to type int");
- Assertions.assertThatThrownBy(() -> predicate(Expression.Operation.IN, "x"))
+ assertThatThrownBy(() -> predicate(Expression.Operation.IN, "x"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create IN predicate without a value");
- Assertions.assertThatThrownBy(
- () -> new Evaluator(STRUCT, predicate(Expression.Operation.IN, "x", 5.1)))
+ assertThatThrownBy(() -> new Evaluator(STRUCT, predicate(Expression.Operation.IN, "x", 5.1)))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Invalid value for conversion to type int");
}
@@ -781,31 +780,31 @@
@Test
public void testNotInExceptions() {
- Assertions.assertThatThrownBy(() -> notIn("x", (Literal) null))
+ assertThatThrownBy(() -> notIn("x", (Literal) null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Cannot create expression literal from null");
- Assertions.assertThatThrownBy(() -> notIn("x", (Collection<?>) null))
+ assertThatThrownBy(() -> notIn("x", (Collection<?>) null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Values cannot be null for NOT_IN predicate.");
- Assertions.assertThatThrownBy(() -> notIn("x", 5, 6).literal())
+ assertThatThrownBy(() -> notIn("x", 5, 6).literal())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("NOT_IN predicate cannot return a literal");
- Assertions.assertThatThrownBy(() -> notIn("x", 1, 2, null))
+ assertThatThrownBy(() -> notIn("x", 1, 2, null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Cannot create expression literal from null");
- Assertions.assertThatThrownBy(() -> new Evaluator(STRUCT, notIn("x", 7, 8, 9.1)))
+ assertThatThrownBy(() -> new Evaluator(STRUCT, notIn("x", 7, 8, 9.1)))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Invalid value for conversion to type int");
- Assertions.assertThatThrownBy(() -> predicate(Expression.Operation.NOT_IN, "x"))
+ assertThatThrownBy(() -> predicate(Expression.Operation.NOT_IN, "x"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create NOT_IN predicate without a value");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> new Evaluator(STRUCT, predicate(Expression.Operation.NOT_IN, "x", 5.1)))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Invalid value for conversion to type int");
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestExpressionBinding.java b/api/src/test/java/org/apache/iceberg/expressions/TestExpressionBinding.java
index 0a9cb4b..8dccc4e 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestExpressionBinding.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestExpressionBinding.java
@@ -31,12 +31,12 @@
import static org.apache.iceberg.expressions.Expressions.startsWith;
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.types.Types.StructType;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestExpressionBinding {
@@ -50,7 +50,7 @@
@Test
public void testMissingReference() {
Expression expr = and(equal("t", 5), equal("x", 7));
- Assertions.assertThatThrownBy(() -> Binder.bind(STRUCT, expr))
+ assertThatThrownBy(() -> Binder.bind(STRUCT, expr))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 't' in struct");
}
@@ -58,7 +58,7 @@
@Test
public void testBoundExpressionFails() {
Expression expr = not(equal("x", 7));
- Assertions.assertThatThrownBy(() -> Binder.bind(STRUCT, Binder.bind(STRUCT, expr)))
+ assertThatThrownBy(() -> Binder.bind(STRUCT, Binder.bind(STRUCT, expr)))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Found already bound predicate");
}
@@ -78,7 +78,7 @@
@Test
public void testCaseSensitiveReference() {
Expression expr = not(equal("X", 7));
- Assertions.assertThatThrownBy(() -> Binder.bind(STRUCT, expr, true))
+ assertThatThrownBy(() -> Binder.bind(STRUCT, expr, true))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'X' in struct");
}
@@ -204,7 +204,7 @@
Expression bound = Binder.bind(STRUCT, equal(bucket("x", 16), 10));
TestHelpers.assertAllReferencesBound("BoundTransform", bound);
BoundPredicate<?> pred = TestHelpers.assertAndUnwrap(bound);
- Assertions.assertThat(pred.term())
+ assertThat(pred.term())
.as("Should use a BoundTransform child")
.isInstanceOf(BoundTransform.class);
BoundTransform<?, ?> transformExpr = (BoundTransform<?, ?>) pred.term();
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestExpressionHelpers.java b/api/src/test/java/org/apache/iceberg/expressions/TestExpressionHelpers.java
index debbc74..8bb03c6 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestExpressionHelpers.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestExpressionHelpers.java
@@ -45,13 +45,13 @@
import static org.apache.iceberg.expressions.Expressions.truncate;
import static org.apache.iceberg.expressions.Expressions.year;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.concurrent.Callable;
import org.apache.iceberg.transforms.Transforms;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.types.Types.NestedField;
import org.apache.iceberg.types.Types.StructType;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestExpressionHelpers {
@@ -171,14 +171,14 @@
@Test
public void testNullName() {
- Assertions.assertThatThrownBy(() -> equal((String) null, 5))
+ assertThatThrownBy(() -> equal((String) null, 5))
.isInstanceOf(NullPointerException.class)
.hasMessage("Name cannot be null");
}
@Test
public void testNullValueExpr() {
- Assertions.assertThatThrownBy(() -> equal((UnboundTerm<Integer>) null, 5))
+ assertThatThrownBy(() -> equal((UnboundTerm<Integer>) null, 5))
.isInstanceOf(NullPointerException.class)
.hasMessage("Term cannot be null");
}
@@ -222,7 +222,7 @@
}
private void assertInvalidateNaNThrows(Callable<UnboundPredicate<Double>> callable) {
- Assertions.assertThatThrownBy(callable::call)
+ assertThatThrownBy(callable::call)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot create expression literal from NaN");
}
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestExpressionUtil.java b/api/src/test/java/org/apache/iceberg/expressions/TestExpressionUtil.java
index 9a27830..894989f 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestExpressionUtil.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestExpressionUtil.java
@@ -33,7 +33,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.DateTimeUtil;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestExpressionUtil {
@@ -114,7 +113,7 @@
@Test
public void zeroAndNegativeNumberHandling() {
- Assertions.assertThat(
+ assertThat(
ExpressionUtil.toSanitizedString(
Expressions.in(
"test",
@@ -734,7 +733,7 @@
"2022-04-29T23:70:51-07:00",
"2022-04-29T23:49:51.123456+100:00")) {
String sanitizedFilter = ExpressionUtil.toSanitizedString(Expressions.equal("test", filter));
- Assertions.assertThat(filterPattern.matcher(sanitizedFilter)).matches();
+ assertThat(filterPattern.matcher(sanitizedFilter)).matches();
}
}
@@ -989,7 +988,7 @@
}
private void assertEquals(Expression expected, Expression actual) {
- Assertions.assertThat(expected).isInstanceOf(UnboundPredicate.class);
+ assertThat(expected).isInstanceOf(UnboundPredicate.class);
assertEquals((UnboundPredicate<?>) expected, (UnboundPredicate<?>) actual);
}
@@ -1000,7 +999,7 @@
}
private void assertEquals(UnboundTerm<?> expected, UnboundTerm<?> actual) {
- Assertions.assertThat(expected)
+ assertThat(expected)
.as("Unknown expected term: " + expected)
.isOfAnyClassIn(NamedReference.class, UnboundTransform.class);
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestInclusiveManifestEvaluator.java b/api/src/test/java/org/apache/iceberg/expressions/TestInclusiveManifestEvaluator.java
index 1a0fac8..068c862 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestInclusiveManifestEvaluator.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestInclusiveManifestEvaluator.java
@@ -39,6 +39,7 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.nio.ByteBuffer;
import org.apache.iceberg.ManifestFile;
@@ -48,7 +49,6 @@
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestInclusiveManifestEvaluator {
@@ -245,7 +245,7 @@
@Test
public void testMissingColumn() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> ManifestEvaluator.forRowFilter(lessThan("missing", 5), SPEC, true).eval(FILE))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'missing'");
@@ -546,7 +546,7 @@
@Test
public void testCaseSensitiveIntegerNotEqRewritten() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> ManifestEvaluator.forRowFilter(not(equal("ID", 5)), SPEC, true).eval(FILE))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'ID'");
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestInclusiveMetricsEvaluator.java b/api/src/test/java/org/apache/iceberg/expressions/TestInclusiveMetricsEvaluator.java
index 6c3e976..251f194 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestInclusiveMetricsEvaluator.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestInclusiveMetricsEvaluator.java
@@ -39,6 +39,7 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.List;
import org.apache.iceberg.DataFile;
@@ -52,7 +53,6 @@
import org.apache.iceberg.types.Types.IntegerType;
import org.apache.iceberg.types.Types.StringType;
import org.apache.iceberg.util.UnicodeUtil;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestInclusiveMetricsEvaluator {
@@ -295,7 +295,7 @@
@Test
public void testMissingColumn() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> new InclusiveMetricsEvaluator(SCHEMA, lessThan("missing", 5)).eval(FILE))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'missing'");
@@ -612,7 +612,7 @@
@Test
public void testCaseSensitiveIntegerNotEqRewritten() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> new InclusiveMetricsEvaluator(SCHEMA, not(equal("ID", 5)), true).eval(FILE))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'ID'");
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestPredicateBinding.java b/api/src/test/java/org/apache/iceberg/expressions/TestPredicateBinding.java
index 3d79259..a07c8fd 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestPredicateBinding.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestPredicateBinding.java
@@ -38,6 +38,7 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.math.BigDecimal;
import java.util.Arrays;
@@ -46,7 +47,6 @@
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.types.Types.StructType;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestPredicateBinding {
@@ -79,7 +79,7 @@
StructType struct = StructType.of(required(13, "x", Types.IntegerType.get()));
UnboundPredicate<Integer> unbound = new UnboundPredicate<>(LT, ref("missing"), 6);
- Assertions.assertThatThrownBy(() -> unbound.bind(struct))
+ assertThatThrownBy(() -> unbound.bind(struct))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'missing' in struct:");
}
@@ -147,7 +147,7 @@
for (Expression.Operation op : COMPARISONS) {
UnboundPredicate<String> unbound = new UnboundPredicate<>(op, ref("f"), "12.40");
- Assertions.assertThatThrownBy(() -> unbound.bind(struct))
+ assertThatThrownBy(() -> unbound.bind(struct))
.isInstanceOf(ValidationException.class)
.hasMessage("Invalid value for conversion to type float: 12.40 (java.lang.String)");
}
@@ -377,7 +377,7 @@
// string (non-compatible)
StructType strStruct = StructType.of(optional(21, "s", Types.StringType.get()));
- Assertions.assertThatThrownBy(() -> new UnboundPredicate<>(IS_NAN, ref("s")).bind(strStruct))
+ assertThatThrownBy(() -> new UnboundPredicate<>(IS_NAN, ref("s")).bind(strStruct))
.isInstanceOf(ValidationException.class)
.hasMessage("IsNaN cannot be used with a non-floating-point column");
}
@@ -406,7 +406,7 @@
// string (non-compatible)
StructType strStruct = StructType.of(optional(21, "s", Types.StringType.get()));
- Assertions.assertThatThrownBy(() -> new UnboundPredicate<>(NOT_NAN, ref("s")).bind(strStruct))
+ assertThatThrownBy(() -> new UnboundPredicate<>(NOT_NAN, ref("s")).bind(strStruct))
.isInstanceOf(ValidationException.class)
.hasMessage("NotNaN cannot be used with a non-floating-point column");
}
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestStrictMetricsEvaluator.java b/api/src/test/java/org/apache/iceberg/expressions/TestStrictMetricsEvaluator.java
index 82aaff0..a23afb1 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestStrictMetricsEvaluator.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestStrictMetricsEvaluator.java
@@ -37,6 +37,7 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.Schema;
@@ -47,7 +48,6 @@
import org.apache.iceberg.types.Types;
import org.apache.iceberg.types.Types.IntegerType;
import org.apache.iceberg.types.Types.StringType;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestStrictMetricsEvaluator {
@@ -280,8 +280,7 @@
@Test
public void testMissingColumn() {
- Assertions.assertThatThrownBy(
- () -> new StrictMetricsEvaluator(SCHEMA, lessThan("missing", 5)).eval(FILE))
+ assertThatThrownBy(() -> new StrictMetricsEvaluator(SCHEMA, lessThan("missing", 5)).eval(FILE))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'missing'");
}
diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestStringLiteralConversions.java b/api/src/test/java/org/apache/iceberg/expressions/TestStringLiteralConversions.java
index f35b274..1dc2225 100644
--- a/api/src/test/java/org/apache/iceberg/expressions/TestStringLiteralConversions.java
+++ b/api/src/test/java/org/apache/iceberg/expressions/TestStringLiteralConversions.java
@@ -19,6 +19,7 @@
package org.apache.iceberg.expressions;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.math.BigDecimal;
import java.time.DateTimeException;
@@ -32,7 +33,6 @@
import org.apache.avro.Schema;
import org.apache.avro.data.TimeConversions;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestStringLiteralConversions {
@@ -182,7 +182,7 @@
public void testTimestampWithZoneWithoutZoneInLiteral() {
// Zone must be present in literals when converting to timestamp with zone
Literal<CharSequence> timestampStr = Literal.of("2017-08-18T14:21:01.919");
- Assertions.assertThatThrownBy(() -> timestampStr.to(Types.TimestampType.withZone()))
+ assertThatThrownBy(() -> timestampStr.to(Types.TimestampType.withZone()))
.isInstanceOf(DateTimeException.class)
.hasMessageContaining("could not be parsed");
}
@@ -191,7 +191,7 @@
public void testTimestampWithoutZoneWithZoneInLiteral() {
// Zone must not be present in literals when converting to timestamp without zone
Literal<CharSequence> timestampStr = Literal.of("2017-08-18T14:21:01.919+07:00");
- Assertions.assertThatThrownBy(() -> timestampStr.to(Types.TimestampType.withoutZone()))
+ assertThatThrownBy(() -> timestampStr.to(Types.TimestampType.withoutZone()))
.isInstanceOf(DateTimeException.class)
.hasMessageContaining("could not be parsed");
}
diff --git a/api/src/test/java/org/apache/iceberg/io/TestCloseableGroup.java b/api/src/test/java/org/apache/iceberg/io/TestCloseableGroup.java
index 4e9604f..8003c34 100644
--- a/api/src/test/java/org/apache/iceberg/io/TestCloseableGroup.java
+++ b/api/src/test/java/org/apache/iceberg/io/TestCloseableGroup.java
@@ -18,9 +18,10 @@
*/
package org.apache.iceberg.io;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.Closeable;
import java.io.IOException;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
@@ -91,7 +92,7 @@
closeableGroup.addCloseable(closeable2);
closeableGroup.addCloseable(closeable3);
- Assertions.assertThatThrownBy(closeableGroup::close).isEqualTo(ioException);
+ assertThatThrownBy(closeableGroup::close).isEqualTo(ioException);
Mockito.verify(closeable1).close();
Mockito.verify(closeable2).close();
Mockito.verifyNoInteractions(closeable3);
@@ -111,7 +112,7 @@
closeableGroup.addCloseable(closeable2);
closeableGroup.addCloseable(closeable3);
- Assertions.assertThatThrownBy(closeableGroup::close).isEqualTo(ioException);
+ assertThatThrownBy(closeableGroup::close).isEqualTo(ioException);
Mockito.verify(closeable1).close();
Mockito.verify(closeable2).close();
Mockito.verifyNoInteractions(closeable3);
@@ -126,7 +127,7 @@
CloseableGroup closeableGroup = new CloseableGroup();
closeableGroup.addCloseable(throwingAutoCloseable);
- Assertions.assertThatThrownBy(closeableGroup::close)
+ assertThatThrownBy(closeableGroup::close)
.isInstanceOf(RuntimeException.class)
.hasRootCause(generalException);
}
@@ -141,7 +142,7 @@
CloseableGroup closeableGroup = new CloseableGroup();
closeableGroup.addCloseable(throwingCloseable);
- Assertions.assertThatThrownBy(closeableGroup::close).isEqualTo(runtimeException);
+ assertThatThrownBy(closeableGroup::close).isEqualTo(runtimeException);
}
@Test
@@ -153,6 +154,6 @@
CloseableGroup closeableGroup = new CloseableGroup();
closeableGroup.addCloseable(throwingAutoCloseable);
- Assertions.assertThatThrownBy(closeableGroup::close).isEqualTo(runtimeException);
+ assertThatThrownBy(closeableGroup::close).isEqualTo(runtimeException);
}
}
diff --git a/api/src/test/java/org/apache/iceberg/io/TestCloseableIterable.java b/api/src/test/java/org/apache/iceberg/io/TestCloseableIterable.java
index 5a14874..ac60c6f 100644
--- a/api/src/test/java/org/apache/iceberg/io/TestCloseableIterable.java
+++ b/api/src/test/java/org/apache/iceberg/io/TestCloseableIterable.java
@@ -19,6 +19,7 @@
package org.apache.iceberg.io;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.IOException;
import java.util.Arrays;
@@ -32,7 +33,6 @@
import org.apache.iceberg.metrics.DefaultMetricsContext;
import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCloseableIterable {
@@ -92,15 +92,14 @@
CloseableIterable<Integer> concat5 =
CloseableIterable.concat(Lists.newArrayList(empty, empty, empty));
- Assertions.assertThatThrownBy(() -> Iterables.getLast(concat5))
- .isInstanceOf(NoSuchElementException.class);
+ assertThatThrownBy(() -> Iterables.getLast(concat5)).isInstanceOf(NoSuchElementException.class);
}
@Test
public void testWithCompletionRunnable() throws IOException {
AtomicInteger completionCounter = new AtomicInteger(0);
List<Integer> items = Lists.newArrayList(1, 2, 3, 4, 5);
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> CloseableIterable.whenComplete(CloseableIterable.combine(items, () -> {}), null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid runnable: null");
@@ -120,9 +119,9 @@
try (CloseableIterable<Integer> iter =
CloseableIterable.whenComplete(
CloseableIterable.combine(empty, () -> {}), completionCounter::incrementAndGet)) {
- iter.forEach(val -> Assertions.assertThat(completionCounter.get()).isZero());
+ iter.forEach(val -> assertThat(completionCounter.get()).isZero());
}
- Assertions.assertThat(completionCounter.get()).isOne();
+ assertThat(completionCounter.get()).isOne();
}
@Test
@@ -142,7 +141,7 @@
AtomicInteger completionCounter = new AtomicInteger(0);
List<Integer> items = Lists.newArrayList(1, 2, 3, 4, 5);
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> {
try (CloseableIterable<Integer> iter =
CloseableIterable.whenComplete(
@@ -158,7 +157,7 @@
.isInstanceOf(RuntimeException.class)
.hasMessage("expected");
- Assertions.assertThat(completionCounter.get()).isOne();
+ assertThat(completionCounter.get()).isOne();
}
@Test
@@ -217,7 +216,7 @@
counter, CloseableIterable.withNoopClose(Arrays.asList(1, 2, 3, 4, 5)));
assertThat(counter.value()).isZero();
items.forEach(item -> {});
- Assertions.assertThat(counter.value()).isEqualTo(5);
+ assertThat(counter.value()).isEqualTo(5);
}
@Test
@@ -230,45 +229,42 @@
x -> x % 2 == 0);
assertThat(counter.value()).isZero();
items.forEach(item -> {});
- Assertions.assertThat(counter.value()).isEqualTo(3);
+ assertThat(counter.value()).isEqualTo(3);
}
@Test
public void countNullCheck() {
- Assertions.assertThatThrownBy(() -> CloseableIterable.count(null, CloseableIterable.empty()))
+ assertThatThrownBy(() -> CloseableIterable.count(null, CloseableIterable.empty()))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid counter: null");
Counter counter = new DefaultMetricsContext().counter("x");
- Assertions.assertThatThrownBy(() -> CloseableIterable.count(counter, null))
+ assertThatThrownBy(() -> CloseableIterable.count(counter, null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid iterable: null");
}
@Test
public void countSkippedNullCheck() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CloseableIterable.filter(null, CloseableIterable.empty(), Predicate.isEqual(true)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid counter: null");
Counter counter = new DefaultMetricsContext().counter("x");
- Assertions.assertThatThrownBy(
- () -> CloseableIterable.filter(counter, null, Predicate.isEqual(true)))
+ assertThatThrownBy(() -> CloseableIterable.filter(counter, null, Predicate.isEqual(true)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid iterable: null");
- Assertions.assertThatThrownBy(
- () -> CloseableIterable.filter(counter, CloseableIterable.empty(), null))
+ assertThatThrownBy(() -> CloseableIterable.filter(counter, CloseableIterable.empty(), null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid predicate: null");
}
@Test
public void transformNullCheck() {
- Assertions.assertThatThrownBy(
- () -> CloseableIterable.transform(CloseableIterable.empty(), null))
+ assertThatThrownBy(() -> CloseableIterable.transform(CloseableIterable.empty(), null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid transform: null");
}
diff --git a/api/src/test/java/org/apache/iceberg/io/TestClosingIterator.java b/api/src/test/java/org/apache/iceberg/io/TestClosingIterator.java
index 19bca2e..0d96907 100644
--- a/api/src/test/java/org/apache/iceberg/io/TestClosingIterator.java
+++ b/api/src/test/java/org/apache/iceberg/io/TestClosingIterator.java
@@ -19,12 +19,12 @@
package org.apache.iceberg.io;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestClosingIterator {
@@ -67,8 +67,7 @@
@Test
public void transformNullCheck() {
- Assertions.assertThatThrownBy(
- () -> CloseableIterator.transform(CloseableIterator.empty(), null))
+ assertThatThrownBy(() -> CloseableIterator.transform(CloseableIterator.empty(), null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid transform: null");
}
diff --git a/api/src/test/java/org/apache/iceberg/metrics/TestDefaultCounter.java b/api/src/test/java/org/apache/iceberg/metrics/TestDefaultCounter.java
index f77da34..3e606c2 100644
--- a/api/src/test/java/org/apache/iceberg/metrics/TestDefaultCounter.java
+++ b/api/src/test/java/org/apache/iceberg/metrics/TestDefaultCounter.java
@@ -18,24 +18,26 @@
*/
package org.apache.iceberg.metrics;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import org.apache.iceberg.metrics.MetricsContext.Unit;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestDefaultCounter {
@Test
public void nullCheck() {
- Assertions.assertThatThrownBy(() -> new DefaultMetricsContext().counter("test", null))
+ assertThatThrownBy(() -> new DefaultMetricsContext().counter("test", null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid count unit: null");
}
@Test
public void noop() {
- Assertions.assertThat(DefaultCounter.NOOP.unit()).isEqualTo(Unit.UNDEFINED);
- Assertions.assertThat(DefaultCounter.NOOP.isNoop()).isTrue();
- Assertions.assertThatThrownBy(DefaultCounter.NOOP::value)
+ assertThat(DefaultCounter.NOOP.unit()).isEqualTo(Unit.UNDEFINED);
+ assertThat(DefaultCounter.NOOP.isNoop()).isTrue();
+ assertThatThrownBy(DefaultCounter.NOOP::value)
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("NOOP counter has no value");
}
@@ -45,8 +47,8 @@
Counter counter = new DefaultCounter(Unit.BYTES);
counter.increment();
counter.increment(5L);
- Assertions.assertThat(counter.value()).isEqualTo(6L);
- Assertions.assertThat(counter.unit()).isEqualTo(MetricsContext.Unit.BYTES);
- Assertions.assertThat(counter.isNoop()).isFalse();
+ assertThat(counter.value()).isEqualTo(6L);
+ assertThat(counter.unit()).isEqualTo(MetricsContext.Unit.BYTES);
+ assertThat(counter.isNoop()).isFalse();
}
}
diff --git a/api/src/test/java/org/apache/iceberg/metrics/TestDefaultMetricsContext.java b/api/src/test/java/org/apache/iceberg/metrics/TestDefaultMetricsContext.java
index 3819485..68ca208 100644
--- a/api/src/test/java/org/apache/iceberg/metrics/TestDefaultMetricsContext.java
+++ b/api/src/test/java/org/apache/iceberg/metrics/TestDefaultMetricsContext.java
@@ -18,11 +18,12 @@
*/
package org.apache.iceberg.metrics;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.withinPercentage;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestDefaultMetricsContext {
@@ -30,7 +31,7 @@
@Test
public void unsupportedCounter() {
MetricsContext metricsContext = new DefaultMetricsContext();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> metricsContext.counter("test", Double.class, MetricsContext.Unit.COUNT))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Counter for type java.lang.Double is not supported");
@@ -38,8 +39,7 @@
@Test
public void intCounterNullCheck() {
- Assertions.assertThatThrownBy(
- () -> new DefaultMetricsContext().counter("name", Integer.class, null))
+ assertThatThrownBy(() -> new DefaultMetricsContext().counter("name", Integer.class, null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid count unit: null");
}
@@ -50,8 +50,8 @@
MetricsContext.Counter<Integer> counter =
metricsContext.counter("intCounter", Integer.class, MetricsContext.Unit.BYTES);
counter.increment(5);
- Assertions.assertThat(counter.value()).isEqualTo(5);
- Assertions.assertThat(counter.unit()).isEqualTo(MetricsContext.Unit.BYTES);
+ assertThat(counter.value()).isEqualTo(5);
+ assertThat(counter.unit()).isEqualTo(MetricsContext.Unit.BYTES);
}
@Test
@@ -61,15 +61,14 @@
metricsContext.counter("test", Integer.class, MetricsContext.Unit.COUNT);
counter.increment(Integer.MAX_VALUE);
counter.increment();
- Assertions.assertThatThrownBy(counter::value)
+ assertThatThrownBy(counter::value)
.isInstanceOf(ArithmeticException.class)
.hasMessage("integer overflow");
}
@Test
public void longCounterNullCheck() {
- Assertions.assertThatThrownBy(
- () -> new DefaultMetricsContext().counter("name", Long.class, null))
+ assertThatThrownBy(() -> new DefaultMetricsContext().counter("name", Long.class, null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid count unit: null");
}
@@ -80,8 +79,8 @@
MetricsContext.Counter<Long> counter =
metricsContext.counter("longCounter", Long.class, MetricsContext.Unit.COUNT);
counter.increment(5L);
- Assertions.assertThat(counter.value()).isEqualTo(5L);
- Assertions.assertThat(counter.unit()).isEqualTo(MetricsContext.Unit.COUNT);
+ assertThat(counter.value()).isEqualTo(5L);
+ assertThat(counter.unit()).isEqualTo(MetricsContext.Unit.COUNT);
}
@Test
@@ -89,7 +88,7 @@
MetricsContext metricsContext = new DefaultMetricsContext();
Timer timer = metricsContext.timer("test", TimeUnit.MICROSECONDS);
timer.record(10, TimeUnit.MINUTES);
- Assertions.assertThat(timer.totalDuration()).isEqualTo(Duration.ofMinutes(10L));
+ assertThat(timer.totalDuration()).isEqualTo(Duration.ofMinutes(10L));
}
@Test
@@ -101,18 +100,18 @@
histogram.update(i);
}
- Assertions.assertThat(histogram.count()).isEqualTo(reservoirSize);
+ assertThat(histogram.count()).isEqualTo(reservoirSize);
Histogram.Statistics statistics = histogram.statistics();
- Assertions.assertThat(statistics.size()).isEqualTo(reservoirSize);
- Assertions.assertThat(statistics.mean()).isEqualTo(500.5);
- Assertions.assertThat(statistics.stdDev()).isCloseTo(288.67499, withinPercentage(0.001));
- Assertions.assertThat(statistics.max()).isEqualTo(1000L);
- Assertions.assertThat(statistics.min()).isEqualTo(1L);
- Assertions.assertThat(statistics.percentile(0.50)).isEqualTo(500);
- Assertions.assertThat(statistics.percentile(0.75)).isEqualTo(750);
- Assertions.assertThat(statistics.percentile(0.90)).isEqualTo(900);
- Assertions.assertThat(statistics.percentile(0.95)).isEqualTo(950);
- Assertions.assertThat(statistics.percentile(0.99)).isEqualTo(990);
- Assertions.assertThat(statistics.percentile(0.999)).isEqualTo(999);
+ assertThat(statistics.size()).isEqualTo(reservoirSize);
+ assertThat(statistics.mean()).isEqualTo(500.5);
+ assertThat(statistics.stdDev()).isCloseTo(288.67499, withinPercentage(0.001));
+ assertThat(statistics.max()).isEqualTo(1000L);
+ assertThat(statistics.min()).isEqualTo(1L);
+ assertThat(statistics.percentile(0.50)).isEqualTo(500);
+ assertThat(statistics.percentile(0.75)).isEqualTo(750);
+ assertThat(statistics.percentile(0.90)).isEqualTo(900);
+ assertThat(statistics.percentile(0.95)).isEqualTo(950);
+ assertThat(statistics.percentile(0.99)).isEqualTo(990);
+ assertThat(statistics.percentile(0.999)).isEqualTo(999);
}
}
diff --git a/api/src/test/java/org/apache/iceberg/metrics/TestDefaultTimer.java b/api/src/test/java/org/apache/iceberg/metrics/TestDefaultTimer.java
index 950bbc9..a071888 100644
--- a/api/src/test/java/org/apache/iceberg/metrics/TestDefaultTimer.java
+++ b/api/src/test/java/org/apache/iceberg/metrics/TestDefaultTimer.java
@@ -20,6 +20,8 @@
import static java.util.concurrent.Executors.newFixedThreadPool;
import static java.util.concurrent.TimeUnit.SECONDS;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.time.Duration;
import java.util.List;
@@ -31,14 +33,13 @@
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestDefaultTimer {
@Test
public void nullCheck() {
- Assertions.assertThatThrownBy(() -> new DefaultTimer(null))
+ assertThatThrownBy(() -> new DefaultTimer(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid time unit: null");
}
@@ -46,20 +47,20 @@
@Test
public void nameAndUnit() {
DefaultTimer timer = new DefaultTimer(TimeUnit.MINUTES);
- Assertions.assertThat(timer.unit()).isEqualTo(TimeUnit.MINUTES);
- Assertions.assertThat(timer.isNoop()).isFalse();
+ assertThat(timer.unit()).isEqualTo(TimeUnit.MINUTES);
+ assertThat(timer.isNoop()).isFalse();
}
@Test
public void noop() {
- Assertions.assertThat(Timer.NOOP.isNoop()).isTrue();
- Assertions.assertThatThrownBy(Timer.NOOP::count)
+ assertThat(Timer.NOOP.isNoop()).isTrue();
+ assertThatThrownBy(Timer.NOOP::count)
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("NOOP timer has no count");
- Assertions.assertThatThrownBy(Timer.NOOP::totalDuration)
+ assertThatThrownBy(Timer.NOOP::totalDuration)
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("NOOP timer has no duration");
- Assertions.assertThatThrownBy(Timer.NOOP::unit)
+ assertThatThrownBy(Timer.NOOP::unit)
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("NOOP timer has no unit");
}
@@ -67,12 +68,12 @@
@Test
public void recordNegativeAmount() {
Timer timer = new DefaultTimer(TimeUnit.NANOSECONDS);
- Assertions.assertThat(timer.count()).isEqualTo(0);
- Assertions.assertThatThrownBy(() -> timer.record(-1, TimeUnit.NANOSECONDS))
+ assertThat(timer.count()).isEqualTo(0);
+ assertThatThrownBy(() -> timer.record(-1, TimeUnit.NANOSECONDS))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot record -1 NANOSECONDS: must be >= 0");
- Assertions.assertThat(timer.count()).isEqualTo(0);
- Assertions.assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
+ assertThat(timer.count()).isEqualTo(0);
+ assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
}
@Test
@@ -81,7 +82,7 @@
Timer.Timed timed = timer.start();
timed.stop();
// we didn't start the timer again
- Assertions.assertThatThrownBy(timed::stop)
+ assertThatThrownBy(timed::stop)
.isInstanceOf(IllegalStateException.class)
.hasMessage("stop() called multiple times");
}
@@ -89,13 +90,13 @@
@Test
public void closeableTimer() throws InterruptedException {
Timer timer = new DefaultTimer(TimeUnit.NANOSECONDS);
- Assertions.assertThat(timer.count()).isEqualTo(0);
- Assertions.assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
+ assertThat(timer.count()).isEqualTo(0);
+ assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
try (Timer.Timed sample = timer.start()) {
Thread.sleep(500L);
}
- Assertions.assertThat(timer.count()).isEqualTo(1);
- Assertions.assertThat(timer.totalDuration()).isGreaterThan(Duration.ZERO);
+ assertThat(timer.count()).isEqualTo(1);
+ assertThat(timer.totalDuration()).isGreaterThan(Duration.ZERO);
}
@Test
@@ -109,18 +110,18 @@
throw new RuntimeException(e);
}
};
- Assertions.assertThat(timer.count()).isEqualTo(0);
- Assertions.assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
+ assertThat(timer.count()).isEqualTo(0);
+ assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
timer.time(runnable);
- Assertions.assertThat(timer.count()).isEqualTo(1);
+ assertThat(timer.count()).isEqualTo(1);
Duration duration = timer.totalDuration();
- Assertions.assertThat(duration).isGreaterThan(Duration.ZERO);
+ assertThat(duration).isGreaterThan(Duration.ZERO);
timer.time(runnable);
- Assertions.assertThat(timer.count()).isEqualTo(2);
+ assertThat(timer.count()).isEqualTo(2);
Duration secondDuration = timer.totalDuration();
- Assertions.assertThat(secondDuration).isGreaterThan(duration);
+ assertThat(secondDuration).isGreaterThan(duration);
}
@Test
@@ -135,18 +136,18 @@
}
return true;
};
- Assertions.assertThat(timer.count()).isEqualTo(0);
- Assertions.assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
+ assertThat(timer.count()).isEqualTo(0);
+ assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
- Assertions.assertThat(timer.timeCallable(callable).booleanValue()).isTrue();
- Assertions.assertThat(timer.count()).isEqualTo(1);
+ assertThat(timer.timeCallable(callable).booleanValue()).isTrue();
+ assertThat(timer.count()).isEqualTo(1);
Duration duration = timer.totalDuration();
- Assertions.assertThat(duration).isGreaterThan(Duration.ZERO);
+ assertThat(duration).isGreaterThan(Duration.ZERO);
- Assertions.assertThat(timer.timeCallable(callable).booleanValue()).isTrue();
- Assertions.assertThat(timer.count()).isEqualTo(2);
+ assertThat(timer.timeCallable(callable).booleanValue()).isTrue();
+ assertThat(timer.count()).isEqualTo(2);
Duration secondDuration = timer.totalDuration();
- Assertions.assertThat(secondDuration).isGreaterThan(duration);
+ assertThat(secondDuration).isGreaterThan(duration);
}
@Test
@@ -161,18 +162,18 @@
}
return true;
};
- Assertions.assertThat(timer.count()).isEqualTo(0);
- Assertions.assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
+ assertThat(timer.count()).isEqualTo(0);
+ assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
- Assertions.assertThat(timer.time(supplier).booleanValue()).isTrue();
- Assertions.assertThat(timer.count()).isEqualTo(1);
+ assertThat(timer.time(supplier).booleanValue()).isTrue();
+ assertThat(timer.count()).isEqualTo(1);
Duration duration = timer.totalDuration();
- Assertions.assertThat(duration).isGreaterThan(Duration.ZERO);
+ assertThat(duration).isGreaterThan(Duration.ZERO);
- Assertions.assertThat(timer.time(supplier).booleanValue()).isTrue();
- Assertions.assertThat(timer.count()).isEqualTo(2);
+ assertThat(timer.time(supplier).booleanValue()).isTrue();
+ assertThat(timer.count()).isEqualTo(2);
Duration secondDuration = timer.totalDuration();
- Assertions.assertThat(secondDuration).isGreaterThan(duration);
+ assertThat(secondDuration).isGreaterThan(duration);
}
@Test
@@ -198,19 +199,19 @@
}
};
- Assertions.assertThat(timer.count()).isEqualTo(0);
- Assertions.assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
- Assertions.assertThat(innerTimer.count()).isEqualTo(0);
- Assertions.assertThat(innerTimer.totalDuration()).isEqualTo(Duration.ZERO);
+ assertThat(timer.count()).isEqualTo(0);
+ assertThat(timer.totalDuration()).isEqualTo(Duration.ZERO);
+ assertThat(innerTimer.count()).isEqualTo(0);
+ assertThat(innerTimer.totalDuration()).isEqualTo(Duration.ZERO);
timer.time(outer);
- Assertions.assertThat(timer.count()).isEqualTo(1);
+ assertThat(timer.count()).isEqualTo(1);
Duration outerDuration = timer.totalDuration();
- Assertions.assertThat(outerDuration).isGreaterThan(Duration.ZERO);
- Assertions.assertThat(innerTimer.count()).isEqualTo(1);
+ assertThat(outerDuration).isGreaterThan(Duration.ZERO);
+ assertThat(innerTimer.count()).isEqualTo(1);
Duration innerDuration = innerTimer.totalDuration();
- Assertions.assertThat(innerDuration).isGreaterThan(Duration.ZERO);
- Assertions.assertThat(outerDuration).isGreaterThan(innerDuration);
+ assertThat(innerDuration).isGreaterThan(Duration.ZERO);
+ assertThat(outerDuration).isGreaterThan(innerDuration);
}
@Test
@@ -249,7 +250,7 @@
executor.shutdownNow();
executor.awaitTermination(5, SECONDS);
- Assertions.assertThat(timer.totalDuration()).isEqualTo(Duration.ofNanos(5 * threads));
- Assertions.assertThat(timer.count()).isEqualTo(threads);
+ assertThat(timer.totalDuration()).isEqualTo(Duration.ofNanos(5 * threads));
+ assertThat(timer.count()).isEqualTo(threads);
}
}
diff --git a/api/src/test/java/org/apache/iceberg/metrics/TestFixedReservoirHistogram.java b/api/src/test/java/org/apache/iceberg/metrics/TestFixedReservoirHistogram.java
index 25e323c..018c2c3 100644
--- a/api/src/test/java/org/apache/iceberg/metrics/TestFixedReservoirHistogram.java
+++ b/api/src/test/java/org/apache/iceberg/metrics/TestFixedReservoirHistogram.java
@@ -20,6 +20,8 @@
import static java.util.concurrent.Executors.newFixedThreadPool;
import static java.util.concurrent.TimeUnit.SECONDS;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.withinPercentage;
import java.util.List;
@@ -28,37 +30,36 @@
import java.util.concurrent.Future;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestFixedReservoirHistogram {
@Test
public void emptyHistogram() {
FixedReservoirHistogram histogram = new FixedReservoirHistogram(100);
- Assertions.assertThat(histogram.count()).isEqualTo(0);
+ assertThat(histogram.count()).isEqualTo(0);
Histogram.Statistics statistics = histogram.statistics();
- Assertions.assertThat(statistics.size()).isEqualTo(0);
- Assertions.assertThat(statistics.mean()).isEqualTo(0.0);
- Assertions.assertThat(statistics.stdDev()).isEqualTo(0.0);
- Assertions.assertThat(statistics.max()).isEqualTo(0L);
- Assertions.assertThat(statistics.min()).isEqualTo(0L);
- Assertions.assertThat(statistics.percentile(0.50)).isEqualTo(0L);
- Assertions.assertThat(statistics.percentile(0.99)).isEqualTo(0L);
+ assertThat(statistics.size()).isEqualTo(0);
+ assertThat(statistics.mean()).isEqualTo(0.0);
+ assertThat(statistics.stdDev()).isEqualTo(0.0);
+ assertThat(statistics.max()).isEqualTo(0L);
+ assertThat(statistics.min()).isEqualTo(0L);
+ assertThat(statistics.percentile(0.50)).isEqualTo(0L);
+ assertThat(statistics.percentile(0.99)).isEqualTo(0L);
}
@Test
public void singleObservation() {
FixedReservoirHistogram histogram = new FixedReservoirHistogram(100);
histogram.update(123L);
- Assertions.assertThat(histogram.count()).isEqualTo(1);
+ assertThat(histogram.count()).isEqualTo(1);
Histogram.Statistics statistics = histogram.statistics();
- Assertions.assertThat(statistics.size()).isEqualTo(1);
- Assertions.assertThat(statistics.mean()).isEqualTo(123.0);
- Assertions.assertThat(statistics.stdDev()).isEqualTo(0.0);
- Assertions.assertThat(statistics.max()).isEqualTo(123L);
- Assertions.assertThat(statistics.min()).isEqualTo(123L);
- Assertions.assertThat(statistics.percentile(0.50)).isEqualTo(123L);
- Assertions.assertThat(statistics.percentile(0.99)).isEqualTo(123L);
+ assertThat(statistics.size()).isEqualTo(1);
+ assertThat(statistics.mean()).isEqualTo(123.0);
+ assertThat(statistics.stdDev()).isEqualTo(0.0);
+ assertThat(statistics.max()).isEqualTo(123L);
+ assertThat(statistics.min()).isEqualTo(123L);
+ assertThat(statistics.percentile(0.50)).isEqualTo(123L);
+ assertThat(statistics.percentile(0.99)).isEqualTo(123L);
}
@Test
@@ -70,8 +71,8 @@
}
Histogram.Statistics statistics = histogram.statistics();
- Assertions.assertThat(statistics.percentile(0.0)).isEqualTo(0L);
- Assertions.assertThat(statistics.percentile(1.0)).isEqualTo(99L);
+ assertThat(statistics.percentile(0.0)).isEqualTo(0L);
+ assertThat(statistics.percentile(1.0)).isEqualTo(99L);
}
@Test
@@ -84,11 +85,11 @@
Histogram.Statistics statistics = histogram.statistics();
- Assertions.assertThatThrownBy(() -> statistics.percentile(-0.1))
+ assertThatThrownBy(() -> statistics.percentile(-0.1))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Percentile point cannot be outside the range of [0.0 - 1.0]: " + -0.1);
- Assertions.assertThatThrownBy(() -> statistics.percentile(1.1))
+ assertThatThrownBy(() -> statistics.percentile(1.1))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Percentile point cannot be outside the range of [0.0 - 1.0]: " + 1.1);
}
@@ -136,17 +137,17 @@
executor.awaitTermination(5, SECONDS);
Histogram.Statistics statistics = histogram.statistics();
- Assertions.assertThat(histogram.count()).isEqualTo(totalSamples);
- Assertions.assertThat(statistics.size()).isEqualTo(totalSamples);
- Assertions.assertThat(statistics.mean()).isEqualTo(500.5);
- Assertions.assertThat(statistics.stdDev()).isCloseTo(288.67499, withinPercentage(0.001));
- Assertions.assertThat(statistics.max()).isEqualTo(1000L);
- Assertions.assertThat(statistics.min()).isEqualTo(1L);
- Assertions.assertThat(statistics.percentile(0.50)).isEqualTo(500);
- Assertions.assertThat(statistics.percentile(0.75)).isEqualTo(750);
- Assertions.assertThat(statistics.percentile(0.90)).isEqualTo(900);
- Assertions.assertThat(statistics.percentile(0.95)).isEqualTo(950);
- Assertions.assertThat(statistics.percentile(0.99)).isEqualTo(990);
- Assertions.assertThat(statistics.percentile(0.999)).isEqualTo(999);
+ assertThat(histogram.count()).isEqualTo(totalSamples);
+ assertThat(statistics.size()).isEqualTo(totalSamples);
+ assertThat(statistics.mean()).isEqualTo(500.5);
+ assertThat(statistics.stdDev()).isCloseTo(288.67499, withinPercentage(0.001));
+ assertThat(statistics.max()).isEqualTo(1000L);
+ assertThat(statistics.min()).isEqualTo(1L);
+ assertThat(statistics.percentile(0.50)).isEqualTo(500);
+ assertThat(statistics.percentile(0.75)).isEqualTo(750);
+ assertThat(statistics.percentile(0.90)).isEqualTo(900);
+ assertThat(statistics.percentile(0.95)).isEqualTo(950);
+ assertThat(statistics.percentile(0.99)).isEqualTo(990);
+ assertThat(statistics.percentile(0.999)).isEqualTo(999);
}
}
diff --git a/api/src/test/java/org/apache/iceberg/transforms/TestBucketing.java b/api/src/test/java/org/apache/iceberg/transforms/TestBucketing.java
index b8a0e40..f397f94 100644
--- a/api/src/test/java/org/apache/iceberg/transforms/TestBucketing.java
+++ b/api/src/test/java/org/apache/iceberg/transforms/TestBucketing.java
@@ -19,6 +19,7 @@
package org.apache.iceberg.transforms;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
@@ -34,7 +35,6 @@
import org.apache.iceberg.relocated.com.google.common.hash.Hashing;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.BucketUtil;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -356,7 +356,7 @@
@Test
public void testVerifiedIllegalNumBuckets() {
- Assertions.assertThatThrownBy(() -> Bucket.get(0))
+ assertThatThrownBy(() -> Bucket.get(0))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid number of buckets: 0 (must be > 0)");
}
diff --git a/api/src/test/java/org/apache/iceberg/transforms/TestProjection.java b/api/src/test/java/org/apache/iceberg/transforms/TestProjection.java
index ccfda89..6389e26 100644
--- a/api/src/test/java/org/apache/iceberg/transforms/TestProjection.java
+++ b/api/src/test/java/org/apache/iceberg/transforms/TestProjection.java
@@ -34,6 +34,7 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.List;
import org.apache.iceberg.PartitionSpec;
@@ -47,7 +48,6 @@
import org.apache.iceberg.expressions.UnboundPredicate;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestProjection {
@@ -130,8 +130,7 @@
@Test
public void testCaseSensitiveIdentityProjection() {
PartitionSpec spec = PartitionSpec.builderFor(SCHEMA).identity("id").build();
- Assertions.assertThatThrownBy(
- () -> Projections.inclusive(spec, true).project(Expressions.notNull("ID")))
+ assertThatThrownBy(() -> Projections.inclusive(spec, true).project(Expressions.notNull("ID")))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'ID' in struct");
}
@@ -213,8 +212,7 @@
@Test
public void testCaseSensitiveStrictIdentityProjection() {
PartitionSpec spec = PartitionSpec.builderFor(SCHEMA).identity("id").build();
- Assertions.assertThatThrownBy(
- () -> Projections.strict(spec, true).project(Expressions.notNull("ID")))
+ assertThatThrownBy(() -> Projections.strict(spec, true).project(Expressions.notNull("ID")))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'ID' in struct");
}
@@ -248,12 +246,12 @@
Expression projection = Projections.inclusive(spec).project(filter);
- Assertions.assertThat(projection).isInstanceOf(Or.class);
+ assertThat(projection).isInstanceOf(Or.class);
Or or1 = (Or) projection;
UnboundPredicate<?> dateint1 = assertAndUnwrapUnbound(or1.left());
assertThat(dateint1.ref().name()).as("Should be a dateint predicate").isEqualTo("dateint");
assertThat(dateint1.literal().value()).as("Should be dateint=20180416").isEqualTo(20180416);
- Assertions.assertThat(or1.right()).isInstanceOf(Or.class);
+ assertThat(or1.right()).isInstanceOf(Or.class);
Or or2 = (Or) or1.right();
UnboundPredicate<?> dateint2 = assertAndUnwrapUnbound(or2.left());
assertThat(dateint2.ref().name()).as("Should be a dateint predicate").isEqualTo("dateint");
diff --git a/api/src/test/java/org/apache/iceberg/transforms/TestResiduals.java b/api/src/test/java/org/apache/iceberg/transforms/TestResiduals.java
index fa3436e..12214fc 100644
--- a/api/src/test/java/org/apache/iceberg/transforms/TestResiduals.java
+++ b/api/src/test/java/org/apache/iceberg/transforms/TestResiduals.java
@@ -33,6 +33,7 @@
import static org.apache.iceberg.expressions.Expressions.notNaN;
import static org.apache.iceberg.expressions.Expressions.or;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.function.Function;
import org.apache.iceberg.PartitionSpec;
@@ -45,7 +46,6 @@
import org.apache.iceberg.expressions.ResidualEvaluator;
import org.apache.iceberg.expressions.UnboundPredicate;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestResiduals {
@@ -144,7 +144,7 @@
ResidualEvaluator resEval = ResidualEvaluator.of(spec, lessThan("DATEINT", 20170815), true);
- Assertions.assertThatThrownBy(() -> resEval.residualFor(Row.of(20170815)))
+ assertThatThrownBy(() -> resEval.residualFor(Row.of(20170815)))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Cannot find field 'DATEINT' in struct");
}
diff --git a/api/src/test/java/org/apache/iceberg/transforms/TestStartsWith.java b/api/src/test/java/org/apache/iceberg/transforms/TestStartsWith.java
index d8aa59e..a9a992e 100644
--- a/api/src/test/java/org/apache/iceberg/transforms/TestStartsWith.java
+++ b/api/src/test/java/org/apache/iceberg/transforms/TestStartsWith.java
@@ -35,7 +35,6 @@
import org.apache.iceberg.expressions.Projections;
import org.apache.iceberg.expressions.UnboundPredicate;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestStartsWith {
@@ -57,7 +56,7 @@
assertProjectionStrict(spec, startsWith(COLUMN, "abab"), "abab", Expression.Operation.EQ);
Expression projection = Projections.strict(spec).project(startsWith(COLUMN, "ababab"));
- Assertions.assertThat(projection).isInstanceOf(False.class);
+ assertThat(projection).isInstanceOf(False.class);
}
@Test
diff --git a/api/src/test/java/org/apache/iceberg/transforms/TestTruncate.java b/api/src/test/java/org/apache/iceberg/transforms/TestTruncate.java
index e9ee6cf..68527b0 100644
--- a/api/src/test/java/org/apache/iceberg/transforms/TestTruncate.java
+++ b/api/src/test/java/org/apache/iceberg/transforms/TestTruncate.java
@@ -19,13 +19,13 @@
package org.apache.iceberg.transforms;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.function.Function;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestTruncate {
@@ -112,7 +112,7 @@
@Test
public void testVerifiedIllegalWidth() {
- Assertions.assertThatThrownBy(() -> Truncate.get(0))
+ assertThatThrownBy(() -> Truncate.get(0))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid truncate width: 0 (must be > 0)");
}
diff --git a/api/src/test/java/org/apache/iceberg/types/TestTypeUtil.java b/api/src/test/java/org/apache/iceberg/types/TestTypeUtil.java
index 81f32f8..e8db093 100644
--- a/api/src/test/java/org/apache/iceberg/types/TestTypeUtil.java
+++ b/api/src/test/java/org/apache/iceberg/types/TestTypeUtil.java
@@ -28,7 +28,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.types.Types.IntegerType;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestTypeUtil {
@@ -445,15 +444,15 @@
required(17, "x", Types.IntegerType.get()),
required(18, "y", Types.IntegerType.get()))))))));
- Assertions.assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(12)))
+ assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(12)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot explicitly project List or Map types");
- Assertions.assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(13)))
+ assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(13)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot explicitly project List or Map types");
- Assertions.assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(14)))
+ assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(14)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot explicitly project List or Map types");
@@ -496,15 +495,15 @@
required(18, "x", Types.IntegerType.get()),
required(19, "y", Types.IntegerType.get()))))))));
- Assertions.assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(12)))
+ assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(12)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot explicitly project List or Map types");
- Assertions.assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(14)))
+ assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(14)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot explicitly project List or Map types");
- Assertions.assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(16)))
+ assertThatThrownBy(() -> TypeUtil.project(schema, Sets.newHashSet(16)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot explicitly project List or Map types");
@@ -534,7 +533,7 @@
new Schema(
required(1, "a", Types.IntegerType.get()), required(2, "b", Types.IntegerType.get()));
Schema sourceSchema = new Schema(required(1, "a", Types.IntegerType.get()));
- Assertions.assertThatThrownBy(() -> TypeUtil.reassignIds(schema, sourceSchema))
+ assertThatThrownBy(() -> TypeUtil.reassignIds(schema, sourceSchema))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Field b not found in source schema");
}
@@ -549,7 +548,7 @@
required(2, "b", Types.StructType.of(required(3, "c", Types.BooleanType.get()))),
required(4, "b.c", Types.BooleanType.get())));
- Assertions.assertThatThrownBy(() -> TypeUtil.indexByName(Types.StructType.of(nestedType)))
+ assertThatThrownBy(() -> TypeUtil.indexByName(Types.StructType.of(nestedType)))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining("Invalid schema: multiple fields for name a.b.c");
}
diff --git a/api/src/test/java/org/apache/iceberg/types/TestTypes.java b/api/src/test/java/org/apache/iceberg/types/TestTypes.java
index ca5c6ed..5db7ca7 100644
--- a/api/src/test/java/org/apache/iceberg/types/TestTypes.java
+++ b/api/src/test/java/org/apache/iceberg/types/TestTypes.java
@@ -18,29 +18,27 @@
*/
package org.apache.iceberg.types;
-import org.assertj.core.api.Assertions;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
+
import org.junit.jupiter.api.Test;
public class TestTypes {
@Test
public void fromPrimitiveString() {
- Assertions.assertThat(Types.fromPrimitiveString("boolean")).isSameAs(Types.BooleanType.get());
- Assertions.assertThat(Types.fromPrimitiveString("BooLean")).isSameAs(Types.BooleanType.get());
+ assertThat(Types.fromPrimitiveString("boolean")).isSameAs(Types.BooleanType.get());
+ assertThat(Types.fromPrimitiveString("BooLean")).isSameAs(Types.BooleanType.get());
- Assertions.assertThat(Types.fromPrimitiveString("timestamp"))
- .isSameAs(Types.TimestampType.withoutZone());
+ assertThat(Types.fromPrimitiveString("timestamp")).isSameAs(Types.TimestampType.withoutZone());
- Assertions.assertThat(Types.fromPrimitiveString("Fixed[ 3 ]"))
- .isEqualTo(Types.FixedType.ofLength(3));
+ assertThat(Types.fromPrimitiveString("Fixed[ 3 ]")).isEqualTo(Types.FixedType.ofLength(3));
- Assertions.assertThat(Types.fromPrimitiveString("Decimal( 2 , 3 )"))
- .isEqualTo(Types.DecimalType.of(2, 3));
+ assertThat(Types.fromPrimitiveString("Decimal( 2 , 3 )")).isEqualTo(Types.DecimalType.of(2, 3));
- Assertions.assertThat(Types.fromPrimitiveString("Decimal(2,3)"))
- .isEqualTo(Types.DecimalType.of(2, 3));
+ assertThat(Types.fromPrimitiveString("Decimal(2,3)")).isEqualTo(Types.DecimalType.of(2, 3));
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> Types.fromPrimitiveString("Unknown"))
.withMessageContaining("Unknown");
}
diff --git a/api/src/test/java/org/apache/iceberg/util/TestCharSequenceMap.java b/api/src/test/java/org/apache/iceberg/util/TestCharSequenceMap.java
index 8ca7889..2154cf3 100644
--- a/api/src/test/java/org/apache/iceberg/util/TestCharSequenceMap.java
+++ b/api/src/test/java/org/apache/iceberg/util/TestCharSequenceMap.java
@@ -25,15 +25,14 @@
import java.util.concurrent.TimeUnit;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCharSequenceMap {
@Test
public void nullString() {
- Assertions.assertThat(CharSequenceMap.create()).doesNotContainKey((String) null);
- Assertions.assertThat(CharSequenceMap.create()).doesNotContainValue((String) null);
+ assertThat(CharSequenceMap.create()).doesNotContainKey((String) null);
+ assertThat(CharSequenceMap.create()).doesNotContainValue((String) null);
}
@Test
diff --git a/api/src/test/java/org/apache/iceberg/util/TestCharSequenceSet.java b/api/src/test/java/org/apache/iceberg/util/TestCharSequenceSet.java
index b0f242c..324742c 100644
--- a/api/src/test/java/org/apache/iceberg/util/TestCharSequenceSet.java
+++ b/api/src/test/java/org/apache/iceberg/util/TestCharSequenceSet.java
@@ -18,12 +18,13 @@
*/
package org.apache.iceberg.util;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.util.Arrays;
import java.util.Collections;
import java.util.Set;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableSet;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCharSequenceSet {
@@ -32,60 +33,60 @@
@Test
public void testSearchingInCharSequenceCollection() {
Set<CharSequence> set = CharSequenceSet.of(Arrays.asList("abc", new StringBuffer("def")));
- Assertions.assertThat(set).contains("abc");
- Assertions.assertThat(set.stream().anyMatch("def"::contains)).isTrue();
+ assertThat(set).contains("abc");
+ assertThat(set.stream().anyMatch("def"::contains)).isTrue();
// this would fail with a normal Set<CharSequence>
- Assertions.assertThat(set.contains("def")).isTrue();
+ assertThat(set.contains("def")).isTrue();
}
@Test
public void nullString() {
- Assertions.assertThat(CharSequenceSet.of(Arrays.asList((String) null))).contains((String) null);
- Assertions.assertThat(CharSequenceSet.empty()).doesNotContain((String) null);
+ assertThat(CharSequenceSet.of(Arrays.asList((String) null))).contains((String) null);
+ assertThat(CharSequenceSet.empty()).doesNotContain((String) null);
}
@Test
public void testRetainAll() {
CharSequenceSet set = CharSequenceSet.of(ImmutableList.of("123", "456"));
- Assertions.assertThat(set.retainAll(ImmutableList.of("456", "789", 123)))
+ assertThat(set.retainAll(ImmutableList.of("456", "789", 123)))
.overridingErrorMessage("Set should be changed")
.isTrue();
- Assertions.assertThat(set).hasSize(1).contains("456");
+ assertThat(set).hasSize(1).contains("456");
set = CharSequenceSet.of(ImmutableList.of("123", "456"));
- Assertions.assertThat(set.retainAll(ImmutableList.of("123", "456")))
+ assertThat(set.retainAll(ImmutableList.of("123", "456")))
.overridingErrorMessage("Set should not be changed")
.isFalse();
- Assertions.assertThat(set.retainAll(ImmutableList.of(123, 456)))
+ assertThat(set.retainAll(ImmutableList.of(123, 456)))
.overridingErrorMessage("Set should be changed")
.isTrue();
- Assertions.assertThat(set).isEmpty();
+ assertThat(set).isEmpty();
}
@Test
public void testRemoveAll() {
CharSequenceSet set = CharSequenceSet.of(ImmutableList.of("123", "456"));
- Assertions.assertThat(set.removeAll(ImmutableList.of("456", "789", 123)))
+ assertThat(set.removeAll(ImmutableList.of("456", "789", 123)))
.overridingErrorMessage("Set should be changed")
.isTrue();
- Assertions.assertThat(set).hasSize(1).contains("123");
+ assertThat(set).hasSize(1).contains("123");
set = CharSequenceSet.of(ImmutableList.of("123", "456"));
- Assertions.assertThat(set.removeAll(ImmutableList.of(123, 456)))
+ assertThat(set.removeAll(ImmutableList.of(123, 456)))
.overridingErrorMessage("Set should not be changed")
.isFalse();
- Assertions.assertThat(set.removeAll(ImmutableList.of("123", "456")))
+ assertThat(set.removeAll(ImmutableList.of("123", "456")))
.overridingErrorMessage("Set should be changed")
.isTrue();
- Assertions.assertThat(set).isEmpty();
+ assertThat(set).isEmpty();
}
@Test
@@ -93,8 +94,8 @@
CharSequenceSet set1 = CharSequenceSet.empty();
CharSequenceSet set2 = CharSequenceSet.empty();
- Assertions.assertThat(set1).isEqualTo(set2);
- Assertions.assertThat(set1.hashCode()).isEqualTo(set2.hashCode());
+ assertThat(set1).isEqualTo(set2);
+ assertThat(set1.hashCode()).isEqualTo(set2.hashCode());
set1.add("v1");
set1.add("v2");
@@ -112,8 +113,8 @@
CharSequenceWrapper.wrap(new StringBuffer("v2")),
CharSequenceWrapper.wrap(new StringBuffer("v3")));
- Assertions.assertThat(set1).isEqualTo(set2).isEqualTo(set3).isEqualTo(set4);
- Assertions.assertThat(set1.hashCode())
+ assertThat(set1).isEqualTo(set2).isEqualTo(set3).isEqualTo(set4);
+ assertThat(set1.hashCode())
.isEqualTo(set2.hashCode())
.isEqualTo(set3.hashCode())
.isEqualTo(set4.hashCode());
diff --git a/api/src/test/java/org/apache/iceberg/util/TestDateTimeUtil.java b/api/src/test/java/org/apache/iceberg/util/TestDateTimeUtil.java
index cf6b9cc..42da3a3 100644
--- a/api/src/test/java/org/apache/iceberg/util/TestDateTimeUtil.java
+++ b/api/src/test/java/org/apache/iceberg/util/TestDateTimeUtil.java
@@ -18,8 +18,9 @@
*/
package org.apache.iceberg.util;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.time.ZonedDateTime;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestDateTimeUtil {
@@ -27,12 +28,11 @@
@Test
public void formatTimestampMillis() {
String timestamp = "1970-01-01T00:00:00.001+00:00";
- Assertions.assertThat(DateTimeUtil.formatTimestampMillis(1L)).isEqualTo(timestamp);
- Assertions.assertThat(ZonedDateTime.parse(timestamp).toInstant().toEpochMilli()).isEqualTo(1L);
+ assertThat(DateTimeUtil.formatTimestampMillis(1L)).isEqualTo(timestamp);
+ assertThat(ZonedDateTime.parse(timestamp).toInstant().toEpochMilli()).isEqualTo(1L);
timestamp = "1970-01-01T00:16:40+00:00";
- Assertions.assertThat(DateTimeUtil.formatTimestampMillis(1000000L)).isEqualTo(timestamp);
- Assertions.assertThat(ZonedDateTime.parse(timestamp).toInstant().toEpochMilli())
- .isEqualTo(1000000L);
+ assertThat(DateTimeUtil.formatTimestampMillis(1000000L)).isEqualTo(timestamp);
+ assertThat(ZonedDateTime.parse(timestamp).toInstant().toEpochMilli()).isEqualTo(1000000L);
}
}
diff --git a/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java b/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java
index f2106cc..e6c3cf5 100644
--- a/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java
+++ b/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java
@@ -18,9 +18,10 @@
*/
package org.apache.iceberg.util;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.IOException;
import java.util.Arrays;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestExceptionUtil {
@@ -36,7 +37,7 @@
CustomCheckedException exc = new CustomCheckedException("test");
Exception suppressedOne = new Exception("test catch suppression");
RuntimeException suppressedTwo = new RuntimeException("test finally suppression");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExceptionUtil.runSafely(
() -> {
@@ -62,7 +63,7 @@
CustomCheckedException exc = new CustomCheckedException("test");
Exception suppressedOne = new Exception("test catch suppression");
RuntimeException suppressedTwo = new RuntimeException("test finally suppression");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExceptionUtil.runSafely(
(ExceptionUtil.Block<
@@ -91,7 +92,7 @@
CustomCheckedException exc = new CustomCheckedException("test");
Exception suppressedOne = new Exception("test catch suppression");
RuntimeException suppressedTwo = new RuntimeException("test finally suppression");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExceptionUtil.runSafely(
(ExceptionUtil.Block<
@@ -121,7 +122,7 @@
RuntimeException exc = new RuntimeException("test");
Exception suppressedOne = new Exception("test catch suppression");
CustomCheckedException suppressedTwo = new CustomCheckedException("test finally suppression");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExceptionUtil.runSafely(
() -> {
diff --git a/arrow/src/test/java/org/apache/iceberg/arrow/vectorized/ArrowReaderTest.java b/arrow/src/test/java/org/apache/iceberg/arrow/vectorized/ArrowReaderTest.java
index 9cd9c8c..9364aff 100644
--- a/arrow/src/test/java/org/apache/iceberg/arrow/vectorized/ArrowReaderTest.java
+++ b/arrow/src/test/java/org/apache/iceberg/arrow/vectorized/ArrowReaderTest.java
@@ -84,7 +84,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.UUIDUtil;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -653,7 +652,7 @@
// we need to use assertThat() here because it does a java.util.Objects.deepEquals() and
// that
// is relevant for byte[]
- Assertions.assertThat(actualValue).as("Row#" + i + " mismatches").isEqualTo(expectedValue);
+ assertThat(actualValue).as("Row#" + i + " mismatches").isEqualTo(expectedValue);
}
}
}
@@ -1205,7 +1204,7 @@
// we need to use assertThat() here because it does a java.util.Objects.deepEquals() and
// that
// is relevant for byte[]
- Assertions.assertThat(actualValue).as("Row#" + i + " mismatches").isEqualTo(expectedValue);
+ assertThat(actualValue).as("Row#" + i + " mismatches").isEqualTo(expectedValue);
}
}
}
diff --git a/arrow/src/test/java/org/apache/iceberg/arrow/vectorized/parquet/DecimalVectorUtilTest.java b/arrow/src/test/java/org/apache/iceberg/arrow/vectorized/parquet/DecimalVectorUtilTest.java
index 88e16c1..63e5a5f 100644
--- a/arrow/src/test/java/org/apache/iceberg/arrow/vectorized/parquet/DecimalVectorUtilTest.java
+++ b/arrow/src/test/java/org/apache/iceberg/arrow/vectorized/parquet/DecimalVectorUtilTest.java
@@ -19,9 +19,9 @@
package org.apache.iceberg.arrow.vectorized.parquet;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.math.BigInteger;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class DecimalVectorUtilTest {
@@ -68,7 +68,7 @@
@Test
public void testPadBigEndianBytesOverflow() {
byte[] bytes = new byte[17];
- Assertions.assertThatThrownBy(() -> DecimalVectorUtil.padBigEndianBytes(bytes, 16))
+ assertThatThrownBy(() -> DecimalVectorUtil.padBigEndianBytes(bytes, 16))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Buffer size of 17 is larger than requested size of 16");
}
diff --git a/aws/src/integration/java/org/apache/iceberg/aws/lakeformation/TestLakeFormationAwsClientFactory.java b/aws/src/integration/java/org/apache/iceberg/aws/lakeformation/TestLakeFormationAwsClientFactory.java
index 9af1e3d..609dbf1 100644
--- a/aws/src/integration/java/org/apache/iceberg/aws/lakeformation/TestLakeFormationAwsClientFactory.java
+++ b/aws/src/integration/java/org/apache/iceberg/aws/lakeformation/TestLakeFormationAwsClientFactory.java
@@ -18,6 +18,7 @@
*/
package org.apache.iceberg.aws.lakeformation;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatNoException;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
@@ -30,7 +31,6 @@
import org.apache.iceberg.aws.glue.GlueCatalog;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.awaitility.Awaitility;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@@ -138,7 +138,7 @@
.atMost(Duration.ofSeconds(10))
.untilAsserted(
() ->
- Assertions.assertThat(
+ assertThat(
iam.getRolePolicy(
GetRolePolicyRequest.builder()
.roleName(roleName)
diff --git a/aws/src/test/java/org/apache/iceberg/aws/AwsClientPropertiesTest.java b/aws/src/test/java/org/apache/iceberg/aws/AwsClientPropertiesTest.java
index fd3bea6..c318538 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/AwsClientPropertiesTest.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/AwsClientPropertiesTest.java
@@ -18,9 +18,10 @@
*/
package org.apache.iceberg.aws;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.util.Map;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
@@ -45,7 +46,7 @@
awsClientProperties.applyClientRegionConfiguration(mockS3ClientBuilder);
Mockito.verify(mockS3ClientBuilder).region(regionArgumentCaptor.capture());
Region region = regionArgumentCaptor.getValue();
- Assertions.assertThat(region.id())
+ assertThat(region.id())
.as("region parameter should match what is set in CLIENT_REGION")
.isEqualTo("us-east-1");
}
@@ -56,7 +57,7 @@
AwsCredentialsProvider credentialsProvider =
awsClientProperties.credentialsProvider(null, null, null);
- Assertions.assertThat(credentialsProvider)
+ assertThat(credentialsProvider)
.as("Should use default credentials if nothing is set")
.isInstanceOf(DefaultCredentialsProvider.class);
}
@@ -69,7 +70,7 @@
AwsCredentialsProvider credentialsProvider2 =
awsClientProperties.credentialsProvider(null, null, null);
- Assertions.assertThat(credentialsProvider)
+ assertThat(credentialsProvider)
.as("Should create a new instance in each call")
.isNotSameAs(credentialsProvider2);
}
@@ -81,14 +82,14 @@
AwsCredentialsProvider credentialsProvider =
awsClientProperties.credentialsProvider("key", "secret", null);
- Assertions.assertThat(credentialsProvider.resolveCredentials())
+ assertThat(credentialsProvider.resolveCredentials())
.as("Should use basic credentials if access key ID and secret access key are set")
.isInstanceOf(AwsBasicCredentials.class);
- Assertions.assertThat(credentialsProvider.resolveCredentials().accessKeyId())
+ assertThat(credentialsProvider.resolveCredentials().accessKeyId())
.as("The access key id should be the same as the one set by tag ACCESS_KEY_ID")
.isEqualTo("key");
- Assertions.assertThat(credentialsProvider.resolveCredentials().secretAccessKey())
+ assertThat(credentialsProvider.resolveCredentials().secretAccessKey())
.as("The secret access key should be the same as the one set by tag SECRET_ACCESS_KEY")
.isEqualTo("secret");
}
@@ -100,13 +101,13 @@
AwsCredentialsProvider credentialsProvider =
awsClientProperties.credentialsProvider("key", "secret", "token");
- Assertions.assertThat(credentialsProvider.resolveCredentials())
+ assertThat(credentialsProvider.resolveCredentials())
.as("Should use session credentials if session token is set")
.isInstanceOf(AwsSessionCredentials.class);
- Assertions.assertThat(credentialsProvider.resolveCredentials().accessKeyId())
+ assertThat(credentialsProvider.resolveCredentials().accessKeyId())
.as("The access key id should be the same as the one set by tag ACCESS_KEY_ID")
.isEqualTo("key");
- Assertions.assertThat(credentialsProvider.resolveCredentials().secretAccessKey())
+ assertThat(credentialsProvider.resolveCredentials().secretAccessKey())
.as("The secret access key should be the same as the one set by tag SECRET_ACCESS_KEY")
.isEqualTo("secret");
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/HttpClientPropertiesTest.java b/aws/src/test/java/org/apache/iceberg/aws/HttpClientPropertiesTest.java
index 7c6e5c1..5db09d3 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/HttpClientPropertiesTest.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/HttpClientPropertiesTest.java
@@ -18,9 +18,11 @@
*/
package org.apache.iceberg.aws;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.util.Map;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
@@ -45,7 +47,7 @@
Mockito.verify(mockS3ClientBuilder).httpClientBuilder(httpClientBuilderCaptor.capture());
SdkHttpClient.Builder capturedHttpClientBuilder = httpClientBuilderCaptor.getValue();
- Assertions.assertThat(capturedHttpClientBuilder)
+ assertThat(capturedHttpClientBuilder)
.as("Should use url connection http client")
.isInstanceOf(UrlConnectionHttpClient.Builder.class);
}
@@ -62,7 +64,7 @@
httpClientProperties.applyHttpClientConfigurations(mockS3ClientBuilder);
Mockito.verify(mockS3ClientBuilder).httpClientBuilder(httpClientBuilderCaptor.capture());
SdkHttpClient.Builder capturedHttpClientBuilder = httpClientBuilderCaptor.getValue();
- Assertions.assertThat(capturedHttpClientBuilder)
+ assertThat(capturedHttpClientBuilder)
.as("Should use apache http client")
.isInstanceOf(ApacheHttpClient.Builder.class);
}
@@ -74,8 +76,7 @@
HttpClientProperties httpClientProperties = new HttpClientProperties(properties);
S3ClientBuilder s3ClientBuilder = S3Client.builder();
- Assertions.assertThatThrownBy(
- () -> httpClientProperties.applyHttpClientConfigurations(s3ClientBuilder))
+ assertThatThrownBy(() -> httpClientProperties.applyHttpClientConfigurations(s3ClientBuilder))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Unrecognized HTTP client type test");
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/TestAwsClientFactories.java b/aws/src/test/java/org/apache/iceberg/aws/TestAwsClientFactories.java
index 9598860..a22eb15 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/TestAwsClientFactories.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/TestAwsClientFactories.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.aws;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.IOException;
import java.util.Map;
import org.apache.iceberg.TestHelpers;
@@ -27,7 +30,6 @@
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.util.SerializationUtil;
-import org.assertj.core.api.Assertions;
import org.assertj.core.api.ThrowableAssert;
import org.junit.jupiter.api.Test;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
@@ -44,11 +46,11 @@
@Test
public void testLoadDefault() {
- Assertions.assertThat(AwsClientFactories.defaultFactory())
+ assertThat(AwsClientFactories.defaultFactory())
.as("default client should be singleton")
.isSameAs(AwsClientFactories.defaultFactory());
- Assertions.assertThat(AwsClientFactories.from(Maps.newHashMap()))
+ assertThat(AwsClientFactories.from(Maps.newHashMap()))
.as("should load default when not configured")
.isInstanceOf(AwsClientFactories.DefaultAwsClientFactory.class);
}
@@ -57,7 +59,7 @@
public void testLoadCustom() {
Map<String, String> properties = Maps.newHashMap();
properties.put(AwsProperties.CLIENT_FACTORY, CustomFactory.class.getName());
- Assertions.assertThat(AwsClientFactories.from(properties))
+ assertThat(AwsClientFactories.from(properties))
.as("should load custom class")
.isInstanceOf(CustomFactory.class);
}
@@ -67,14 +69,14 @@
Map<String, String> properties = Maps.newHashMap();
properties.put(S3FileIOProperties.ACCESS_KEY_ID, "key");
- Assertions.assertThatThrownBy(() -> AwsClientFactories.from(properties))
+ assertThatThrownBy(() -> AwsClientFactories.from(properties))
.isInstanceOf(ValidationException.class)
.hasMessage("S3 client access key ID and secret access key must be set at the same time");
properties.remove(S3FileIOProperties.ACCESS_KEY_ID);
properties.put(S3FileIOProperties.SECRET_ACCESS_KEY, "secret");
- Assertions.assertThatThrownBy(() -> AwsClientFactories.from(properties))
+ assertThatThrownBy(() -> AwsClientFactories.from(properties))
.isInstanceOf(ValidationException.class)
.hasMessage("S3 client access key ID and secret access key must be set at the same time");
}
@@ -85,13 +87,12 @@
AwsClientFactory defaultAwsClientFactory = AwsClientFactories.from(properties);
AwsClientFactory roundTripResult =
TestHelpers.KryoHelpers.roundTripSerialize(defaultAwsClientFactory);
- Assertions.assertThat(roundTripResult)
- .isInstanceOf(AwsClientFactories.DefaultAwsClientFactory.class);
+ assertThat(roundTripResult).isInstanceOf(AwsClientFactories.DefaultAwsClientFactory.class);
byte[] serializedFactoryBytes = SerializationUtil.serializeToBytes(defaultAwsClientFactory);
AwsClientFactory deserializedClientFactory =
SerializationUtil.deserializeFromBytes(serializedFactoryBytes);
- Assertions.assertThat(deserializedClientFactory)
+ assertThat(deserializedClientFactory)
.isInstanceOf(AwsClientFactories.DefaultAwsClientFactory.class);
}
@@ -104,12 +105,12 @@
AwsClientFactory assumeRoleAwsClientFactory = AwsClientFactories.from(properties);
AwsClientFactory roundTripResult =
TestHelpers.KryoHelpers.roundTripSerialize(assumeRoleAwsClientFactory);
- Assertions.assertThat(roundTripResult).isInstanceOf(AssumeRoleAwsClientFactory.class);
+ assertThat(roundTripResult).isInstanceOf(AssumeRoleAwsClientFactory.class);
byte[] serializedFactoryBytes = SerializationUtil.serializeToBytes(assumeRoleAwsClientFactory);
AwsClientFactory deserializedClientFactory =
SerializationUtil.deserializeFromBytes(serializedFactoryBytes);
- Assertions.assertThat(deserializedClientFactory).isInstanceOf(AssumeRoleAwsClientFactory.class);
+ assertThat(deserializedClientFactory).isInstanceOf(AssumeRoleAwsClientFactory.class);
}
@Test
@@ -125,14 +126,13 @@
AwsClientFactory lakeFormationAwsClientFactory = AwsClientFactories.from(properties);
AwsClientFactory roundTripResult =
TestHelpers.KryoHelpers.roundTripSerialize(lakeFormationAwsClientFactory);
- Assertions.assertThat(roundTripResult).isInstanceOf(LakeFormationAwsClientFactory.class);
+ assertThat(roundTripResult).isInstanceOf(LakeFormationAwsClientFactory.class);
byte[] serializedFactoryBytes =
SerializationUtil.serializeToBytes(lakeFormationAwsClientFactory);
AwsClientFactory deserializedClientFactory =
SerializationUtil.deserializeFromBytes(serializedFactoryBytes);
- Assertions.assertThat(deserializedClientFactory)
- .isInstanceOf(LakeFormationAwsClientFactory.class);
+ assertThat(deserializedClientFactory).isInstanceOf(LakeFormationAwsClientFactory.class);
}
@Test
@@ -143,7 +143,7 @@
assertClientObjectsNotNull(defaultAwsClientFactory);
// Ensuring S3Exception thrown instead exception thrown by resolveCredentials() implemented by
// test credentials provider
- Assertions.assertThatThrownBy(() -> defaultAwsClientFactory.s3().listBuckets())
+ assertThatThrownBy(() -> defaultAwsClientFactory.s3().listBuckets())
.isInstanceOf(software.amazon.awssdk.services.s3.model.S3Exception.class)
.hasMessageContaining("The AWS Access Key Id you provided does not exist in our records");
}
@@ -207,22 +207,21 @@
}
private void assertClientObjectsNotNull(AwsClientFactory defaultAwsClientFactory) {
- Assertions.assertThat(defaultAwsClientFactory.s3()).isNotNull();
- Assertions.assertThat(defaultAwsClientFactory.dynamo()).isNotNull();
- Assertions.assertThat(defaultAwsClientFactory.glue()).isNotNull();
- Assertions.assertThat(defaultAwsClientFactory.kms()).isNotNull();
+ assertThat(defaultAwsClientFactory.s3()).isNotNull();
+ assertThat(defaultAwsClientFactory.dynamo()).isNotNull();
+ assertThat(defaultAwsClientFactory.glue()).isNotNull();
+ assertThat(defaultAwsClientFactory.kms()).isNotNull();
}
private void assertIllegalArgumentException(
ThrowableAssert.ThrowingCallable shouldRaiseThrowable, String containsMessage) {
- Assertions.assertThatThrownBy(shouldRaiseThrowable)
+ assertThatThrownBy(shouldRaiseThrowable)
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining(containsMessage);
}
private void assertDefaultAwsClientFactory(AwsClientFactory awsClientFactory) {
- Assertions.assertThat(awsClientFactory)
- .isInstanceOf(AwsClientFactories.DefaultAwsClientFactory.class);
+ assertThat(awsClientFactory).isInstanceOf(AwsClientFactories.DefaultAwsClientFactory.class);
}
private AwsClientFactory getAwsClientFactoryByCredentialsProvider(String providerClass) {
diff --git a/aws/src/test/java/org/apache/iceberg/aws/TestAwsProperties.java b/aws/src/test/java/org/apache/iceberg/aws/TestAwsProperties.java
index 510c842..8a0dabc 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/TestAwsProperties.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/TestAwsProperties.java
@@ -20,11 +20,11 @@
import static org.apache.iceberg.aws.AwsProperties.DYNAMODB_TABLE_NAME;
import static org.apache.iceberg.aws.AwsProperties.GLUE_CATALOG_ID;
+import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestAwsProperties {
@@ -35,9 +35,9 @@
new AwsProperties(ImmutableMap.of(GLUE_CATALOG_ID, "foo", DYNAMODB_TABLE_NAME, "ice"));
AwsProperties deSerializedAwsPropertiesWithProps =
TestHelpers.KryoHelpers.roundTripSerialize(awsPropertiesWithProps);
- Assertions.assertThat(deSerializedAwsPropertiesWithProps.glueCatalogId())
+ assertThat(deSerializedAwsPropertiesWithProps.glueCatalogId())
.isEqualTo(awsPropertiesWithProps.glueCatalogId());
- Assertions.assertThat(deSerializedAwsPropertiesWithProps.dynamoDbTableName())
+ assertThat(deSerializedAwsPropertiesWithProps.dynamoDbTableName())
.isEqualTo(awsPropertiesWithProps.dynamoDbTableName());
}
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/TestHttpClientProperties.java b/aws/src/test/java/org/apache/iceberg/aws/TestHttpClientProperties.java
index df338a5..b0602a0 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/TestHttpClientProperties.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/TestHttpClientProperties.java
@@ -18,9 +18,11 @@
*/
package org.apache.iceberg.aws;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.util.Map;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
@@ -45,7 +47,7 @@
Mockito.verify(mockS3ClientBuilder).httpClientBuilder(httpClientBuilderCaptor.capture());
SdkHttpClient.Builder capturedHttpClientBuilder = httpClientBuilderCaptor.getValue();
- Assertions.assertThat(capturedHttpClientBuilder)
+ assertThat(capturedHttpClientBuilder)
.as("Should use url connection http client")
.isInstanceOf(UrlConnectionHttpClient.Builder.class);
}
@@ -62,7 +64,7 @@
httpProperties.applyHttpClientConfigurations(mockS3ClientBuilder);
Mockito.verify(mockS3ClientBuilder).httpClientBuilder(httpClientBuilderCaptor.capture());
SdkHttpClient.Builder capturedHttpClientBuilder = httpClientBuilderCaptor.getValue();
- Assertions.assertThat(capturedHttpClientBuilder)
+ assertThat(capturedHttpClientBuilder)
.as("Should use apache http client")
.isInstanceOf(ApacheHttpClient.Builder.class);
}
@@ -74,8 +76,7 @@
HttpClientProperties httpProperties = new HttpClientProperties(properties);
S3ClientBuilder s3ClientBuilder = S3Client.builder();
- Assertions.assertThatThrownBy(
- () -> httpProperties.applyHttpClientConfigurations(s3ClientBuilder))
+ assertThatThrownBy(() -> httpProperties.applyHttpClientConfigurations(s3ClientBuilder))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Unrecognized HTTP client type test");
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/TestRESTSigV4Signer.java b/aws/src/test/java/org/apache/iceberg/aws/TestRESTSigV4Signer.java
index b353635..88623ed 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/TestRESTSigV4Signer.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/TestRESTSigV4Signer.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.aws;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.io.IOException;
import java.util.Map;
import org.apache.hc.core5.http.HttpHeaders;
@@ -28,7 +30,6 @@
import org.apache.iceberg.rest.auth.OAuth2Util;
import org.apache.iceberg.rest.responses.ConfigResponse;
import org.apache.iceberg.rest.responses.OAuthTokenResponse;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
@@ -101,7 +102,7 @@
client.get("v1/config", ConfigResponse.class, ImmutableMap.of(), e -> {});
mockServer.verify(request, VerificationTimes.exactly(1));
- Assertions.assertThat(response).isNotNull();
+ assertThat(response).isNotNull();
}
@Test
@@ -141,6 +142,6 @@
"v1/oauth/token", formData, OAuthTokenResponse.class, ImmutableMap.of(), e -> {});
mockServer.verify(request, VerificationTimes.exactly(1));
- Assertions.assertThat(response).isNotNull();
+ assertThat(response).isNotNull();
}
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/TestS3FileIOAwsClientFactories.java b/aws/src/test/java/org/apache/iceberg/aws/TestS3FileIOAwsClientFactories.java
index f09d4e5..cd1cd43 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/TestS3FileIOAwsClientFactories.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/TestS3FileIOAwsClientFactories.java
@@ -18,11 +18,12 @@
*/
package org.apache.iceberg.aws;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.util.Map;
import org.apache.iceberg.aws.s3.S3FileIOAwsClientFactory;
import org.apache.iceberg.aws.s3.S3FileIOProperties;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestS3FileIOAwsClientFactories {
@@ -34,7 +35,7 @@
S3FileIOProperties.CLIENT_FACTORY,
"org.apache.iceberg.aws.s3.DefaultS3FileIOAwsClientFactory");
Object factoryImpl = S3FileIOAwsClientFactories.initialize(properties);
- Assertions.assertThat(factoryImpl)
+ assertThat(factoryImpl)
.as(
"should instantiate an object of type S3FileIOAwsClientFactory when s3.client-factory-impl is set")
.isInstanceOf(S3FileIOAwsClientFactory.class);
@@ -45,7 +46,7 @@
// don't set anything
Map<String, String> properties = Maps.newHashMap();
Object factoryImpl = S3FileIOAwsClientFactories.initialize(properties);
- Assertions.assertThat(factoryImpl)
+ assertThat(factoryImpl)
.as(
"should instantiate an object of type AwsClientFactory when s3.client-factory-impl is not set")
.isInstanceOf(AwsClientFactory.class);
diff --git a/aws/src/test/java/org/apache/iceberg/aws/TestS3FileIOProperties.java b/aws/src/test/java/org/apache/iceberg/aws/TestS3FileIOProperties.java
index b7a3f60..92ed364 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/TestS3FileIOProperties.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/TestS3FileIOProperties.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.aws;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.util.Map;
import java.util.Optional;
import org.apache.iceberg.CatalogProperties;
@@ -25,7 +28,6 @@
import org.apache.iceberg.aws.s3.signer.S3V4RestSignerClient;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
@@ -46,7 +48,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.SSE_TYPE, S3FileIOProperties.SSE_TYPE_CUSTOM);
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot initialize SSE-C S3FileIO with null encryption key");
}
@@ -57,7 +59,7 @@
map.put(S3FileIOProperties.SSE_TYPE, S3FileIOProperties.SSE_TYPE_CUSTOM);
map.put(S3FileIOProperties.SSE_KEY, "something");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot initialize SSE-C S3FileIO with null encryption key MD5");
}
@@ -67,7 +69,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.ACL, ObjectCannedACL.AUTHENTICATED_READ.toString());
S3FileIOProperties properties = new S3FileIOProperties(map);
- Assertions.assertThat(properties.acl()).isEqualTo(ObjectCannedACL.AUTHENTICATED_READ);
+ assertThat(properties.acl()).isEqualTo(ObjectCannedACL.AUTHENTICATED_READ);
}
@Test
@@ -75,7 +77,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.ACL, "bad-input");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot support S3 CannedACL bad-input");
}
@@ -85,7 +87,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.MULTIPART_SIZE, "1");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Minimum multipart upload object size must be larger than 5 MB.");
}
@@ -95,7 +97,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.MULTIPART_SIZE, "5368709120"); // 5GB
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Input malformed or exceeded maximum multipart upload size 5GB: 5368709120");
}
@@ -105,7 +107,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.MULTIPART_THRESHOLD_FACTOR, "0.9");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Multipart threshold factor must be >= to 1.0");
}
@@ -115,7 +117,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.DELETE_BATCH_SIZE, "2000");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Deletion batch size must be between 1 and 1000");
}
@@ -125,7 +127,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.DELETE_BATCH_SIZE, "0");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Deletion batch size must be between 1 and 1000");
}
@@ -144,7 +146,7 @@
Mockito.verify(mockS3ClientBuilder).credentialsProvider(awsCredentialsProviderCaptor.capture());
AwsCredentialsProvider capturedAwsCredentialsProvider = awsCredentialsProviderCaptor.getValue();
- Assertions.assertThat(capturedAwsCredentialsProvider)
+ assertThat(capturedAwsCredentialsProvider)
.as("Should use default credentials if nothing is set")
.isInstanceOf(DefaultCredentialsProvider.class);
}
@@ -165,13 +167,13 @@
Mockito.verify(mockS3ClientBuilder).credentialsProvider(awsCredentialsProviderCaptor.capture());
AwsCredentialsProvider capturedAwsCredentialsProvider = awsCredentialsProviderCaptor.getValue();
- Assertions.assertThat(capturedAwsCredentialsProvider.resolveCredentials())
+ assertThat(capturedAwsCredentialsProvider.resolveCredentials())
.as("Should use basic credentials if access key ID and secret access key are set")
.isInstanceOf(AwsBasicCredentials.class);
- Assertions.assertThat(capturedAwsCredentialsProvider.resolveCredentials().accessKeyId())
+ assertThat(capturedAwsCredentialsProvider.resolveCredentials().accessKeyId())
.as("The access key id should be the same as the one set by tag S3FILEIO_ACCESS_KEY_ID")
.isEqualTo("key");
- Assertions.assertThat(capturedAwsCredentialsProvider.resolveCredentials().secretAccessKey())
+ assertThat(capturedAwsCredentialsProvider.resolveCredentials().secretAccessKey())
.as(
"The secret access key should be the same as the one set by tag S3FILEIO_SECRET_ACCESS_KEY")
.isEqualTo("secret");
@@ -194,13 +196,13 @@
Mockito.verify(mockS3ClientBuilder).credentialsProvider(awsCredentialsProviderCaptor.capture());
AwsCredentialsProvider capturedAwsCredentialsProvider = awsCredentialsProviderCaptor.getValue();
- Assertions.assertThat(capturedAwsCredentialsProvider.resolveCredentials())
+ assertThat(capturedAwsCredentialsProvider.resolveCredentials())
.as("Should use session credentials if session token is set")
.isInstanceOf(AwsSessionCredentials.class);
- Assertions.assertThat(capturedAwsCredentialsProvider.resolveCredentials().accessKeyId())
+ assertThat(capturedAwsCredentialsProvider.resolveCredentials().accessKeyId())
.as("The access key id should be the same as the one set by tag S3FILEIO_ACCESS_KEY_ID")
.isEqualTo("key");
- Assertions.assertThat(capturedAwsCredentialsProvider.resolveCredentials().secretAccessKey())
+ assertThat(capturedAwsCredentialsProvider.resolveCredentials().secretAccessKey())
.as(
"The secret access key should be the same as the one set by tag S3FILEIO_SECRET_ACCESS_KEY")
.isEqualTo("secret");
@@ -212,7 +214,7 @@
ImmutableMap.of(S3FileIOProperties.REMOTE_SIGNING_ENABLED, "true");
S3FileIOProperties s3Properties = new S3FileIOProperties(properties);
- Assertions.assertThatThrownBy(() -> s3Properties.applySignerConfiguration(S3Client.builder()))
+ assertThatThrownBy(() -> s3Properties.applySignerConfiguration(S3Client.builder()))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("S3 signer service URI is required");
}
@@ -230,10 +232,10 @@
Optional<Signer> signer =
builder.overrideConfiguration().advancedOption(SdkAdvancedClientOption.SIGNER);
- Assertions.assertThat(signer).isPresent().get().isInstanceOf(S3V4RestSignerClient.class);
+ assertThat(signer).isPresent().get().isInstanceOf(S3V4RestSignerClient.class);
S3V4RestSignerClient signerClient = (S3V4RestSignerClient) signer.get();
- Assertions.assertThat(signerClient.baseSignerUri()).isEqualTo(uri);
- Assertions.assertThat(signerClient.properties()).isEqualTo(properties);
+ assertThat(signerClient.baseSignerUri()).isEqualTo(uri);
+ assertThat(signerClient.properties()).isEqualTo(properties);
}
@Test
@@ -250,17 +252,14 @@
Optional<String> userAgent =
builder.overrideConfiguration().advancedOption(SdkAdvancedClientOption.USER_AGENT_PREFIX);
- Assertions.assertThat(userAgent)
- .isPresent()
- .get()
- .satisfies(x -> Assertions.assertThat(x).startsWith("s3fileio"));
+ assertThat(userAgent).isPresent().get().satisfies(x -> assertThat(x).startsWith("s3fileio"));
Optional<Signer> signer =
builder.overrideConfiguration().advancedOption(SdkAdvancedClientOption.SIGNER);
- Assertions.assertThat(signer).isPresent().get().isInstanceOf(S3V4RestSignerClient.class);
+ assertThat(signer).isPresent().get().isInstanceOf(S3V4RestSignerClient.class);
S3V4RestSignerClient signerClient = (S3V4RestSignerClient) signer.get();
- Assertions.assertThat(signerClient.baseSignerUri()).isEqualTo(uri);
- Assertions.assertThat(signerClient.properties()).isEqualTo(properties);
+ assertThat(signerClient.baseSignerUri()).isEqualTo(uri);
+ assertThat(signerClient.properties()).isEqualTo(properties);
}
@Test
@@ -274,7 +273,7 @@
Optional<Signer> signer =
builder.overrideConfiguration().advancedOption(SdkAdvancedClientOption.SIGNER);
- Assertions.assertThat(signer).isNotPresent();
+ assertThat(signer).isNotPresent();
}
@Test
@@ -286,7 +285,7 @@
S3ClientBuilder builder = S3Client.builder();
s3Properties.applyS3AccessGrantsConfigurations(builder);
- Assertions.assertThat(builder.plugins().size()).isEqualTo(1);
+ assertThat(builder.plugins().size()).isEqualTo(1);
}
@Test
@@ -298,7 +297,7 @@
S3ClientBuilder builder = S3Client.builder();
s3Properties.applyS3AccessGrantsConfigurations(builder);
- Assertions.assertThat(builder.plugins().size()).isEqualTo(0);
+ assertThat(builder.plugins().size()).isEqualTo(0);
// Implicitly false
properties = ImmutableMap.of();
@@ -306,6 +305,6 @@
builder = S3Client.builder();
s3Properties.applyS3AccessGrantsConfigurations(builder);
- Assertions.assertThat(builder.plugins().size()).isEqualTo(0);
+ assertThat(builder.plugins().size()).isEqualTo(0);
}
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/dynamodb/TestDynamoDbCatalog.java b/aws/src/test/java/org/apache/iceberg/aws/dynamodb/TestDynamoDbCatalog.java
index 5b5941c..b602cea 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/dynamodb/TestDynamoDbCatalog.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/dynamodb/TestDynamoDbCatalog.java
@@ -19,6 +19,8 @@
package org.apache.iceberg.aws.dynamodb;
import static org.apache.iceberg.aws.dynamodb.DynamoDbCatalog.toPropertyCol;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.ArgumentMatchers.any;
import org.apache.iceberg.aws.AwsProperties;
@@ -26,7 +28,6 @@
import org.apache.iceberg.exceptions.NoSuchNamespaceException;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
@@ -60,7 +61,7 @@
.when(dynamo)
.getItem(any(GetItemRequest.class));
String location = catalogWithSlash.defaultWarehouseLocation(TABLE_IDENTIFIER);
- Assertions.assertThat(location).isEqualTo(WAREHOUSE_PATH + "/db.db/table");
+ assertThat(location).isEqualTo(WAREHOUSE_PATH + "/db.db/table");
}
@Test
@@ -71,7 +72,7 @@
String warehousePath = WAREHOUSE_PATH + "/db.db/table";
String defaultWarehouseLocation = dynamoCatalog.defaultWarehouseLocation(TABLE_IDENTIFIER);
- Assertions.assertThat(defaultWarehouseLocation).isEqualTo(warehousePath);
+ assertThat(defaultWarehouseLocation).isEqualTo(warehousePath);
}
@Test
@@ -88,7 +89,7 @@
.getItem(any(GetItemRequest.class));
String defaultWarehouseLocation = dynamoCatalog.defaultWarehouseLocation(TABLE_IDENTIFIER);
- Assertions.assertThat(defaultWarehouseLocation).isEqualTo("s3://bucket2/db/table");
+ assertThat(defaultWarehouseLocation).isEqualTo("s3://bucket2/db/table");
}
@Test
@@ -97,7 +98,7 @@
.when(dynamo)
.getItem(any(GetItemRequest.class));
- Assertions.assertThatThrownBy(() -> dynamoCatalog.defaultWarehouseLocation(TABLE_IDENTIFIER))
+ assertThatThrownBy(() -> dynamoCatalog.defaultWarehouseLocation(TABLE_IDENTIFIER))
.as("default warehouse can't be called on non existent namespace")
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Cannot find default warehouse location:");
diff --git a/aws/src/test/java/org/apache/iceberg/aws/glue/TestGlueCatalog.java b/aws/src/test/java/org/apache/iceberg/aws/glue/TestGlueCatalog.java
index 6d3b521..2042948 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/glue/TestGlueCatalog.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/glue/TestGlueCatalog.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.aws.glue;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.util.List;
import java.util.Map;
import java.util.UUID;
@@ -35,7 +38,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.util.LockManagers;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
@@ -104,8 +106,7 @@
Mockito.doThrow(EntityNotFoundException.builder().build())
.when(glue)
.getTable(Mockito.any(GetTableRequest.class));
- Assertions.assertThatThrownBy(
- () -> catalog.createTable(TableIdentifier.of("db", "table"), new Schema()))
+ assertThatThrownBy(() -> catalog.createTable(TableIdentifier.of("db", "table"), new Schema()))
.hasMessageContaining(
"Cannot derive default warehouse location, warehouse path must not be null or empty")
.isInstanceOf(ValidationException.class);
@@ -127,7 +128,7 @@
.when(glue)
.getDatabase(Mockito.any(GetDatabaseRequest.class));
String location = catalogWithSlash.defaultWarehouseLocation(TableIdentifier.of("db", "table"));
- Assertions.assertThat(location).isEqualTo(WAREHOUSE_PATH + "/db.db/table");
+ assertThat(location).isEqualTo(WAREHOUSE_PATH + "/db.db/table");
}
@Test
@@ -137,7 +138,7 @@
.when(glue)
.getDatabase(Mockito.any(GetDatabaseRequest.class));
String location = glueCatalog.defaultWarehouseLocation(TableIdentifier.of("db", "table"));
- Assertions.assertThat(location).isEqualTo(WAREHOUSE_PATH + "/db.db/table");
+ assertThat(location).isEqualTo(WAREHOUSE_PATH + "/db.db/table");
}
@Test
@@ -149,7 +150,7 @@
.when(glue)
.getDatabase(Mockito.any(GetDatabaseRequest.class));
String location = glueCatalog.defaultWarehouseLocation(TableIdentifier.of("db", "table"));
- Assertions.assertThat(location).isEqualTo("s3://bucket2/db/table");
+ assertThat(location).isEqualTo("s3://bucket2/db/table");
}
@Test
@@ -162,7 +163,7 @@
.getDatabase(Mockito.any(GetDatabaseRequest.class));
String location = glueCatalog.defaultWarehouseLocation(TableIdentifier.of("db", "table"));
- Assertions.assertThat(location).isEqualTo("s3://bucket2/db/table");
+ assertThat(location).isEqualTo("s3://bucket2/db/table");
}
@Test
@@ -239,7 +240,7 @@
.build())
.when(glue)
.getTables(Mockito.any(GetTablesRequest.class));
- Assertions.assertThat(glueCatalog.listTables(Namespace.of("db1")))
+ assertThat(glueCatalog.listTables(Namespace.of("db1")))
.isEqualTo(
Lists.newArrayList(TableIdentifier.of("db1", "t1"), TableIdentifier.of("db1", "t2")));
}
@@ -285,7 +286,7 @@
})
.when(glue)
.getTables(Mockito.any(GetTablesRequest.class));
- Assertions.assertThat(glueCatalog.listTables(Namespace.of("db1"))).hasSize(10);
+ assertThat(glueCatalog.listTables(Namespace.of("db1"))).hasSize(10);
}
@Test
@@ -343,7 +344,7 @@
.when(glue)
.deleteTable(Mockito.any(DeleteTableRequest.class));
glueCatalog.dropTable(TableIdentifier.of("db1", "t1"));
- Assertions.assertThat(counter.get()).isEqualTo(0);
+ assertThat(counter.get()).isEqualTo(0);
}
@Test
@@ -397,7 +398,7 @@
.createTable(Mockito.any(CreateTableRequest.class));
glueCatalog.renameTable(TableIdentifier.of("db", "t"), TableIdentifier.of("db", "x_renamed"));
- Assertions.assertThat(counter.get()).isEqualTo(0);
+ assertThat(counter.get()).isEqualTo(0);
}
@Test
@@ -417,7 +418,7 @@
Lists.newArrayList(Namespace.of("db-1"), Namespace.of("db", "db2"));
for (Namespace namespace : invalidNamespaces) {
- Assertions.assertThatThrownBy(() -> glueCatalog.createNamespace(namespace))
+ assertThatThrownBy(() -> glueCatalog.createNamespace(namespace))
.isInstanceOf(ValidationException.class)
.hasMessageStartingWith("Cannot convert namespace")
.hasMessageEndingWith(
@@ -435,7 +436,7 @@
.build())
.when(glue)
.getDatabases(Mockito.any(GetDatabasesRequest.class));
- Assertions.assertThat(glueCatalog.listNamespaces())
+ assertThat(glueCatalog.listNamespaces())
.isEqualTo(Lists.newArrayList(Namespace.of("db1"), Namespace.of("db2")));
}
@@ -463,7 +464,7 @@
})
.when(glue)
.getDatabases(Mockito.any(GetDatabasesRequest.class));
- Assertions.assertThat(glueCatalog.listNamespaces()).hasSize(10);
+ assertThat(glueCatalog.listNamespaces()).hasSize(10);
}
@Test
@@ -472,7 +473,7 @@
GetDatabaseResponse.builder().database(Database.builder().name("db1").build()).build())
.when(glue)
.getDatabase(Mockito.any(GetDatabaseRequest.class));
- Assertions.assertThat(glueCatalog.listNamespaces(Namespace.of("db1")))
+ assertThat(glueCatalog.listNamespaces(Namespace.of("db1")))
.as("list self should return empty list")
.isEmpty();
}
@@ -480,7 +481,7 @@
@Test
public void testListNamespacesBadName() {
- Assertions.assertThatThrownBy(() -> glueCatalog.listNamespaces(Namespace.of("db-1")))
+ assertThatThrownBy(() -> glueCatalog.listNamespaces(Namespace.of("db-1")))
.isInstanceOf(ValidationException.class)
.hasMessage(
"Cannot convert namespace db-1 to Glue database name, "
@@ -503,8 +504,7 @@
.build())
.when(glue)
.getDatabase(Mockito.any(GetDatabaseRequest.class));
- Assertions.assertThat(glueCatalog.loadNamespaceMetadata(Namespace.of("db1")))
- .isEqualTo(parameters);
+ assertThat(glueCatalog.loadNamespaceMetadata(Namespace.of("db1"))).isEqualTo(parameters);
}
@Test
@@ -546,7 +546,7 @@
.when(glue)
.deleteDatabase(Mockito.any(DeleteDatabaseRequest.class));
- Assertions.assertThatThrownBy(() -> glueCatalog.dropNamespace(Namespace.of("db1")))
+ assertThatThrownBy(() -> glueCatalog.dropNamespace(Namespace.of("db1")))
.isInstanceOf(NamespaceNotEmptyException.class)
.hasMessage("Cannot drop namespace db1 because it still contains Iceberg tables");
}
@@ -567,7 +567,7 @@
.when(glue)
.deleteDatabase(Mockito.any(DeleteDatabaseRequest.class));
- Assertions.assertThatThrownBy(() -> glueCatalog.dropNamespace(Namespace.of("db1")))
+ assertThatThrownBy(() -> glueCatalog.dropNamespace(Namespace.of("db1")))
.isInstanceOf(NamespaceNotEmptyException.class)
.hasMessage("Cannot drop namespace db1 because it still contains non-Iceberg tables");
}
@@ -622,7 +622,7 @@
LockManagers.defaultLockManager(),
catalogProps);
Map<String, String> properties = glueCatalog.properties();
- Assertions.assertThat(properties)
+ assertThat(properties)
.isNotEmpty()
.containsEntry("table-default.key1", "catalog-default-key1")
.containsEntry("table-default.key2", "catalog-default-key2")
@@ -644,8 +644,7 @@
glue,
LockManagers.defaultLockManager(),
ImmutableMap.of());
- Assertions.assertThat(glueCatalog.isValidIdentifier(TableIdentifier.parse("db-1.a-1")))
- .isEqualTo(true);
+ assertThat(glueCatalog.isValidIdentifier(TableIdentifier.parse("db-1.a-1"))).isEqualTo(true);
}
@Test
@@ -671,7 +670,7 @@
glueCatalog.newTableOps(TableIdentifier.of(Namespace.of("db"), "table"));
Map<String, String> tableCatalogProperties = glueTableOperations.tableCatalogProperties();
- Assertions.assertThat(tableCatalogProperties)
+ assertThat(tableCatalogProperties)
.containsEntry(
S3FileIOProperties.WRITE_TAGS_PREFIX.concat(S3FileIOProperties.S3_TAG_ICEBERG_TABLE),
"table")
diff --git a/aws/src/test/java/org/apache/iceberg/aws/glue/TestGlueToIcebergConverter.java b/aws/src/test/java/org/apache/iceberg/aws/glue/TestGlueToIcebergConverter.java
index 0d2c3d8..71e2f72 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/glue/TestGlueToIcebergConverter.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/glue/TestGlueToIcebergConverter.java
@@ -18,13 +18,15 @@
*/
package org.apache.iceberg.aws.glue;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.util.Map;
import org.apache.iceberg.BaseMetastoreTableOperations;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.exceptions.NoSuchIcebergTableException;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import software.amazon.awssdk.services.glue.model.Database;
import software.amazon.awssdk.services.glue.model.Table;
@@ -35,21 +37,21 @@
public void testToNamespace() {
Database database = Database.builder().name("db").build();
Namespace namespace = Namespace.of("db");
- Assertions.assertThat(GlueToIcebergConverter.toNamespace(database)).isEqualTo(namespace);
+ assertThat(GlueToIcebergConverter.toNamespace(database)).isEqualTo(namespace);
}
@Test
public void testToTableId() {
Table table = Table.builder().databaseName("db").name("name").build();
TableIdentifier icebergId = TableIdentifier.of("db", "name");
- Assertions.assertThat(GlueToIcebergConverter.toTableId(table)).isEqualTo(icebergId);
+ assertThat(GlueToIcebergConverter.toTableId(table)).isEqualTo(icebergId);
}
@Test
public void testValidateTableIcebergPropertyNotFound() {
Table table = Table.builder().parameters(ImmutableMap.of()).build();
- Assertions.assertThatThrownBy(() -> GlueTableOperations.checkIfTableIsIceberg(table, "name"))
+ assertThatThrownBy(() -> GlueTableOperations.checkIfTableIsIceberg(table, "name"))
.isInstanceOf(NoSuchIcebergTableException.class)
.hasMessage("Input Glue table is not an iceberg table: name (type=null)");
}
@@ -60,7 +62,7 @@
ImmutableMap.of(BaseMetastoreTableOperations.TABLE_TYPE_PROP, "other");
Table table = Table.builder().parameters(properties).build();
- Assertions.assertThatThrownBy(() -> GlueTableOperations.checkIfTableIsIceberg(table, "name"))
+ assertThatThrownBy(() -> GlueTableOperations.checkIfTableIsIceberg(table, "name"))
.isInstanceOf(NoSuchIcebergTableException.class)
.hasMessage("Input Glue table is not an iceberg table: name (type=other)");
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/glue/TestIcebergToGlueConverter.java b/aws/src/test/java/org/apache/iceberg/aws/glue/TestIcebergToGlueConverter.java
index 599e324..7c646f7 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/glue/TestIcebergToGlueConverter.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/glue/TestIcebergToGlueConverter.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.aws.glue;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.util.List;
import java.util.Map;
import org.apache.iceberg.PartitionSpec;
@@ -32,7 +35,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import software.amazon.awssdk.services.glue.model.Column;
import software.amazon.awssdk.services.glue.model.DatabaseInput;
@@ -49,8 +51,7 @@
@Test
public void testToDatabaseName() {
- Assertions.assertThat(IcebergToGlueConverter.toDatabaseName(Namespace.of("db"), false))
- .isEqualTo("db");
+ assertThat(IcebergToGlueConverter.toDatabaseName(Namespace.of("db"), false)).isEqualTo("db");
}
@Test
@@ -64,7 +65,7 @@
Namespace.of(new String(new char[600]).replace("\0", "a")));
for (Namespace name : badNames) {
- Assertions.assertThatThrownBy(() -> IcebergToGlueConverter.toDatabaseName(name, false))
+ assertThatThrownBy(() -> IcebergToGlueConverter.toDatabaseName(name, false))
.isInstanceOf(ValidationException.class)
.hasMessageStartingWith("Cannot convert namespace")
.hasMessageEndingWith(
@@ -78,8 +79,7 @@
List<Namespace> acceptableNames =
Lists.newArrayList(Namespace.of("db-1"), Namespace.of("db-1-1-1"));
for (Namespace name : acceptableNames) {
- Assertions.assertThat(IcebergToGlueConverter.toDatabaseName(name, true))
- .isEqualTo(name.toString());
+ assertThat(IcebergToGlueConverter.toDatabaseName(name, true)).isEqualTo(name.toString());
}
}
@@ -91,7 +91,7 @@
TableIdentifier.parse("db.a-1-1"),
TableIdentifier.parse("db.a#1"));
for (TableIdentifier identifier : acceptableIdentifiers) {
- Assertions.assertThat(IcebergToGlueConverter.getTableName(identifier, true))
+ assertThat(IcebergToGlueConverter.getTableName(identifier, true))
.isEqualTo(identifier.name());
}
}
@@ -108,24 +108,21 @@
"val");
DatabaseInput databaseInput =
IcebergToGlueConverter.toDatabaseInput(Namespace.of("ns"), properties, false);
- Assertions.assertThat(databaseInput.locationUri())
- .as("Location should be set")
- .isEqualTo("s3://location");
- Assertions.assertThat(databaseInput.description())
+ assertThat(databaseInput.locationUri()).as("Location should be set").isEqualTo("s3://location");
+ assertThat(databaseInput.description())
.as("Description should be set")
.isEqualTo("description");
- Assertions.assertThat(databaseInput.parameters())
+ assertThat(databaseInput.parameters())
.as("Parameters should be set")
.isEqualTo(ImmutableMap.of("key", "val"));
- Assertions.assertThat(databaseInput.name()).as("Database name should be set").isEqualTo("ns");
+ assertThat(databaseInput.name()).as("Database name should be set").isEqualTo("ns");
}
@Test
public void testToDatabaseInputNoParameter() {
DatabaseInput input = DatabaseInput.builder().name("db").parameters(ImmutableMap.of()).build();
Namespace namespace = Namespace.of("db");
- Assertions.assertThat(
- IcebergToGlueConverter.toDatabaseInput(namespace, ImmutableMap.of(), false))
+ assertThat(IcebergToGlueConverter.toDatabaseInput(namespace, ImmutableMap.of(), false))
.isEqualTo(input);
}
@@ -135,14 +132,14 @@
ImmutableMap.of(IcebergToGlueConverter.GLUE_DESCRIPTION_KEY, "description", "key", "val");
DatabaseInput databaseInput =
IcebergToGlueConverter.toDatabaseInput(Namespace.of("ns"), properties, false);
- Assertions.assertThat(databaseInput.locationUri()).as("Location should not be set").isNull();
- Assertions.assertThat(databaseInput.description())
+ assertThat(databaseInput.locationUri()).as("Location should not be set").isNull();
+ assertThat(databaseInput.description())
.as("Description should be set")
.isEqualTo("description");
- Assertions.assertThat(databaseInput.parameters())
+ assertThat(databaseInput.parameters())
.as("Parameters should be set")
.isEqualTo(ImmutableMap.of("key", "val"));
- Assertions.assertThat(databaseInput.name()).as("Database name should be set").isEqualTo("ns");
+ assertThat(databaseInput.name()).as("Database name should be set").isEqualTo("ns");
}
@Test
@@ -151,14 +148,12 @@
ImmutableMap.of(IcebergToGlueConverter.GLUE_DB_LOCATION_KEY, "s3://location", "key", "val");
DatabaseInput databaseInput =
IcebergToGlueConverter.toDatabaseInput(Namespace.of("ns"), properties, false);
- Assertions.assertThat(databaseInput.locationUri())
- .as("Location should be set")
- .isEqualTo("s3://location");
- Assertions.assertThat(databaseInput.description()).as("Description should not be set").isNull();
- Assertions.assertThat(databaseInput.parameters())
+ assertThat(databaseInput.locationUri()).as("Location should be set").isEqualTo("s3://location");
+ assertThat(databaseInput.description()).as("Description should not be set").isNull();
+ assertThat(databaseInput.parameters())
.as("Parameters should be set")
.isEqualTo(ImmutableMap.of("key", "val"));
- Assertions.assertThat(databaseInput.name()).as("Database name should be set").isEqualTo("ns");
+ assertThat(databaseInput.name()).as("Database name should be set").isEqualTo("ns");
}
@Test
@@ -212,13 +207,13 @@
.build())
.build();
- Assertions.assertThat(actualTableInput.storageDescriptor().additionalLocations())
+ assertThat(actualTableInput.storageDescriptor().additionalLocations())
.as("additionalLocations should match")
.isEqualTo(expectedTableInput.storageDescriptor().additionalLocations());
- Assertions.assertThat(actualTableInput.storageDescriptor().location())
+ assertThat(actualTableInput.storageDescriptor().location())
.as("Location should match")
.isEqualTo(expectedTableInput.storageDescriptor().location());
- Assertions.assertThat(actualTableInput.storageDescriptor().columns())
+ assertThat(actualTableInput.storageDescriptor().columns())
.as("Columns should match")
.isEqualTo(expectedTableInput.storageDescriptor().columns());
}
@@ -278,13 +273,13 @@
.build())
.build();
- Assertions.assertThat(actualTableInput.storageDescriptor().additionalLocations())
+ assertThat(actualTableInput.storageDescriptor().additionalLocations())
.as("additionalLocations should match")
.isEqualTo(expectedTableInput.storageDescriptor().additionalLocations());
- Assertions.assertThat(actualTableInput.storageDescriptor().location())
+ assertThat(actualTableInput.storageDescriptor().location())
.as("Location should match")
.isEqualTo(expectedTableInput.storageDescriptor().location());
- Assertions.assertThat(actualTableInput.storageDescriptor().columns())
+ assertThat(actualTableInput.storageDescriptor().columns())
.as("Columns should match")
.isEqualTo(expectedTableInput.storageDescriptor().columns());
}
@@ -307,7 +302,7 @@
IcebergToGlueConverter.setTableInputInformation(actualTableInputBuilder, tableMetadata);
TableInput actualTableInput = actualTableInputBuilder.build();
- Assertions.assertThat(actualTableInput.description())
+ assertThat(actualTableInput.description())
.as("description should match")
.isEqualTo(tableDescription);
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIO.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIO.java
index 26c9bc1..135eb76 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIO.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIO.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.aws.s3;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.AdditionalAnswers.delegatesTo;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
@@ -68,7 +70,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Streams;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.SerializableSupplier;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
@@ -131,25 +132,25 @@
random.nextBytes(expected);
InputFile in = s3FileIO.newInputFile(location);
- Assertions.assertThat(in.exists()).isFalse();
+ assertThat(in.exists()).isFalse();
OutputFile out = s3FileIO.newOutputFile(location);
try (OutputStream os = out.createOrOverwrite()) {
IOUtil.writeFully(os, ByteBuffer.wrap(expected));
}
- Assertions.assertThat(in.exists()).isTrue();
+ assertThat(in.exists()).isTrue();
byte[] actual = new byte[1024 * 1024];
try (InputStream is = in.newStream()) {
IOUtil.readFully(is, actual, 0, expected.length);
}
- Assertions.assertThat(actual).isEqualTo(expected);
+ assertThat(actual).isEqualTo(expected);
s3FileIO.deleteFile(in);
- Assertions.assertThat(s3FileIO.newInputFile(location).exists()).isFalse();
+ assertThat(s3FileIO.newInputFile(location).exists()).isFalse();
}
@Test
@@ -171,7 +172,7 @@
public void testDeleteEmptyList() throws IOException {
String location = "s3://bucket/path/to/file.txt";
InputFile in = s3FileIO.newInputFile(location);
- Assertions.assertThat(in.exists()).isFalse();
+ assertThat(in.exists()).isFalse();
OutputFile out = s3FileIO.newOutputFile(location);
try (OutputStream os = out.createOrOverwrite()) {
IOUtil.writeFully(os, ByteBuffer.wrap(new byte[1024 * 1024]));
@@ -179,9 +180,9 @@
s3FileIO.deleteFiles(Lists.newArrayList());
- Assertions.assertThat(s3FileIO.newInputFile(location).exists()).isTrue();
+ assertThat(s3FileIO.newInputFile(location).exists()).isTrue();
s3FileIO.deleteFile(in);
- Assertions.assertThat(s3FileIO.newInputFile(location).exists()).isFalse();
+ assertThat(s3FileIO.newInputFile(location).exists()).isFalse();
}
@Test
@@ -193,7 +194,7 @@
.build();
doReturn(deleteObjectsResponse).when(s3mock).deleteObjects((DeleteObjectsRequest) any());
- Assertions.assertThatThrownBy(() -> s3FileIO.deleteFiles(Lists.newArrayList(location)))
+ assertThatThrownBy(() -> s3FileIO.deleteFiles(Lists.newArrayList(location)))
.isInstanceOf(BulkDeletionFailureException.class)
.hasMessage("Failed to delete 1 files");
}
@@ -216,7 +217,7 @@
int expectedDeleteRequests = expectedNumberOfBatchesPerBucket * numBucketsForBatchDeletion;
verify(s3mock, times(expectedDeleteRequests)).deleteObjects((DeleteObjectsRequest) any());
for (String path : paths) {
- Assertions.assertThat(s3FileIO.newInputFile(path).exists()).isFalse();
+ assertThat(s3FileIO.newInputFile(path).exists()).isFalse();
}
}
@@ -232,7 +233,7 @@
byte[] data = TestHelpers.serialize(pre);
SerializableSupplier<S3Client> post = TestHelpers.deserialize(data);
- Assertions.assertThat(post.get().serviceName()).isEqualTo("s3");
+ assertThat(post.get().serviceName()).isEqualTo("s3");
}
@Test
@@ -248,13 +249,12 @@
String scalePrefix = String.format("%s/%s/", prefix, scale);
createRandomObjects(scalePrefix, scale);
- Assertions.assertThat(Streams.stream(s3FileIO.listPrefix(scalePrefix)).count())
+ assertThat(Streams.stream(s3FileIO.listPrefix(scalePrefix)).count())
.isEqualTo((long) scale);
});
long totalFiles = scaleSizes.stream().mapToLong(Integer::longValue).sum();
- Assertions.assertThat(Streams.stream(s3FileIO.listPrefix(prefix)).count())
- .isEqualTo(totalFiles);
+ assertThat(Streams.stream(s3FileIO.listPrefix(prefix)).count()).isEqualTo(totalFiles);
}
/**
@@ -273,8 +273,7 @@
createRandomObjects(scalePrefix, scale);
s3FileIO.deletePrefix(scalePrefix);
- Assertions.assertThat(Streams.stream(s3FileIO.listPrefix(scalePrefix)).count())
- .isEqualTo(0);
+ assertThat(Streams.stream(s3FileIO.listPrefix(scalePrefix)).count()).isEqualTo(0);
});
}
@@ -283,7 +282,7 @@
String location = "s3://bucket/path/to/data.parquet";
InputFile in = s3FileIO.newInputFile(location);
- Assertions.assertThatThrownBy(() -> in.newStream().read())
+ assertThatThrownBy(() -> in.newStream().read())
.isInstanceOf(NotFoundException.class)
.hasMessage("Location does not exist: " + location);
}
@@ -313,12 +312,12 @@
long start = System.currentTimeMillis();
// to test NotFoundException, load the table again. refreshing the existing table doesn't
// require reading metadata
- Assertions.assertThatThrownBy(() -> catalog.loadTable(ident))
+ assertThatThrownBy(() -> catalog.loadTable(ident))
.isInstanceOf(NotFoundException.class)
.hasMessageStartingWith("Location does not exist");
long duration = System.currentTimeMillis() - start;
- Assertions.assertThat(duration < 10_000).as("Should take less than 10 seconds").isTrue();
+ assertThat(duration < 10_000).as("Should take less than 10 seconds").isTrue();
}
}
@@ -333,8 +332,8 @@
String json = FileIOParser.toJson(s3FileIO);
try (FileIO deserialized = FileIOParser.fromJson(json, conf)) {
- Assertions.assertThat(deserialized).isInstanceOf(S3FileIO.class);
- Assertions.assertThat(deserialized.properties()).isEqualTo(s3FileIO.properties());
+ assertThat(deserialized).isInstanceOf(S3FileIO.class);
+ assertThat(deserialized.properties()).isEqualTo(s3FileIO.properties());
}
}
@@ -346,8 +345,7 @@
testS3FileIO.initialize(ImmutableMap.of("k1", "v1"));
FileIO roundTripSerializedFileIO = TestHelpers.KryoHelpers.roundTripSerialize(testS3FileIO);
- Assertions.assertThat(roundTripSerializedFileIO.properties())
- .isEqualTo(testS3FileIO.properties());
+ assertThat(roundTripSerializedFileIO.properties()).isEqualTo(testS3FileIO.properties());
}
@Test
@@ -358,8 +356,7 @@
testS3FileIO.initialize(ImmutableMap.of());
FileIO roundTripSerializedFileIO = TestHelpers.KryoHelpers.roundTripSerialize(testS3FileIO);
- Assertions.assertThat(roundTripSerializedFileIO.properties())
- .isEqualTo(testS3FileIO.properties());
+ assertThat(roundTripSerializedFileIO.properties()).isEqualTo(testS3FileIO.properties());
}
@Test
@@ -370,8 +367,7 @@
testS3FileIO.initialize(ImmutableMap.of("k1", "v1"));
FileIO roundTripSerializedFileIO = TestHelpers.roundTripSerialize(testS3FileIO);
- Assertions.assertThat(roundTripSerializedFileIO.properties())
- .isEqualTo(testS3FileIO.properties());
+ assertThat(roundTripSerializedFileIO.properties()).isEqualTo(testS3FileIO.properties());
}
@Test
@@ -384,7 +380,7 @@
.hiddenImpl(ResolvingFileIO.class, String.class)
.build(resolvingFileIO)
.invoke("s3://foo/bar");
- Assertions.assertThat(result).isInstanceOf(S3FileIO.class);
+ assertThat(result).isInstanceOf(S3FileIO.class);
}
@Test
@@ -405,7 +401,7 @@
InputFile inputFile = s3FileIO.newInputFile(dataFile);
reset(s3mock);
- Assertions.assertThat(inputFile.getLength())
+ assertThat(inputFile.getLength())
.as("Data file length should be determined from the file size stats")
.isEqualTo(123L);
verify(s3mock, never()).headObject(any(HeadObjectRequest.class));
@@ -431,7 +427,7 @@
InputFile inputFile = s3FileIO.newInputFile(manifest);
reset(s3mock);
- Assertions.assertThat(inputFile.getLength()).isEqualTo(manifest.length());
+ assertThat(inputFile.getLength()).isEqualTo(manifest.length());
verify(s3mock, never()).headObject(any(HeadObjectRequest.class));
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIOProperties.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIOProperties.java
index c6d3776..f445a2d 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIOProperties.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3FileIOProperties.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.aws.s3;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.net.URI;
import java.util.Collections;
import java.util.List;
@@ -29,7 +32,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
@@ -51,71 +53,70 @@
public void testS3FileIOPropertiesDefaultValues() {
S3FileIOProperties s3FileIOProperties = new S3FileIOProperties();
- Assertions.assertThat(S3FileIOProperties.SSE_TYPE_NONE).isEqualTo(s3FileIOProperties.sseType());
+ assertThat(S3FileIOProperties.SSE_TYPE_NONE).isEqualTo(s3FileIOProperties.sseType());
- Assertions.assertThat(s3FileIOProperties.sseKey()).isNull();
- Assertions.assertThat(s3FileIOProperties.sseMd5()).isNull();
- Assertions.assertThat(s3FileIOProperties.accessKeyId()).isNull();
- Assertions.assertThat(s3FileIOProperties.secretAccessKey()).isNull();
- Assertions.assertThat(s3FileIOProperties.sessionToken()).isNull();
- Assertions.assertThat(s3FileIOProperties.acl()).isNull();
- Assertions.assertThat(s3FileIOProperties.endpoint()).isNull();
- Assertions.assertThat(s3FileIOProperties.writeStorageClass()).isNull();
+ assertThat(s3FileIOProperties.sseKey()).isNull();
+ assertThat(s3FileIOProperties.sseMd5()).isNull();
+ assertThat(s3FileIOProperties.accessKeyId()).isNull();
+ assertThat(s3FileIOProperties.secretAccessKey()).isNull();
+ assertThat(s3FileIOProperties.sessionToken()).isNull();
+ assertThat(s3FileIOProperties.acl()).isNull();
+ assertThat(s3FileIOProperties.endpoint()).isNull();
+ assertThat(s3FileIOProperties.writeStorageClass()).isNull();
- Assertions.assertThat(S3FileIOProperties.PRELOAD_CLIENT_ENABLED_DEFAULT)
+ assertThat(S3FileIOProperties.PRELOAD_CLIENT_ENABLED_DEFAULT)
.isEqualTo(s3FileIOProperties.isPreloadClientEnabled());
- Assertions.assertThat(S3FileIOProperties.DUALSTACK_ENABLED_DEFAULT)
+ assertThat(S3FileIOProperties.DUALSTACK_ENABLED_DEFAULT)
.isEqualTo(s3FileIOProperties.isDualStackEnabled());
- Assertions.assertThat(S3FileIOProperties.PATH_STYLE_ACCESS_DEFAULT)
+ assertThat(S3FileIOProperties.PATH_STYLE_ACCESS_DEFAULT)
.isEqualTo(s3FileIOProperties.isPathStyleAccess());
- Assertions.assertThat(S3FileIOProperties.USE_ARN_REGION_ENABLED_DEFAULT)
+ assertThat(S3FileIOProperties.USE_ARN_REGION_ENABLED_DEFAULT)
.isEqualTo(s3FileIOProperties.isUseArnRegionEnabled());
- Assertions.assertThat(S3FileIOProperties.ACCELERATION_ENABLED_DEFAULT)
+ assertThat(S3FileIOProperties.ACCELERATION_ENABLED_DEFAULT)
.isEqualTo(s3FileIOProperties.isAccelerationEnabled());
- Assertions.assertThat(S3FileIOProperties.REMOTE_SIGNING_ENABLED_DEFAULT)
+ assertThat(S3FileIOProperties.REMOTE_SIGNING_ENABLED_DEFAULT)
.isEqualTo(s3FileIOProperties.isRemoteSigningEnabled());
- Assertions.assertThat(Runtime.getRuntime().availableProcessors())
+ assertThat(Runtime.getRuntime().availableProcessors())
.isEqualTo(s3FileIOProperties.multipartUploadThreads());
- Assertions.assertThat(S3FileIOProperties.MULTIPART_SIZE_DEFAULT)
+ assertThat(S3FileIOProperties.MULTIPART_SIZE_DEFAULT)
.isEqualTo(s3FileIOProperties.multiPartSize());
- Assertions.assertThat(S3FileIOProperties.MULTIPART_THRESHOLD_FACTOR_DEFAULT)
+ assertThat(S3FileIOProperties.MULTIPART_THRESHOLD_FACTOR_DEFAULT)
.isEqualTo(s3FileIOProperties.multipartThresholdFactor());
- Assertions.assertThat(S3FileIOProperties.DELETE_BATCH_SIZE_DEFAULT)
+ assertThat(S3FileIOProperties.DELETE_BATCH_SIZE_DEFAULT)
.isEqualTo(s3FileIOProperties.deleteBatchSize());
- Assertions.assertThat(System.getProperty("java.io.tmpdir"))
+ assertThat(System.getProperty("java.io.tmpdir"))
.isEqualTo(s3FileIOProperties.stagingDirectory());
- Assertions.assertThat(S3FileIOProperties.CHECKSUM_ENABLED_DEFAULT)
+ assertThat(S3FileIOProperties.CHECKSUM_ENABLED_DEFAULT)
.isEqualTo(s3FileIOProperties.isChecksumEnabled());
- Assertions.assertThat(Sets.newHashSet()).isEqualTo(s3FileIOProperties.writeTags());
+ assertThat(Sets.newHashSet()).isEqualTo(s3FileIOProperties.writeTags());
- Assertions.assertThat(S3FileIOProperties.WRITE_TABLE_TAG_ENABLED_DEFAULT)
+ assertThat(S3FileIOProperties.WRITE_TABLE_TAG_ENABLED_DEFAULT)
.isEqualTo(s3FileIOProperties.writeTableTagEnabled());
- Assertions.assertThat(S3FileIOProperties.WRITE_NAMESPACE_TAG_ENABLED_DEFAULT)
+ assertThat(S3FileIOProperties.WRITE_NAMESPACE_TAG_ENABLED_DEFAULT)
.isEqualTo(s3FileIOProperties.isWriteNamespaceTagEnabled());
- Assertions.assertThat(Sets.newHashSet()).isEqualTo(s3FileIOProperties.deleteTags());
+ assertThat(Sets.newHashSet()).isEqualTo(s3FileIOProperties.deleteTags());
- Assertions.assertThat(Runtime.getRuntime().availableProcessors())
+ assertThat(Runtime.getRuntime().availableProcessors())
.isEqualTo(s3FileIOProperties.deleteThreads());
- Assertions.assertThat(S3FileIOProperties.DELETE_ENABLED_DEFAULT)
+ assertThat(S3FileIOProperties.DELETE_ENABLED_DEFAULT)
.isEqualTo(s3FileIOProperties.isDeleteEnabled());
- Assertions.assertThat(Collections.emptyMap())
- .isEqualTo(s3FileIOProperties.bucketToAccessPointMapping());
+ assertThat(Collections.emptyMap()).isEqualTo(s3FileIOProperties.bucketToAccessPointMapping());
}
@Test
@@ -123,85 +124,80 @@
Map<String, String> map = getTestProperties();
S3FileIOProperties s3FileIOProperties = new S3FileIOProperties(map);
- Assertions.assertThat(map)
- .containsEntry(S3FileIOProperties.SSE_TYPE, s3FileIOProperties.sseType());
+ assertThat(map).containsEntry(S3FileIOProperties.SSE_TYPE, s3FileIOProperties.sseType());
- Assertions.assertThat(map)
- .containsEntry(S3FileIOProperties.SSE_KEY, s3FileIOProperties.sseKey());
+ assertThat(map).containsEntry(S3FileIOProperties.SSE_KEY, s3FileIOProperties.sseKey());
- Assertions.assertThat(map)
- .containsEntry(S3FileIOProperties.SSE_MD5, s3FileIOProperties.sseMd5());
+ assertThat(map).containsEntry(S3FileIOProperties.SSE_MD5, s3FileIOProperties.sseMd5());
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(S3FileIOProperties.ACCESS_KEY_ID, s3FileIOProperties.accessKeyId());
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(S3FileIOProperties.SECRET_ACCESS_KEY, s3FileIOProperties.secretAccessKey());
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(S3FileIOProperties.SESSION_TOKEN, s3FileIOProperties.sessionToken());
- Assertions.assertThat(map)
- .containsEntry(S3FileIOProperties.ACL, s3FileIOProperties.acl().toString());
+ assertThat(map).containsEntry(S3FileIOProperties.ACL, s3FileIOProperties.acl().toString());
- Assertions.assertThat(map)
- .containsEntry(S3FileIOProperties.ENDPOINT, s3FileIOProperties.endpoint());
+ assertThat(map).containsEntry(S3FileIOProperties.ENDPOINT, s3FileIOProperties.endpoint());
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.PRELOAD_CLIENT_ENABLED,
String.valueOf(s3FileIOProperties.isPreloadClientEnabled()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.DUALSTACK_ENABLED,
String.valueOf(s3FileIOProperties.isDualStackEnabled()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.PATH_STYLE_ACCESS,
String.valueOf(s3FileIOProperties.isPathStyleAccess()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.USE_ARN_REGION_ENABLED,
String.valueOf(s3FileIOProperties.isUseArnRegionEnabled()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.ACCELERATION_ENABLED,
String.valueOf(s3FileIOProperties.isAccelerationEnabled()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.REMOTE_SIGNING_ENABLED,
String.valueOf(s3FileIOProperties.isRemoteSigningEnabled()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.MULTIPART_UPLOAD_THREADS,
String.valueOf(s3FileIOProperties.multipartUploadThreads()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.MULTIPART_SIZE, String.valueOf(s3FileIOProperties.multiPartSize()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.MULTIPART_THRESHOLD_FACTOR,
String.valueOf(s3FileIOProperties.multipartThresholdFactor()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.DELETE_BATCH_SIZE,
String.valueOf(s3FileIOProperties.deleteBatchSize()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.STAGING_DIRECTORY,
String.valueOf(s3FileIOProperties.stagingDirectory()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.CHECKSUM_ENABLED,
String.valueOf(s3FileIOProperties.isChecksumEnabled()));
@@ -210,15 +206,15 @@
s3FileIOProperties.writeTags().stream().map(Tag::value).collect(Collectors.toList());
writeTagValues.forEach(
value ->
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(S3FileIOProperties.WRITE_TAGS_PREFIX + S3_WRITE_TAG_KEY, value));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.WRITE_TABLE_TAG_ENABLED,
String.valueOf(s3FileIOProperties.writeTableTagEnabled()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.WRITE_NAMESPACE_TAG_ENABLED,
String.valueOf(s3FileIOProperties.isWriteNamespaceTagEnabled()));
@@ -227,14 +223,14 @@
s3FileIOProperties.deleteTags().stream().map(Tag::value).collect(Collectors.toList());
deleteTagValues.forEach(
value ->
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(S3FileIOProperties.DELETE_TAGS_PREFIX + S3_DELETE_TAG_KEY, value));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.DELETE_THREADS, String.valueOf(s3FileIOProperties.deleteThreads()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.DELETE_ENABLED,
String.valueOf(s3FileIOProperties.isDeleteEnabled()));
@@ -244,22 +240,21 @@
.values()
.forEach(
value ->
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.ACCESS_POINTS_PREFIX + S3_TEST_BUCKET_NAME, value));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.PRELOAD_CLIENT_ENABLED,
String.valueOf(s3FileIOProperties.isPreloadClientEnabled()));
- Assertions.assertThat(map)
+ assertThat(map)
.containsEntry(
S3FileIOProperties.REMOTE_SIGNING_ENABLED,
String.valueOf(s3FileIOProperties.isRemoteSigningEnabled()));
- Assertions.assertThat(map)
- .containsEntry(S3FileIOProperties.WRITE_STORAGE_CLASS, "INTELLIGENT_TIERING");
+ assertThat(map).containsEntry(S3FileIOProperties.WRITE_STORAGE_CLASS, "INTELLIGENT_TIERING");
}
@Test
@@ -267,7 +262,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.ACCESS_KEY_ID, "s3-access-key");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(ValidationException.class)
.hasMessage("S3 client access key ID and secret access key must be set at the same time");
}
@@ -277,7 +272,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.SECRET_ACCESS_KEY, "s3-secret-key");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(ValidationException.class)
.hasMessage("S3 client access key ID and secret access key must be set at the same time");
}
@@ -287,7 +282,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.SSE_TYPE, S3FileIOProperties.SSE_TYPE_CUSTOM);
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot initialize SSE-C S3FileIO with null encryption key");
}
@@ -298,7 +293,7 @@
map.put(S3FileIOProperties.SSE_TYPE, S3FileIOProperties.SSE_TYPE_CUSTOM);
map.put(S3FileIOProperties.SSE_KEY, "something");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot initialize SSE-C S3FileIO with null encryption key MD5");
}
@@ -308,7 +303,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.ACL, ObjectCannedACL.AUTHENTICATED_READ.toString());
S3FileIOProperties properties = new S3FileIOProperties(map);
- Assertions.assertThat(ObjectCannedACL.AUTHENTICATED_READ).isEqualTo(properties.acl());
+ assertThat(ObjectCannedACL.AUTHENTICATED_READ).isEqualTo(properties.acl());
}
@Test
@@ -316,7 +311,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.ACL, "bad-input");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot support S3 CannedACL bad-input");
}
@@ -326,7 +321,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.MULTIPART_SIZE, "1");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Minimum multipart upload object size must be larger than 5 MB.");
}
@@ -336,7 +331,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.MULTIPART_SIZE, "5368709120"); // 5GB
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Input malformed or exceeded maximum multipart upload size 5GB: 5368709120");
}
@@ -346,7 +341,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.MULTIPART_THRESHOLD_FACTOR, "0.9");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Multipart threshold factor must be >= to 1.0");
}
@@ -356,7 +351,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.DELETE_BATCH_SIZE, "2000");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Deletion batch size must be between 1 and 1000");
}
@@ -366,7 +361,7 @@
Map<String, String> map = Maps.newHashMap();
map.put(S3FileIOProperties.DELETE_BATCH_SIZE, "0");
- Assertions.assertThatThrownBy(() -> new S3FileIOProperties(map))
+ assertThatThrownBy(() -> new S3FileIOProperties(map))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Deletion batch size must be between 1 and 1000");
}
@@ -448,13 +443,13 @@
Mockito.verify(mockA).serviceConfiguration(s3ConfigurationCaptor.capture());
S3Configuration s3Configuration = s3ConfigurationCaptor.getValue();
- Assertions.assertThat(s3Configuration.pathStyleAccessEnabled())
+ assertThat(s3Configuration.pathStyleAccessEnabled())
.as("s3 path style access enabled parameter should be set to true")
.isTrue();
- Assertions.assertThat(s3Configuration.useArnRegionEnabled())
+ assertThat(s3Configuration.useArnRegionEnabled())
.as("s3 use arn region enabled parameter should be set to true")
.isTrue();
- Assertions.assertThat(s3Configuration.accelerateModeEnabled())
+ assertThat(s3Configuration.accelerateModeEnabled())
.as("s3 acceleration mode enabled parameter should be set to true")
.isFalse();
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3InputStream.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3InputStream.java
index feaac4e..95f4d09 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3InputStream.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3InputStream.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.aws.s3;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.adobe.testing.s3mock.junit5.S3MockExtension;
import java.io.IOException;
import java.util.Arrays;
@@ -25,7 +28,6 @@
import org.apache.iceberg.io.IOUtil;
import org.apache.iceberg.io.RangeReadable;
import org.apache.iceberg.io.SeekableInputStream;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@@ -88,7 +90,7 @@
SeekableInputStream in, long rangeStart, int size, byte[] original, boolean buffered)
throws IOException {
in.seek(rangeStart);
- Assertions.assertThat(in.getPos()).isEqualTo(rangeStart);
+ assertThat(in.getPos()).isEqualTo(rangeStart);
long rangeEnd = rangeStart + size;
byte[] actual = new byte[size];
@@ -102,9 +104,8 @@
}
}
- Assertions.assertThat(in.getPos()).isEqualTo(rangeEnd);
- Assertions.assertThat(actual)
- .isEqualTo(Arrays.copyOfRange(original, (int) rangeStart, (int) rangeEnd));
+ assertThat(in.getPos()).isEqualTo(rangeEnd);
+ assertThat(actual).isEqualTo(Arrays.copyOfRange(original, (int) rangeStart, (int) rangeEnd));
}
@Test
@@ -145,7 +146,7 @@
throws IOException {
in.readFully(position, buffer, offset, length);
- Assertions.assertThat(Arrays.copyOfRange(buffer, offset, offset + length))
+ assertThat(Arrays.copyOfRange(buffer, offset, offset + length))
.isEqualTo(Arrays.copyOfRange(original, offset, offset + length));
}
@@ -154,7 +155,7 @@
S3URI uri = new S3URI("s3://bucket/path/to/closed.dat");
SeekableInputStream closed = new S3InputStream(s3, uri);
closed.close();
- Assertions.assertThatThrownBy(() -> closed.seek(0))
+ assertThatThrownBy(() -> closed.seek(0))
.isInstanceOf(IllegalStateException.class)
.hasMessage("already closed");
}
@@ -170,7 +171,7 @@
in.seek(expected.length / 2);
byte[] actual = new byte[expected.length / 2];
IOUtil.readFully(in, actual, 0, expected.length / 2);
- Assertions.assertThat(actual)
+ assertThat(actual)
.isEqualTo(Arrays.copyOfRange(expected, expected.length / 2, expected.length));
}
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3OutputStream.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3OutputStream.java
index 6f8d1d6..6fbe59e 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3OutputStream.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3OutputStream.java
@@ -19,6 +19,10 @@
package org.apache.iceberg.aws.s3;
import static org.apache.iceberg.metrics.MetricsContext.nullMetrics;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatNoException;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.assertj.core.api.Assertions.fail;
import static org.mockito.AdditionalAnswers.delegatesTo;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doThrow;
@@ -43,7 +47,6 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -123,7 +126,7 @@
RuntimeException mockException = new RuntimeException("mock uploadPart failure");
doThrow(mockException).when(s3mock).uploadPart((UploadPartRequest) any(), (RequestBody) any());
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> {
try (S3OutputStream stream =
new S3OutputStream(s3mock, randomURI(), properties, nullMetrics())) {
@@ -143,7 +146,7 @@
.when(s3mock)
.completeMultipartUpload((CompleteMultipartUploadRequest) any());
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> {
try (S3OutputStream stream =
new S3OutputStream(s3mock, randomURI(), properties, nullMetrics())) {
@@ -188,11 +191,11 @@
.putObject(any(PutObjectRequest.class), any(RequestBody.class));
S3OutputStream stream = new S3OutputStream(s3mock, randomURI(), properties, nullMetrics());
- Assertions.assertThatThrownBy(stream::close)
+ assertThatThrownBy(stream::close)
.isInstanceOf(mockException.getClass())
.hasMessageContaining(mockException.getMessage());
- Assertions.assertThatNoException().isThrownBy(stream::close);
+ assertThatNoException().isThrownBy(stream::close);
}
private void writeTest() {
@@ -252,8 +255,7 @@
for (int i = 0; i < uploadPartRequests.size(); ++i) {
int offset = i * FIVE_MBS;
int len = (i + 1) * FIVE_MBS - 1 > data.length ? data.length - offset : FIVE_MBS;
- Assertions.assertThat(uploadPartRequests.get(i).contentMD5())
- .isEqualTo(getDigest(data, offset, len));
+ assertThat(uploadPartRequests.get(i).contentMD5()).isEqualTo(getDigest(data, offset, len));
}
}
}
@@ -262,8 +264,7 @@
byte[] data, ArgumentCaptor<PutObjectRequest> putObjectRequestArgumentCaptor) {
if (properties.isChecksumEnabled()) {
List<PutObjectRequest> putObjectRequests = putObjectRequestArgumentCaptor.getAllValues();
- Assertions.assertThat(putObjectRequests.get(0).contentMD5())
- .isEqualTo(getDigest(data, 0, data.length));
+ assertThat(putObjectRequests.get(0).contentMD5()).isEqualTo(getDigest(data, 0, data.length));
}
}
@@ -271,7 +272,7 @@
if (properties.isChecksumEnabled()) {
List<PutObjectRequest> putObjectRequests = putObjectRequestArgumentCaptor.getAllValues();
String tagging = putObjectRequests.get(0).tagging();
- Assertions.assertThat(getTags(properties.writeTags())).isEqualTo(tagging);
+ assertThat(getTags(properties.writeTags())).isEqualTo(tagging);
}
}
@@ -285,7 +286,7 @@
md5.update(data, offset, length);
return BinaryUtils.toBase64(md5.digest());
} catch (NoSuchAlgorithmException e) {
- Assertions.fail("Failed to get MD5 MessageDigest. %s", e);
+ fail("Failed to get MD5 MessageDigest. %s", e);
}
return null;
}
@@ -294,11 +295,11 @@
try (S3OutputStream stream = new S3OutputStream(client, uri, properties, nullMetrics())) {
if (arrayWrite) {
stream.write(data);
- Assertions.assertThat(stream.getPos()).isEqualTo(data.length);
+ assertThat(stream.getPos()).isEqualTo(data.length);
} else {
for (int i = 0; i < data.length; i++) {
stream.write(data[i]);
- Assertions.assertThat(stream.getPos()).isEqualTo(i + 1);
+ assertThat(stream.getPos()).isEqualTo(i + 1);
}
}
} catch (IOException e) {
@@ -306,11 +307,11 @@
}
byte[] actual = readS3Data(uri);
- Assertions.assertThat(actual).isEqualTo(data);
+ assertThat(actual).isEqualTo(data);
// Verify all staging files are cleaned up
try {
- Assertions.assertThat(Files.list(tmpDir)).isEmpty();
+ assertThat(Files.list(tmpDir)).isEmpty();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3RequestUtil.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3RequestUtil.java
index 379fde9..67c67c1 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3RequestUtil.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3RequestUtil.java
@@ -18,7 +18,8 @@
*/
package org.apache.iceberg.aws.s3;
-import org.assertj.core.api.Assertions;
+import static org.assertj.core.api.Assertions.assertThat;
+
import org.junit.jupiter.api.Test;
import software.amazon.awssdk.services.s3.model.S3Request;
import software.amazon.awssdk.services.s3.model.ServerSideEncryption;
@@ -44,11 +45,11 @@
this::setCustomAlgorithm,
this::setCustomKey,
this::setCustomMd5);
- Assertions.assertThat(serverSideEncryption).isNull();
- Assertions.assertThat(kmsKeyId).isNull();
- Assertions.assertThat(customAlgorithm).isEqualTo(ServerSideEncryption.AES256.name());
- Assertions.assertThat(customKey).isEqualTo("key");
- Assertions.assertThat(customMd5).isEqualTo("md5");
+ assertThat(serverSideEncryption).isNull();
+ assertThat(kmsKeyId).isNull();
+ assertThat(customAlgorithm).isEqualTo(ServerSideEncryption.AES256.name());
+ assertThat(customKey).isEqualTo("key");
+ assertThat(customMd5).isEqualTo("md5");
}
@Test
@@ -62,11 +63,11 @@
this::setCustomAlgorithm,
this::setCustomKey,
this::setCustomMd5);
- Assertions.assertThat(serverSideEncryption).isEqualTo(ServerSideEncryption.AES256);
- Assertions.assertThat(kmsKeyId).isNull();
- Assertions.assertThat(customAlgorithm).isNull();
- Assertions.assertThat(customKey).isNull();
- Assertions.assertThat(customMd5).isNull();
+ assertThat(serverSideEncryption).isEqualTo(ServerSideEncryption.AES256);
+ assertThat(kmsKeyId).isNull();
+ assertThat(customAlgorithm).isNull();
+ assertThat(customKey).isNull();
+ assertThat(customMd5).isNull();
}
@Test
@@ -81,11 +82,11 @@
this::setCustomAlgorithm,
this::setCustomKey,
this::setCustomMd5);
- Assertions.assertThat(serverSideEncryption).isEqualTo(ServerSideEncryption.AWS_KMS);
- Assertions.assertThat(kmsKeyId).isEqualTo("key");
- Assertions.assertThat(customAlgorithm).isNull();
- Assertions.assertThat(customKey).isNull();
- Assertions.assertThat(customMd5).isNull();
+ assertThat(serverSideEncryption).isEqualTo(ServerSideEncryption.AWS_KMS);
+ assertThat(kmsKeyId).isEqualTo("key");
+ assertThat(customAlgorithm).isNull();
+ assertThat(customKey).isNull();
+ assertThat(customMd5).isNull();
}
@Test
@@ -100,11 +101,11 @@
this::setCustomAlgorithm,
this::setCustomKey,
this::setCustomMd5);
- Assertions.assertThat(serverSideEncryption).isEqualTo(ServerSideEncryption.AWS_KMS_DSSE);
- Assertions.assertThat(kmsKeyId).isEqualTo("key");
- Assertions.assertThat(customAlgorithm).isNull();
- Assertions.assertThat(customKey).isNull();
- Assertions.assertThat(customMd5).isNull();
+ assertThat(serverSideEncryption).isEqualTo(ServerSideEncryption.AWS_KMS_DSSE);
+ assertThat(kmsKeyId).isEqualTo("key");
+ assertThat(customAlgorithm).isNull();
+ assertThat(customKey).isNull();
+ assertThat(customMd5).isNull();
}
@Test
@@ -119,11 +120,11 @@
this::setCustomAlgorithm,
this::setCustomKey,
this::setCustomMd5);
- Assertions.assertThat(serverSideEncryption).isNull();
- Assertions.assertThat(kmsKeyId).isNull();
- Assertions.assertThat(customAlgorithm).isNull();
- Assertions.assertThat(customKey).isNull();
- Assertions.assertThat(customMd5).isNull();
+ assertThat(serverSideEncryption).isNull();
+ assertThat(kmsKeyId).isNull();
+ assertThat(customAlgorithm).isNull();
+ assertThat(customKey).isNull();
+ assertThat(customMd5).isNull();
}
public S3Request.Builder setCustomAlgorithm(String algorithm) {
diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3URI.java b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3URI.java
index 803cbe0..383ff67 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3URI.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/s3/TestS3URI.java
@@ -18,11 +18,13 @@
*/
package org.apache.iceberg.aws.s3;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.util.Map;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestS3URI {
@@ -32,9 +34,9 @@
String p1 = "s3://bucket/path/to/file";
S3URI uri1 = new S3URI(p1);
- Assertions.assertThat(uri1.bucket()).isEqualTo("bucket");
- Assertions.assertThat(uri1.key()).isEqualTo("path/to/file");
- Assertions.assertThat(uri1.toString()).isEqualTo(p1);
+ assertThat(uri1.bucket()).isEqualTo("bucket");
+ assertThat(uri1.key()).isEqualTo("path/to/file");
+ assertThat(uri1.toString()).isEqualTo(p1);
}
@Test
@@ -42,15 +44,15 @@
String p1 = "s3://bucket/path%20to%20file";
S3URI uri1 = new S3URI(p1);
- Assertions.assertThat(uri1.bucket()).isEqualTo("bucket");
- Assertions.assertThat(uri1.key()).isEqualTo("path%20to%20file");
- Assertions.assertThat(uri1.toString()).isEqualTo(p1);
+ assertThat(uri1.bucket()).isEqualTo("bucket");
+ assertThat(uri1.key()).isEqualTo("path%20to%20file");
+ assertThat(uri1.toString()).isEqualTo(p1);
}
@Test
public void testMissingScheme() {
- Assertions.assertThatThrownBy(() -> new S3URI("/path/to/file"))
+ assertThatThrownBy(() -> new S3URI("/path/to/file"))
.isInstanceOf(ValidationException.class)
.hasMessage("Invalid S3 URI, cannot determine scheme: /path/to/file");
}
@@ -60,9 +62,9 @@
String p1 = "s3://bucket";
S3URI url1 = new S3URI(p1);
- Assertions.assertThat(url1.bucket()).isEqualTo("bucket");
- Assertions.assertThat(url1.key()).isEqualTo("");
- Assertions.assertThat(url1.toString()).isEqualTo(p1);
+ assertThat(url1.bucket()).isEqualTo("bucket");
+ assertThat(url1.key()).isEqualTo("");
+ assertThat(url1.toString()).isEqualTo(p1);
}
@Test
@@ -70,17 +72,17 @@
String p1 = "s3://bucket/path/to/file?query=foo#bar";
S3URI uri1 = new S3URI(p1);
- Assertions.assertThat(uri1.bucket()).isEqualTo("bucket");
- Assertions.assertThat(uri1.key()).isEqualTo("path/to/file");
- Assertions.assertThat(uri1.toString()).isEqualTo(p1);
+ assertThat(uri1.bucket()).isEqualTo("bucket");
+ assertThat(uri1.key()).isEqualTo("path/to/file");
+ assertThat(uri1.toString()).isEqualTo(p1);
}
@Test
public void testValidSchemes() {
for (String scheme : Lists.newArrayList("https", "s3", "s3a", "s3n", "gs")) {
S3URI uri = new S3URI(scheme + "://bucket/path/to/file");
- Assertions.assertThat(uri.bucket()).isEqualTo("bucket");
- Assertions.assertThat(uri.key()).isEqualTo("path/to/file");
+ assertThat(uri.bucket()).isEqualTo("bucket");
+ assertThat(uri.key()).isEqualTo("path/to/file");
}
}
@@ -90,8 +92,8 @@
Map<String, String> bucketToAccessPointMapping = ImmutableMap.of("bucket", "access-point");
S3URI uri1 = new S3URI(p1, bucketToAccessPointMapping);
- Assertions.assertThat(uri1.bucket()).isEqualTo("access-point");
- Assertions.assertThat(uri1.key()).isEqualTo("path/to/file");
- Assertions.assertThat(uri1.toString()).isEqualTo(p1);
+ assertThat(uri1.bucket()).isEqualTo("access-point");
+ assertThat(uri1.key()).isEqualTo("path/to/file");
+ assertThat(uri1.toString()).isEqualTo(p1);
}
}
diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignRequestParser.java b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignRequestParser.java
index 2c5f74a..75ae2d8 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignRequestParser.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignRequestParser.java
@@ -18,43 +18,45 @@
*/
package org.apache.iceberg.aws.s3.signer;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestS3SignRequestParser {
@Test
public void nullRequest() {
- Assertions.assertThatThrownBy(() -> S3SignRequestParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> S3SignRequestParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse s3 sign request from null object");
- Assertions.assertThatThrownBy(() -> S3SignRequestParser.toJson(null))
+ assertThatThrownBy(() -> S3SignRequestParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid s3 sign request: null");
}
@Test
public void missingFields() {
- Assertions.assertThatThrownBy(() -> S3SignRequestParser.fromJson("{}"))
+ assertThatThrownBy(() -> S3SignRequestParser.fromJson("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: region");
- Assertions.assertThatThrownBy(() -> S3SignRequestParser.fromJson("{\"region\":\"us-west-2\"}"))
+ assertThatThrownBy(() -> S3SignRequestParser.fromJson("{\"region\":\"us-west-2\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: method");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> S3SignRequestParser.fromJson("{\"region\":\"us-west-2\", \"method\" : \"PUT\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: uri");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
S3SignRequestParser.fromJson(
"{\n"
@@ -68,7 +70,7 @@
@Test
public void invalidMethod() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
S3SignRequestParser.fromJson(
"{\n"
@@ -83,7 +85,7 @@
@Test
public void invalidUri() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
S3SignRequestParser.fromJson(
"{\n"
@@ -98,7 +100,7 @@
@Test
public void invalidRegion() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
S3SignRequestParser.fromJson(
"{\n"
@@ -131,8 +133,8 @@
.build();
String json = S3SignRequestParser.toJson(s3SignRequest, true);
- Assertions.assertThat(S3SignRequestParser.fromJson(json)).isEqualTo(s3SignRequest);
- Assertions.assertThat(json)
+ assertThat(S3SignRequestParser.fromJson(json)).isEqualTo(s3SignRequest);
+ assertThat(json)
.isEqualTo(
"{\n"
+ " \"region\" : \"us-west-2\",\n"
@@ -168,8 +170,8 @@
.build();
String json = S3SignRequestParser.toJson(s3SignRequest, true);
- Assertions.assertThat(S3SignRequestParser.fromJson(json)).isEqualTo(s3SignRequest);
- Assertions.assertThat(json)
+ assertThat(S3SignRequestParser.fromJson(json)).isEqualTo(s3SignRequest);
+ assertThat(json)
.isEqualTo(
"{\n"
+ " \"region\" : \"us-west-2\",\n"
@@ -209,8 +211,8 @@
.build();
String json = S3SignRequestParser.toJson(s3SignRequest, true);
- Assertions.assertThat(S3SignRequestParser.fromJson(json)).isEqualTo(s3SignRequest);
- Assertions.assertThat(json)
+ assertThat(S3SignRequestParser.fromJson(json)).isEqualTo(s3SignRequest);
+ assertThat(json)
.isEqualTo(
"{\n"
+ " \"region\" : \"us-west-2\",\n"
diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignResponseParser.java b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignResponseParser.java
index d2cf132..19f2f54 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignResponseParser.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignResponseParser.java
@@ -18,34 +18,36 @@
*/
package org.apache.iceberg.aws.s3.signer;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestS3SignResponseParser {
@Test
public void nullResponse() {
- Assertions.assertThatThrownBy(() -> S3SignResponseParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> S3SignResponseParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse s3 sign response from null object");
- Assertions.assertThatThrownBy(() -> S3SignResponseParser.toJson(null))
+ assertThatThrownBy(() -> S3SignResponseParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid s3 sign response: null");
}
@Test
public void missingFields() {
- Assertions.assertThatThrownBy(() -> S3SignResponseParser.fromJson("{}"))
+ assertThatThrownBy(() -> S3SignResponseParser.fromJson("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: uri");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
S3SignResponseParser.fromJson(
"{\"uri\" : \"http://localhost:49208/iceberg-signer-test\"}"))
@@ -55,8 +57,7 @@
@Test
public void invalidUri() {
- Assertions.assertThatThrownBy(
- () -> S3SignResponseParser.fromJson("{\"uri\" : 45, \"headers\" : {}}}"))
+ assertThatThrownBy(() -> S3SignResponseParser.fromJson("{\"uri\" : 45, \"headers\" : {}}}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a string value: uri: 45");
}
@@ -79,8 +80,8 @@
.build();
String json = S3SignResponseParser.toJson(s3SignResponse, true);
- Assertions.assertThat(S3SignResponseParser.fromJson(json)).isEqualTo(s3SignResponse);
- Assertions.assertThat(json)
+ assertThat(S3SignResponseParser.fromJson(json)).isEqualTo(s3SignResponse);
+ assertThat(json)
.isEqualTo(
"{\n"
+ " \"uri\" : \"http://localhost:49208/iceberg-signer-test\",\n"
diff --git a/aws/src/test/java/org/apache/iceberg/aws/util/TestRetryDetector.java b/aws/src/test/java/org/apache/iceberg/aws/util/TestRetryDetector.java
index 72cc2f9..ddf476f 100644
--- a/aws/src/test/java/org/apache/iceberg/aws/util/TestRetryDetector.java
+++ b/aws/src/test/java/org/apache/iceberg/aws/util/TestRetryDetector.java
@@ -18,7 +18,8 @@
*/
package org.apache.iceberg.aws.util;
-import org.assertj.core.api.Assertions;
+import static org.assertj.core.api.Assertions.assertThat;
+
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import software.amazon.awssdk.core.metrics.CoreMetric;
@@ -31,7 +32,7 @@
@Test
public void testNoMetrics() {
RetryDetector detector = new RetryDetector();
- Assertions.assertThat(detector.retried()).as("Should default to false").isFalse();
+ assertThat(detector.retried()).as("Should default to false").isFalse();
}
@Test
@@ -40,7 +41,7 @@
RetryDetector detector = new RetryDetector();
detector.publish(metrics.collect());
- Assertions.assertThat(detector.retried())
+ assertThat(detector.retried())
.as("Should not detect retries if RETRY_COUNT metric is not reported")
.isFalse();
}
@@ -52,9 +53,7 @@
RetryDetector detector = new RetryDetector();
detector.publish(metrics.collect());
- Assertions.assertThat(detector.retried())
- .as("Should not detect retries if RETRY_COUNT is zero")
- .isFalse();
+ assertThat(detector.retried()).as("Should not detect retries if RETRY_COUNT is zero").isFalse();
}
@Test
@@ -64,9 +63,7 @@
RetryDetector detector = new RetryDetector();
detector.publish(metrics.collect());
- Assertions.assertThat(detector.retried())
- .as("Should detect retries if RETRY_COUNT is non-zero")
- .isTrue();
+ assertThat(detector.retried()).as("Should detect retries if RETRY_COUNT is non-zero").isTrue();
}
@Test
@@ -77,7 +74,7 @@
RetryDetector detector = new RetryDetector();
detector.publish(metrics.collect());
- Assertions.assertThat(detector.retried())
+ assertThat(detector.retried())
.as("Should detect retries if even one RETRY_COUNT is non-zero")
.isTrue();
}
@@ -91,7 +88,7 @@
RetryDetector detector = new RetryDetector();
detector.publish(metrics.collect());
- Assertions.assertThat(detector.retried())
+ assertThat(detector.retried())
.as("Should not detect retries if nested RETRY_COUNT is zero")
.isFalse();
}
@@ -105,7 +102,7 @@
RetryDetector detector = new RetryDetector();
detector.publish(metrics.collect());
- Assertions.assertThat(detector.retried())
+ assertThat(detector.retried())
.as("Should detect retries if nested RETRY_COUNT is non-zero")
.isTrue();
}
@@ -124,7 +121,7 @@
RetryDetector detector = new RetryDetector();
detector.publish(metrics.collect());
- Assertions.assertThat(detector.retried())
+ assertThat(detector.retried())
.as("Should detect retries if even one nested RETRY_COUNT is non-zero")
.isTrue();
}
@@ -138,11 +135,9 @@
RetryDetector detector = new RetryDetector();
detector.publish(metrics1.collect());
- Assertions.assertThat(detector.retried())
- .as("Should not detect retries if RETRY_COUNT is zero")
- .isFalse();
+ assertThat(detector.retried()).as("Should not detect retries if RETRY_COUNT is zero").isFalse();
detector.publish(metrics2.collect());
- Assertions.assertThat(detector.retried())
+ assertThat(detector.retried())
.as("Should continue detecting retries in additional metrics")
.isTrue();
}
@@ -158,13 +153,9 @@
RetryDetector detector = new RetryDetector();
detector.publish(metrics1Spy);
- Assertions.assertThat(detector.retried())
- .as("Should detect retries if RETRY_COUNT is zero")
- .isTrue();
+ assertThat(detector.retried()).as("Should detect retries if RETRY_COUNT is zero").isTrue();
detector.publish(metrics2Spy);
- Assertions.assertThat(detector.retried())
- .as("Should remain true once a retry is detected")
- .isTrue();
+ assertThat(detector.retried()).as("Should remain true once a retry is detected").isTrue();
Mockito.verify(metrics1Spy).metricValues(Mockito.eq(CoreMetric.RETRY_COUNT));
Mockito.verifyNoMoreInteractions(metrics1Spy, metrics2Spy);
diff --git a/azure/src/test/java/org/apache/iceberg/azure/AzurePropertiesTest.java b/azure/src/test/java/org/apache/iceberg/azure/AzurePropertiesTest.java
index 2ebccb6..32f4911 100644
--- a/azure/src/test/java/org/apache/iceberg/azure/AzurePropertiesTest.java
+++ b/azure/src/test/java/org/apache/iceberg/azure/AzurePropertiesTest.java
@@ -20,6 +20,7 @@
import static org.apache.iceberg.azure.AzureProperties.ADLS_SHARED_KEY_ACCOUNT_KEY;
import static org.apache.iceberg.azure.AzureProperties.ADLS_SHARED_KEY_ACCOUNT_NAME;
+import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
@@ -30,7 +31,6 @@
import com.azure.storage.common.StorageSharedKeyCredential;
import com.azure.storage.file.datalake.DataLakeFileSystemClientBuilder;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class AzurePropertiesTest {
@@ -101,7 +101,7 @@
@Test
public void testSharedKey() {
- Assertions.assertThatIllegalArgumentException()
+ assertThatIllegalArgumentException()
.isThrownBy(
() ->
new AzureProperties(
@@ -111,7 +111,7 @@
String.format(
"Azure authentication: shared-key requires both %s and %s",
ADLS_SHARED_KEY_ACCOUNT_NAME, ADLS_SHARED_KEY_ACCOUNT_KEY));
- Assertions.assertThatIllegalArgumentException()
+ assertThatIllegalArgumentException()
.isThrownBy(
() -> new AzureProperties(ImmutableMap.of(ADLS_SHARED_KEY_ACCOUNT_NAME, "account")))
.withMessage(
diff --git a/core/src/test/java/org/apache/iceberg/TestBaseIncrementalChangelogScan.java b/core/src/test/java/org/apache/iceberg/TestBaseIncrementalChangelogScan.java
index bb64762..3b94886 100644
--- a/core/src/test/java/org/apache/iceberg/TestBaseIncrementalChangelogScan.java
+++ b/core/src/test/java/org/apache/iceberg/TestBaseIncrementalChangelogScan.java
@@ -21,6 +21,7 @@
import static org.apache.iceberg.TableProperties.MANIFEST_MERGE_ENABLED;
import static org.apache.iceberg.TableProperties.MANIFEST_MIN_MERGE_COUNT;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assumptions.assumeThat;
import java.io.IOException;
@@ -33,7 +34,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableSet;
import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.TestTemplate;
import org.junit.jupiter.api.extension.ExtendWith;
@@ -255,7 +255,7 @@
table.newRowDelta().addDeletes(FILE_A2_DELETES).commit();
- Assertions.assertThatThrownBy(() -> plan(newScan()))
+ assertThatThrownBy(() -> plan(newScan()))
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("Delete files are currently not supported in changelog scans");
}
diff --git a/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java b/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java
index 878ca36..25530d0 100644
--- a/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java
+++ b/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configurable;
@@ -33,7 +36,6 @@
import org.apache.iceberg.metrics.MetricsReporter;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCatalogUtil {
@@ -46,9 +48,9 @@
String name = "custom";
Catalog catalog =
CatalogUtil.loadCatalog(TestCatalog.class.getName(), name, options, hadoopConf);
- Assertions.assertThat(catalog).isInstanceOf(TestCatalog.class);
- Assertions.assertThat(((TestCatalog) catalog).catalogName).isEqualTo(name);
- Assertions.assertThat(((TestCatalog) catalog).catalogProperties).isEqualTo(options);
+ assertThat(catalog).isInstanceOf(TestCatalog.class);
+ assertThat(((TestCatalog) catalog).catalogName).isEqualTo(name);
+ assertThat(((TestCatalog) catalog).catalogProperties).isEqualTo(options);
}
@Test
@@ -60,10 +62,10 @@
String name = "custom";
Catalog catalog =
CatalogUtil.loadCatalog(TestCatalogConfigurable.class.getName(), name, options, hadoopConf);
- Assertions.assertThat(catalog).isInstanceOf(TestCatalogConfigurable.class);
- Assertions.assertThat(((TestCatalogConfigurable) catalog).catalogName).isEqualTo(name);
- Assertions.assertThat(((TestCatalogConfigurable) catalog).catalogProperties).isEqualTo(options);
- Assertions.assertThat(((TestCatalogConfigurable) catalog).configuration).isEqualTo(hadoopConf);
+ assertThat(catalog).isInstanceOf(TestCatalogConfigurable.class);
+ assertThat(((TestCatalogConfigurable) catalog).catalogName).isEqualTo(name);
+ assertThat(((TestCatalogConfigurable) catalog).catalogProperties).isEqualTo(options);
+ assertThat(((TestCatalogConfigurable) catalog).configuration).isEqualTo(hadoopConf);
}
@Test
@@ -72,7 +74,7 @@
options.put("key", "val");
Configuration hadoopConf = new Configuration();
String name = "custom";
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CatalogUtil.loadCatalog(
TestCatalogBadConstructor.class.getName(), name, options, hadoopConf))
@@ -89,7 +91,7 @@
Configuration hadoopConf = new Configuration();
String name = "custom";
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CatalogUtil.loadCatalog(
TestCatalogNoInterface.class.getName(), name, options, hadoopConf))
@@ -106,7 +108,7 @@
String name = "custom";
String impl = TestCatalogErrorConstructor.class.getName();
- Assertions.assertThatThrownBy(() -> CatalogUtil.loadCatalog(impl, name, options, hadoopConf))
+ assertThatThrownBy(() -> CatalogUtil.loadCatalog(impl, name, options, hadoopConf))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Cannot initialize Catalog implementation")
.hasMessageContaining("NoClassDefFoundError: Error while initializing class");
@@ -119,7 +121,7 @@
Configuration hadoopConf = new Configuration();
String name = "custom";
String impl = "CatalogDoesNotExist";
- Assertions.assertThatThrownBy(() -> CatalogUtil.loadCatalog(impl, name, options, hadoopConf))
+ assertThatThrownBy(() -> CatalogUtil.loadCatalog(impl, name, options, hadoopConf))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Cannot initialize Catalog implementation")
.hasMessageContaining("java.lang.ClassNotFoundException: CatalogDoesNotExist");
@@ -130,8 +132,8 @@
Map<String, String> properties = Maps.newHashMap();
properties.put("key", "val");
FileIO fileIO = CatalogUtil.loadFileIO(TestFileIONoArg.class.getName(), properties, null);
- Assertions.assertThat(fileIO).isInstanceOf(TestFileIONoArg.class);
- Assertions.assertThat(((TestFileIONoArg) fileIO).map).isEqualTo(properties);
+ assertThat(fileIO).isInstanceOf(TestFileIONoArg.class);
+ assertThat(((TestFileIONoArg) fileIO).map).isEqualTo(properties);
}
@Test
@@ -140,8 +142,8 @@
configuration.set("key", "val");
FileIO fileIO =
CatalogUtil.loadFileIO(HadoopFileIO.class.getName(), Maps.newHashMap(), configuration);
- Assertions.assertThat(fileIO).isInstanceOf(HadoopFileIO.class);
- Assertions.assertThat(((HadoopFileIO) fileIO).conf().get("key")).isEqualTo("val");
+ assertThat(fileIO).isInstanceOf(HadoopFileIO.class);
+ assertThat(((HadoopFileIO) fileIO).conf().get("key")).isEqualTo("val");
}
@Test
@@ -151,13 +153,13 @@
FileIO fileIO =
CatalogUtil.loadFileIO(
TestFileIOConfigurable.class.getName(), Maps.newHashMap(), configuration);
- Assertions.assertThat(fileIO).isInstanceOf(TestFileIOConfigurable.class);
- Assertions.assertThat(((TestFileIOConfigurable) fileIO).configuration).isEqualTo(configuration);
+ assertThat(fileIO).isInstanceOf(TestFileIOConfigurable.class);
+ assertThat(((TestFileIOConfigurable) fileIO).configuration).isEqualTo(configuration);
}
@Test
public void loadCustomFileIO_badArg() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> CatalogUtil.loadFileIO(TestFileIOBadArg.class.getName(), Maps.newHashMap(), null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith(
@@ -167,7 +169,7 @@
@Test
public void loadCustomFileIO_badClass() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CatalogUtil.loadFileIO(TestFileIONotImpl.class.getName(), Maps.newHashMap(), null))
.isInstanceOf(IllegalArgumentException.class)
@@ -182,7 +184,7 @@
options.put(CatalogUtil.ICEBERG_CATALOG_TYPE, "hive");
Configuration hadoopConf = new Configuration();
String name = "custom";
- Assertions.assertThatThrownBy(() -> CatalogUtil.buildIcebergCatalog(name, options, hadoopConf))
+ assertThatThrownBy(() -> CatalogUtil.buildIcebergCatalog(name, options, hadoopConf))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Cannot create catalog custom, both type and catalog-impl are set: type=hive, catalog-impl=CustomCatalog");
@@ -196,12 +198,12 @@
CatalogProperties.METRICS_REPORTER_IMPL, TestMetricsReporterDefault.class.getName());
MetricsReporter metricsReporter = CatalogUtil.loadMetricsReporter(properties);
- Assertions.assertThat(metricsReporter).isInstanceOf(TestMetricsReporterDefault.class);
+ assertThat(metricsReporter).isInstanceOf(TestMetricsReporterDefault.class);
}
@Test
public void loadCustomMetricsReporter_badArg() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CatalogUtil.loadMetricsReporter(
ImmutableMap.of(
@@ -213,7 +215,7 @@
@Test
public void loadCustomMetricsReporter_badClass() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CatalogUtil.loadMetricsReporter(
ImmutableMap.of(
@@ -230,22 +232,22 @@
String nameSpaceWithTwoLevels = "ns.l2";
String tableName = "tbl";
TableIdentifier tableIdentifier = TableIdentifier.of(namespace, tableName);
- Assertions.assertThat(CatalogUtil.fullTableName(uriTypeCatalogName, tableIdentifier))
+ assertThat(CatalogUtil.fullTableName(uriTypeCatalogName, tableIdentifier))
.isEqualTo(uriTypeCatalogName + "/" + namespace + "." + tableName);
tableIdentifier = TableIdentifier.of(nameSpaceWithTwoLevels, tableName);
- Assertions.assertThat(CatalogUtil.fullTableName(uriTypeCatalogName, tableIdentifier))
+ assertThat(CatalogUtil.fullTableName(uriTypeCatalogName, tableIdentifier))
.isEqualTo(uriTypeCatalogName + "/" + nameSpaceWithTwoLevels + "." + tableName);
- Assertions.assertThat(CatalogUtil.fullTableName(uriTypeCatalogName + "/", tableIdentifier))
+ assertThat(CatalogUtil.fullTableName(uriTypeCatalogName + "/", tableIdentifier))
.isEqualTo(uriTypeCatalogName + "/" + nameSpaceWithTwoLevels + "." + tableName);
String nonUriCatalogName = "test.db.catalog";
- Assertions.assertThat(CatalogUtil.fullTableName(nonUriCatalogName, tableIdentifier))
+ assertThat(CatalogUtil.fullTableName(nonUriCatalogName, tableIdentifier))
.isEqualTo(nonUriCatalogName + "." + nameSpaceWithTwoLevels + "." + tableName);
String pathStyleCatalogName = "/test/db";
- Assertions.assertThat(CatalogUtil.fullTableName(pathStyleCatalogName, tableIdentifier))
+ assertThat(CatalogUtil.fullTableName(pathStyleCatalogName, tableIdentifier))
.isEqualTo(pathStyleCatalogName + "/" + nameSpaceWithTwoLevels + "." + tableName);
}
diff --git a/core/src/test/java/org/apache/iceberg/TestContentFileParser.java b/core/src/test/java/org/apache/iceberg/TestContentFileParser.java
index 1e185b6..83f7fc1 100644
--- a/core/src/test/java/org/apache/iceberg/TestContentFileParser.java
+++ b/core/src/test/java/org/apache/iceberg/TestContentFileParser.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import java.nio.ByteBuffer;
import java.util.Arrays;
@@ -28,7 +31,6 @@
import org.apache.iceberg.types.Conversions;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.JsonUtil;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
@@ -37,26 +39,25 @@
public class TestContentFileParser {
@Test
public void testNullArguments() throws Exception {
- Assertions.assertThatThrownBy(() -> ContentFileParser.toJson(null, TestBase.SPEC))
+ assertThatThrownBy(() -> ContentFileParser.toJson(null, TestBase.SPEC))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid content file: null");
- Assertions.assertThatThrownBy(() -> ContentFileParser.toJson(TestBase.FILE_A, null))
+ assertThatThrownBy(() -> ContentFileParser.toJson(TestBase.FILE_A, null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid partition spec: null");
- Assertions.assertThatThrownBy(
- () -> ContentFileParser.toJson(TestBase.FILE_A, TestBase.SPEC, null))
+ assertThatThrownBy(() -> ContentFileParser.toJson(TestBase.FILE_A, TestBase.SPEC, null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid JSON generator: null");
- Assertions.assertThatThrownBy(() -> ContentFileParser.fromJson(null, TestBase.SPEC))
+ assertThatThrownBy(() -> ContentFileParser.fromJson(null, TestBase.SPEC))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid JSON node for content file: null");
String jsonStr = ContentFileParser.toJson(TestBase.FILE_A, TestBase.SPEC);
JsonNode jsonNode = JsonUtil.mapper().readTree(jsonStr);
- Assertions.assertThatThrownBy(() -> ContentFileParser.fromJson(jsonNode, null))
+ assertThatThrownBy(() -> ContentFileParser.fromJson(jsonNode, null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid partition spec: null");
}
@@ -66,10 +67,10 @@
public void testDataFile(PartitionSpec spec, DataFile dataFile, String expectedJson)
throws Exception {
String jsonStr = ContentFileParser.toJson(dataFile, spec);
- Assertions.assertThat(jsonStr).isEqualTo(expectedJson);
+ assertThat(jsonStr).isEqualTo(expectedJson);
JsonNode jsonNode = JsonUtil.mapper().readTree(jsonStr);
ContentFile<?> deserializedContentFile = ContentFileParser.fromJson(jsonNode, spec);
- Assertions.assertThat(deserializedContentFile).isInstanceOf(DataFile.class);
+ assertThat(deserializedContentFile).isInstanceOf(DataFile.class);
assertContentFileEquals(dataFile, deserializedContentFile, spec);
}
@@ -78,10 +79,10 @@
public void testDeleteFile(PartitionSpec spec, DeleteFile deleteFile, String expectedJson)
throws Exception {
String jsonStr = ContentFileParser.toJson(deleteFile, spec);
- Assertions.assertThat(jsonStr).isEqualTo(expectedJson);
+ assertThat(jsonStr).isEqualTo(expectedJson);
JsonNode jsonNode = JsonUtil.mapper().readTree(jsonStr);
ContentFile<?> deserializedContentFile = ContentFileParser.fromJson(jsonNode, spec);
- Assertions.assertThat(deserializedContentFile).isInstanceOf(DeleteFile.class);
+ assertThat(deserializedContentFile).isInstanceOf(DeleteFile.class);
assertContentFileEquals(deleteFile, deserializedContentFile, spec);
}
@@ -313,25 +314,25 @@
static void assertContentFileEquals(
ContentFile<?> expected, ContentFile<?> actual, PartitionSpec spec) {
- Assertions.assertThat(actual.getClass()).isEqualTo(expected.getClass());
- Assertions.assertThat(actual.specId()).isEqualTo(expected.specId());
- Assertions.assertThat(actual.content()).isEqualTo(expected.content());
- Assertions.assertThat(actual.path()).isEqualTo(expected.path());
- Assertions.assertThat(actual.format()).isEqualTo(expected.format());
- Assertions.assertThat(actual.partition())
+ assertThat(actual.getClass()).isEqualTo(expected.getClass());
+ assertThat(actual.specId()).isEqualTo(expected.specId());
+ assertThat(actual.content()).isEqualTo(expected.content());
+ assertThat(actual.path()).isEqualTo(expected.path());
+ assertThat(actual.format()).isEqualTo(expected.format());
+ assertThat(actual.partition())
.usingComparator(Comparators.forType(spec.partitionType()))
.isEqualTo(expected.partition());
- Assertions.assertThat(actual.recordCount()).isEqualTo(expected.recordCount());
- Assertions.assertThat(actual.fileSizeInBytes()).isEqualTo(expected.fileSizeInBytes());
- Assertions.assertThat(actual.columnSizes()).isEqualTo(expected.columnSizes());
- Assertions.assertThat(actual.valueCounts()).isEqualTo(expected.valueCounts());
- Assertions.assertThat(actual.nullValueCounts()).isEqualTo(expected.nullValueCounts());
- Assertions.assertThat(actual.nanValueCounts()).isEqualTo(expected.nanValueCounts());
- Assertions.assertThat(actual.lowerBounds()).isEqualTo(expected.lowerBounds());
- Assertions.assertThat(actual.upperBounds()).isEqualTo(expected.upperBounds());
- Assertions.assertThat(actual.keyMetadata()).isEqualTo(expected.keyMetadata());
- Assertions.assertThat(actual.splitOffsets()).isEqualTo(expected.splitOffsets());
- Assertions.assertThat(actual.equalityFieldIds()).isEqualTo(expected.equalityFieldIds());
- Assertions.assertThat(actual.sortOrderId()).isEqualTo(expected.sortOrderId());
+ assertThat(actual.recordCount()).isEqualTo(expected.recordCount());
+ assertThat(actual.fileSizeInBytes()).isEqualTo(expected.fileSizeInBytes());
+ assertThat(actual.columnSizes()).isEqualTo(expected.columnSizes());
+ assertThat(actual.valueCounts()).isEqualTo(expected.valueCounts());
+ assertThat(actual.nullValueCounts()).isEqualTo(expected.nullValueCounts());
+ assertThat(actual.nanValueCounts()).isEqualTo(expected.nanValueCounts());
+ assertThat(actual.lowerBounds()).isEqualTo(expected.lowerBounds());
+ assertThat(actual.upperBounds()).isEqualTo(expected.upperBounds());
+ assertThat(actual.keyMetadata()).isEqualTo(expected.keyMetadata());
+ assertThat(actual.splitOffsets()).isEqualTo(expected.splitOffsets());
+ assertThat(actual.equalityFieldIds()).isEqualTo(expected.equalityFieldIds());
+ assertThat(actual.sortOrderId()).isEqualTo(expected.sortOrderId());
}
}
diff --git a/core/src/test/java/org/apache/iceberg/TestEnvironmentContext.java b/core/src/test/java/org/apache/iceberg/TestEnvironmentContext.java
index 52d38fa..405604a 100644
--- a/core/src/test/java/org/apache/iceberg/TestEnvironmentContext.java
+++ b/core/src/test/java/org/apache/iceberg/TestEnvironmentContext.java
@@ -18,14 +18,15 @@
*/
package org.apache.iceberg;
-import org.assertj.core.api.Assertions;
+import static org.assertj.core.api.Assertions.assertThat;
+
import org.junit.jupiter.api.Test;
public class TestEnvironmentContext {
@Test
public void testDefaultValue() {
- Assertions.assertThat(EnvironmentContext.get().get("iceberg-version"))
+ assertThat(EnvironmentContext.get().get("iceberg-version"))
.isEqualTo(IcebergBuild.fullVersion());
}
}
diff --git a/core/src/test/java/org/apache/iceberg/TestFileScanTaskParser.java b/core/src/test/java/org/apache/iceberg/TestFileScanTaskParser.java
index 73bfba9..6e274c4 100644
--- a/core/src/test/java/org/apache/iceberg/TestFileScanTaskParser.java
+++ b/core/src/test/java/org/apache/iceberg/TestFileScanTaskParser.java
@@ -18,10 +18,12 @@
*/
package org.apache.iceberg;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import org.apache.iceberg.expressions.ExpressionUtil;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.expressions.ResidualEvaluator;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
@@ -29,11 +31,11 @@
public class TestFileScanTaskParser {
@Test
public void testNullArguments() {
- Assertions.assertThatThrownBy(() -> FileScanTaskParser.toJson(null))
+ assertThatThrownBy(() -> FileScanTaskParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid file scan task: null");
- Assertions.assertThatThrownBy(() -> FileScanTaskParser.fromJson(null, true))
+ assertThatThrownBy(() -> FileScanTaskParser.fromJson(null, true))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid JSON string for file scan task: null");
}
@@ -44,7 +46,7 @@
PartitionSpec spec = TestBase.SPEC;
FileScanTask fileScanTask = createScanTask(spec, caseSensitive);
String jsonStr = FileScanTaskParser.toJson(fileScanTask);
- Assertions.assertThat(jsonStr).isEqualTo(expectedFileScanTaskJson());
+ assertThat(jsonStr).isEqualTo(expectedFileScanTaskJson());
FileScanTask deserializedTask = FileScanTaskParser.fromJson(jsonStr, caseSensitive);
assertFileScanTaskEquals(fileScanTask, deserializedTask, spec, caseSensitive);
}
@@ -87,17 +89,15 @@
private static void assertFileScanTaskEquals(
FileScanTask expected, FileScanTask actual, PartitionSpec spec, boolean caseSensitive) {
TestContentFileParser.assertContentFileEquals(expected.file(), actual.file(), spec);
- Assertions.assertThat(actual.deletes()).hasSameSizeAs(expected.deletes());
+ assertThat(actual.deletes()).hasSameSizeAs(expected.deletes());
for (int pos = 0; pos < expected.deletes().size(); ++pos) {
TestContentFileParser.assertContentFileEquals(
expected.deletes().get(pos), actual.deletes().get(pos), spec);
}
- Assertions.assertThat(expected.schema().sameSchema(actual.schema()))
- .as("Schema should match")
- .isTrue();
- Assertions.assertThat(actual.spec()).isEqualTo(expected.spec());
- Assertions.assertThat(
+ assertThat(expected.schema().sameSchema(actual.schema())).as("Schema should match").isTrue();
+ assertThat(actual.spec()).isEqualTo(expected.spec());
+ assertThat(
ExpressionUtil.equivalent(
expected.residual(), actual.residual(), TestBase.SCHEMA.asStruct(), caseSensitive))
.as("Residual expression should match")
diff --git a/core/src/test/java/org/apache/iceberg/TestFixedSizeSplitScanTaskIterator.java b/core/src/test/java/org/apache/iceberg/TestFixedSizeSplitScanTaskIterator.java
index 314e805..68e9b6b 100644
--- a/core/src/test/java/org/apache/iceberg/TestFixedSizeSplitScanTaskIterator.java
+++ b/core/src/test/java/org/apache/iceberg/TestFixedSizeSplitScanTaskIterator.java
@@ -18,10 +18,11 @@
*/
package org.apache.iceberg;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.util.List;
import org.apache.iceberg.BaseFileScanTask.SplitScanTask;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestFixedSizeSplitScanTaskIterator {
@@ -57,8 +58,8 @@
List<Long> split = offsetLenPairs.get(i);
long offset = split.get(0);
long length = split.get(1);
- Assertions.assertThat(task.start()).isEqualTo(offset);
- Assertions.assertThat(task.length()).isEqualTo(length);
+ assertThat(task.start()).isEqualTo(offset);
+ assertThat(task.length()).isEqualTo(length);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/TestManifestReader.java b/core/src/test/java/org/apache/iceberg/TestManifestReader.java
index 44b0908..999fd0d 100644
--- a/core/src/test/java/org/apache/iceberg/TestManifestReader.java
+++ b/core/src/test/java/org/apache/iceberg/TestManifestReader.java
@@ -19,6 +19,7 @@
package org.apache.iceberg;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assumptions.assumeThat;
import java.io.IOException;
@@ -31,7 +32,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Streams;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration;
import org.junit.jupiter.api.TestTemplate;
@@ -73,7 +73,7 @@
@TestTemplate
public void testInvalidUsage() throws IOException {
ManifestFile manifest = writeManifest(FILE_A, FILE_B);
- Assertions.assertThatThrownBy(() -> ManifestFiles.read(manifest, FILE_IO))
+ assertThatThrownBy(() -> ManifestFiles.read(manifest, FILE_IO))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot read from ManifestFile with null (unassigned) snapshot ID");
}
diff --git a/core/src/test/java/org/apache/iceberg/TestMetricsTruncation.java b/core/src/test/java/org/apache/iceberg/TestMetricsTruncation.java
index 3fe974f..5bd1fcd 100644
--- a/core/src/test/java/org/apache/iceberg/TestMetricsTruncation.java
+++ b/core/src/test/java/org/apache/iceberg/TestMetricsTruncation.java
@@ -23,11 +23,12 @@
import static org.apache.iceberg.util.BinaryUtil.truncateBinaryMin;
import static org.apache.iceberg.util.UnicodeUtil.truncateStringMax;
import static org.apache.iceberg.util.UnicodeUtil.truncateStringMin;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.nio.ByteBuffer;
import java.util.Comparator;
import org.apache.iceberg.expressions.Literal;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
@SuppressWarnings("checkstyle:LocalVariableName")
@@ -39,32 +40,32 @@
ByteBuffer emptyByteBuffer = ByteBuffer.allocate(0);
Comparator<ByteBuffer> cmp = Literal.of(original).comparator();
- Assertions.assertThat(cmp.compare(truncateBinary(original, 0), emptyByteBuffer))
+ assertThat(cmp.compare(truncateBinary(original, 0), emptyByteBuffer))
.as("Truncating to a length of zero should return an empty ByteBuffer")
.isEqualTo(0);
- Assertions.assertThat(truncateBinary(original, original.remaining()))
+ assertThat(truncateBinary(original, original.remaining()))
.as("Truncating to the original buffer's remaining size should return the original buffer")
.isEqualTo(original);
- Assertions.assertThat(truncateBinary(original, 16))
+ assertThat(truncateBinary(original, 16))
.as(
"Truncating with a length greater than the input's remaining size should return the input")
.isEqualTo(original);
ByteBuffer truncated = truncateBinary(original, 2);
- Assertions.assertThat(truncated.remaining())
+ assertThat(truncated.remaining())
.as(
"Truncating with a length less than the input's remaining size should truncate properly")
.isEqualTo(2);
- Assertions.assertThat(truncated.position())
+ assertThat(truncated.position())
.as(
"Truncating with a length less than the input's remaining size should truncate properly")
.isEqualTo(0);
- Assertions.assertThat(original.remaining())
+ assertThat(original.remaining())
.as("Truncating should not modify the input buffer")
.isEqualTo(4);
- Assertions.assertThat(original.position())
+ assertThat(original.position())
.as("Truncating should not modify the input buffer")
.isEqualTo(0);
- Assertions.assertThatThrownBy(() -> truncateBinary(original, -1))
+ assertThatThrownBy(() -> truncateBinary(original, -1))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Truncate length should be non-negative");
}
@@ -78,20 +79,19 @@
ByteBuffer test2_2 = ByteBuffer.wrap(new byte[] {(byte) 0xFF, (byte) 0xFF});
Comparator<ByteBuffer> cmp = Literal.of(test1).comparator();
- Assertions.assertThat(cmp.compare(truncateBinaryMin(Literal.of(test1), 2).value(), test1))
+ assertThat(cmp.compare(truncateBinaryMin(Literal.of(test1), 2).value(), test1))
.as("Truncated lower bound should be lower than or equal to the actual lower bound")
.isLessThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateBinaryMin(Literal.of(test1), 2).value(), test1_2_expected))
+ assertThat(cmp.compare(truncateBinaryMin(Literal.of(test1), 2).value(), test1_2_expected))
.as("Output must have the first two bytes of the input")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateBinaryMin(Literal.of(test1), 5).value(), test1))
+ assertThat(cmp.compare(truncateBinaryMin(Literal.of(test1), 5).value(), test1))
.as("No truncation required as truncate length is greater than the input size")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateBinaryMin(Literal.of(test2), 2).value(), test2))
+ assertThat(cmp.compare(truncateBinaryMin(Literal.of(test2), 2).value(), test2))
.as("Truncated lower bound should be lower than or equal to the actual lower bound")
.isLessThanOrEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateBinaryMin(Literal.of(test2), 2).value(), test2_2))
+ assertThat(cmp.compare(truncateBinaryMin(Literal.of(test2), 2).value(), test2_2))
.as(
"Output must have the first two bytes of the input. A lower bound exists "
+ "even though the first two bytes are the max value")
@@ -107,33 +107,30 @@
ByteBuffer expectedOutput = ByteBuffer.wrap(new byte[] {1, 2});
Comparator<ByteBuffer> cmp = Literal.of(test1).comparator();
- Assertions.assertThat(cmp.compare(truncateBinaryMax(Literal.of(test1), 2).value(), test1))
+ assertThat(cmp.compare(truncateBinaryMax(Literal.of(test1), 2).value(), test1))
.as("Truncated upper bound should be greater than or equal to the actual upper bound")
.isGreaterThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateBinaryMax(Literal.of(test1), 2).value(), expectedOutput))
+ assertThat(cmp.compare(truncateBinaryMax(Literal.of(test1), 2).value(), expectedOutput))
.as("Output must have two bytes and the second byte of the input must be incremented")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateBinaryMax(Literal.of(test2), 2).value(), test2))
+ assertThat(cmp.compare(truncateBinaryMax(Literal.of(test2), 2).value(), test2))
.as("Truncated upper bound should be greater than or equal to the actual upper bound")
.isGreaterThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateBinaryMax(Literal.of(test2), 3).value(), expectedOutput))
+ assertThat(cmp.compare(truncateBinaryMax(Literal.of(test2), 3).value(), expectedOutput))
.as(
"Since the third byte is already the max value, output must have two bytes "
+ "with the second byte incremented ")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateBinaryMax(Literal.of(test3), 5).value(), test3))
+ assertThat(cmp.compare(truncateBinaryMax(Literal.of(test3), 5).value(), test3))
.as("No truncation required as truncate length is greater than the input size")
.isEqualTo(0);
- Assertions.assertThat(truncateBinaryMax(Literal.of(test3), 2))
+ assertThat(truncateBinaryMax(Literal.of(test3), 2))
.as("An upper bound doesn't exist since the first two bytes are the max value")
.isNull();
- Assertions.assertThat(cmp.compare(truncateBinaryMax(Literal.of(test4), 2).value(), test4))
+ assertThat(cmp.compare(truncateBinaryMax(Literal.of(test4), 2).value(), test4))
.as("Truncated upper bound should be greater than or equal to the actual upper bound")
.isGreaterThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateBinaryMax(Literal.of(test4), 2).value(), expectedOutput))
+ assertThat(cmp.compare(truncateBinaryMax(Literal.of(test4), 2).value(), expectedOutput))
.as(
"Since a shorter sequence is considered smaller, output must have two bytes "
+ "and the second byte of the input must be incremented")
@@ -155,38 +152,34 @@
String test4 = "\uD800\uDC00\uD800\uDC00";
String test4_1_expected = "\uD800\uDC00";
Comparator<CharSequence> cmp = Literal.of(test1).comparator();
- Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test1), 3).value(), test1))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test1), 3).value(), test1))
.as("Truncated lower bound should be lower than or equal to the actual lower bound")
.isLessThanOrEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test1), 8).value(), test1))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test1), 8).value(), test1))
.as("No truncation required as truncate length is greater than the input size")
.isEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMin(Literal.of(test1), 2).value(), test1_2_expected))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test1), 2).value(), test1_2_expected))
.as("Output must have the first two characters of the input")
.isEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMin(Literal.of(test1), 3).value(), test1_3_expected))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test1), 3).value(), test1_3_expected))
.as("Output must have the first three characters of the input")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test2), 16).value(), test2))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test2), 16).value(), test2))
.as("Truncated lower bound should be lower than or equal to the actual lower bound")
.isLessThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMin(Literal.of(test2), 7).value(), test2_7_expected))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test2), 7).value(), test2_7_expected))
.as("Output must have the first seven characters of the input")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test3), 2).value(), test3))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test3), 2).value(), test3))
.as("Truncated lower bound should be lower than or equal to the actual lower bound")
.isLessThanOrEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test3), 2).value(), test3))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test3), 2).value(), test3))
.as("No truncation required as truncate length is equal to the input size")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMin(Literal.of(test4), 1).value(), test4))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test4), 1).value(), test4))
.as("Truncated lower bound should be lower than or equal to the actual lower bound")
.isLessThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMin(Literal.of(test4), 1).value(), test4_1_expected))
+ assertThat(cmp.compare(truncateStringMin(Literal.of(test4), 1).value(), test4_1_expected))
.as("Output must have the first 4 byte UTF-8 character of the input")
.isEqualTo(0);
}
@@ -215,76 +208,68 @@
String test7_1_expected = "\uD83D\uDE03";
Comparator<CharSequence> cmp = Literal.of(test1).comparator();
- Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 4).value(), test1))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 4).value(), test1))
.as("Truncated upper bound should be greater than or equal to the actual upper bound")
.isGreaterThanOrEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 7).value(), test1))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 7).value(), test1))
.as("No truncation required as truncate length is equal to the input size")
.isEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMax(Literal.of(test1), 2).value(), test1_2_expected))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 2).value(), test1_2_expected))
.as(
"Output must have two characters and the second character of the input must "
+ "be incremented")
.isEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMax(Literal.of(test1), 3).value(), test1_3_expected))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 3).value(), test1_3_expected))
.as(
"Output must have three characters and the third character of the input must "
+ "be incremented")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 8).value(), test1))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test1), 8).value(), test1))
.as("No truncation required as truncate length is greater than the input size")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test2), 8).value(), test2))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test2), 8).value(), test2))
.as("Truncated upper bound should be greater than or equal to the actual upper bound")
.isGreaterThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMax(Literal.of(test2), 7).value(), test2_7_expected))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test2), 7).value(), test2_7_expected))
.as(
"Output must have seven characters and the seventh character of the input must be incremented")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test3), 3).value(), test3))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test3), 3).value(), test3))
.as("Truncated upper bound should be greater than or equal to the actual upper bound")
.isGreaterThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMax(Literal.of(test3), 3).value(), test3_3_expected))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test3), 3).value(), test3_3_expected))
.as(
"Output must have three characters and the third character of the input must "
+ "be incremented. The second perceivable character in this string is actually a glyph. It consists of "
+ "two unicode characters")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test4), 1).value(), test4))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test4), 1).value(), test4))
.as("Truncated upper bound should be greater than or equal to the actual upper bound")
.isGreaterThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMax(Literal.of(test4), 1).value(), test4_1_expected))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test4), 1).value(), test4_1_expected))
.as(
"Output must have one character. Since the first character is the max 3 byte "
+ "UTF-8 character, it should be incremented to the lowest 4 byte UTF-8 character")
.isEqualTo(0);
- Assertions.assertThat(truncateStringMax(Literal.of(test5), 1))
+ assertThat(truncateStringMax(Literal.of(test5), 1))
.as("An upper bound doesn't exist since the first two characters are max UTF-8 characters")
.isNull();
- Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test6), 2).value(), test6))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test6), 2).value(), test6))
.as("Truncated upper bound should be greater than or equal to the actual upper bound")
.isGreaterThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMax(Literal.of(test6), 1).value(), test6_2_expected))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test6), 1).value(), test6_2_expected))
.as(
"Test 4 byte UTF-8 character increment. Output must have one character with "
+ "the first character incremented")
.isEqualTo(0);
- Assertions.assertThat(cmp.compare(truncateStringMax(Literal.of(test7), 2).value(), test7))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test7), 2).value(), test7))
.as("Truncated upper bound should be greater than or equal to the actual upper bound")
.isGreaterThanOrEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMax(Literal.of(test7), 2).value(), test7_2_expected))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test7), 2).value(), test7_2_expected))
.as(
"Test input with multiple 4 byte UTF-8 character where the second unicode character should be incremented")
.isEqualTo(0);
- Assertions.assertThat(
- cmp.compare(truncateStringMax(Literal.of(test7), 1).value(), test7_1_expected))
+ assertThat(cmp.compare(truncateStringMax(Literal.of(test7), 1).value(), test7_1_expected))
.as(
"Test input with multiple 4 byte UTF-8 character where the first unicode character should be incremented")
.isEqualTo(0);
diff --git a/core/src/test/java/org/apache/iceberg/TestOffsetsBasedSplitScanTaskIterator.java b/core/src/test/java/org/apache/iceberg/TestOffsetsBasedSplitScanTaskIterator.java
index d9ad4fb..89eaaaf 100644
--- a/core/src/test/java/org/apache/iceberg/TestOffsetsBasedSplitScanTaskIterator.java
+++ b/core/src/test/java/org/apache/iceberg/TestOffsetsBasedSplitScanTaskIterator.java
@@ -18,10 +18,11 @@
*/
package org.apache.iceberg;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.util.List;
import org.apache.iceberg.BaseFileScanTask.SplitScanTask;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestOffsetsBasedSplitScanTaskIterator {
@@ -62,15 +63,15 @@
offsetRanges,
TestOffsetsBasedSplitScanTaskIterator::createSplitTask);
List<FileScanTask> tasks = Lists.newArrayList(splitTaskIterator);
- Assertions.assertThat(tasks).as("Number of tasks don't match").hasSameSizeAs(offsetLenPairs);
+ assertThat(tasks).as("Number of tasks don't match").hasSameSizeAs(offsetLenPairs);
for (int i = 0; i < tasks.size(); i++) {
FileScanTask task = tasks.get(i);
List<Long> split = offsetLenPairs.get(i);
long offset = split.get(0);
long length = split.get(1);
- Assertions.assertThat(task.start()).isEqualTo(offset);
- Assertions.assertThat(task.length()).isEqualTo(length);
+ assertThat(task.start()).isEqualTo(offset);
+ assertThat(task.length()).isEqualTo(length);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/TestSchemaUnionByFieldName.java b/core/src/test/java/org/apache/iceberg/TestSchemaUnionByFieldName.java
index 5aedde6..bda7646 100644
--- a/core/src/test/java/org/apache/iceberg/TestSchemaUnionByFieldName.java
+++ b/core/src/test/java/org/apache/iceberg/TestSchemaUnionByFieldName.java
@@ -20,6 +20,8 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
@@ -42,7 +44,6 @@
import org.apache.iceberg.types.Types.TimeType;
import org.apache.iceberg.types.Types.TimestampType;
import org.apache.iceberg.types.Types.UUIDType;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestSchemaUnionByFieldName {
@@ -82,7 +83,7 @@
public void testAddTopLevelPrimitives() {
Schema newSchema = new Schema(primitiveFields(0, primitiveTypes()));
Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
}
@Test
@@ -91,7 +92,7 @@
Schema newSchema =
new Schema(optional(1, "aList", Types.ListType.ofOptional(2, primitiveType)));
Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
}
}
@@ -102,7 +103,7 @@
new Schema(
optional(1, "aMap", Types.MapType.ofOptional(2, 3, primitiveType, primitiveType)));
Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
}
}
@@ -113,7 +114,7 @@
new Schema(
optional(1, "aStruct", Types.StructType.of(optional(2, "primitive", primitiveType))));
Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(currentSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(currentSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(currentSchema.asStruct());
}
}
@@ -125,7 +126,7 @@
new Schema(
optional(1, "aStruct", Types.StructType.of(optional(2, "primitive", primitiveType))));
Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
}
}
@@ -136,7 +137,7 @@
new Schema(
optional(1, "aStruct", Types.StructType.of(primitiveFields(1, primitiveTypes()))));
Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
}
@Test
@@ -165,7 +166,7 @@
Types.ListType.ofOptional(
10, DecimalType.of(11, 20))))))))))));
Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
}
@Test
@@ -201,7 +202,7 @@
"aString",
StringType.get()))))))))))))));
Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
}
@Test
@@ -234,7 +235,7 @@
Types.MapType.ofOptional(
12, 13, StringType.get(), StringType.get()))))))));
Schema applied = new SchemaUpdate(new Schema(), 0).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
}
@Test
@@ -243,8 +244,7 @@
new Schema(optional(1, "aList", Types.ListType.ofOptional(2, StringType.get())));
Schema newSchema =
new Schema(optional(1, "aList", Types.ListType.ofOptional(2, LongType.get())));
- Assertions.assertThatThrownBy(
- () -> new SchemaUpdate(currentSchema, 2).unionByNameWith(newSchema).apply())
+ assertThatThrownBy(() -> new SchemaUpdate(currentSchema, 2).unionByNameWith(newSchema).apply())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot change column type: aList.element: string -> long");
}
@@ -260,8 +260,7 @@
new Schema(
optional(1, "aMap", Types.MapType.ofOptional(2, 3, StringType.get(), LongType.get())));
- Assertions.assertThatThrownBy(
- () -> new SchemaUpdate(currentSchema, 3).unionByNameWith(newSchema).apply())
+ assertThatThrownBy(() -> new SchemaUpdate(currentSchema, 3).unionByNameWith(newSchema).apply())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot change column type: aMap.value: string -> long");
}
@@ -275,8 +274,7 @@
Schema newSchema =
new Schema(
optional(1, "aMap", Types.MapType.ofOptional(2, 3, UUIDType.get(), StringType.get())));
- Assertions.assertThatThrownBy(
- () -> new SchemaUpdate(currentSchema, 3).unionByNameWith(newSchema).apply())
+ assertThatThrownBy(() -> new SchemaUpdate(currentSchema, 3).unionByNameWith(newSchema).apply())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot change column type: aMap.key: string -> uuid");
}
@@ -288,8 +286,8 @@
Schema newSchema = new Schema(required(1, "aCol", LongType.get()));
Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct().fields()).hasSize(1);
- Assertions.assertThat(applied.asStruct().fields().get(0).type()).isEqualTo(LongType.get());
+ assertThat(applied.asStruct().fields()).hasSize(1);
+ assertThat(applied.asStruct().fields().get(0).type()).isEqualTo(LongType.get());
}
@Test
@@ -299,9 +297,9 @@
Schema newSchema = new Schema(required(1, "aCol", DoubleType.get()));
Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
- Assertions.assertThat(applied.asStruct().fields()).hasSize(1);
- Assertions.assertThat(applied.asStruct().fields().get(0).type()).isEqualTo(DoubleType.get());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct().fields()).hasSize(1);
+ assertThat(applied.asStruct().fields().get(0).type()).isEqualTo(DoubleType.get());
}
@Test
@@ -309,8 +307,7 @@
Schema currentSchema = new Schema(required(1, "aCol", DoubleType.get()));
Schema newSchema = new Schema(required(1, "aCol", FloatType.get()));
- Assertions.assertThatThrownBy(
- () -> new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply())
+ assertThatThrownBy(() -> new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot change column type: aCol: double -> float");
}
@@ -323,7 +320,7 @@
Schema newSchema = new Schema(required(1, "aCol", DecimalType.of(22, 1)));
Schema applied = new SchemaUpdate(currentSchema, 1).unionByNameWith(newSchema).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(newSchema.asStruct());
}
@Test
@@ -384,7 +381,7 @@
optional(5, "value", StringType.get()),
optional(6, "time", TimeType.get())))))))));
- Assertions.assertThat(applied.asStruct()).isEqualTo(expected.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(expected.asStruct());
}
@Test
@@ -392,8 +389,7 @@
Schema currentSchema =
new Schema(optional(1, "aColumn", Types.ListType.ofOptional(2, StringType.get())));
Schema newSchema = new Schema(optional(1, "aColumn", StringType.get()));
- Assertions.assertThatThrownBy(
- () -> new SchemaUpdate(currentSchema, 2).unionByNameWith(newSchema).apply())
+ assertThatThrownBy(() -> new SchemaUpdate(currentSchema, 2).unionByNameWith(newSchema).apply())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot change column type: aColumn: list<string> -> string");
}
@@ -423,7 +419,7 @@
Schema union = new SchemaUpdate(aSchema, 0).unionByNameWith(mirrored).apply();
// We don't expect the original schema to have been altered.
- Assertions.assertThat(union.asStruct()).isEqualTo(aSchema.asStruct());
+ assertThat(union.asStruct()).isEqualTo(aSchema.asStruct());
}
@Test
@@ -459,7 +455,7 @@
7, "d1", Types.StructType.of(optional(8, "d2", Types.StringType.get()))))));
Schema union = new SchemaUpdate(schema, 5).unionByNameWith(observed).apply();
- Assertions.assertThat(union.asStruct()).isEqualTo(observed.asStruct());
+ assertThat(union.asStruct()).isEqualTo(observed.asStruct());
}
@Test
@@ -510,7 +506,7 @@
StringType.get()))))))))))))));
Schema applied = new SchemaUpdate(schema, 4).unionByNameWith(observed).apply();
- Assertions.assertThat(applied.asStruct()).isEqualTo(observed.asStruct());
+ assertThat(applied.asStruct()).isEqualTo(observed.asStruct());
}
@Test
@@ -575,6 +571,6 @@
"list2",
ListType.ofOptional(7, StringType.get())))))))));
- Assertions.assertThat(union.asStruct()).isEqualTo(expected.asStruct());
+ assertThat(union.asStruct()).isEqualTo(expected.asStruct());
}
}
diff --git a/core/src/test/java/org/apache/iceberg/TestUpdateRequirementParser.java b/core/src/test/java/org/apache/iceberg/TestUpdateRequirementParser.java
index cd32b96..277fdf7 100644
--- a/core/src/test/java/org/apache/iceberg/TestUpdateRequirementParser.java
+++ b/core/src/test/java/org/apache/iceberg/TestUpdateRequirementParser.java
@@ -18,9 +18,12 @@
*/
package org.apache.iceberg;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.assertj.core.api.Assertions.fail;
+
import java.util.List;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestUpdateRequirementParser {
@@ -33,7 +36,7 @@
"{\"uuid\":\"2cc52516-5e73-41f2-b139-545d41a4e151\"}");
for (String json : invalidJson) {
- Assertions.assertThatThrownBy(() -> UpdateRequirementParser.fromJson(json))
+ assertThatThrownBy(() -> UpdateRequirementParser.fromJson(json))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse update requirement. Missing field: type");
}
@@ -53,7 +56,7 @@
String uuid = "2cc52516-5e73-41f2-b139-545d41a4e151";
String expected = String.format("{\"type\":\"assert-table-uuid\",\"uuid\":\"%s\"}", uuid);
UpdateRequirement actual = new UpdateRequirement.AssertTableUUID(uuid);
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertTableUUID should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -72,7 +75,7 @@
String uuid = "2cc52516-5e73-41f2-b139-545d41a4e151";
String expected = String.format("{\"type\":\"assert-view-uuid\",\"uuid\":\"%s\"}", uuid);
UpdateRequirement actual = new UpdateRequirement.AssertViewUUID(uuid);
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertViewUUID should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -89,7 +92,7 @@
public void testAssertTableDoesNotExistToJson() {
String expected = "{\"type\":\"assert-create\"}";
UpdateRequirement actual = new UpdateRequirement.AssertTableDoesNotExist();
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertTableDoesNotExist should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -130,7 +133,7 @@
"{\"type\":\"%s\",\"ref\":\"%s\",\"snapshot-id\":%d}",
requirementType, refName, snapshotId);
UpdateRequirement actual = new UpdateRequirement.AssertRefSnapshotID(refName, snapshotId);
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertRefSnapshotId should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -145,7 +148,7 @@
"{\"type\":\"%s\",\"ref\":\"%s\",\"snapshot-id\":%d}",
requirementType, refName, snapshotId);
UpdateRequirement actual = new UpdateRequirement.AssertRefSnapshotID(refName, snapshotId);
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertRefSnapshotId should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -172,7 +175,7 @@
"{\"type\":\"%s\",\"last-assigned-field-id\":%d}",
requirementType, lastAssignedFieldId);
UpdateRequirement actual = new UpdateRequirement.AssertLastAssignedFieldId(lastAssignedFieldId);
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertLastAssignedFieldId should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -194,7 +197,7 @@
String expected =
String.format("{\"type\":\"%s\",\"current-schema-id\":%d}", requirementType, schemaId);
UpdateRequirement actual = new UpdateRequirement.AssertCurrentSchemaID(schemaId);
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertCurrentSchemaId should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -222,7 +225,7 @@
requirementType, lastAssignedPartitionId);
UpdateRequirement actual =
new UpdateRequirement.AssertLastAssignedPartitionId(lastAssignedPartitionId);
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertLastAssignedPartitionId should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -244,7 +247,7 @@
String expected =
String.format("{\"type\":\"%s\",\"default-spec-id\":%d}", requirementType, specId);
UpdateRequirement actual = new UpdateRequirement.AssertDefaultSpecID(specId);
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertDefaultSpecId should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -268,7 +271,7 @@
String.format(
"{\"type\":\"%s\",\"default-sort-order-id\":%d}", requirementType, sortOrderId);
UpdateRequirement actual = new UpdateRequirement.AssertDefaultSortOrderID(sortOrderId);
- Assertions.assertThat(UpdateRequirementParser.toJson(actual))
+ assertThat(UpdateRequirementParser.toJson(actual))
.as("AssertDefaultSortOrderId should convert to the correct JSON value")
.isEqualTo(expected);
}
@@ -322,13 +325,13 @@
(UpdateRequirement.AssertDefaultSortOrderID) actual);
break;
default:
- Assertions.fail("Unrecognized update requirement type: " + requirementType);
+ fail("Unrecognized update requirement type: " + requirementType);
}
}
private static void compareAssertTableUUID(
UpdateRequirement.AssertTableUUID expected, UpdateRequirement.AssertTableUUID actual) {
- Assertions.assertThat(actual.uuid())
+ assertThat(actual.uuid())
.as("UUID from JSON should not be null")
.isNotNull()
.as("UUID should parse correctly from JSON")
@@ -337,7 +340,7 @@
private static void compareAssertViewUUID(
UpdateRequirement.AssertViewUUID expected, UpdateRequirement.AssertViewUUID actual) {
- Assertions.assertThat(actual.uuid())
+ assertThat(actual.uuid())
.as("UUID from JSON should not be null")
.isNotNull()
.as("UUID should parse correctly from JSON")
@@ -349,7 +352,7 @@
// are the same and as expected.
private static void compareAssertTableDoesNotExist(
UpdateRequirement expected, UpdateRequirement actual) {
- Assertions.assertThat(actual)
+ assertThat(actual)
.isOfAnyClassIn(UpdateRequirement.AssertTableDoesNotExist.class)
.hasSameClassAs(expected);
}
@@ -357,10 +360,10 @@
private static void compareAssertRefSnapshotId(
UpdateRequirement.AssertRefSnapshotID expected,
UpdateRequirement.AssertRefSnapshotID actual) {
- Assertions.assertThat(actual.refName())
+ assertThat(actual.refName())
.as("Ref name should parse correctly from JSON")
.isEqualTo(expected.refName());
- Assertions.assertThat(actual.snapshotId())
+ assertThat(actual.snapshotId())
.as("Snapshot ID should parse correctly from JSON")
.isEqualTo(expected.snapshotId());
}
@@ -368,7 +371,7 @@
private static void compareAssertLastAssignedFieldId(
UpdateRequirement.AssertLastAssignedFieldId expected,
UpdateRequirement.AssertLastAssignedFieldId actual) {
- Assertions.assertThat(actual.lastAssignedFieldId())
+ assertThat(actual.lastAssignedFieldId())
.as("Last assigned field id should parse correctly from JSON")
.isEqualTo(expected.lastAssignedFieldId());
}
@@ -376,7 +379,7 @@
private static void compareAssertCurrentSchemaId(
UpdateRequirement.AssertCurrentSchemaID expected,
UpdateRequirement.AssertCurrentSchemaID actual) {
- Assertions.assertThat(actual.schemaId())
+ assertThat(actual.schemaId())
.as("Current schema id should parse correctly from JSON")
.isEqualTo(expected.schemaId());
}
@@ -384,7 +387,7 @@
private static void compareAssertLastAssignedPartitionId(
UpdateRequirement.AssertLastAssignedPartitionId expected,
UpdateRequirement.AssertLastAssignedPartitionId actual) {
- Assertions.assertThat(actual.lastAssignedPartitionId())
+ assertThat(actual.lastAssignedPartitionId())
.as("Last assigned partition id should parse correctly from JSON")
.isEqualTo(expected.lastAssignedPartitionId());
}
@@ -392,7 +395,7 @@
private static void compareAssertDefaultSpecId(
UpdateRequirement.AssertDefaultSpecID expected,
UpdateRequirement.AssertDefaultSpecID actual) {
- Assertions.assertThat(actual.specId())
+ assertThat(actual.specId())
.as("Default spec id should parse correctly from JSON")
.isEqualTo(expected.specId());
}
@@ -400,7 +403,7 @@
private static void compareAssertDefaultSortOrderId(
UpdateRequirement.AssertDefaultSortOrderID expected,
UpdateRequirement.AssertDefaultSortOrderID actual) {
- Assertions.assertThat(actual.sortOrderId())
+ assertThat(actual.sortOrderId())
.as("Default sort order id should parse correctly from JSON")
.isEqualTo(expected.sortOrderId());
}
diff --git a/core/src/test/java/org/apache/iceberg/avro/TestAvroNameMapping.java b/core/src/test/java/org/apache/iceberg/avro/TestAvroNameMapping.java
index 2af0984..cabc9f2 100644
--- a/core/src/test/java/org/apache/iceberg/avro/TestAvroNameMapping.java
+++ b/core/src/test/java/org/apache/iceberg/avro/TestAvroNameMapping.java
@@ -19,6 +19,8 @@
package org.apache.iceberg.avro;
import static org.apache.avro.generic.GenericData.Record;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.File;
import java.io.IOException;
@@ -40,7 +42,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Comparators;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
@SuppressWarnings("unchecked")
@@ -80,10 +81,8 @@
Record projected = writeAndRead(writeSchema, readSchema, record, nameMapping);
// field id 5 comes from read schema
- Assertions.assertThat(projected.get("location"))
- .as("location field should not be read")
- .isNull();
- Assertions.assertThat(projected.get("id")).isEqualTo(34L);
+ assertThat(projected.get("location")).as("location field should not be read").isNull();
+ assertThat(projected.get("id")).isEqualTo(34L);
// Table mapping partially project `location` map value
nameMapping =
@@ -102,9 +101,7 @@
projected = writeAndRead(writeSchema, readSchema, record, nameMapping);
Record projectedL1 = ((Map<String, Record>) projected.get("location")).get("l1");
- Assertions.assertThat(projectedL1.get("long"))
- .as("location.value.long, should not be read")
- .isNull();
+ assertThat(projectedL1.get("long")).as("location.value.long, should not be read").isNull();
}
@Test
@@ -174,14 +171,12 @@
Map<Record, Record> projectedLocation = (Map<Record, Record>) projected.get("location");
Record projectedKey = projectedLocation.keySet().iterator().next();
Record projectedValue = projectedLocation.values().iterator().next();
- Assertions.assertThat(
- Comparators.charSequences().compare("k1", (CharSequence) projectedKey.get("k1")))
+ assertThat(Comparators.charSequences().compare("k1", (CharSequence) projectedKey.get("k1")))
.isEqualTo(0);
- Assertions.assertThat(
- Comparators.charSequences().compare("k2", (CharSequence) projectedKey.get("k2")))
+ assertThat(Comparators.charSequences().compare("k2", (CharSequence) projectedKey.get("k2")))
.isEqualTo(0);
- Assertions.assertThat(projectedValue.get("lat")).isEqualTo(52.995143f);
- Assertions.assertThat(projectedValue.get("long")).isNull();
+ assertThat(projectedValue.get("lat")).isEqualTo(52.995143f);
+ assertThat(projectedValue.get("long")).isNull();
}
@Test
@@ -201,7 +196,7 @@
new Schema(Types.NestedField.optional(18, "y", Types.IntegerType.get())));
Schema readSchema = writeSchema;
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
// In this case, pruneColumns result is an empty record
() -> writeAndRead(writeSchema, readSchema, record, nameMapping))
.isInstanceOf(IllegalArgumentException.class)
@@ -242,8 +237,8 @@
Schema readSchema = writeSchema;
Record projected = writeAndRead(writeSchema, readSchema, record, nameMapping);
- Assertions.assertThat(projected.get("point")).as("point is not projected").isNull();
- Assertions.assertThat(projected.get("id")).isEqualTo(34L);
+ assertThat(projected.get("point")).as("point is not projected").isNull();
+ assertThat(projected.get("id")).isEqualTo(34L);
// point array is partially projected
nameMapping =
MappingUtil.create(
@@ -259,9 +254,9 @@
projected = writeAndRead(writeSchema, readSchema, record, nameMapping);
Record point = ((List<Record>) projected.get("point")).get(0);
- Assertions.assertThat(point.get("x")).as("point.x is projected").isEqualTo(1);
- Assertions.assertThat(point.get("y")).as("point.y is not projected").isNull();
- Assertions.assertThat(projected.get("id")).isEqualTo(34L);
+ assertThat(point.get("x")).as("point.x is projected").isEqualTo(1);
+ assertThat(point.get("y")).as("point.y is not projected").isNull();
+ assertThat(projected.get("id")).isEqualTo(34L);
}
@Test
@@ -309,7 +304,7 @@
Types.NestedField.required(19, "y", Types.IntegerType.get())))));
Record projected = writeAndRead(writeSchema, readSchema, record, nameMapping);
- Assertions.assertThat(((List<Record>) projected.get("points")).get(0).get("y"))
+ assertThat(((List<Record>) projected.get("points")).get(0).get("y"))
.as("x is read as y")
.isEqualTo(1);
@@ -325,7 +320,7 @@
Types.NestedField.required(19, "z", Types.IntegerType.get())))));
projected = writeAndRead(writeSchema, readSchema, record, nameMapping);
- Assertions.assertThat(((List<Record>) projected.get("points")).get(0).get("z"))
+ assertThat(((List<Record>) projected.get("points")).get(0).get("z"))
.as("x is read as z")
.isEqualTo(1);
}
@@ -344,7 +339,7 @@
Schema readSchema = writeSchema;
// Pass null for nameMapping so that it is automatically inferred from read schema
Record projected = writeAndRead(writeSchema, readSchema, record, null);
- Assertions.assertThat(projected).isEqualTo(record);
+ assertThat(projected).isEqualTo(record);
}
@Test
@@ -364,7 +359,7 @@
Record record = super.writeAndRead(desc, writeSchema, readSchema, inputRecord);
Record projectedWithNameMapping =
writeAndRead(writeSchema, readSchema, inputRecord, MappingUtil.create(writeSchema));
- Assertions.assertThat(projectedWithNameMapping).isEqualTo(record);
+ assertThat(projectedWithNameMapping).isEqualTo(record);
return record;
}
diff --git a/core/src/test/java/org/apache/iceberg/avro/TestAvroReadProjection.java b/core/src/test/java/org/apache/iceberg/avro/TestAvroReadProjection.java
index e3aca9b..9364bd0 100644
--- a/core/src/test/java/org/apache/iceberg/avro/TestAvroReadProjection.java
+++ b/core/src/test/java/org/apache/iceberg/avro/TestAvroReadProjection.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.avro;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.io.File;
import java.io.IOException;
import java.util.List;
@@ -30,7 +32,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestAvroReadProjection extends TestReadProjection {
@@ -72,10 +73,10 @@
GenericData.Record projected =
writeAndRead("full_projection", writeSchema, writeSchema, record);
- Assertions.assertThat(((Map<Long, List<Long>>) projected.get("map")).get(100L))
+ assertThat(((Map<Long, List<Long>>) projected.get("map")).get(100L))
.as("Should contain correct value list")
.isEqualTo(values1);
- Assertions.assertThat(((Map<Long, List<Long>>) projected.get("map")).get(200L))
+ assertThat(((Map<Long, List<Long>>) projected.get("map")).get(200L))
.as("Should contain correct value list")
.isEqualTo(values2);
}
diff --git a/core/src/test/java/org/apache/iceberg/avro/TestBuildAvroProjection.java b/core/src/test/java/org/apache/iceberg/avro/TestBuildAvroProjection.java
index 40c04de..eaea439 100644
--- a/core/src/test/java/org/apache/iceberg/avro/TestBuildAvroProjection.java
+++ b/core/src/test/java/org/apache/iceberg/avro/TestBuildAvroProjection.java
@@ -19,13 +19,13 @@
package org.apache.iceberg.avro;
import static org.apache.iceberg.types.Types.NestedField.optional;
+import static org.assertj.core.api.Assertions.assertThat;
import java.util.Collections;
import java.util.function.Supplier;
import org.apache.avro.SchemaBuilder;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestBuildAvroProjection {
@@ -68,11 +68,8 @@
final org.apache.avro.Schema actual = testSubject.array(expected, supplier);
- Assertions.assertThat(actual)
- .as("Array projection produced undesired array schema")
- .isEqualTo(expected);
- Assertions.assertThat(
- Integer.valueOf(actual.getProp(AvroSchemaUtil.ELEMENT_ID_PROP)).intValue())
+ assertThat(actual).as("Array projection produced undesired array schema").isEqualTo(expected);
+ assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.ELEMENT_ID_PROP)).intValue())
.as("Unexpected element ID discovered on the projected array schema")
.isEqualTo(0);
}
@@ -143,11 +140,8 @@
final org.apache.avro.Schema actual = testSubject.array(extraField, supplier);
- Assertions.assertThat(actual)
- .as("Array projection produced undesired array schema")
- .isEqualTo(expected);
- Assertions.assertThat(
- Integer.valueOf(actual.getProp(AvroSchemaUtil.ELEMENT_ID_PROP)).intValue())
+ assertThat(actual).as("Array projection produced undesired array schema").isEqualTo(expected);
+ assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.ELEMENT_ID_PROP)).intValue())
.as("Unexpected element ID discovered on the projected array schema")
.isEqualTo(0);
}
@@ -206,11 +200,8 @@
final org.apache.avro.Schema actual = testSubject.array(lessField, supplier);
- Assertions.assertThat(actual)
- .as("Array projection produced undesired array schema")
- .isEqualTo(expected);
- Assertions.assertThat(
- Integer.valueOf(actual.getProp(AvroSchemaUtil.ELEMENT_ID_PROP)).intValue())
+ assertThat(actual).as("Array projection produced undesired array schema").isEqualTo(expected);
+ assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.ELEMENT_ID_PROP)).intValue())
.as("Unexpected element ID discovered on the projected array schema")
.isEqualTo(0);
}
@@ -256,13 +247,11 @@
final org.apache.avro.Schema actual = testSubject.map(expected, supplier);
- Assertions.assertThat(actual)
- .as("Map projection produced undesired map schema")
- .isEqualTo(expected);
- Assertions.assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.KEY_ID_PROP)).intValue())
+ assertThat(actual).as("Map projection produced undesired map schema").isEqualTo(expected);
+ assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.KEY_ID_PROP)).intValue())
.as("Unexpected key ID discovered on the projected map schema")
.isEqualTo(0);
- Assertions.assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.VALUE_ID_PROP)).intValue())
+ assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.VALUE_ID_PROP)).intValue())
.as("Unexpected value ID discovered on the projected map schema")
.isEqualTo(1);
}
@@ -337,13 +326,11 @@
final org.apache.avro.Schema actual = testSubject.map(extraField, supplier);
- Assertions.assertThat(actual)
- .as("Map projection produced undesired map schema")
- .isEqualTo(expected);
- Assertions.assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.KEY_ID_PROP)).intValue())
+ assertThat(actual).as("Map projection produced undesired map schema").isEqualTo(expected);
+ assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.KEY_ID_PROP)).intValue())
.as("Unexpected key ID discovered on the projected map schema")
.isEqualTo(0);
- Assertions.assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.VALUE_ID_PROP)).intValue())
+ assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.VALUE_ID_PROP)).intValue())
.as("Unexpected value ID discovered on the projected map schema")
.isEqualTo(1);
}
@@ -406,13 +393,11 @@
final org.apache.avro.Schema actual = testSubject.map(lessField, supplier);
- Assertions.assertThat(actual)
- .as("Map projection produced undesired map schema")
- .isEqualTo(expected);
- Assertions.assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.KEY_ID_PROP)).intValue())
+ assertThat(actual).as("Map projection produced undesired map schema").isEqualTo(expected);
+ assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.KEY_ID_PROP)).intValue())
.as("Unexpected key ID discovered on the projected map schema")
.isEqualTo(0);
- Assertions.assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.VALUE_ID_PROP)).intValue())
+ assertThat(Integer.valueOf(actual.getProp(AvroSchemaUtil.VALUE_ID_PROP)).intValue())
.as("Unexpected value ID discovered on the projected map schema")
.isEqualTo(1);
}
diff --git a/core/src/test/java/org/apache/iceberg/avro/TestReadProjection.java b/core/src/test/java/org/apache/iceberg/avro/TestReadProjection.java
index 331e427..ead17e9 100644
--- a/core/src/test/java/org/apache/iceberg/avro/TestReadProjection.java
+++ b/core/src/test/java/org/apache/iceberg/avro/TestReadProjection.java
@@ -18,6 +18,10 @@
*/
package org.apache.iceberg.avro;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.assertj.core.api.Assertions.within;
+
import java.io.IOException;
import java.nio.file.Path;
import java.util.List;
@@ -31,7 +35,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.types.Comparators;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -54,11 +57,9 @@
Record projected = writeAndRead("full_projection", schema, schema, record);
- Assertions.assertThat((Long) projected.get("id"))
- .as("Should contain the correct id value")
- .isEqualTo(34L);
+ assertThat((Long) projected.get("id")).as("Should contain the correct id value").isEqualTo(34L);
int cmp = Comparators.charSequences().compare("test", (CharSequence) projected.get("data"));
- Assertions.assertThat(cmp).as("Should contain the correct data value").isEqualTo(0);
+ assertThat(cmp).as("Should contain the correct data value").isEqualTo(0);
}
@Test
@@ -78,10 +79,10 @@
Types.NestedField.required(0, "id", Types.LongType.get()));
Record projected = writeAndRead("full_projection", schema, reordered, record);
- Assertions.assertThat(projected.get(0).toString())
+ assertThat(projected.get(0).toString())
.as("Should contain the correct 0 value")
.isEqualTo("test");
- Assertions.assertThat(projected.get(1)).as("Should contain the correct 1 value").isEqualTo(34L);
+ assertThat(projected.get(1)).as("Should contain the correct 1 value").isEqualTo(34L);
}
@Test
@@ -102,11 +103,11 @@
Types.NestedField.optional(3, "missing_2", Types.LongType.get()));
Record projected = writeAndRead("full_projection", schema, reordered, record);
- Assertions.assertThat(projected.get(0)).as("Should contain the correct 0 value").isNull();
- Assertions.assertThat(projected.get(1).toString())
+ assertThat(projected.get(0)).as("Should contain the correct 0 value").isNull();
+ assertThat(projected.get(1).toString())
.as("Should contain the correct 1 value")
.isEqualTo("test");
- Assertions.assertThat(projected.get(2)).as("Should contain the correct 2 value").isNull();
+ assertThat(projected.get(2)).as("Should contain the correct 2 value").isNull();
}
@Test
@@ -122,10 +123,9 @@
Record projected = writeAndRead("empty_projection", schema, schema.select(), record);
- Assertions.assertThat(projected).as("Should read a non-null record").isNotNull();
+ assertThat(projected).as("Should read a non-null record").isNotNull();
// this is expected because there are no values
- Assertions.assertThatThrownBy(() -> projected.get(0))
- .isInstanceOf(ArrayIndexOutOfBoundsException.class);
+ assertThatThrownBy(() -> projected.get(0)).isInstanceOf(ArrayIndexOutOfBoundsException.class);
}
@Test
@@ -143,9 +143,7 @@
Record projected = writeAndRead("basic_projection_id", writeSchema, idOnly, record);
assertEmptyAvroField(projected, "data");
- Assertions.assertThat((Long) projected.get("id"))
- .as("Should contain the correct id value")
- .isEqualTo(34L);
+ assertThat((Long) projected.get("id")).as("Should contain the correct id value").isEqualTo(34L);
Schema dataOnly = new Schema(Types.NestedField.optional(1, "data", Types.StringType.get()));
@@ -153,7 +151,7 @@
assertEmptyAvroField(projected, "id");
int cmp = Comparators.charSequences().compare("test", (CharSequence) projected.get("data"));
- Assertions.assertThat(cmp).as("Should contain the correct data value").isEqualTo(0);
+ assertThat(cmp).as("Should contain the correct data value").isEqualTo(0);
}
@Test
@@ -174,11 +172,9 @@
Record projected = writeAndRead("project_and_rename", writeSchema, readSchema, record);
- Assertions.assertThat((Long) projected.get("id"))
- .as("Should contain the correct id value")
- .isEqualTo(34L);
+ assertThat((Long) projected.get("id")).as("Should contain the correct id value").isEqualTo(34L);
int cmp = Comparators.charSequences().compare("test", (CharSequence) projected.get("renamed"));
- Assertions.assertThat(cmp).as("Should contain the correct data/renamed value").isEqualTo(0);
+ assertThat(cmp).as("Should contain the correct data/renamed value").isEqualTo(0);
}
@Test
@@ -205,9 +201,7 @@
Record projected = writeAndRead("id_only", writeSchema, idOnly, record);
assertEmptyAvroField(projected, "location");
- Assertions.assertThat((long) projected.get("id"))
- .as("Should contain the correct id value")
- .isEqualTo(34L);
+ assertThat((long) projected.get("id")).as("Should contain the correct id value").isEqualTo(34L);
Schema latOnly =
new Schema(
@@ -219,11 +213,11 @@
projected = writeAndRead("latitude_only", writeSchema, latOnly, record);
Record projectedLocation = (Record) projected.get("location");
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(projected.get("location")).as("Should project location").isNotNull();
+ assertThat(projected.get("location")).as("Should project location").isNotNull();
assertEmptyAvroField(projectedLocation, "long");
- Assertions.assertThat((Float) projectedLocation.get("lat"))
+ assertThat((Float) projectedLocation.get("lat"))
.as("Should project latitude")
- .isCloseTo(52.995143f, Assertions.within(0.000001f));
+ .isCloseTo(52.995143f, within(0.000001f));
Schema longOnly =
new Schema(
@@ -235,23 +229,23 @@
projected = writeAndRead("longitude_only", writeSchema, longOnly, record);
projectedLocation = (Record) projected.get("location");
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(projected.get("location")).as("Should project location").isNotNull();
+ assertThat(projected.get("location")).as("Should project location").isNotNull();
assertEmptyAvroField(projectedLocation, "lat");
- Assertions.assertThat((Float) projectedLocation.get("long"))
+ assertThat((Float) projectedLocation.get("long"))
.as("Should project longitude")
- .isCloseTo(-1.539054f, Assertions.within(0.000001f));
+ .isCloseTo(-1.539054f, within(0.000001f));
Schema locationOnly = writeSchema.select("location");
projected = writeAndRead("location_only", writeSchema, locationOnly, record);
projectedLocation = (Record) projected.get("location");
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(projected.get("location")).as("Should project location").isNotNull();
- Assertions.assertThat((Float) projectedLocation.get("lat"))
+ assertThat(projected.get("location")).as("Should project location").isNotNull();
+ assertThat((Float) projectedLocation.get("lat"))
.as("Should project latitude")
- .isCloseTo(52.995143f, Assertions.within(0.000001f));
- Assertions.assertThat((Float) projectedLocation.get("long"))
+ .isCloseTo(52.995143f, within(0.000001f));
+ assertThat((Float) projectedLocation.get("long"))
.as("Should project longitude")
- .isCloseTo(-1.539054f, Assertions.within(0.000001f));
+ .isCloseTo(-1.539054f, within(0.000001f));
}
@Test
@@ -273,29 +267,27 @@
Schema idOnly = new Schema(Types.NestedField.required(0, "id", Types.LongType.get()));
Record projected = writeAndRead("id_only", writeSchema, idOnly, record);
- Assertions.assertThat((long) projected.get("id"))
- .as("Should contain the correct id value")
- .isEqualTo(34L);
+ assertThat((long) projected.get("id")).as("Should contain the correct id value").isEqualTo(34L);
assertEmptyAvroField(projected, "properties");
Schema keyOnly = writeSchema.select("properties.key");
projected = writeAndRead("key_only", writeSchema, keyOnly, record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(toStringMap((Map) projected.get("properties")))
+ assertThat(toStringMap((Map) projected.get("properties")))
.as("Should project entire map")
.isEqualTo(properties);
Schema valueOnly = writeSchema.select("properties.value");
projected = writeAndRead("value_only", writeSchema, valueOnly, record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(toStringMap((Map) projected.get("properties")))
+ assertThat(toStringMap((Map) projected.get("properties")))
.as("Should project entire map")
.isEqualTo(properties);
Schema mapOnly = writeSchema.select("properties");
projected = writeAndRead("map_only", writeSchema, mapOnly, record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(toStringMap((Map) projected.get("properties")))
+ assertThat(toStringMap((Map) projected.get("properties")))
.as("Should project entire map")
.isEqualTo(properties);
}
@@ -345,57 +337,51 @@
Schema idOnly = new Schema(Types.NestedField.required(0, "id", Types.LongType.get()));
Record projected = writeAndRead("id_only", writeSchema, idOnly, record);
- Assertions.assertThat((long) projected.get("id"))
- .as("Should contain the correct id value")
- .isEqualTo(34L);
+ assertThat((long) projected.get("id")).as("Should contain the correct id value").isEqualTo(34L);
assertEmptyAvroField(projected, "locations");
projected = writeAndRead("all_locations", writeSchema, writeSchema.select("locations"), record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(toStringMap((Map) projected.get("locations")))
+ assertThat(toStringMap((Map) projected.get("locations")))
.as("Should project locations map")
.isEqualTo(record.get("locations"));
projected = writeAndRead("lat_only", writeSchema, writeSchema.select("locations.lat"), record);
assertEmptyAvroField(projected, "id");
Map<String, ?> locations = toStringMap((Map) projected.get("locations"));
- Assertions.assertThat(locations).as("Should project locations map").isNotNull();
- Assertions.assertThat(locations.keySet())
- .as("Should contain L1 and L2")
- .containsExactly("L1", "L2");
+ assertThat(locations).as("Should project locations map").isNotNull();
+ assertThat(locations.keySet()).as("Should contain L1 and L2").containsExactly("L1", "L2");
Record projectedL1 = (Record) locations.get("L1");
- Assertions.assertThat(projectedL1).as("L1 should not be null").isNotNull();
- Assertions.assertThat((float) projectedL1.get("lat"))
+ assertThat(projectedL1).as("L1 should not be null").isNotNull();
+ assertThat((float) projectedL1.get("lat"))
.as("L1 should contain lat")
- .isCloseTo(53.992811f, Assertions.within(0.000001f));
+ .isCloseTo(53.992811f, within(0.000001f));
assertEmptyAvroField(projectedL1, "long");
Record projectedL2 = (Record) locations.get("L2");
- Assertions.assertThat(projectedL2).as("L2 should not be null").isNotNull();
- Assertions.assertThat((float) projectedL2.get("lat"))
+ assertThat(projectedL2).as("L2 should not be null").isNotNull();
+ assertThat((float) projectedL2.get("lat"))
.as("L2 should contain lat")
- .isCloseTo(52.995143f, Assertions.within(0.000001f));
+ .isCloseTo(52.995143f, within(0.000001f));
assertEmptyAvroField(projectedL2, "y");
projected =
writeAndRead("long_only", writeSchema, writeSchema.select("locations.long"), record);
assertEmptyAvroField(projected, "id");
locations = toStringMap((Map) projected.get("locations"));
- Assertions.assertThat(locations).as("Should project locations map").isNotNull();
- Assertions.assertThat(locations.keySet())
- .as("Should contain L1 and L2")
- .containsExactly("L1", "L2");
+ assertThat(locations).as("Should project locations map").isNotNull();
+ assertThat(locations.keySet()).as("Should contain L1 and L2").containsExactly("L1", "L2");
projectedL1 = (Record) locations.get("L1");
- Assertions.assertThat(projectedL1).as("L1 should not be null").isNotNull();
+ assertThat(projectedL1).as("L1 should not be null").isNotNull();
assertEmptyAvroField(projectedL1, "lat");
- Assertions.assertThat((float) projectedL1.get("long"))
+ assertThat((float) projectedL1.get("long"))
.as("L1 should contain long")
- .isCloseTo(-1.542616f, Assertions.within(0.000001f));
+ .isCloseTo(-1.542616f, within(0.000001f));
projectedL2 = (Record) locations.get("L2");
- Assertions.assertThat(projectedL2).as("L2 should not be null").isNotNull();
+ assertThat(projectedL2).as("L2 should not be null").isNotNull();
assertEmptyAvroField(projectedL2, "lat");
- Assertions.assertThat((float) projectedL2.get("long"))
+ assertThat((float) projectedL2.get("long"))
.as("L2 should contain long")
- .isCloseTo(-1.539054f, Assertions.within(0.000001f));
+ .isCloseTo(-1.539054f, within(0.000001f));
Schema latitiudeRenamed =
new Schema(
@@ -412,22 +398,20 @@
projected = writeAndRead("latitude_renamed", writeSchema, latitiudeRenamed, record);
assertEmptyAvroField(projected, "id");
locations = toStringMap((Map) projected.get("locations"));
- Assertions.assertThat(locations).as("Should project locations map").isNotNull();
- Assertions.assertThat(locations.keySet())
- .as("Should contain L1 and L2")
- .containsExactly("L1", "L2");
+ assertThat(locations).as("Should project locations map").isNotNull();
+ assertThat(locations.keySet()).as("Should contain L1 and L2").containsExactly("L1", "L2");
projectedL1 = (Record) locations.get("L1");
- Assertions.assertThat(projectedL1).as("L1 should not be null").isNotNull();
- Assertions.assertThat((float) projectedL1.get("latitude"))
+ assertThat(projectedL1).as("L1 should not be null").isNotNull();
+ assertThat((float) projectedL1.get("latitude"))
.as("L1 should contain latitude")
- .isCloseTo(53.992811f, Assertions.within(0.000001f));
+ .isCloseTo(53.992811f, within(0.000001f));
assertEmptyAvroField(projectedL1, "lat");
assertEmptyAvroField(projectedL1, "long");
projectedL2 = (Record) locations.get("L2");
- Assertions.assertThat(projectedL2).as("L2 should not be null").isNotNull();
- Assertions.assertThat((float) projectedL2.get("latitude"))
+ assertThat(projectedL2).as("L2 should not be null").isNotNull();
+ assertThat((float) projectedL2.get("latitude"))
.as("L2 should contain latitude")
- .isCloseTo(52.995143f, Assertions.within(0.000001f));
+ .isCloseTo(52.995143f, within(0.000001f));
assertEmptyAvroField(projectedL2, "lat");
assertEmptyAvroField(projectedL2, "long");
}
@@ -449,24 +433,18 @@
Schema idOnly = new Schema(Types.NestedField.required(0, "id", Types.LongType.get()));
Record projected = writeAndRead("id_only", writeSchema, idOnly, record);
- Assertions.assertThat((long) projected.get("id"))
- .as("Should contain the correct id value")
- .isEqualTo(34L);
+ assertThat((long) projected.get("id")).as("Should contain the correct id value").isEqualTo(34L);
assertEmptyAvroField(projected, "values");
Schema elementOnly = writeSchema.select("values.element");
projected = writeAndRead("element_only", writeSchema, elementOnly, record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(projected.get("values"))
- .as("Should project entire list")
- .isEqualTo(values);
+ assertThat(projected.get("values")).as("Should project entire list").isEqualTo(values);
Schema listOnly = writeSchema.select("values");
projected = writeAndRead("list_only", writeSchema, listOnly, record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(projected.get("values"))
- .as("Should project entire list")
- .isEqualTo(values);
+ assertThat(projected.get("values")).as("Should project entire list").isEqualTo(values);
}
@Test
@@ -501,40 +479,38 @@
Schema idOnly = new Schema(Types.NestedField.required(0, "id", Types.LongType.get()));
Record projected = writeAndRead("id_only", writeSchema, idOnly, record);
- Assertions.assertThat((long) projected.get("id"))
- .as("Should contain the correct id value")
- .isEqualTo(34L);
+ assertThat((long) projected.get("id")).as("Should contain the correct id value").isEqualTo(34L);
assertEmptyAvroField(projected, "points");
projected = writeAndRead("all_points", writeSchema, writeSchema.select("points"), record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(projected.get("points"))
+ assertThat(projected.get("points"))
.as("Should project points list")
.isEqualTo(record.get("points"));
projected = writeAndRead("x_only", writeSchema, writeSchema.select("points.x"), record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(projected.get("points")).as("Should project points list").isNotNull();
+ assertThat(projected.get("points")).as("Should project points list").isNotNull();
List<Record> points = (List<Record>) projected.get("points");
- Assertions.assertThat(points).as("Should read 2 points").hasSize(2);
+ assertThat(points).as("Should read 2 points").hasSize(2);
Record projectedP1 = points.get(0);
- Assertions.assertThat((int) projectedP1.get("x")).as("Should project x").isEqualTo(1);
+ assertThat((int) projectedP1.get("x")).as("Should project x").isEqualTo(1);
assertEmptyAvroField(projectedP1, "y");
Record projectedP2 = points.get(1);
- Assertions.assertThat((int) projectedP2.get("x")).as("Should project x").isEqualTo(3);
+ assertThat((int) projectedP2.get("x")).as("Should project x").isEqualTo(3);
assertEmptyAvroField(projectedP2, "y");
projected = writeAndRead("y_only", writeSchema, writeSchema.select("points.y"), record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(projected.get("points")).as("Should project points list").isNotNull();
+ assertThat(projected.get("points")).as("Should project points list").isNotNull();
points = (List<Record>) projected.get("points");
- Assertions.assertThat(points).as("Should read 2 points").hasSize(2);
+ assertThat(points).as("Should read 2 points").hasSize(2);
projectedP1 = points.get(0);
assertEmptyAvroField(projectedP1, "x");
- Assertions.assertThat((int) projectedP1.get("y")).as("Should project y").isEqualTo(2);
+ assertThat((int) projectedP1.get("y")).as("Should project y").isEqualTo(2);
projectedP2 = points.get(1);
assertEmptyAvroField(projectedP2, "x");
- Assertions.assertThat(projectedP2.get("y")).as("Should project null y").isNull();
+ assertThat(projectedP2.get("y")).as("Should project null y").isNull();
Schema yRenamed =
new Schema(
@@ -548,17 +524,17 @@
projected = writeAndRead("y_renamed", writeSchema, yRenamed, record);
assertEmptyAvroField(projected, "id");
- Assertions.assertThat(projected.get("points")).as("Should project points list").isNotNull();
+ assertThat(projected.get("points")).as("Should project points list").isNotNull();
points = (List<Record>) projected.get("points");
- Assertions.assertThat(points).as("Should read 2 points").hasSize(2);
+ assertThat(points).as("Should read 2 points").hasSize(2);
projectedP1 = points.get(0);
assertEmptyAvroField(projectedP1, "x");
assertEmptyAvroField(projectedP1, "y");
- Assertions.assertThat((int) projectedP1.get("z")).as("Should project z").isEqualTo(2);
+ assertThat((int) projectedP1.get("z")).as("Should project z").isEqualTo(2);
projectedP2 = points.get(1);
assertEmptyAvroField(projectedP2, "x");
assertEmptyAvroField(projectedP2, "y");
- Assertions.assertThat(projectedP2.get("z")).as("Should project null z").isNull();
+ assertThat(projectedP2.get("z")).as("Should project null z").isNull();
}
@Test
@@ -588,10 +564,8 @@
assertEmptyAvroField(projected, "id");
Record result = (Record) projected.get("location");
- Assertions.assertThat(projected.get(0))
- .as("location should be in the 0th position")
- .isEqualTo(result);
- Assertions.assertThat(result).as("Should contain an empty record").isNotNull();
+ assertThat(projected.get(0)).as("location should be in the 0th position").isEqualTo(result);
+ assertThat(result).as("Should contain an empty record").isNotNull();
assertEmptyAvroField(result, "lat");
assertEmptyAvroField(result, "long");
}
@@ -621,10 +595,8 @@
Record projected = writeAndRead("empty_req_proj", writeSchema, emptyStruct, record);
assertEmptyAvroField(projected, "id");
Record result = (Record) projected.get("location");
- Assertions.assertThat(projected.get(0))
- .as("location should be in the 0th position")
- .isEqualTo(result);
- Assertions.assertThat(result).as("Should contain an empty record").isNotNull();
+ assertThat(projected.get(0)).as("location should be in the 0th position").isEqualTo(result);
+ assertThat(result).as("Should contain an empty record").isNotNull();
assertEmptyAvroField(result, "lat");
assertEmptyAvroField(result, "long");
}
@@ -659,19 +631,15 @@
Types.NestedField.required(4, "empty", Types.StructType.of()))));
Record projected = writeAndRead("req_empty_req_proj", writeSchema, emptyStruct, record);
- Assertions.assertThat(projected.get("id")).as("Should project id").isEqualTo(34L);
+ assertThat(projected.get("id")).as("Should project id").isEqualTo(34L);
Record result = (Record) projected.get("location");
- Assertions.assertThat(projected.get(1))
- .as("location should be in the 1st position")
- .isEqualTo(result);
- Assertions.assertThat(result).as("Should contain an empty record").isNotNull();
+ assertThat(projected.get(1)).as("location should be in the 1st position").isEqualTo(result);
+ assertThat(result).as("Should contain an empty record").isNotNull();
assertEmptyAvroField(result, "lat");
assertEmptyAvroField(result, "long");
- Assertions.assertThat(result.getSchema().getField("empty"))
- .as("Should project empty")
- .isNotNull();
- Assertions.assertThat(result.get("empty")).as("Empty should not be null").isNotNull();
- Assertions.assertThat(((Record) result.get("empty")).getSchema().getFields())
+ assertThat(result.getSchema().getField("empty")).as("Should project empty").isNotNull();
+ assertThat(result.get("empty")).as("Empty should not be null").isNotNull();
+ assertThat(((Record) result.get("empty")).getSchema().getFields())
.as("Empty should be empty")
.isEmpty();
}
@@ -714,16 +682,12 @@
Record projected = writeAndRead("nested_empty_proj", writeSchema, emptyStruct, record);
assertEmptyAvroField(projected, "id");
Record outerResult = (Record) projected.get("outer");
- Assertions.assertThat(projected.get(0))
- .as("Outer should be in the 0th position")
- .isEqualTo(outerResult);
- Assertions.assertThat(outerResult).as("Should contain the outer record").isNotNull();
+ assertThat(projected.get(0)).as("Outer should be in the 0th position").isEqualTo(outerResult);
+ assertThat(outerResult).as("Should contain the outer record").isNotNull();
assertEmptyAvroField(outerResult, "lat");
Record innerResult = (Record) outerResult.get("inner");
- Assertions.assertThat(outerResult.get(0))
- .as("Inner should be in the 0th position")
- .isEqualTo(innerResult);
- Assertions.assertThat(innerResult).as("Should contain the inner record").isNotNull();
+ assertThat(outerResult.get(0)).as("Inner should be in the 0th position").isEqualTo(innerResult);
+ assertThat(innerResult).as("Should contain the inner record").isNotNull();
assertEmptyAvroField(innerResult, "lon");
}
@@ -763,21 +727,17 @@
Record projected = writeAndRead("nested_empty_req_proj", writeSchema, emptyStruct, record);
assertEmptyAvroField(projected, "id");
Record outerResult = (Record) projected.get("outer");
- Assertions.assertThat(projected.get(0))
- .as("Outer should be in the 0th position")
- .isEqualTo(outerResult);
- Assertions.assertThat(outerResult).as("Should contain the outer record").isNotNull();
+ assertThat(projected.get(0)).as("Outer should be in the 0th position").isEqualTo(outerResult);
+ assertThat(outerResult).as("Should contain the outer record").isNotNull();
assertEmptyAvroField(outerResult, "lat");
Record innerResult = (Record) outerResult.get("inner");
- Assertions.assertThat(outerResult.get(0))
- .as("Inner should be in the 0th position")
- .isEqualTo(innerResult);
- Assertions.assertThat(innerResult).as("Should contain the inner record").isNotNull();
+ assertThat(outerResult.get(0)).as("Inner should be in the 0th position").isEqualTo(innerResult);
+ assertThat(innerResult).as("Should contain the inner record").isNotNull();
assertEmptyAvroField(innerResult, "lon");
}
private void assertEmptyAvroField(GenericRecord record, String field) {
- Assertions.assertThatThrownBy(() -> record.get(field))
+ assertThatThrownBy(() -> record.get(field))
.isInstanceOf(AvroRuntimeException.class)
.hasMessage("Not a valid schema field: " + field);
}
diff --git a/core/src/test/java/org/apache/iceberg/catalog/CatalogTests.java b/core/src/test/java/org/apache/iceberg/catalog/CatalogTests.java
index ff01d80..8c95f27 100644
--- a/core/src/test/java/org/apache/iceberg/catalog/CatalogTests.java
+++ b/core/src/test/java/org/apache/iceberg/catalog/CatalogTests.java
@@ -19,6 +19,9 @@
package org.apache.iceberg.catalog;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.assertj.core.api.Assertions.setMaxStackTraceElementsDisplayed;
import java.io.IOException;
import java.io.UncheckedIOException;
@@ -63,7 +66,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Streams;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.CharSequenceSet;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.Test;
@@ -172,29 +174,29 @@
public void testCreateNamespace() {
C catalog = catalog();
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
catalog.createNamespace(NS);
- Assertions.assertThat(catalog.listNamespaces())
+ assertThat(catalog.listNamespaces())
.as("Catalog should have the created namespace")
.contains(NS);
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
}
@Test
public void testCreateExistingNamespace() {
C catalog = catalog();
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
catalog.createNamespace(NS);
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
- Assertions.assertThatThrownBy(() -> catalog.createNamespace(NS))
+ assertThatThrownBy(() -> catalog.createNamespace(NS))
.isInstanceOf(AlreadyExistsException.class)
.hasMessageContaining("Namespace already exists");
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should still exist").isTrue();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should still exist").isTrue();
}
@Test
@@ -203,15 +205,15 @@
C catalog = catalog();
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
Map<String, String> createProps = ImmutableMap.of("prop", "val");
catalog.createNamespace(NS, createProps);
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
Map<String, String> props = catalog.loadNamespaceMetadata(NS);
- Assertions.assertThat(Sets.intersection(createProps.entrySet(), props.entrySet()))
+ assertThat(Sets.intersection(createProps.entrySet(), props.entrySet()))
.as("Create properties should be a subset of returned properties")
.containsExactlyInAnyOrderElementsOf(createProps.entrySet());
}
@@ -220,16 +222,16 @@
public void testLoadNamespaceMetadata() {
C catalog = catalog();
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
- Assertions.assertThatThrownBy(() -> catalog.loadNamespaceMetadata(NS))
+ assertThatThrownBy(() -> catalog.loadNamespaceMetadata(NS))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageStartingWith("Namespace does not exist: newdb");
catalog.createNamespace(NS);
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
Map<String, String> props = catalog.loadNamespaceMetadata(NS);
- Assertions.assertThat(props).as("Should return non-null property map").isNotNull();
+ assertThat(props).as("Should return non-null property map").isNotNull();
// note that there are no requirements for the properties returned by the catalog
}
@@ -245,7 +247,7 @@
catalog.setProperties(NS, properties);
Map<String, String> actualProperties = catalog.loadNamespaceMetadata(NS);
- Assertions.assertThat(actualProperties.entrySet())
+ assertThat(actualProperties.entrySet())
.as("Set properties should be a subset of returned properties")
.containsAll(properties.entrySet());
}
@@ -262,7 +264,7 @@
catalog.setProperties(NS, initialProperties);
Map<String, String> actualProperties = catalog.loadNamespaceMetadata(NS);
- Assertions.assertThat(actualProperties.entrySet())
+ assertThat(actualProperties.entrySet())
.as("Set properties should be a subset of returned properties")
.containsAll(initialProperties.entrySet());
@@ -271,7 +273,7 @@
catalog.setProperties(NS, updatedProperties);
Map<String, String> finalProperties = catalog.loadNamespaceMetadata(NS);
- Assertions.assertThat(finalProperties.entrySet())
+ assertThat(finalProperties.entrySet())
.as("Updated properties should be a subset of returned properties")
.containsAll(updatedProperties.entrySet());
}
@@ -288,7 +290,7 @@
catalog.setProperties(NS, initialProperties);
Map<String, String> actualProperties = catalog.loadNamespaceMetadata(NS);
- Assertions.assertThat(actualProperties.entrySet())
+ assertThat(actualProperties.entrySet())
.as("Set properties should be a subset of returned properties")
.containsAll(initialProperties.entrySet());
@@ -298,7 +300,7 @@
catalog.setProperties(NS, updatedProperties);
Map<String, String> finalProperties = catalog.loadNamespaceMetadata(NS);
- Assertions.assertThat(finalProperties.entrySet())
+ assertThat(finalProperties.entrySet())
.as("Updated properties should be a subset of returned properties")
.containsAll(updatedProperties.entrySet());
}
@@ -309,7 +311,7 @@
C catalog = catalog();
- Assertions.assertThatThrownBy(() -> catalog.setProperties(NS, ImmutableMap.of("test", "value")))
+ assertThatThrownBy(() -> catalog.setProperties(NS, ImmutableMap.of("test", "value")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageStartingWith("Namespace does not exist: newdb");
}
@@ -327,10 +329,10 @@
catalog.removeProperties(NS, ImmutableSet.of("created-at"));
Map<String, String> actualProperties = catalog.loadNamespaceMetadata(NS);
- Assertions.assertThat(actualProperties.containsKey("created-at"))
+ assertThat(actualProperties.containsKey("created-at"))
.as("Should not contain deleted property key")
.isFalse();
- Assertions.assertThat(Sets.intersection(properties.entrySet(), actualProperties.entrySet()))
+ assertThat(Sets.intersection(properties.entrySet(), actualProperties.entrySet()))
.as("Expected properties should be a subset of returned properties")
.containsExactlyInAnyOrderElementsOf(ImmutableMap.of("owner", "user").entrySet());
}
@@ -341,7 +343,7 @@
C catalog = catalog();
- Assertions.assertThatThrownBy(() -> catalog.removeProperties(NS, ImmutableSet.of("a", "b")))
+ assertThatThrownBy(() -> catalog.removeProperties(NS, ImmutableSet.of("a", "b")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageStartingWith("Namespace does not exist: newdb");
}
@@ -350,21 +352,21 @@
public void testDropNamespace() {
C catalog = catalog();
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
catalog.createNamespace(NS);
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
- Assertions.assertThat(catalog.dropNamespace(NS))
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should exist").isTrue();
+ assertThat(catalog.dropNamespace(NS))
.as("Dropping an existing namespace should return true")
.isTrue();
- Assertions.assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
+ assertThat(catalog.namespaceExists(NS)).as("Namespace should not exist").isFalse();
}
@Test
public void testDropNonexistentNamespace() {
C catalog = catalog();
- Assertions.assertThat(catalog.dropNamespace(NS))
+ assertThat(catalog.dropNamespace(NS))
.as("Dropping a nonexistent namespace should return false")
.isFalse();
}
@@ -379,22 +381,22 @@
Namespace ns2 = Namespace.of("newdb_2");
catalog.createNamespace(ns1);
- Assertions.assertThat(catalog.listNamespaces())
+ assertThat(catalog.listNamespaces())
.as("Should include newdb_1")
.hasSameElementsAs(concat(starting, ns1));
catalog.createNamespace(ns2);
- Assertions.assertThat(catalog.listNamespaces())
+ assertThat(catalog.listNamespaces())
.as("Should include newdb_1 and newdb_2")
.hasSameElementsAs(concat(starting, ns1, ns2));
catalog.dropNamespace(ns1);
- Assertions.assertThat(catalog.listNamespaces())
+ assertThat(catalog.listNamespaces())
.as("Should include newdb_2, not newdb_1")
.hasSameElementsAs(concat(starting, ns2));
catalog.dropNamespace(ns2);
- Assertions.assertThat(catalog.listNamespaces().containsAll(starting))
+ assertThat(catalog.listNamespaces().containsAll(starting))
.as("Should include only starting namespaces")
.isTrue();
}
@@ -414,35 +416,35 @@
Namespace child2 = Namespace.of("parent", "child2");
catalog.createNamespace(parent);
- Assertions.assertThat(catalog.listNamespaces())
+ assertThat(catalog.listNamespaces())
.as("Should include parent")
.hasSameElementsAs(concat(starting, parent));
- Assertions.assertThat(catalog.listNamespaces(parent))
+ assertThat(catalog.listNamespaces(parent))
.as("Should have no children in newly created parent namespace")
.isEmpty();
catalog.createNamespace(child1);
- Assertions.assertThat(catalog.listNamespaces(parent))
+ assertThat(catalog.listNamespaces(parent))
.as("Should include child1")
.hasSameElementsAs(ImmutableList.of(child1));
catalog.createNamespace(child2);
- Assertions.assertThat(catalog.listNamespaces(parent))
+ assertThat(catalog.listNamespaces(parent))
.as("Should include child1 and child2")
.hasSameElementsAs(ImmutableList.of(child1, child2));
- Assertions.assertThat(catalog.listNamespaces())
+ assertThat(catalog.listNamespaces())
.as("Should not change listing the root")
.hasSameElementsAs(concat(starting, parent));
catalog.dropNamespace(child1);
- Assertions.assertThat(catalog.listNamespaces(parent))
+ assertThat(catalog.listNamespaces(parent))
.as("Should include only child2")
.hasSameElementsAs(ImmutableList.of(child2));
catalog.dropNamespace(child2);
- Assertions.assertThat(catalog.listNamespaces(parent)).as("Should be empty").isEmpty();
+ assertThat(catalog.listNamespaces(parent)).as("Should be empty").isEmpty();
}
@Test
@@ -453,21 +455,17 @@
Namespace withSlash = Namespace.of("new/db");
- Assertions.assertThat(catalog.namespaceExists(withSlash))
- .as("Namespace should not exist")
- .isFalse();
+ assertThat(catalog.namespaceExists(withSlash)).as("Namespace should not exist").isFalse();
catalog.createNamespace(withSlash);
- Assertions.assertThat(catalog.namespaceExists(withSlash)).as("Namespace should exist").isTrue();
+ assertThat(catalog.namespaceExists(withSlash)).as("Namespace should exist").isTrue();
Map<String, String> properties = catalog.loadNamespaceMetadata(withSlash);
- Assertions.assertThat(properties).as("Properties should be accessible").isNotNull();
- Assertions.assertThat(catalog.dropNamespace(withSlash))
+ assertThat(properties).as("Properties should be accessible").isNotNull();
+ assertThat(catalog.dropNamespace(withSlash))
.as("Dropping the namespace should succeed")
.isTrue();
- Assertions.assertThat(catalog.namespaceExists(withSlash))
- .as("Namespace should not exist")
- .isFalse();
+ assertThat(catalog.namespaceExists(withSlash)).as("Namespace should not exist").isFalse();
}
@Test
@@ -478,21 +476,15 @@
Namespace withDot = Namespace.of("new.db");
- Assertions.assertThat(catalog.namespaceExists(withDot))
- .as("Namespace should not exist")
- .isFalse();
+ assertThat(catalog.namespaceExists(withDot)).as("Namespace should not exist").isFalse();
catalog.createNamespace(withDot);
- Assertions.assertThat(catalog.namespaceExists(withDot)).as("Namespace should exist").isTrue();
+ assertThat(catalog.namespaceExists(withDot)).as("Namespace should exist").isTrue();
Map<String, String> properties = catalog.loadNamespaceMetadata(withDot);
- Assertions.assertThat(properties).as("Properties should be accessible").isNotNull();
- Assertions.assertThat(catalog.dropNamespace(withDot))
- .as("Dropping the namespace should succeed")
- .isTrue();
- Assertions.assertThat(catalog.namespaceExists(withDot))
- .as("Namespace should not exist")
- .isFalse();
+ assertThat(properties).as("Properties should be accessible").isNotNull();
+ assertThat(catalog.dropNamespace(withDot)).as("Dropping the namespace should succeed").isTrue();
+ assertThat(catalog.namespaceExists(withDot)).as("Namespace should not exist").isFalse();
}
@Test
@@ -501,26 +493,26 @@
TableIdentifier ident = TableIdentifier.of("ns", "table");
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
+ assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
if (requiresNamespaceCreate()) {
catalog.createNamespace(ident.namespace());
}
Table table = catalog.buildTable(ident, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
+ assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
// validate table settings
- Assertions.assertThat(table.name())
+ assertThat(table.name())
.as("Table name should report its full name")
.isEqualTo(catalog.name() + "." + ident);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Schema should match expected ID assignment")
.isEqualTo(TABLE_SCHEMA.asStruct());
- Assertions.assertThat(table.location()).as("Should have a location").isNotNull();
- Assertions.assertThat(table.spec().isUnpartitioned()).as("Should be unpartitioned").isTrue();
- Assertions.assertThat(table.sortOrder().isUnsorted()).as("Should be unsorted").isTrue();
- Assertions.assertThat(table.properties()).as("Should have table properties").isNotNull();
+ assertThat(table.location()).as("Should have a location").isNotNull();
+ assertThat(table.spec().isUnpartitioned()).as("Should be unpartitioned").isTrue();
+ assertThat(table.sortOrder().isUnsorted()).as("Should be unsorted").isTrue();
+ assertThat(table.properties()).as("Should have table properties").isNotNull();
}
@Test
@@ -534,19 +526,19 @@
catalog.createNamespace(Namespace.of("ns"));
}
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
+ assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
catalog.buildTable(ident, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
+ assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
Table loaded = catalog.loadTable(ident);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Schema should match expected ID assignment")
.isEqualTo(TABLE_SCHEMA.asStruct());
catalog.dropTable(ident);
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
+ assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
}
@Test
@@ -560,19 +552,19 @@
catalog.createNamespace(Namespace.of("ns"));
}
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
+ assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
catalog.buildTable(ident, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
+ assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
Table loaded = catalog.loadTable(ident);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Schema should match expected ID assignment")
.isEqualTo(TABLE_SCHEMA.asStruct());
catalog.dropTable(ident);
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
+ assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
}
@Test
@@ -585,17 +577,17 @@
catalog.createNamespace(ident.namespace());
}
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
+ assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
catalog.buildTable(ident, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
+ assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
- Assertions.assertThatThrownBy(() -> catalog.buildTable(ident, OTHER_SCHEMA).create())
+ assertThatThrownBy(() -> catalog.buildTable(ident, OTHER_SCHEMA).create())
.isInstanceOf(AlreadyExistsException.class)
.hasMessageStartingWith("Table already exists: ns.table");
Table table = catalog.loadTable(ident);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Schema should match original table schema")
.isEqualTo(TABLE_SCHEMA.asStruct());
}
@@ -610,7 +602,7 @@
catalog.createNamespace(ident.namespace());
}
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
+ assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
Map<String, String> properties =
ImmutableMap.of("user", "someone", "created-at", "2022-02-25T00:38:19");
@@ -624,24 +616,22 @@
.create();
// validate table settings
- Assertions.assertThat(table.name())
+ assertThat(table.name())
.as("Table name should report its full name")
.isEqualTo(catalog.name() + "." + ident);
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
+ assertThat(table.schema().asStruct())
.as("Schema should match expected ID assignment")
.isEqualTo(TABLE_SCHEMA.asStruct());
- Assertions.assertThat(table.location()).as("Should have a location").isNotNull();
- Assertions.assertThat(table.spec())
- .as("Should use requested partition spec")
- .isEqualTo(TABLE_SPEC);
- Assertions.assertThat(table.sortOrder())
+ assertThat(table.location()).as("Should have a location").isNotNull();
+ assertThat(table.spec()).as("Should use requested partition spec").isEqualTo(TABLE_SPEC);
+ assertThat(table.sortOrder())
.as("Should use requested write order")
.isEqualTo(TABLE_WRITE_ORDER);
- Assertions.assertThat(table.properties().entrySet())
+ assertThat(table.properties().entrySet())
.as("Table properties should be a superset of the requested properties")
.containsAll(properties.entrySet());
- Assertions.assertThat(table.uuid())
+ assertThat(table.uuid())
.isEqualTo(UUID.fromString(((BaseTable) table).operations().current().uuid()));
}
@@ -655,7 +645,7 @@
catalog.createNamespace(ident.namespace());
}
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
+ assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
Map<String, String> properties =
ImmutableMap.of("user", "someone", "created-at", "2022-02-25T00:38:19");
@@ -666,25 +656,23 @@
.withSortOrder(WRITE_ORDER)
.withProperties(properties)
.create();
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
+ assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
Table table = catalog.loadTable(ident);
// validate table settings
- Assertions.assertThat(table.name())
+ assertThat(table.name())
.as("Table name should report its full name")
.isEqualTo(catalog.name() + "." + ident);
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(catalog.tableExists(ident)).as("Table should exist").isTrue();
+ assertThat(table.schema().asStruct())
.as("Schema should match expected ID assignment")
.isEqualTo(TABLE_SCHEMA.asStruct());
- Assertions.assertThat(table.location()).as("Should have a location").isNotNull();
- Assertions.assertThat(table.spec())
- .as("Should use requested partition spec")
- .isEqualTo(TABLE_SPEC);
- Assertions.assertThat(table.sortOrder())
+ assertThat(table.location()).as("Should have a location").isNotNull();
+ assertThat(table.spec()).as("Should use requested partition spec").isEqualTo(TABLE_SPEC);
+ assertThat(table.sortOrder())
.as("Should use requested write order")
.isEqualTo(TABLE_WRITE_ORDER);
- Assertions.assertThat(table.properties().entrySet())
+ assertThat(table.properties().entrySet())
.as("Table properties should be a superset of the requested properties")
.containsAll(properties.entrySet());
}
@@ -703,13 +691,13 @@
catalog.buildTable(tableIdent, SCHEMA).create();
Table table = catalog.loadTable(metaIdent);
- Assertions.assertThat(table).isNotNull();
- Assertions.assertThat(table).isInstanceOf(FilesTable.class);
+ assertThat(table).isNotNull();
+ assertThat(table).isInstanceOf(FilesTable.class);
// check that the table metadata can be refreshed
table.refresh();
- Assertions.assertThat(table.name()).isEqualTo(catalog.name() + "." + metaIdent);
+ assertThat(table.name()).isEqualTo(catalog.name() + "." + metaIdent);
}
@Test
@@ -718,8 +706,8 @@
TableIdentifier ident = TableIdentifier.of("ns", "table");
- Assertions.assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
- Assertions.assertThatThrownBy(() -> catalog.loadTable(ident))
+ assertThat(catalog.tableExists(ident)).as("Table should not exist").isFalse();
+ assertThatThrownBy(() -> catalog.loadTable(ident))
.isInstanceOf(NoSuchTableException.class)
.hasMessageStartingWith("Table does not exist: ns.table");
}
@@ -732,26 +720,20 @@
catalog.createNamespace(NS);
}
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Source table should not exist before create")
.isFalse();
catalog.buildTable(TABLE, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after create")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after create").isTrue();
- Assertions.assertThat(catalog.tableExists(RENAMED_TABLE))
+ assertThat(catalog.tableExists(RENAMED_TABLE))
.as("Destination table should not exist before rename")
.isFalse();
catalog.renameTable(TABLE, RENAMED_TABLE);
- Assertions.assertThat(catalog.tableExists(RENAMED_TABLE))
- .as("Table should exist with new name")
- .isTrue();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Original table should no longer exist")
- .isFalse();
+ assertThat(catalog.tableExists(RENAMED_TABLE)).as("Table should exist with new name").isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Original table should no longer exist").isFalse();
catalog.dropTable(RENAMED_TABLE);
assertEmpty("Should not contain table after drop", catalog, NS);
@@ -765,18 +747,18 @@
catalog.createNamespace(NS);
}
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Source table should not exist before rename")
.isFalse();
- Assertions.assertThat(catalog.tableExists(RENAMED_TABLE))
+ assertThat(catalog.tableExists(RENAMED_TABLE))
.as("Destination table should not exist before rename")
.isFalse();
- Assertions.assertThatThrownBy(() -> catalog.renameTable(TABLE, RENAMED_TABLE))
+ assertThatThrownBy(() -> catalog.renameTable(TABLE, RENAMED_TABLE))
.isInstanceOf(NoSuchTableException.class)
.hasMessageContaining("Table does not exist");
- Assertions.assertThat(catalog.tableExists(RENAMED_TABLE))
+ assertThat(catalog.tableExists(RENAMED_TABLE))
.as("Destination table should not exist after failed rename")
.isFalse();
}
@@ -790,13 +772,13 @@
catalog().createNamespace(from.namespace());
}
- Assertions.assertThat(catalog().tableExists(from)).as("Table should not exist").isFalse();
+ assertThat(catalog().tableExists(from)).as("Table should not exist").isFalse();
catalog().buildTable(from, SCHEMA).create();
- Assertions.assertThat(catalog().tableExists(from)).as("Table should exist").isTrue();
+ assertThat(catalog().tableExists(from)).as("Table should exist").isTrue();
- Assertions.assertThatThrownBy(() -> catalog().renameTable(from, to))
+ assertThatThrownBy(() -> catalog().renameTable(from, to))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Namespace does not exist: non_existing");
}
@@ -809,30 +791,28 @@
catalog.createNamespace(NS);
}
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Source table should not exist before create")
.isFalse();
catalog.buildTable(TABLE, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Source table should exist after create")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Source table should exist after create").isTrue();
- Assertions.assertThat(catalog.tableExists(RENAMED_TABLE))
+ assertThat(catalog.tableExists(RENAMED_TABLE))
.as("Destination table should not exist before create")
.isFalse();
catalog.buildTable(RENAMED_TABLE, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(RENAMED_TABLE))
+ assertThat(catalog.tableExists(RENAMED_TABLE))
.as("Destination table should exist after create")
.isTrue();
- Assertions.assertThatThrownBy(() -> catalog.renameTable(TABLE, RENAMED_TABLE))
+ assertThatThrownBy(() -> catalog.renameTable(TABLE, RENAMED_TABLE))
.isInstanceOf(AlreadyExistsException.class)
.hasMessageContaining("Table already exists");
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Source table should still exist after failed rename")
.isTrue();
- Assertions.assertThat(catalog.tableExists(RENAMED_TABLE))
+ assertThat(catalog.tableExists(RENAMED_TABLE))
.as("Destination table should still exist after failed rename")
.isTrue();
@@ -840,7 +820,7 @@
((HasTableOperations) catalog.loadTable(TABLE)).operations().current().uuid();
String destinationTableUUID =
((HasTableOperations) catalog.loadTable(RENAMED_TABLE)).operations().current().uuid();
- Assertions.assertThat(sourceTableUUID)
+ assertThat(sourceTableUUID)
.as("Source and destination table should remain distinct after failed rename")
.isNotEqualTo(destinationTableUUID);
}
@@ -853,20 +833,14 @@
catalog.createNamespace(NS);
}
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should not exist before create")
- .isFalse();
+ assertThat(catalog.tableExists(TABLE)).as("Table should not exist before create").isFalse();
catalog.buildTable(TABLE, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after create")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after create").isTrue();
boolean dropped = catalog.dropTable(TABLE);
- Assertions.assertThat(dropped).as("Should drop a table that does exist").isTrue();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should not exist after drop")
- .isFalse();
+ assertThat(dropped).as("Should drop a table that does exist").isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should not exist after drop").isFalse();
}
@Test
@@ -877,20 +851,14 @@
catalog.createNamespace(NS);
}
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should not exist before create")
- .isFalse();
+ assertThat(catalog.tableExists(TABLE)).as("Table should not exist before create").isFalse();
catalog.buildTable(TABLE, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after create")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after create").isTrue();
boolean dropped = catalog.dropTable(TABLE, true);
- Assertions.assertThat(dropped).as("Should drop a table that does exist").isTrue();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should not exist after drop")
- .isFalse();
+ assertThat(dropped).as("Should drop a table that does exist").isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should not exist after drop").isFalse();
}
@Test
@@ -901,24 +869,18 @@
catalog.createNamespace(NS);
}
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should not exist before create")
- .isFalse();
+ assertThat(catalog.tableExists(TABLE)).as("Table should not exist before create").isFalse();
Table table = catalog.buildTable(TABLE, SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after create")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after create").isTrue();
Set<String> actualMetadataFileLocations = ReachableFileUtil.metadataFileLocations(table, false);
boolean dropped = catalog.dropTable(TABLE, false);
- Assertions.assertThat(dropped).as("Should drop a table that does exist").isTrue();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should not exist after drop")
- .isFalse();
+ assertThat(dropped).as("Should drop a table that does exist").isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should not exist after drop").isFalse();
Set<String> expectedMetadataFileLocations =
ReachableFileUtil.metadataFileLocations(table, false);
- Assertions.assertThat(actualMetadataFileLocations)
+ assertThat(actualMetadataFileLocations)
.hasSameElementsAs(expectedMetadataFileLocations)
.hasSize(1)
.as("Should have one metadata file");
@@ -933,10 +895,8 @@
}
TableIdentifier noSuchTableIdent = TableIdentifier.of(NS, "notable");
- Assertions.assertThat(catalog.tableExists(noSuchTableIdent))
- .as("Table should not exist")
- .isFalse();
- Assertions.assertThat(catalog.dropTable(noSuchTableIdent))
+ assertThat(catalog.tableExists(noSuchTableIdent)).as("Table should not exist").isFalse();
+ assertThat(catalog.dropTable(noSuchTableIdent))
.as("Should not drop a table that does not exist")
.isFalse();
}
@@ -962,40 +922,40 @@
catalog.buildTable(ns1Table1, SCHEMA).create();
- Assertions.assertThat(catalog.listTables(ns1))
+ assertThat(catalog.listTables(ns1))
.as("Should contain ns_1.table_1 after create")
.containsExactlyInAnyOrder(ns1Table1);
catalog.buildTable(ns2Table1, SCHEMA).create();
- Assertions.assertThat(catalog.listTables(ns2))
+ assertThat(catalog.listTables(ns2))
.as("Should contain ns_2.table_1 after create")
.containsExactlyInAnyOrder(ns2Table1);
- Assertions.assertThat(catalog.listTables(ns1))
+ assertThat(catalog.listTables(ns1))
.as("Should not show changes to ns_2 in ns_1")
.containsExactlyInAnyOrder(ns1Table1);
catalog.buildTable(ns1Table2, SCHEMA).create();
- Assertions.assertThat(catalog.listTables(ns2))
+ assertThat(catalog.listTables(ns2))
.as("Should not show changes to ns_1 in ns_2")
.containsExactlyInAnyOrder(ns2Table1);
- Assertions.assertThat(catalog.listTables(ns1))
+ assertThat(catalog.listTables(ns1))
.as("Should contain ns_1.table_2 after create")
.containsExactlyInAnyOrder(ns1Table1, ns1Table2);
catalog.dropTable(ns1Table1);
- Assertions.assertThat(catalog.listTables(ns2))
+ assertThat(catalog.listTables(ns2))
.as("Should not show changes to ns_1 in ns_2")
.containsExactlyInAnyOrder(ns2Table1);
- Assertions.assertThat(catalog.listTables(ns1))
+ assertThat(catalog.listTables(ns1))
.as("Should not contain ns_1.table_1 after drop")
.containsExactlyInAnyOrder(ns1Table2);
catalog.dropTable(ns1Table2);
- Assertions.assertThat(catalog.listTables(ns2))
+ assertThat(catalog.listTables(ns2))
.as("Should not show changes to ns_1 in ns_2")
.containsExactlyInAnyOrder(ns2Table1);
@@ -1022,7 +982,7 @@
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Loaded table should have expected schema")
.isEqualTo(expected.asStruct());
}
@@ -1038,17 +998,17 @@
Table table = catalog.buildTable(TABLE, SCHEMA).create();
UpdateSchema update = table.updateSchema().addColumn("new_col", Types.LongType.get());
- Assertions.assertThat(catalog.dropTable(TABLE)).as("Should successfully drop table").isTrue();
+ assertThat(catalog.dropTable(TABLE)).as("Should successfully drop table").isTrue();
catalog.buildTable(TABLE, OTHER_SCHEMA).create();
String expectedMessage =
supportsServerSideRetry() ? "Requirement failed: UUID does not match" : "Cannot commit";
- Assertions.assertThatThrownBy(update::commit)
+ assertThatThrownBy(update::commit)
.isInstanceOf(CommitFailedException.class)
.hasMessageContaining(expectedMessage);
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Loaded table should have expected schema")
.isEqualTo(OTHER_SCHEMA.asStruct());
}
@@ -1076,7 +1036,7 @@
update.commit();
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Loaded table should have expected schema")
.isEqualTo(expected.asStruct());
}
@@ -1101,12 +1061,12 @@
// attempt to commit the original update
String expectedMessage =
supportsServerSideRetry() ? "Requirement failed: current schema changed" : "Cannot commit";
- Assertions.assertThatThrownBy(update::commit)
+ assertThatThrownBy(update::commit)
.isInstanceOf(CommitFailedException.class)
.hasMessageContaining(expectedMessage);
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Loaded table should have expected schema")
.isEqualTo(expected.asStruct());
}
@@ -1134,12 +1094,12 @@
supportsServerSideRetry()
? "Requirement failed: last assigned field id changed"
: "Cannot commit";
- Assertions.assertThatThrownBy(update::commit)
+ assertThatThrownBy(update::commit)
.isInstanceOf(CommitFailedException.class)
.hasMessageContaining(expectedMessage);
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Loaded table should have expected schema")
.isEqualTo(expected.asStruct());
}
@@ -1163,7 +1123,7 @@
table.updateSchema().deleteColumn("col1").deleteColumn("col2").deleteColumn("col3").commit();
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Loaded table should have expected schema")
.isEqualTo(TABLE_SCHEMA.asStruct());
}
@@ -1186,7 +1146,7 @@
Table loaded = catalog.loadTable(TABLE);
// the spec ID may not match, so check equality of the fields
- Assertions.assertThat(loaded.spec().fields())
+ assertThat(loaded.spec().fields())
.as("Loaded table should have expected spec")
.isEqualTo(expected.fields());
}
@@ -1219,7 +1179,7 @@
Table loaded = catalog.loadTable(TABLE);
// the spec ID may not match, so check equality of the fields
- Assertions.assertThat(loaded.spec().fields())
+ assertThat(loaded.spec().fields())
.as("Loaded table should have expected spec")
.isEqualTo(expected.fields());
}
@@ -1248,14 +1208,14 @@
supportsServerSideRetry()
? "Requirement failed: default partition spec changed"
: "Cannot commit";
- Assertions.assertThatThrownBy(update::commit)
+ assertThatThrownBy(update::commit)
.isInstanceOf(CommitFailedException.class)
.hasMessageContaining(expectedMessage);
Table loaded = catalog.loadTable(TABLE);
// the spec ID may not match, so check equality of the fields
- Assertions.assertThat(loaded.spec().fields())
+ assertThat(loaded.spec().fields())
.as("Loaded table should have expected spec")
.isEqualTo(expected.fields());
}
@@ -1283,14 +1243,14 @@
supportsServerSideRetry()
? "Requirement failed: last assigned partition id changed"
: "Cannot commit";
- Assertions.assertThatThrownBy(update::commit)
+ assertThatThrownBy(update::commit)
.isInstanceOf(CommitFailedException.class)
.hasMessageContaining(expectedMessage);
Table loaded = catalog.loadTable(TABLE);
// the spec ID may not match, so check equality of the fields
- Assertions.assertThat(loaded.spec().fields())
+ assertThat(loaded.spec().fields())
.as("Loaded table should have expected spec")
.isEqualTo(expected.fields());
}
@@ -1311,7 +1271,7 @@
.withPartitionSpec(SPEC)
.withProperty("format-version", "2")
.create();
- Assertions.assertThat(((BaseTable) table).operations().current().formatVersion())
+ assertThat(((BaseTable) table).operations().current().formatVersion())
.as("Should be a v2 table")
.isEqualTo(2);
@@ -1319,9 +1279,7 @@
table.updateSpec().removeField("id").commit();
- Assertions.assertThat(table.spec())
- .as("Loaded table should have expected spec")
- .isEqualTo(TABLE_SPEC);
+ assertThat(table.spec()).as("Loaded table should have expected spec").isEqualTo(TABLE_SPEC);
}
@Test
@@ -1342,7 +1300,7 @@
Table loaded = catalog.loadTable(TABLE);
// the sort order ID may not match, so check equality of the fields
- Assertions.assertThat(loaded.sortOrder().fields())
+ assertThat(loaded.sortOrder().fields())
.as("Loaded table should have expected order")
.isEqualTo(expected.fields());
}
@@ -1376,7 +1334,7 @@
Table loaded = catalog.loadTable(TABLE);
// the sort order ID may not match, so check equality of the fields
- Assertions.assertThat(loaded.sortOrder().fields())
+ assertThat(loaded.sortOrder().fields())
.as("Loaded table should have expected order")
.isEqualTo(expected.fields());
}
@@ -1395,7 +1353,7 @@
table.replaceSortOrder().asc(Expressions.bucket("id", 16)).asc("id").commit();
- Assertions.assertThat(table.sortOrder())
+ assertThat(table.sortOrder())
.as("Loaded table should have expected order")
.isEqualTo(TABLE_WRITE_ORDER);
}
@@ -1411,7 +1369,7 @@
Table table = catalog.buildTable(TABLE, SCHEMA).withPartitionSpec(SPEC).create();
try (CloseableIterable<FileScanTask> tasks = table.newScan().planFiles()) {
- Assertions.assertThat(tasks.iterator().hasNext()).as("Should contain no files").isFalse();
+ assertThat(tasks.iterator().hasNext()).as("Should contain no files").isFalse();
}
table.newFastAppend().appendFile(FILE_A).commit();
@@ -1498,10 +1456,10 @@
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Loaded table should have expected schema")
.isEqualTo(expectedSchema.asStruct());
- Assertions.assertThat(loaded.spec().fields())
+ assertThat(loaded.spec().fields())
.as("Loaded table should have expected spec")
.isEqualTo(expectedSpec.fields());
@@ -1518,21 +1476,19 @@
Transaction create = catalog.buildTable(TABLE, SCHEMA).createTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after createTransaction")
.isFalse();
create.newFastAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after append commit")
.isFalse();
create.commitTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
Table table = catalog.loadTable(TABLE);
assertFiles(table, FILE_A);
assertPreviousMetadataFileCount(table, 0);
@@ -1557,37 +1513,35 @@
.withProperties(properties)
.createTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after createTransaction")
.isFalse();
create.newFastAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after append commit")
.isFalse();
create.commitTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
Table table = catalog.loadTable(TABLE);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(TABLE_SCHEMA.asStruct());
- Assertions.assertThat(table.spec().fields())
+ assertThat(table.spec().fields())
.as("Table should have create partition spec")
.isEqualTo(TABLE_SPEC.fields());
- Assertions.assertThat(table.sortOrder())
+ assertThat(table.sortOrder())
.as("Table should have create sort order")
.isEqualTo(TABLE_WRITE_ORDER);
- Assertions.assertThat(table.properties().entrySet())
+ assertThat(table.properties().entrySet())
.as("Table properties should be a superset of the requested properties")
.containsAll(properties.entrySet());
if (!overridesRequestedLocation()) {
- Assertions.assertThat(table.location())
+ assertThat(table.location())
.as("Table location should match requested")
.isEqualTo("file:/tmp/ns/table");
}
@@ -1615,7 +1569,7 @@
.withProperties(properties)
.createTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after createTransaction")
.isFalse();
@@ -1643,15 +1597,13 @@
create.newFastAppend().appendFile(anotherFile).commit();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after append commit")
.isFalse();
create.commitTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
Table table = catalog.loadTable(TABLE);
@@ -1663,29 +1615,29 @@
final int updateSpecId = initialSpecId + 1;
final int updateOrderId = initialOrderId + 1;
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(newSchema.asStruct());
- Assertions.assertThat(table.schema().schemaId())
+ assertThat(table.schema().schemaId())
.as("Table schema should match the new schema ID")
.isEqualTo(updateSchemaId);
- Assertions.assertThat(table.spec().fields())
+ assertThat(table.spec().fields())
.as("Table should have updated partition spec")
.isEqualTo(newSpec.fields());
- Assertions.assertThat(table.spec().specId())
+ assertThat(table.spec().specId())
.as("Table should have updated partition spec ID")
.isEqualTo(updateSpecId);
- Assertions.assertThat(table.sortOrder().fields())
+ assertThat(table.sortOrder().fields())
.as("Table should have updated sort order")
.isEqualTo(newSortOrder.fields());
- Assertions.assertThat(table.sortOrder().orderId())
+ assertThat(table.sortOrder().orderId())
.as("Table should have updated sort order ID")
.isEqualTo(updateOrderId);
- Assertions.assertThat(table.properties().entrySet())
+ assertThat(table.properties().entrySet())
.as("Table properties should be a superset of the requested properties")
.containsAll(properties.entrySet());
if (!overridesRequestedLocation()) {
- Assertions.assertThat(table.location())
+ assertThat(table.location())
.as("Table location should match requested")
.isEqualTo("file:/tmp/ns/table");
}
@@ -1716,44 +1668,42 @@
.withProperties(properties)
.createTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after createTransaction")
.isFalse();
create.newFastAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after append commit")
.isFalse();
create.commitTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
Table table = catalog.loadTable(TABLE);
Map<String, String> expectedProps = Maps.newHashMap(properties);
expectedProps.remove("format-version");
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(TABLE_SCHEMA.asStruct());
- Assertions.assertThat(table.spec().fields())
+ assertThat(table.spec().fields())
.as("Table should have create partition spec")
.isEqualTo(TABLE_SPEC.fields());
- Assertions.assertThat(table.sortOrder())
+ assertThat(table.sortOrder())
.as("Table should have create sort order")
.isEqualTo(TABLE_WRITE_ORDER);
- Assertions.assertThat(Sets.intersection(properties.entrySet(), table.properties().entrySet()))
+ assertThat(Sets.intersection(properties.entrySet(), table.properties().entrySet()))
.as("Table properties should be a superset of the requested properties")
.containsExactlyInAnyOrderElementsOf(expectedProps.entrySet());
- Assertions.assertThat(table.currentSnapshot().sequenceNumber())
+ assertThat(table.currentSnapshot().sequenceNumber())
.as("Sequence number should start at 1 for v2 format")
.isEqualTo(1);
if (!overridesRequestedLocation()) {
- Assertions.assertThat(table.location())
+ assertThat(table.location())
.as("Table location should match requested")
.isEqualTo("file:/tmp/ns/table");
}
@@ -1773,30 +1723,30 @@
Transaction create = catalog.buildTable(TABLE, SCHEMA).createTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after createTransaction")
.isFalse();
create.newFastAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after append commit")
.isFalse();
catalog.buildTable(TABLE, OTHER_SCHEMA).create();
- Assertions.setMaxStackTraceElementsDisplayed(Integer.MAX_VALUE);
+ setMaxStackTraceElementsDisplayed(Integer.MAX_VALUE);
String expectedMessage =
supportsServerSideRetry()
? "Requirement failed: table already exists"
: "Table already exists";
- Assertions.assertThatThrownBy(create::commitTransaction)
+ assertThatThrownBy(create::commitTransaction)
.isInstanceOf(AlreadyExistsException.class)
.hasMessageStartingWith(expectedMessage);
// validate the concurrently created table is unmodified
Table table = catalog.loadTable(TABLE);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match concurrent create")
.isEqualTo(OTHER_SCHEMA.asStruct());
assertNoFiles(table);
@@ -1812,21 +1762,19 @@
Transaction create = catalog.buildTable(TABLE, SCHEMA).createOrReplaceTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after createTransaction")
.isFalse();
create.newFastAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after append commit")
.isFalse();
create.commitTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
Table table = catalog.loadTable(TABLE);
assertFiles(table, FILE_A);
@@ -1852,38 +1800,36 @@
.withProperties(properties)
.createOrReplaceTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after createTransaction")
.isFalse();
createOrReplace.newFastAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after append commit")
.isFalse();
createOrReplace.commitTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
Table table = catalog.loadTable(TABLE);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(TABLE_SCHEMA.asStruct());
- Assertions.assertThat(table.spec().fields())
+ assertThat(table.spec().fields())
.as("Table should have create partition spec")
.isEqualTo(TABLE_SPEC.fields());
- Assertions.assertThat(table.sortOrder())
+ assertThat(table.sortOrder())
.as("Table should have create sort order")
.isEqualTo(TABLE_WRITE_ORDER);
- Assertions.assertThat(table.properties().entrySet())
+ assertThat(table.properties().entrySet())
.as("Table properties should be a superset of the requested properties")
.containsAll(properties.entrySet());
if (!overridesRequestedLocation()) {
- Assertions.assertThat(table.location())
+ assertThat(table.location())
.as("Table location should match requested")
.isEqualTo("file:/tmp/ns/table");
}
@@ -1903,13 +1849,13 @@
Table original = catalog.buildTable(TABLE, OTHER_SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should exist before replaceTransaction")
.isTrue();
Transaction createOrReplace = catalog.buildTable(TABLE, SCHEMA).createOrReplaceTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should still exist after replaceTransaction")
.isTrue();
@@ -1918,7 +1864,7 @@
// validate table has not changed
Table table = catalog.loadTable(TABLE);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match concurrent create")
.isEqualTo(OTHER_SCHEMA.asStruct());
@@ -1928,14 +1874,12 @@
createOrReplace.commitTransaction();
// validate the table after replace
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
table.refresh(); // refresh should work with UUID validation
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(REPLACE_SCHEMA.asStruct());
assertUUIDsMatch(original, loaded);
@@ -1953,7 +1897,7 @@
Table original = catalog.buildTable(TABLE, OTHER_SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should exist before replaceTransaction")
.isTrue();
@@ -1968,7 +1912,7 @@
.withProperties(properties)
.createOrReplaceTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should still exist after replaceTransaction")
.isTrue();
@@ -1976,14 +1920,12 @@
// validate table has not changed
Table table = catalog.loadTable(TABLE);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match concurrent create")
.isEqualTo(OTHER_SCHEMA.asStruct());
- Assertions.assertThat(table.spec().isUnpartitioned())
- .as("Table should be unpartitioned")
- .isTrue();
- Assertions.assertThat(table.sortOrder().isUnsorted()).as("Table should be unsorted").isTrue();
- Assertions.assertThat(table.properties().get("created-at"))
+ assertThat(table.spec().isUnpartitioned()).as("Table should be unpartitioned").isTrue();
+ assertThat(table.sortOrder().isUnsorted()).as("Table should be unsorted").isTrue();
+ assertThat(table.properties().get("created-at"))
.as("Created at should not match")
.isNotEqualTo("2022-02-25T00:38:19");
assertUUIDsMatch(original, table);
@@ -1992,27 +1934,25 @@
createOrReplace.commitTransaction();
// validate the table after replace
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
table.refresh(); // refresh should work with UUID validation
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(REPLACE_SCHEMA.asStruct());
- Assertions.assertThat(loaded.spec())
+ assertThat(loaded.spec())
.as("Table should have replace partition spec")
.isEqualTo(REPLACE_SPEC);
- Assertions.assertThat(loaded.sortOrder())
+ assertThat(loaded.sortOrder())
.as("Table should have replace sort order")
.isEqualTo(REPLACE_WRITE_ORDER);
- Assertions.assertThat(loaded.properties().entrySet())
+ assertThat(loaded.properties().entrySet())
.as("Table properties should be a superset of the requested properties")
.containsAll(properties.entrySet());
if (!overridesRequestedLocation()) {
- Assertions.assertThat(table.location())
+ assertThat(table.location())
.as("Table location should be replaced")
.isEqualTo("file:/tmp/ns/table");
}
@@ -2036,13 +1976,13 @@
Transaction createOrReplace = catalog.buildTable(TABLE, SCHEMA).createOrReplaceTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after createTransaction")
.isFalse();
createOrReplace.newFastAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should not exist after append commit")
.isFalse();
@@ -2052,13 +1992,13 @@
supportsServerSideRetry()
? "Requirement failed: table already exists"
: "Table already exists";
- Assertions.assertThatThrownBy(createOrReplace::commitTransaction)
+ assertThatThrownBy(createOrReplace::commitTransaction)
.isInstanceOf(AlreadyExistsException.class)
.hasMessageStartingWith(expectedMessage);
// validate the concurrently created table is unmodified
Table table = catalog.loadTable(TABLE);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match concurrent create")
.isEqualTo(OTHER_SCHEMA.asStruct());
assertNoFiles(table);
@@ -2074,13 +2014,13 @@
Table original = catalog.buildTable(TABLE, OTHER_SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should exist before replaceTransaction")
.isTrue();
Transaction replace = catalog.buildTable(TABLE, SCHEMA).replaceTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should still exist after replaceTransaction")
.isTrue();
@@ -2088,7 +2028,7 @@
// validate table has not changed
Table table = catalog.loadTable(TABLE);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match concurrent create")
.isEqualTo(OTHER_SCHEMA.asStruct());
assertUUIDsMatch(original, table);
@@ -2097,14 +2037,12 @@
replace.commitTransaction();
// validate the table after replace
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
table.refresh(); // refresh should work with UUID validation
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(REPLACE_SCHEMA.asStruct());
@@ -2123,7 +2061,7 @@
Table original = catalog.buildTable(TABLE, OTHER_SCHEMA).create();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should exist before replaceTransaction")
.isTrue();
@@ -2138,7 +2076,7 @@
.withProperties(properties)
.replaceTransaction();
- Assertions.assertThat(catalog.tableExists(TABLE))
+ assertThat(catalog.tableExists(TABLE))
.as("Table should still exist after replaceTransaction")
.isTrue();
@@ -2147,14 +2085,12 @@
// validate table has not changed
Table table = catalog.loadTable(TABLE);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match concurrent create")
.isEqualTo(OTHER_SCHEMA.asStruct());
- Assertions.assertThat(table.spec().isUnpartitioned())
- .as("Table should be unpartitioned")
- .isTrue();
- Assertions.assertThat(table.sortOrder().isUnsorted()).as("Table should be unsorted").isTrue();
- Assertions.assertThat(table.properties().get("created-at"))
+ assertThat(table.spec().isUnpartitioned()).as("Table should be unpartitioned").isTrue();
+ assertThat(table.sortOrder().isUnsorted()).as("Table should be unsorted").isTrue();
+ assertThat(table.properties().get("created-at"))
.as("Created at should not match")
.isNotEqualTo("2022-02-25T00:38:19");
@@ -2164,27 +2100,25 @@
replace.commitTransaction();
// validate the table after replace
- Assertions.assertThat(catalog.tableExists(TABLE))
- .as("Table should exist after append commit")
- .isTrue();
+ assertThat(catalog.tableExists(TABLE)).as("Table should exist after append commit").isTrue();
table.refresh(); // refresh should work with UUID validation
Table loaded = catalog.loadTable(TABLE);
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(REPLACE_SCHEMA.asStruct());
- Assertions.assertThat(loaded.spec())
+ assertThat(loaded.spec())
.as("Table should have replace partition spec")
.isEqualTo(REPLACE_SPEC);
- Assertions.assertThat(loaded.sortOrder())
+ assertThat(loaded.sortOrder())
.as("Table should have replace sort order")
.isEqualTo(REPLACE_WRITE_ORDER);
- Assertions.assertThat(loaded.properties().entrySet())
+ assertThat(loaded.properties().entrySet())
.as("Table properties should be a superset of the requested properties")
.containsAll(properties.entrySet());
if (!overridesRequestedLocation()) {
- Assertions.assertThat(table.location())
+ assertThat(table.location())
.as("Table location should be replaced")
.isEqualTo("file:/tmp/ns/table");
}
@@ -2202,7 +2136,7 @@
catalog.createNamespace(NS);
}
- Assertions.assertThatThrownBy(() -> catalog.buildTable(TABLE, SCHEMA).replaceTransaction())
+ assertThatThrownBy(() -> catalog.buildTable(TABLE, SCHEMA).replaceTransaction())
.isInstanceOf(NoSuchTableException.class)
.hasMessageStartingWith("Table does not exist: newdb.table");
}
@@ -2230,30 +2164,26 @@
firstReplace.commitTransaction();
Table afterFirstReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterFirstReplace.schema().asStruct())
+ assertThat(afterFirstReplace.schema().asStruct())
.as("Table schema should match the original schema")
.isEqualTo(original.schema().asStruct());
- Assertions.assertThat(afterFirstReplace.spec().isUnpartitioned())
+ assertThat(afterFirstReplace.spec().isUnpartitioned())
.as("Table should be unpartitioned")
.isTrue();
- Assertions.assertThat(afterFirstReplace.sortOrder().isUnsorted())
- .as("Table should be unsorted")
- .isTrue();
+ assertThat(afterFirstReplace.sortOrder().isUnsorted()).as("Table should be unsorted").isTrue();
assertUUIDsMatch(original, afterFirstReplace);
assertFiles(afterFirstReplace, FILE_B);
secondReplace.commitTransaction();
Table afterSecondReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterSecondReplace.schema().asStruct())
+ assertThat(afterSecondReplace.schema().asStruct())
.as("Table schema should match the original schema")
.isEqualTo(original.schema().asStruct());
- Assertions.assertThat(afterSecondReplace.spec().isUnpartitioned())
+ assertThat(afterSecondReplace.spec().isUnpartitioned())
.as("Table should be unpartitioned")
.isTrue();
- Assertions.assertThat(afterSecondReplace.sortOrder().isUnsorted())
- .as("Table should be unsorted")
- .isTrue();
+ assertThat(afterSecondReplace.sortOrder().isUnsorted()).as("Table should be unsorted").isTrue();
assertUUIDsMatch(original, afterSecondReplace);
assertFiles(afterSecondReplace, FILE_C);
}
@@ -2281,7 +2211,7 @@
firstReplace.commitTransaction();
Table afterFirstReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterFirstReplace.schema().asStruct())
+ assertThat(afterFirstReplace.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(REPLACE_SCHEMA.asStruct());
assertUUIDsMatch(original, afterFirstReplace);
@@ -2290,7 +2220,7 @@
secondReplace.commitTransaction();
Table afterSecondReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterSecondReplace.schema().asStruct())
+ assertThat(afterSecondReplace.schema().asStruct())
.as("Table schema should match the original schema")
.isEqualTo(original.schema().asStruct());
assertUUIDsMatch(original, afterSecondReplace);
@@ -2320,7 +2250,7 @@
firstReplace.commitTransaction();
Table afterFirstReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterFirstReplace.schema().asStruct())
+ assertThat(afterFirstReplace.schema().asStruct())
.as("Table schema should match the original schema")
.isEqualTo(original.schema().asStruct());
assertUUIDsMatch(original, afterFirstReplace);
@@ -2329,7 +2259,7 @@
secondReplace.commitTransaction();
Table afterSecondReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterSecondReplace.schema().asStruct())
+ assertThat(afterSecondReplace.schema().asStruct())
.as("Table schema should match the new schema")
.isEqualTo(REPLACE_SCHEMA.asStruct());
assertUUIDsMatch(original, afterSecondReplace);
@@ -2361,7 +2291,7 @@
firstReplace.commitTransaction();
Table afterFirstReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterFirstReplace.schema().asStruct())
+ assertThat(afterFirstReplace.schema().asStruct())
.as("Table schema should match the original schema")
.isEqualTo(REPLACE_SCHEMA.asStruct());
@@ -2370,7 +2300,7 @@
// even though the new schema is identical, the assertion that the last assigned id has not
// changed will fail
- Assertions.assertThatThrownBy(secondReplace::commitTransaction)
+ assertThatThrownBy(secondReplace::commitTransaction)
.isInstanceOf(CommitFailedException.class)
.hasMessageStartingWith(
"Commit failed: Requirement failed: last assigned field id changed");
@@ -2400,7 +2330,7 @@
firstReplace.commitTransaction();
Table afterFirstReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterFirstReplace.spec().fields())
+ assertThat(afterFirstReplace.spec().fields())
.as("Table spec should match the new spec")
.isEqualTo(TABLE_SPEC.fields());
assertUUIDsMatch(original, afterFirstReplace);
@@ -2409,7 +2339,7 @@
secondReplace.commitTransaction();
Table afterSecondReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterSecondReplace.spec().isUnpartitioned())
+ assertThat(afterSecondReplace.spec().isUnpartitioned())
.as("Table should be unpartitioned")
.isTrue();
assertUUIDsMatch(original, afterSecondReplace);
@@ -2440,7 +2370,7 @@
firstReplace.commitTransaction();
Table afterFirstReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterFirstReplace.spec().isUnpartitioned())
+ assertThat(afterFirstReplace.spec().isUnpartitioned())
.as("Table should be unpartitioned")
.isTrue();
assertUUIDsMatch(original, afterFirstReplace);
@@ -2449,7 +2379,7 @@
secondReplace.commitTransaction();
Table afterSecondReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterSecondReplace.spec().fields())
+ assertThat(afterSecondReplace.spec().fields())
.as("Table spec should match the new spec")
.isEqualTo(TABLE_SPEC.fields());
assertUUIDsMatch(original, afterSecondReplace);
@@ -2482,7 +2412,7 @@
firstReplace.commitTransaction();
Table afterFirstReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterFirstReplace.spec().fields())
+ assertThat(afterFirstReplace.spec().fields())
.as("Table spec should match the new spec")
.isEqualTo(TABLE_SPEC.fields());
assertUUIDsMatch(original, afterFirstReplace);
@@ -2490,7 +2420,7 @@
// even though the new spec is identical, the assertion that the last assigned id has not
// changed will fail
- Assertions.assertThatThrownBy(secondReplace::commitTransaction)
+ assertThatThrownBy(secondReplace::commitTransaction)
.isInstanceOf(CommitFailedException.class)
.hasMessageStartingWith(
"Commit failed: Requirement failed: last assigned partition id changed");
@@ -2520,7 +2450,7 @@
firstReplace.commitTransaction();
Table afterFirstReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterFirstReplace.sortOrder())
+ assertThat(afterFirstReplace.sortOrder())
.as("Table order should match the new order")
.isEqualTo(TABLE_WRITE_ORDER);
assertUUIDsMatch(original, afterFirstReplace);
@@ -2529,9 +2459,7 @@
secondReplace.commitTransaction();
Table afterSecondReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterSecondReplace.sortOrder().isUnsorted())
- .as("Table should be unsorted")
- .isTrue();
+ assertThat(afterSecondReplace.sortOrder().isUnsorted()).as("Table should be unsorted").isTrue();
assertUUIDsMatch(original, afterSecondReplace);
assertFiles(afterSecondReplace, FILE_C);
}
@@ -2565,16 +2493,14 @@
firstReplace.commitTransaction();
Table afterFirstReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterFirstReplace.sortOrder().isSorted())
- .as("Table order should be set")
- .isTrue();
+ assertThat(afterFirstReplace.sortOrder().isSorted()).as("Table order should be set").isTrue();
assertUUIDsMatch(original, afterFirstReplace);
assertFiles(afterFirstReplace, FILE_B);
secondReplace.commitTransaction();
Table afterSecondReplace = catalog.loadTable(TABLE);
- Assertions.assertThat(afterSecondReplace.sortOrder().fields())
+ assertThat(afterSecondReplace.sortOrder().fields())
.as("Table order should match the new order")
.isEqualTo(TABLE_WRITE_ORDER.fields());
assertUUIDsMatch(original, afterSecondReplace);
@@ -2595,7 +2521,7 @@
table.updateSchema().addColumn("c", Types.LongType.get()).commit();
Set<String> metadataFileLocations = ReachableFileUtil.metadataFileLocations(table, false);
- Assertions.assertThat(metadataFileLocations).hasSize(4);
+ assertThat(metadataFileLocations).hasSize(4);
int maxPreviousVersionsToKeep = 2;
table
@@ -2607,14 +2533,14 @@
.commit();
metadataFileLocations = ReachableFileUtil.metadataFileLocations(table, false);
- Assertions.assertThat(metadataFileLocations).hasSize(maxPreviousVersionsToKeep + 1);
+ assertThat(metadataFileLocations).hasSize(maxPreviousVersionsToKeep + 1);
// for each new commit, the amount of metadata files should stay the same and old files should
// be deleted
for (int i = 1; i <= 5; i++) {
table.updateSchema().addColumn("d" + i, Types.LongType.get()).commit();
metadataFileLocations = ReachableFileUtil.metadataFileLocations(table, false);
- Assertions.assertThat(metadataFileLocations).hasSize(maxPreviousVersionsToKeep + 1);
+ assertThat(metadataFileLocations).hasSize(maxPreviousVersionsToKeep + 1);
}
maxPreviousVersionsToKeep = 4;
@@ -2630,7 +2556,7 @@
for (int i = 1; i <= 10; i++) {
table.updateSchema().addColumn("e" + i, Types.LongType.get()).commit();
metadataFileLocations = ReachableFileUtil.metadataFileLocations(table, false);
- Assertions.assertThat(metadataFileLocations).hasSize(maxPreviousVersionsToKeep + 1);
+ assertThat(metadataFileLocations).hasSize(maxPreviousVersionsToKeep + 1);
}
}
@@ -2638,7 +2564,7 @@
public void tableCreationWithoutNamespace() {
Assumptions.assumeTrue(requiresNamespaceCreate());
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
catalog().buildTable(TableIdentifier.of("non-existing", "table"), SCHEMA).create())
.isInstanceOf(NoSuchNamespaceException.class)
@@ -2675,27 +2601,25 @@
Table registeredTable = catalog.registerTable(TABLE, metadataLocation);
- Assertions.assertThat(registeredTable).isNotNull();
- Assertions.assertThat(catalog.tableExists(TABLE)).as("Table must exist").isTrue();
- Assertions.assertThat(registeredTable.properties())
+ assertThat(registeredTable).isNotNull();
+ assertThat(catalog.tableExists(TABLE)).as("Table must exist").isTrue();
+ assertThat(registeredTable.properties())
.as("Props must match")
.containsAllEntriesOf(properties);
- Assertions.assertThat(registeredTable.schema().asStruct())
+ assertThat(registeredTable.schema().asStruct())
.as("Schema must match")
.isEqualTo(originalTable.schema().asStruct());
- Assertions.assertThat(registeredTable.specs())
- .as("Specs must match")
- .isEqualTo(originalTable.specs());
- Assertions.assertThat(registeredTable.sortOrders())
+ assertThat(registeredTable.specs()).as("Specs must match").isEqualTo(originalTable.specs());
+ assertThat(registeredTable.sortOrders())
.as("Sort orders must match")
.isEqualTo(originalTable.sortOrders());
- Assertions.assertThat(registeredTable.currentSnapshot())
+ assertThat(registeredTable.currentSnapshot())
.as("Current snapshot must match")
.isEqualTo(originalTable.currentSnapshot());
- Assertions.assertThat(registeredTable.snapshots())
+ assertThat(registeredTable.snapshots())
.as("Snapshots must match")
.isEqualTo(originalTable.snapshots());
- Assertions.assertThat(registeredTable.history())
+ assertThat(registeredTable.history())
.as("History must match")
.isEqualTo(originalTable.history());
@@ -2705,9 +2629,9 @@
registeredTable.newFastAppend().appendFile(FILE_A).commit();
assertFiles(registeredTable, FILE_B, FILE_C, FILE_A);
- Assertions.assertThat(catalog.loadTable(TABLE)).isNotNull();
- Assertions.assertThat(catalog.dropTable(TABLE)).isTrue();
- Assertions.assertThat(catalog.tableExists(TABLE)).isFalse();
+ assertThat(catalog.loadTable(TABLE)).isNotNull();
+ assertThat(catalog.dropTable(TABLE)).isTrue();
+ assertThat(catalog.tableExists(TABLE)).isFalse();
}
@Test
@@ -2724,36 +2648,36 @@
Table table = catalog.loadTable(identifier);
TableOperations ops = ((BaseTable) table).operations();
String metadataLocation = ops.current().metadataFileLocation();
- Assertions.assertThatThrownBy(() -> catalog.registerTable(identifier, metadataLocation))
+ assertThatThrownBy(() -> catalog.registerTable(identifier, metadataLocation))
.isInstanceOf(AlreadyExistsException.class)
.hasMessageStartingWith("Table already exists: a.t1");
- Assertions.assertThat(catalog.dropTable(identifier)).isTrue();
+ assertThat(catalog.dropTable(identifier)).isTrue();
}
private static void assertEmpty(String context, Catalog catalog, Namespace ns) {
try {
- Assertions.assertThat(catalog.listTables(ns)).as(context).isEmpty();
+ assertThat(catalog.listTables(ns)).as(context).isEmpty();
} catch (NoSuchNamespaceException e) {
// it is okay if the catalog throws NoSuchNamespaceException when it is empty
}
}
public void assertUUIDsMatch(Table expected, Table actual) {
- Assertions.assertThat(((BaseTable) actual).operations().current().uuid())
+ assertThat(((BaseTable) actual).operations().current().uuid())
.as("Table UUID should not change")
.isEqualTo(((BaseTable) expected).operations().current().uuid());
}
public void assertPreviousMetadataFileCount(Table table, int metadataFileCount) {
TableOperations ops = ((BaseTable) table).operations();
- Assertions.assertThat(ops.current().previousFiles())
+ assertThat(ops.current().previousFiles())
.as("Table should have correct number of previous metadata locations")
.hasSize(metadataFileCount);
}
public void assertNoFiles(Table table) {
try (CloseableIterable<FileScanTask> tasks = table.newScan().planFiles()) {
- Assertions.assertThat(tasks).as("Should contain no files").isEmpty();
+ assertThat(tasks).as("Should contain no files").isEmpty();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
@@ -2766,10 +2690,8 @@
.map(FileScanTask::file)
.map(DataFile::path)
.collect(Collectors.toList());
- Assertions.assertThat(paths)
- .as("Should contain expected number of data files")
- .hasSize(files.length);
- Assertions.assertThat(CharSequenceSet.of(paths))
+ assertThat(paths).as("Should contain expected number of data files").hasSize(files.length);
+ assertThat(CharSequenceSet.of(paths))
.as("Should contain correct file paths")
.isEqualTo(CharSequenceSet.of(Iterables.transform(Arrays.asList(files), DataFile::path)));
} catch (IOException e) {
@@ -2782,11 +2704,7 @@
Streams.stream(tasks)
.map(FileScanTask::file)
.filter(file -> file.path().equals(dataFile.path()))
- .forEach(
- file ->
- Assertions.assertThat(file.specId())
- .as("Spec ID should match")
- .isEqualTo(specId));
+ .forEach(file -> assertThat(file.specId()).as("Spec ID should match").isEqualTo(specId));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
@@ -2798,7 +2716,7 @@
.map(FileScanTask::file)
.forEach(
file ->
- Assertions.assertThat(file.specId())
+ assertThat(file.specId())
.as("Spec ID should match")
.isEqualTo(table.spec().specId()));
} catch (IOException e) {
diff --git a/core/src/test/java/org/apache/iceberg/catalog/TestTableIdentifierParser.java b/core/src/test/java/org/apache/iceberg/catalog/TestTableIdentifierParser.java
index 587b939..739a87c 100644
--- a/core/src/test/java/org/apache/iceberg/catalog/TestTableIdentifierParser.java
+++ b/core/src/test/java/org/apache/iceberg/catalog/TestTableIdentifierParser.java
@@ -18,7 +18,9 @@
*/
package org.apache.iceberg.catalog;
-import org.assertj.core.api.Assertions;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import org.junit.jupiter.api.Test;
public class TestTableIdentifierParser {
@@ -27,13 +29,13 @@
public void testTableIdentifierToJson() {
String json = "{\"namespace\":[\"accounting\",\"tax\"],\"name\":\"paid\"}";
TableIdentifier identifier = TableIdentifier.of(Namespace.of("accounting", "tax"), "paid");
- Assertions.assertThat(TableIdentifierParser.toJson(identifier))
+ assertThat(TableIdentifierParser.toJson(identifier))
.as("Should be able to serialize a table identifier with both namespace and name")
.isEqualTo(json);
TableIdentifier identifierWithEmptyNamespace = TableIdentifier.of(Namespace.empty(), "paid");
String jsonWithEmptyNamespace = "{\"namespace\":[],\"name\":\"paid\"}";
- Assertions.assertThat(TableIdentifierParser.toJson(identifierWithEmptyNamespace))
+ assertThat(TableIdentifierParser.toJson(identifierWithEmptyNamespace))
.as("Should be able to serialize a table identifier that uses the empty namespace")
.isEqualTo(jsonWithEmptyNamespace);
}
@@ -42,18 +44,18 @@
public void testTableIdentifierFromJson() {
String json = "{\"namespace\":[\"accounting\",\"tax\"],\"name\":\"paid\"}";
TableIdentifier identifier = TableIdentifier.of(Namespace.of("accounting", "tax"), "paid");
- Assertions.assertThat(TableIdentifierParser.fromJson(json))
+ assertThat(TableIdentifierParser.fromJson(json))
.as("Should be able to deserialize a valid table identifier")
.isEqualTo(identifier);
TableIdentifier identifierWithEmptyNamespace = TableIdentifier.of(Namespace.empty(), "paid");
String jsonWithEmptyNamespace = "{\"namespace\":[],\"name\":\"paid\"}";
- Assertions.assertThat(TableIdentifierParser.fromJson(jsonWithEmptyNamespace))
+ assertThat(TableIdentifierParser.fromJson(jsonWithEmptyNamespace))
.as("Should be able to deserialize a valid multi-level table identifier")
.isEqualTo(identifierWithEmptyNamespace);
String identifierMissingNamespace = "{\"name\":\"paid\"}";
- Assertions.assertThat(TableIdentifierParser.fromJson(identifierMissingNamespace))
+ assertThat(TableIdentifierParser.fromJson(identifierMissingNamespace))
.as(
"Should implicitly convert a missing namespace into the the empty namespace when parsing")
.isEqualTo(identifierWithEmptyNamespace);
@@ -62,22 +64,22 @@
@Test
public void testFailParsingWhenNullOrEmptyJson() {
String nullJson = null;
- Assertions.assertThatThrownBy(() -> TableIdentifierParser.fromJson(nullJson))
+ assertThatThrownBy(() -> TableIdentifierParser.fromJson(nullJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse table identifier from invalid JSON: null");
String emptyString = "";
- Assertions.assertThatThrownBy(() -> TableIdentifierParser.fromJson(emptyString))
+ assertThatThrownBy(() -> TableIdentifierParser.fromJson(emptyString))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse table identifier from invalid JSON: ''");
String emptyJson = "{}";
- Assertions.assertThatThrownBy(() -> TableIdentifierParser.fromJson(emptyJson))
+ assertThatThrownBy(() -> TableIdentifierParser.fromJson(emptyJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: name");
String emptyJsonArray = "[]";
- Assertions.assertThatThrownBy(() -> TableIdentifierParser.fromJson(emptyJsonArray))
+ assertThatThrownBy(() -> TableIdentifierParser.fromJson(emptyJsonArray))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing or non-object table identifier: []");
}
@@ -85,7 +87,7 @@
@Test
public void testFailParsingWhenMissingRequiredFields() {
String identifierMissingName = "{\"namespace\":[\"accounting\",\"tax\"]}";
- Assertions.assertThatThrownBy(() -> TableIdentifierParser.fromJson(identifierMissingName))
+ assertThatThrownBy(() -> TableIdentifierParser.fromJson(identifierMissingName))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: name");
}
@@ -93,12 +95,12 @@
@Test
public void testFailWhenFieldsHaveInvalidValues() {
String invalidNamespace = "{\"namespace\":\"accounting.tax\",\"name\":\"paid\"}";
- Assertions.assertThatThrownBy(() -> TableIdentifierParser.fromJson(invalidNamespace))
+ assertThatThrownBy(() -> TableIdentifierParser.fromJson(invalidNamespace))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse JSON array from non-array value: namespace: \"accounting.tax\"");
String invalidName = "{\"namespace\":[\"accounting\",\"tax\"],\"name\":1234}";
- Assertions.assertThatThrownBy(() -> TableIdentifierParser.fromJson(invalidName))
+ assertThatThrownBy(() -> TableIdentifierParser.fromJson(invalidName))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a string value: name: 1234");
}
diff --git a/core/src/test/java/org/apache/iceberg/encryption/TestCiphers.java b/core/src/test/java/org/apache/iceberg/encryption/TestCiphers.java
index eedcea4..72706de 100644
--- a/core/src/test/java/org/apache/iceberg/encryption/TestCiphers.java
+++ b/core/src/test/java/org/apache/iceberg/encryption/TestCiphers.java
@@ -19,10 +19,10 @@
package org.apache.iceberg.encryption;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.nio.charset.StandardCharsets;
import java.security.SecureRandom;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCiphers {
@@ -74,7 +74,7 @@
final byte[] badAad = (aad == null) ? new byte[1] : aad;
badAad[0]++;
- Assertions.assertThatThrownBy(() -> decryptor.decrypt(ciphertext, badAad))
+ assertThatThrownBy(() -> decryptor.decrypt(ciphertext, badAad))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining("GCM tag check failed");
}
@@ -82,7 +82,7 @@
if (testCorruption) {
ciphertext[ciphertext.length / 2]++;
- Assertions.assertThatThrownBy(() -> decryptor.decrypt(ciphertext, aad))
+ assertThatThrownBy(() -> decryptor.decrypt(ciphertext, aad))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining("GCM tag check failed");
}
diff --git a/core/src/test/java/org/apache/iceberg/expressions/TestExpressionParser.java b/core/src/test/java/org/apache/iceberg/expressions/TestExpressionParser.java
index a0ffe0a..43e2f13 100644
--- a/core/src/test/java/org/apache/iceberg/expressions/TestExpressionParser.java
+++ b/core/src/test/java/org/apache/iceberg/expressions/TestExpressionParser.java
@@ -20,6 +20,8 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import com.fasterxml.jackson.databind.JsonNode;
import java.math.BigDecimal;
@@ -27,7 +29,6 @@
import java.util.UUID;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestExpressionParser {
@@ -101,12 +102,12 @@
String boundJson = ExpressionParser.toJson(bound, true);
String unboundJson = ExpressionParser.toJson(expr, true);
- Assertions.assertThat(boundJson)
+ assertThat(boundJson)
.as("Bound and unbound should produce identical json")
.isEqualTo(unboundJson);
Expression parsed = ExpressionParser.fromJson(boundJson, SCHEMA);
- Assertions.assertThat(ExpressionUtil.equivalent(expr, parsed, SUPPORTED_PRIMITIVES, true))
+ assertThat(ExpressionUtil.equivalent(expr, parsed, SUPPORTED_PRIMITIVES, true))
.as("Round-trip value should be equivalent")
.isTrue();
}
@@ -114,45 +115,42 @@
@Test
public void nullExpression() {
- Assertions.assertThatThrownBy(() -> ExpressionParser.toJson(null))
+ assertThatThrownBy(() -> ExpressionParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid expression: null");
- Assertions.assertThatThrownBy(() -> ExpressionParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> ExpressionParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse expression from null object");
}
@Test
public void trueExpression() {
- Assertions.assertThat(ExpressionParser.toJson(Expressions.alwaysTrue(), true))
- .isEqualTo("true");
- Assertions.assertThat(ExpressionParser.fromJson("true")).isEqualTo(Expressions.alwaysTrue());
+ assertThat(ExpressionParser.toJson(Expressions.alwaysTrue(), true)).isEqualTo("true");
+ assertThat(ExpressionParser.fromJson("true")).isEqualTo(Expressions.alwaysTrue());
// type=literal is also supported
String longJson = "{\n \"type\" : \"literal\",\n \"value\" : true\n}";
- Assertions.assertThat(ExpressionParser.fromJson(longJson)).isEqualTo(Expressions.alwaysTrue());
+ assertThat(ExpressionParser.fromJson(longJson)).isEqualTo(Expressions.alwaysTrue());
}
@Test
public void falseExpression() {
- Assertions.assertThat(ExpressionParser.toJson(Expressions.alwaysFalse(), true))
- .isEqualTo("false");
- Assertions.assertThat(ExpressionParser.fromJson("false")).isEqualTo(Expressions.alwaysFalse());
+ assertThat(ExpressionParser.toJson(Expressions.alwaysFalse(), true)).isEqualTo("false");
+ assertThat(ExpressionParser.fromJson("false")).isEqualTo(Expressions.alwaysFalse());
// type=literal is also supported
String longJson = "{\n \"type\" : \"literal\",\n \"value\" : false\n}";
- Assertions.assertThat(ExpressionParser.fromJson(longJson)).isEqualTo(Expressions.alwaysFalse());
+ assertThat(ExpressionParser.fromJson(longJson)).isEqualTo(Expressions.alwaysFalse());
}
@Test
public void eqExpression() {
String expected =
"{\n" + " \"type\" : \"eq\",\n" + " \"term\" : \"name\",\n" + " \"value\" : 25\n" + "}";
- Assertions.assertThat(ExpressionParser.toJson(Expressions.equal("name", 25), true))
- .isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(Expressions.equal("name", 25), true)).isEqualTo(expected);
Expression expression = ExpressionParser.fromJson(expected);
- Assertions.assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
}
@Test
@@ -168,19 +166,18 @@
+ " \"value\" : 50\n"
+ "}";
- Assertions.assertThat(
+ assertThat(
ExpressionParser.toJson(
Expressions.lessThanOrEqual(Expressions.bucket("id", 100), 50), true))
.isEqualTo(expected);
// schema is required to parse transform expressions
- Assertions.assertThat(
- ExpressionParser.toJson(ExpressionParser.fromJson(expected, SCHEMA), true))
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected, SCHEMA), true))
.isEqualTo(expected);
}
@Test
public void extraFields() {
- Assertions.assertThat(
+ assertThat(
ExpressionParser.toJson(
ExpressionParser.fromJson(
"{\n"
@@ -201,7 +198,7 @@
@Test
public void invalidTerm() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExpressionParser.fromJson(
"{\n"
@@ -218,7 +215,7 @@
@Test
public void invalidValues() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExpressionParser.fromJson(
"{\n"
@@ -229,7 +226,7 @@
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse NOT_NAN predicate: has invalid value field");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExpressionParser.fromJson(
"{\n"
@@ -240,7 +237,7 @@
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse IS_NAN predicate: has invalid values field");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExpressionParser.fromJson(
"{\n"
@@ -251,7 +248,7 @@
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse LT predicate: missing value");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExpressionParser.fromJson(
"{\n"
@@ -263,7 +260,7 @@
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse LT predicate: has invalid values field");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExpressionParser.fromJson(
"{\n"
@@ -274,7 +271,7 @@
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse NOT_IN predicate: missing values");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExpressionParser.fromJson(
"{\n"
@@ -289,7 +286,7 @@
@Test
public void invalidOperationType() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExpressionParser.fromJson(
"{\n"
@@ -303,7 +300,7 @@
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid operation type: illegal");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ExpressionParser.fromJson(
"{\n"
@@ -320,16 +317,16 @@
@Test
public void invalidAnd() {
- Assertions.assertThatThrownBy(() -> ExpressionParser.fromJson("{\n \"type\" : \"and\"\n}"))
+ assertThatThrownBy(() -> ExpressionParser.fromJson("{\n \"type\" : \"and\"\n}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing field: left");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> ExpressionParser.fromJson("{\n \"type\" : \"and\",\n \"left\": true}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing field: right");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> ExpressionParser.fromJson("{\n \"type\" : \"and\",\n \"right\": true}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing field: left");
@@ -344,10 +341,9 @@
+ " \"value\" : 50\n"
+ "}";
- Assertions.assertThat(
- ExpressionParser.toJson(Expressions.lessThanOrEqual("column-name", 50), true))
+ assertThat(ExpressionParser.toJson(Expressions.lessThanOrEqual("column-name", 50), true))
.isEqualTo(expected);
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
.isEqualTo(expected);
}
@@ -370,8 +366,7 @@
+ " }\n"
+ "}";
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(json), true))
- .isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(json), true)).isEqualTo(expected);
}
@Test
@@ -393,8 +388,7 @@
+ " \"value\" : 50\n"
+ "}";
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(json), true))
- .isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(json), true)).isEqualTo(expected);
}
@Test
@@ -419,8 +413,8 @@
Expressions.greaterThanOrEqual("column-name-1", 50),
Expressions.in("column-name-2", "one", "two"));
- Assertions.assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
+ assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
.isEqualTo(expected);
}
@@ -443,8 +437,8 @@
Expression expression =
Expressions.or(
Expressions.lessThan("column-name-1", 50), Expressions.notNull("column-name-2"));
- Assertions.assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
+ assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
.isEqualTo(expected);
}
@@ -462,8 +456,8 @@
Expression expression = Expressions.not(Expressions.greaterThanOrEqual("column-name-1", 50));
- Assertions.assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
+ assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
.isEqualTo(expected);
}
@@ -497,8 +491,8 @@
Expressions.equal("column-name-2", "test"));
Expression expression = Expressions.or(and, Expressions.isNaN("column-name-3"));
- Assertions.assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
+ assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
.isEqualTo(expected);
}
@@ -514,8 +508,8 @@
ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[] {1, 2, 3});
Expression expression = Expressions.equal("column-name", byteBuffer);
- Assertions.assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
+ assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
.isEqualTo(expected);
}
@@ -530,8 +524,8 @@
Expression expression = Expressions.in("column-name", new BigDecimal("3.14"));
- Assertions.assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
+ assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
.isEqualTo(expected);
}
@@ -546,8 +540,8 @@
Expression expression = Expressions.in("column-name", new BigDecimal("3.14E+4"));
- Assertions.assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
- Assertions.assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
+ assertThat(ExpressionParser.toJson(expression, true)).isEqualTo(expected);
+ assertThat(ExpressionParser.toJson(ExpressionParser.fromJson(expected), true))
.isEqualTo(expected);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/HadoopFileIOTest.java b/core/src/test/java/org/apache/iceberg/hadoop/HadoopFileIOTest.java
index 0a19581..95d0b6a 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/HadoopFileIOTest.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/HadoopFileIOTest.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.hadoop;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
@@ -37,7 +40,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Streams;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -71,14 +73,13 @@
Path scalePath = new Path(parent, Integer.toString(scale));
createRandomFiles(scalePath, scale);
- Assertions.assertThat(
+ assertThat(
Streams.stream(hadoopFileIO.listPrefix(scalePath.toUri().toString())).count())
.isEqualTo((long) scale);
});
long totalFiles = scaleSizes.stream().mapToLong(Integer::longValue).sum();
- Assertions.assertThat(
- Streams.stream(hadoopFileIO.listPrefix(parent.toUri().toString())).count())
+ assertThat(Streams.stream(hadoopFileIO.listPrefix(parent.toUri().toString())).count())
.isEqualTo(totalFiles);
}
@@ -89,11 +90,9 @@
fs.createNewFile(randomFilePath);
// check existence of the created file
- Assertions.assertThat(hadoopFileIO.newInputFile(randomFilePath.toUri().toString()).exists())
- .isTrue();
+ assertThat(hadoopFileIO.newInputFile(randomFilePath.toUri().toString()).exists()).isTrue();
fs.delete(randomFilePath, false);
- Assertions.assertThat(hadoopFileIO.newInputFile(randomFilePath.toUri().toString()).exists())
- .isFalse();
+ assertThat(hadoopFileIO.newInputFile(randomFilePath.toUri().toString()).exists()).isFalse();
}
@Test
@@ -112,7 +111,7 @@
hadoopFileIO.deletePrefix(scalePath.toUri().toString());
// Hadoop filesystem will throw if the path does not exist
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> hadoopFileIO.listPrefix(scalePath.toUri().toString()).iterator())
.isInstanceOf(UncheckedIOException.class)
.hasMessageContaining("java.io.FileNotFoundException");
@@ -120,8 +119,7 @@
hadoopFileIO.deletePrefix(parent.toUri().toString());
// Hadoop filesystem will throw if the path does not exist
- Assertions.assertThatThrownBy(
- () -> hadoopFileIO.listPrefix(parent.toUri().toString()).iterator())
+ assertThatThrownBy(() -> hadoopFileIO.listPrefix(parent.toUri().toString()).iterator())
.isInstanceOf(UncheckedIOException.class)
.hasMessageContaining("java.io.FileNotFoundException");
}
@@ -133,15 +131,14 @@
hadoopFileIO.deleteFiles(
filesCreated.stream().map(Path::toString).collect(Collectors.toList()));
filesCreated.forEach(
- file ->
- Assertions.assertThat(hadoopFileIO.newInputFile(file.toString()).exists()).isFalse());
+ file -> assertThat(hadoopFileIO.newInputFile(file.toString()).exists()).isFalse());
}
@Test
public void testDeleteFilesErrorHandling() {
List<String> filesCreated =
random.ints(2).mapToObj(x -> "fakefsnotreal://file-" + x).collect(Collectors.toList());
- Assertions.assertThatThrownBy(() -> hadoopFileIO.deleteFiles(filesCreated))
+ assertThatThrownBy(() -> hadoopFileIO.deleteFiles(filesCreated))
.isInstanceOf(BulkDeletionFailureException.class)
.hasMessage("Failed to delete 2 files");
}
@@ -154,8 +151,7 @@
testHadoopFileIO.initialize(ImmutableMap.of("k1", "v1"));
FileIO roundTripSerializedFileIO = TestHelpers.KryoHelpers.roundTripSerialize(testHadoopFileIO);
- Assertions.assertThat(roundTripSerializedFileIO.properties())
- .isEqualTo(testHadoopFileIO.properties());
+ assertThat(roundTripSerializedFileIO.properties()).isEqualTo(testHadoopFileIO.properties());
}
@Test
@@ -166,8 +162,7 @@
testHadoopFileIO.initialize(ImmutableMap.of("k1", "v1"));
FileIO roundTripSerializedFileIO = TestHelpers.roundTripSerialize(testHadoopFileIO);
- Assertions.assertThat(roundTripSerializedFileIO.properties())
- .isEqualTo(testHadoopFileIO.properties());
+ assertThat(roundTripSerializedFileIO.properties()).isEqualTo(testHadoopFileIO.properties());
}
@Test
@@ -180,7 +175,7 @@
.hiddenImpl(ResolvingFileIO.class, String.class)
.build(resolvingFileIO)
.invoke("hdfs://foo/bar");
- Assertions.assertThat(result).isInstanceOf(HadoopFileIO.class);
+ assertThat(result).isInstanceOf(HadoopFileIO.class);
}
private List<Path> createRandomFiles(Path parent, int count) {
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/TestCachingCatalog.java b/core/src/test/java/org/apache/iceberg/hadoop/TestCachingCatalog.java
index 31b4143..cb7ca64 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/TestCachingCatalog.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/TestCachingCatalog.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.hadoop;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.github.benmanes.caffeine.cache.Cache;
import java.io.IOException;
import java.time.Duration;
@@ -41,7 +44,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.util.FakeTicker;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -86,15 +88,15 @@
Table manifestsMetaTable2 = catalog.loadTable(manifestsMetaTableIdent);
// metadata tables are cached
- Assertions.assertThat(filesMetaTable2).isEqualTo(filesMetaTable);
- Assertions.assertThat(manifestsMetaTable2).isEqualTo(manifestsMetaTable);
+ assertThat(filesMetaTable2).isEqualTo(filesMetaTable);
+ assertThat(manifestsMetaTable2).isEqualTo(manifestsMetaTable);
// the current snapshot of origin table is updated after committing
- Assertions.assertThat(table.currentSnapshot()).isNotEqualTo(oldSnapshot);
+ assertThat(table.currentSnapshot()).isNotEqualTo(oldSnapshot);
// underlying table operation in metadata tables are shared with the origin table
- Assertions.assertThat(filesMetaTable2.currentSnapshot()).isEqualTo(table.currentSnapshot());
- Assertions.assertThat(manifestsMetaTable2.currentSnapshot()).isEqualTo(table.currentSnapshot());
+ assertThat(filesMetaTable2.currentSnapshot()).isEqualTo(table.currentSnapshot());
+ assertThat(manifestsMetaTable2.currentSnapshot()).isEqualTo(table.currentSnapshot());
}
@Test
@@ -128,20 +130,20 @@
// remember the new snapshot
Snapshot newSnapshot = table.currentSnapshot();
- Assertions.assertThat(newSnapshot).as("Snapshots must be different").isNotEqualTo(oldSnapshot);
+ assertThat(newSnapshot).as("Snapshots must be different").isNotEqualTo(oldSnapshot);
// validate metadata tables were correctly invalidated
for (MetadataTableType type : MetadataTableType.values()) {
TableIdentifier metadataIdent1 = TableIdentifier.parse(tableIdent + "." + type.name());
Table metadataTable1 = catalog.loadTable(metadataIdent1);
- Assertions.assertThat(metadataTable1.currentSnapshot())
+ assertThat(metadataTable1.currentSnapshot())
.as("Snapshot must be new")
.isEqualTo(newSnapshot);
TableIdentifier metadataIdent2 =
TableIdentifier.parse(tableIdent + "." + type.name().toLowerCase(Locale.ROOT));
Table metadataTable2 = catalog.loadTable(metadataIdent2);
- Assertions.assertThat(metadataTable2.currentSnapshot())
+ assertThat(metadataTable2.currentSnapshot())
.as("Snapshot must be new")
.isEqualTo(newSnapshot);
}
@@ -154,12 +156,12 @@
catalog.createTable(tableIdent, SCHEMA, SPEC, ImmutableMap.of("key2", "value2"));
Table table = catalog.loadTable(tableIdent);
- Assertions.assertThat(table.name()).as("Name must match").isEqualTo("hadoop.db.ns1.ns2.tbl");
+ assertThat(table.name()).as("Name must match").isEqualTo("hadoop.db.ns1.ns2.tbl");
TableIdentifier snapshotsTableIdent =
TableIdentifier.of("db", "ns1", "ns2", "tbl", "snapshots");
Table snapshotsTable = catalog.loadTable(snapshotsTableIdent);
- Assertions.assertThat(snapshotsTable.name())
+ assertThat(snapshotsTable.name())
.as("Name must match")
.isEqualTo("hadoop.db.ns1.ns2.tbl.snapshots");
}
@@ -174,22 +176,16 @@
catalog.createTable(tableIdent, SCHEMA, SPEC, ImmutableMap.of("key", "value"));
// Ensure table is cached with full ttl remaining upon creation
- Assertions.assertThat(catalog.cache().asMap()).containsKey(tableIdent);
- Assertions.assertThat(catalog.remainingAgeFor(tableIdent))
- .isPresent()
- .get()
- .isEqualTo(EXPIRATION_TTL);
+ assertThat(catalog.cache().asMap()).containsKey(tableIdent);
+ assertThat(catalog.remainingAgeFor(tableIdent)).isPresent().get().isEqualTo(EXPIRATION_TTL);
ticker.advance(HALF_OF_EXPIRATION);
- Assertions.assertThat(catalog.cache().asMap()).containsKey(tableIdent);
- Assertions.assertThat(catalog.ageOf(tableIdent))
- .isPresent()
- .get()
- .isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.cache().asMap()).containsKey(tableIdent);
+ assertThat(catalog.ageOf(tableIdent)).isPresent().get().isEqualTo(HALF_OF_EXPIRATION);
ticker.advance(HALF_OF_EXPIRATION.plus(Duration.ofSeconds(10)));
- Assertions.assertThat(catalog.cache().asMap()).doesNotContainKey(tableIdent);
- Assertions.assertThat(catalog.loadTable(tableIdent))
+ assertThat(catalog.cache().asMap()).doesNotContainKey(tableIdent);
+ assertThat(catalog.loadTable(tableIdent))
.as("CachingCatalog should return a new instance after expiration")
.isNotSameAs(table);
}
@@ -202,48 +198,42 @@
TableIdentifier tableIdent = TableIdentifier.of(namespace, "tbl");
catalog.createTable(tableIdent, SCHEMA, SPEC, ImmutableMap.of("key", "value"));
- Assertions.assertThat(catalog.cache().asMap()).containsKey(tableIdent);
- Assertions.assertThat(catalog.ageOf(tableIdent)).isPresent().get().isEqualTo(Duration.ZERO);
+ assertThat(catalog.cache().asMap()).containsKey(tableIdent);
+ assertThat(catalog.ageOf(tableIdent)).isPresent().get().isEqualTo(Duration.ZERO);
ticker.advance(HALF_OF_EXPIRATION);
- Assertions.assertThat(catalog.cache().asMap()).containsKey(tableIdent);
- Assertions.assertThat(catalog.ageOf(tableIdent))
- .isPresent()
- .get()
- .isEqualTo(HALF_OF_EXPIRATION);
- Assertions.assertThat(catalog.remainingAgeFor(tableIdent))
- .isPresent()
- .get()
- .isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.cache().asMap()).containsKey(tableIdent);
+ assertThat(catalog.ageOf(tableIdent)).isPresent().get().isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.remainingAgeFor(tableIdent)).isPresent().get().isEqualTo(HALF_OF_EXPIRATION);
Duration oneMinute = Duration.ofMinutes(1L);
ticker.advance(oneMinute);
- Assertions.assertThat(catalog.cache().asMap()).containsKey(tableIdent);
- Assertions.assertThat(catalog.ageOf(tableIdent))
+ assertThat(catalog.cache().asMap()).containsKey(tableIdent);
+ assertThat(catalog.ageOf(tableIdent))
.isPresent()
.get()
.isEqualTo(HALF_OF_EXPIRATION.plus(oneMinute));
- Assertions.assertThat(catalog.remainingAgeFor(tableIdent))
+ assertThat(catalog.remainingAgeFor(tableIdent))
.get()
.isEqualTo(HALF_OF_EXPIRATION.minus(oneMinute));
// Access the table via the catalog, which should refresh the TTL
Table table = catalog.loadTable(tableIdent);
- Assertions.assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(Duration.ZERO);
- Assertions.assertThat(catalog.remainingAgeFor(tableIdent)).get().isEqualTo(EXPIRATION_TTL);
+ assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(Duration.ZERO);
+ assertThat(catalog.remainingAgeFor(tableIdent)).get().isEqualTo(EXPIRATION_TTL);
ticker.advance(HALF_OF_EXPIRATION);
- Assertions.assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
- Assertions.assertThat(catalog.remainingAgeFor(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.remainingAgeFor(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
// Check that accessing the table object directly does not affect the cache TTL
table.refresh();
- Assertions.assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
- Assertions.assertThat(catalog.remainingAgeFor(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.remainingAgeFor(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
table.newAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
- Assertions.assertThat(catalog.remainingAgeFor(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.remainingAgeFor(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
}
@Test
@@ -253,46 +243,46 @@
Namespace namespace = Namespace.of("db", "ns1", "ns2");
TableIdentifier tableIdent = TableIdentifier.of(namespace, "tbl");
Table table = catalog.createTable(tableIdent, SCHEMA, SPEC, ImmutableMap.of("key2", "value2"));
- Assertions.assertThat(catalog.cache().asMap()).containsKey(tableIdent);
+ assertThat(catalog.cache().asMap()).containsKey(tableIdent);
table.newAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(catalog.cache().asMap()).containsKey(tableIdent);
- Assertions.assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(Duration.ZERO);
+ assertThat(catalog.cache().asMap()).containsKey(tableIdent);
+ assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(Duration.ZERO);
ticker.advance(HALF_OF_EXPIRATION);
- Assertions.assertThat(catalog.cache().asMap()).containsKey(tableIdent);
- Assertions.assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
+ assertThat(catalog.cache().asMap()).containsKey(tableIdent);
+ assertThat(catalog.ageOf(tableIdent)).get().isEqualTo(HALF_OF_EXPIRATION);
// Load the metadata tables for the first time. Their age should be zero as they're new entries.
Arrays.stream(metadataTables(tableIdent)).forEach(catalog::loadTable);
- Assertions.assertThat(catalog.cache().asMap()).containsKeys(metadataTables(tableIdent));
- Assertions.assertThat(Arrays.stream(metadataTables(tableIdent)).map(catalog::ageOf))
+ assertThat(catalog.cache().asMap()).containsKeys(metadataTables(tableIdent));
+ assertThat(Arrays.stream(metadataTables(tableIdent)).map(catalog::ageOf))
.isNotEmpty()
.allMatch(age -> age.isPresent() && age.get().equals(Duration.ZERO));
- Assertions.assertThat(catalog.remainingAgeFor(tableIdent))
+ assertThat(catalog.remainingAgeFor(tableIdent))
.as("Loading a non-cached metadata table should refresh the main table's age")
.isEqualTo(Optional.of(EXPIRATION_TTL));
// Move time forward and access already cached metadata tables.
ticker.advance(HALF_OF_EXPIRATION);
Arrays.stream(metadataTables(tableIdent)).forEach(catalog::loadTable);
- Assertions.assertThat(Arrays.stream(metadataTables(tableIdent)).map(catalog::ageOf))
+ assertThat(Arrays.stream(metadataTables(tableIdent)).map(catalog::ageOf))
.isNotEmpty()
.allMatch(age -> age.isPresent() && age.get().equals(Duration.ZERO));
- Assertions.assertThat(catalog.remainingAgeFor(tableIdent))
+ assertThat(catalog.remainingAgeFor(tableIdent))
.as("Accessing a cached metadata table should not affect the main table's age")
.isEqualTo(Optional.of(HALF_OF_EXPIRATION));
// Move time forward so the data table drops.
ticker.advance(HALF_OF_EXPIRATION);
- Assertions.assertThat(catalog.cache().asMap()).doesNotContainKey(tableIdent);
+ assertThat(catalog.cache().asMap()).doesNotContainKey(tableIdent);
Arrays.stream(metadataTables(tableIdent))
.forEach(
metadataTable ->
- Assertions.assertThat(catalog.cache().asMap())
+ assertThat(catalog.cache().asMap())
.as(
"When a data table expires, its metadata tables should expire regardless of age")
.doesNotContainKeys(metadataTable));
@@ -335,8 +325,8 @@
}
}
executor.awaitTermination(2, TimeUnit.SECONDS);
- Assertions.assertThat(cacheGetCount).hasValue(numThreads / 2);
- Assertions.assertThat(cacheCleanupCount).hasValue(numThreads / 2);
+ assertThat(cacheGetCount).hasValue(numThreads / 2);
+ assertThat(cacheCleanupCount).hasValue(numThreads / 2);
executor.shutdown();
createdTables.forEach(table -> catalog.dropTable(table, true));
@@ -344,8 +334,7 @@
@Test
public void testCachingCatalogRejectsExpirationIntervalOfZero() {
- Assertions.assertThatThrownBy(
- () -> TestableCachingCatalog.wrap(hadoopCatalog(), Duration.ZERO, ticker))
+ assertThatThrownBy(() -> TestableCachingCatalog.wrap(hadoopCatalog(), Duration.ZERO, ticker))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"When cache.expiration-interval-ms is set to 0, the catalog cache should be disabled. This indicates a bug.");
@@ -359,7 +348,7 @@
Duration.ofMillis(CatalogProperties.CACHE_EXPIRATION_INTERVAL_MS_OFF),
ticker);
- Assertions.assertThat(catalog.isCacheExpirationEnabled())
+ assertThat(catalog.isCacheExpirationEnabled())
.as(
"When a negative value is used as the expiration interval, the cache should not expire entries based on a TTL")
.isFalse();
@@ -374,10 +363,10 @@
Namespace namespace = Namespace.of("db", "ns1", "ns2");
TableIdentifier tableIdent = TableIdentifier.of(namespace, "tbl");
catalog.createTable(tableIdent, SCHEMA, SPEC, ImmutableMap.of("key2", "value2"));
- Assertions.assertThat(catalog.cache().asMap()).containsKey(tableIdent);
+ assertThat(catalog.cache().asMap()).containsKey(tableIdent);
catalog.invalidateTable(tableIdent);
- Assertions.assertThat(catalog.cache().asMap()).doesNotContainKey(tableIdent);
- Assertions.assertThat(wrappedCatalog.cache().asMap()).doesNotContainKey(tableIdent);
+ assertThat(catalog.cache().asMap()).doesNotContainKey(tableIdent);
+ assertThat(wrappedCatalog.cache().asMap()).doesNotContainKey(tableIdent);
}
public static TableIdentifier[] metadataTables(TableIdentifier tableIdent) {
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/TestCatalogUtilDropTable.java b/core/src/test/java/org/apache/iceberg/hadoop/TestCatalogUtilDropTable.java
index e3d2b4d..79f30e1 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/TestCatalogUtilDropTable.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/TestCatalogUtilDropTable.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.hadoop;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
@@ -46,7 +48,6 @@
import org.apache.iceberg.puffin.PuffinWriter;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatchers;
@@ -83,12 +84,12 @@
Set<String> statsLocations = statsLocations(tableMetadata);
Set<String> partitionStatsLocations = partitionStatsLocations(tableMetadata);
- Assertions.assertThat(manifestListLocations).as("should have 2 manifest lists").hasSize(2);
- Assertions.assertThat(metadataLocations).as("should have 5 metadata locations").hasSize(5);
- Assertions.assertThat(statsLocations)
+ assertThat(manifestListLocations).as("should have 2 manifest lists").hasSize(2);
+ assertThat(metadataLocations).as("should have 5 metadata locations").hasSize(5);
+ assertThat(statsLocations)
.as("should have 1 stats file")
.containsExactly(statisticsFile.path());
- Assertions.assertThat(partitionStatsLocations)
+ assertThat(partitionStatsLocations)
.as("should have 1 partition stats file")
.containsExactly(partitionStatisticsFile.path());
@@ -109,22 +110,20 @@
.deleteFile(argumentCaptor.capture());
List<String> deletedPaths = argumentCaptor.getAllValues();
- Assertions.assertThat(deletedPaths)
+ assertThat(deletedPaths)
.as("should contain all created manifest lists")
.containsAll(manifestListLocations);
- Assertions.assertThat(deletedPaths)
+ assertThat(deletedPaths)
.as("should contain all created manifests")
.containsAll(manifestLocations);
- Assertions.assertThat(deletedPaths)
- .as("should contain all created data")
- .containsAll(dataLocations);
- Assertions.assertThat(deletedPaths)
+ assertThat(deletedPaths).as("should contain all created data").containsAll(dataLocations);
+ assertThat(deletedPaths)
.as("should contain all created metadata locations")
.containsAll(metadataLocations);
- Assertions.assertThat(deletedPaths)
+ assertThat(deletedPaths)
.as("should contain all created statistics")
.containsAll(statsLocations);
- Assertions.assertThat(deletedPaths)
+ assertThat(deletedPaths)
.as("should contain all created partition stats files")
.containsAll(partitionStatsLocations);
}
@@ -162,8 +161,8 @@
Set<String> manifestListLocations = manifestListLocations(snapshotSet);
Set<String> manifestLocations = manifestLocations(snapshotSet, table.io());
Set<String> metadataLocations = metadataLocations(tableMetadata);
- Assertions.assertThat(manifestListLocations).as("should have 2 manifest lists").hasSize(2);
- Assertions.assertThat(metadataLocations).as("should have 4 metadata locations").hasSize(4);
+ assertThat(manifestListLocations).as("should have 2 manifest lists").hasSize(2);
+ assertThat(metadataLocations).as("should have 4 metadata locations").hasSize(4);
FileIO fileIO = createMockFileIO(table.io());
@@ -177,13 +176,13 @@
.deleteFile(argumentCaptor.capture());
List<String> deletedPaths = argumentCaptor.getAllValues();
- Assertions.assertThat(deletedPaths)
+ assertThat(deletedPaths)
.as("should contain all created manifest lists")
.containsAll(manifestListLocations);
- Assertions.assertThat(deletedPaths)
+ assertThat(deletedPaths)
.as("should contain all created manifests")
.containsAll(manifestLocations);
- Assertions.assertThat(deletedPaths)
+ assertThat(deletedPaths)
.as("should contain all created metadata locations")
.containsAll(metadataLocations);
}
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopCatalog.java b/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopCatalog.java
index 8365129..fbd6f83 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopCatalog.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopCatalog.java
@@ -20,6 +20,8 @@
import static org.apache.iceberg.NullOrder.NULLS_FIRST;
import static org.apache.iceberg.SortDirection.ASC;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@@ -51,7 +53,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.transforms.Transform;
import org.apache.iceberg.transforms.Transforms;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
@@ -73,11 +74,9 @@
.withProperties(ImmutableMap.of("key2", "value2"))
.create();
- Assertions.assertThat(table.schema().toString()).isEqualTo(TABLE_SCHEMA.toString());
- Assertions.assertThat(table.spec().fields()).hasSize(1);
- Assertions.assertThat(table.properties())
- .containsEntry("key1", "value1")
- .containsEntry("key2", "value2");
+ assertThat(table.schema().toString()).isEqualTo(TABLE_SCHEMA.toString());
+ assertThat(table.spec().fields()).hasSize(1);
+ assertThat(table.properties()).containsEntry("key1", "value1").containsEntry("key2", "value2");
}
@ParameterizedTest
@@ -94,8 +93,8 @@
txn.commitTransaction();
Table table = catalog.loadTable(tableIdent);
- Assertions.assertThat(table.schema().toString()).isEqualTo(TABLE_SCHEMA.toString());
- Assertions.assertThat(table.spec().isUnpartitioned()).isTrue();
+ assertThat(table.schema().toString()).isEqualTo(TABLE_SCHEMA.toString());
+ assertThat(table.spec().isUnpartitioned()).isTrue();
}
@ParameterizedTest
@@ -117,14 +116,14 @@
createTxn.commitTransaction();
Table table = catalog.loadTable(tableIdent);
- Assertions.assertThat(table.currentSnapshot()).isNotNull();
+ assertThat(table.currentSnapshot()).isNotNull();
Transaction replaceTxn =
catalog.buildTable(tableIdent, SCHEMA).withProperty("key2", "value2").replaceTransaction();
replaceTxn.commitTransaction();
table = catalog.loadTable(tableIdent);
- Assertions.assertThat(table.currentSnapshot()).isNull();
+ assertThat(table.currentSnapshot()).isNull();
if (formatVersion == 1) {
PartitionSpec v1Expected =
@@ -132,18 +131,14 @@
.alwaysNull("data", "data_bucket")
.withSpecId(1)
.build();
- Assertions.assertThat(table.spec())
+ assertThat(table.spec())
.as("Table should have a spec with one void field")
.isEqualTo(v1Expected);
} else {
- Assertions.assertThat(table.spec().isUnpartitioned())
- .as("Table spec should be unpartitioned")
- .isTrue();
+ assertThat(table.spec().isUnpartitioned()).as("Table spec should be unpartitioned").isTrue();
}
- Assertions.assertThat(table.properties())
- .containsEntry("key1", "value1")
- .containsEntry("key2", "value2");
+ assertThat(table.properties()).containsEntry("key1", "value1").containsEntry("key2", "value2");
}
@Test
@@ -151,17 +146,16 @@
HadoopCatalog catalog = hadoopCatalog();
TableIdentifier tableIdent = TableIdentifier.of("db", "ns1", "ns2", "tbl");
- Assertions.assertThatThrownBy(
- () -> catalog.buildTable(tableIdent, SCHEMA).withLocation("custom").create())
+ assertThatThrownBy(() -> catalog.buildTable(tableIdent, SCHEMA).withLocation("custom").create())
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Cannot set a custom location for a path-based table");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> catalog.buildTable(tableIdent, SCHEMA).withLocation("custom").createTransaction())
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Cannot set a custom location for a path-based table");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
catalog
.buildTable(tableIdent, SCHEMA)
@@ -177,8 +171,8 @@
Table table = hadoopCatalog().createTable(tableIdent, SCHEMA, SPEC);
SortOrder sortOrder = table.sortOrder();
- Assertions.assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(0);
- Assertions.assertThat(sortOrder.isUnsorted()).as("Order must be unsorted").isTrue();
+ assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(0);
+ assertThat(sortOrder.isUnsorted()).as("Order must be unsorted").isTrue();
}
@Test
@@ -193,16 +187,14 @@
.create();
SortOrder sortOrder = table.sortOrder();
- Assertions.assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(1);
- Assertions.assertThat(sortOrder.fields().size()).as("Order must have 1 field").isEqualTo(1);
- Assertions.assertThat(sortOrder.fields().get(0).direction())
- .as("Direction must match")
- .isEqualTo(ASC);
- Assertions.assertThat(sortOrder.fields().get(0).nullOrder())
+ assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(1);
+ assertThat(sortOrder.fields().size()).as("Order must have 1 field").isEqualTo(1);
+ assertThat(sortOrder.fields().get(0).direction()).as("Direction must match").isEqualTo(ASC);
+ assertThat(sortOrder.fields().get(0).nullOrder())
.as("Null order must match")
.isEqualTo(NULLS_FIRST);
Transform<?, ?> transform = Transforms.identity();
- Assertions.assertThat(sortOrder.fields().get(0).transform())
+ assertThat(sortOrder.fields().get(0).transform())
.as("Transform must match")
.isEqualTo(transform);
}
@@ -215,10 +207,10 @@
String metaLocation = catalog.defaultWarehouseLocation(testTable);
FileSystem fs = Util.getFs(new Path(metaLocation), catalog.getConf());
- Assertions.assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
+ assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
catalog.dropTable(testTable);
- Assertions.assertThat(fs.isDirectory(new Path(metaLocation))).isFalse();
+ assertThat(fs.isDirectory(new Path(metaLocation))).isFalse();
}
@Test
@@ -234,8 +226,8 @@
catalog.initialize("hadoop", catalogProps);
FileIO fileIO = catalog.newTableOps(tableIdent).io();
- Assertions.assertThat(fileIO.properties()).containsEntry("warehouse", "/hive/testwarehouse");
- Assertions.assertThat(fileIO.properties()).containsEntry("io.manifest.cache-enabled", "true");
+ assertThat(fileIO.properties()).containsEntry("warehouse", "/hive/testwarehouse");
+ assertThat(fileIO.properties()).containsEntry("io.manifest.cache-enabled", "true");
}
@Test
@@ -245,15 +237,15 @@
TableIdentifier testTable = TableIdentifier.of("tbl");
Table table = catalog.createTable(testTable, SCHEMA, PartitionSpec.unpartitioned());
- Assertions.assertThat(table.schema().toString()).isEqualTo(TABLE_SCHEMA.toString());
- Assertions.assertThat(table.name()).isEqualTo("hadoop.tbl");
+ assertThat(table.schema().toString()).isEqualTo(TABLE_SCHEMA.toString());
+ assertThat(table.name()).isEqualTo("hadoop.tbl");
String metaLocation = catalog.defaultWarehouseLocation(testTable);
FileSystem fs = Util.getFs(new Path(metaLocation), catalog.getConf());
- Assertions.assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
+ assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
catalog.dropTable(testTable);
- Assertions.assertThat(fs.isDirectory(new Path(metaLocation))).isFalse();
+ assertThat(fs.isDirectory(new Path(metaLocation))).isFalse();
}
@Test
@@ -264,10 +256,10 @@
String metaLocation = catalog.defaultWarehouseLocation(testTable);
FileSystem fs = Util.getFs(new Path(metaLocation), catalog.getConf());
- Assertions.assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
+ assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
catalog.dropTable(testTable);
- Assertions.assertThat(fs.isDirectory(new Path(metaLocation))).isFalse();
+ assertThat(fs.isDirectory(new Path(metaLocation))).isFalse();
}
@Test
@@ -276,14 +268,14 @@
TableIdentifier testTable = TableIdentifier.of("db", "ns1", "ns2", "tbl");
String metaLocation = catalog.defaultWarehouseLocation(testTable);
// testing with non existent directory
- Assertions.assertThat(catalog.dropTable(testTable)).isFalse();
+ assertThat(catalog.dropTable(testTable)).isFalse();
FileSystem fs = Util.getFs(new Path(metaLocation), catalog.getConf());
fs.mkdirs(new Path(metaLocation));
- Assertions.assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
+ assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
- Assertions.assertThat(catalog.dropTable(testTable)).isFalse();
- Assertions.assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
+ assertThat(catalog.dropTable(testTable)).isFalse();
+ assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
}
@Test
@@ -291,8 +283,7 @@
HadoopCatalog catalog = hadoopCatalog();
TableIdentifier testTable = TableIdentifier.of("db", "tbl1");
catalog.createTable(testTable, SCHEMA, PartitionSpec.unpartitioned());
- Assertions.assertThatThrownBy(
- () -> catalog.renameTable(testTable, TableIdentifier.of("db", "tbl2")))
+ assertThatThrownBy(() -> catalog.renameTable(testTable, TableIdentifier.of("db", "tbl2")))
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("Cannot rename Hadoop tables");
}
@@ -311,13 +302,13 @@
List<TableIdentifier> tbls1 = catalog.listTables(Namespace.of("db"));
Set<String> tblSet = Sets.newHashSet(tbls1.stream().map(t -> t.name()).iterator());
- Assertions.assertThat(tblSet).hasSize(2).contains("tbl1").contains("tbl2");
+ assertThat(tblSet).hasSize(2).contains("tbl1").contains("tbl2");
List<TableIdentifier> tbls2 = catalog.listTables(Namespace.of("db", "ns1"));
- Assertions.assertThat(tbls2).hasSize(1);
- Assertions.assertThat(tbls2.get(0).name()).isEqualTo("tbl3");
+ assertThat(tbls2).hasSize(1);
+ assertThat(tbls2.get(0).name()).isEqualTo("tbl3");
- Assertions.assertThatThrownBy(() -> catalog.listTables(Namespace.of("db", "ns1", "ns2")))
+ assertThatThrownBy(() -> catalog.listTables(Namespace.of("db", "ns1", "ns2")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: db.ns1.ns2");
}
@@ -331,9 +322,7 @@
create.table().locationProvider(); // NPE triggered if not handled appropriately
create.commitTransaction();
- Assertions.assertThat(catalog.listTables(Namespace.of("ns1", "ns2")))
- .as("1 table expected")
- .hasSize(1);
+ assertThat(catalog.listTables(Namespace.of("ns1", "ns2"))).as("1 table expected").hasSize(1);
catalog.dropTable(tableIdent, true);
}
@@ -352,13 +341,13 @@
String metaLocation1 = warehouseLocation + "/" + "db/ns1/ns2";
FileSystem fs1 = Util.getFs(new Path(metaLocation1), catalog.getConf());
- Assertions.assertThat(fs1.isDirectory(new Path(metaLocation1))).isTrue();
+ assertThat(fs1.isDirectory(new Path(metaLocation1))).isTrue();
String metaLocation2 = warehouseLocation + "/" + "db/ns2/ns3";
FileSystem fs2 = Util.getFs(new Path(metaLocation2), catalog.getConf());
- Assertions.assertThat(fs2.isDirectory(new Path(metaLocation2))).isTrue();
+ assertThat(fs2.isDirectory(new Path(metaLocation2))).isTrue();
- Assertions.assertThatThrownBy(() -> catalog.createNamespace(tbl1.namespace()))
+ assertThatThrownBy(() -> catalog.createNamespace(tbl1.namespace()))
.isInstanceOf(AlreadyExistsException.class)
.hasMessage("Namespace already exists: " + tbl1.namespace());
}
@@ -378,25 +367,21 @@
List<Namespace> nsp1 = catalog.listNamespaces(Namespace.of("db"));
Set<String> tblSet = Sets.newHashSet(nsp1.stream().map(t -> t.toString()).iterator());
- Assertions.assertThat(tblSet)
- .hasSize(3)
- .contains("db.ns1")
- .contains("db.ns2")
- .contains("db.ns3");
+ assertThat(tblSet).hasSize(3).contains("db.ns1").contains("db.ns2").contains("db.ns3");
List<Namespace> nsp2 = catalog.listNamespaces(Namespace.of("db", "ns1"));
- Assertions.assertThat(nsp2).hasSize(1);
- Assertions.assertThat(nsp2.get(0).toString()).isEqualTo("db.ns1.ns2");
+ assertThat(nsp2).hasSize(1);
+ assertThat(nsp2.get(0).toString()).isEqualTo("db.ns1.ns2");
List<Namespace> nsp3 = catalog.listNamespaces();
Set<String> tblSet2 = Sets.newHashSet(nsp3.stream().map(t -> t.toString()).iterator());
- Assertions.assertThat(tblSet2).hasSize(2).contains("db").contains("db2");
+ assertThat(tblSet2).hasSize(2).contains("db").contains("db2");
List<Namespace> nsp4 = catalog.listNamespaces();
Set<String> tblSet3 = Sets.newHashSet(nsp4.stream().map(t -> t.toString()).iterator());
- Assertions.assertThat(tblSet3).hasSize(2).contains("db").contains("db2");
+ assertThat(tblSet3).hasSize(2).contains("db").contains("db2");
- Assertions.assertThatThrownBy(() -> catalog.listNamespaces(Namespace.of("db", "db2", "ns2")))
+ assertThatThrownBy(() -> catalog.listNamespaces(Namespace.of("db", "db2", "ns2")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: db.db2.ns2");
}
@@ -414,8 +399,7 @@
.forEach(t -> catalog.createTable(t, SCHEMA, PartitionSpec.unpartitioned()));
catalog.loadNamespaceMetadata(Namespace.of("db"));
- Assertions.assertThatThrownBy(
- () -> catalog.loadNamespaceMetadata(Namespace.of("db", "db2", "ns2")))
+ assertThatThrownBy(() -> catalog.loadNamespaceMetadata(Namespace.of("db", "db2", "ns2")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: db.db2.ns2");
}
@@ -431,10 +415,10 @@
Lists.newArrayList(tbl1, tbl2, tbl3, tbl4)
.forEach(t -> catalog.createTable(t, SCHEMA, PartitionSpec.unpartitioned()));
- Assertions.assertThat(catalog.namespaceExists(Namespace.of("db", "ns1", "ns2")))
+ assertThat(catalog.namespaceExists(Namespace.of("db", "ns1", "ns2")))
.as("Should be true as namespace exists")
.isTrue();
- Assertions.assertThat(catalog.namespaceExists(Namespace.of("db", "db2", "ns2")))
+ assertThat(catalog.namespaceExists(Namespace.of("db", "db2", "ns2")))
.as("Should be false as namespace doesn't exist")
.isFalse();
}
@@ -442,7 +426,7 @@
@Test
public void testAlterNamespaceMeta() throws IOException {
HadoopCatalog catalog = hadoopCatalog();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
catalog.setProperties(
Namespace.of("db", "db2", "ns2"), ImmutableMap.of("property", "test")))
@@ -466,19 +450,19 @@
Lists.newArrayList(tbl1, tbl2)
.forEach(t -> catalog.createTable(t, SCHEMA, PartitionSpec.unpartitioned()));
- Assertions.assertThatThrownBy(() -> catalog.dropNamespace(Namespace.of("db")))
+ assertThatThrownBy(() -> catalog.dropNamespace(Namespace.of("db")))
.isInstanceOf(NamespaceNotEmptyException.class)
.hasMessage("Namespace " + namespace1 + " is not empty.");
- Assertions.assertThat(catalog.dropNamespace(Namespace.of("db2")))
+ assertThat(catalog.dropNamespace(Namespace.of("db2")))
.as("Should fail to drop namespace that doesn't exist")
.isFalse();
- Assertions.assertThat(catalog.dropTable(tbl1)).isTrue();
- Assertions.assertThat(catalog.dropTable(tbl2)).isTrue();
- Assertions.assertThat(catalog.dropNamespace(namespace2)).isTrue();
- Assertions.assertThat(catalog.dropNamespace(namespace1)).isTrue();
+ assertThat(catalog.dropTable(tbl1)).isTrue();
+ assertThat(catalog.dropTable(tbl2)).isTrue();
+ assertThat(catalog.dropNamespace(namespace2)).isTrue();
+ assertThat(catalog.dropNamespace(namespace1)).isTrue();
String metaLocation = warehouseLocation + "/" + "db";
FileSystem fs = Util.getFs(new Path(metaLocation), catalog.getConf());
- Assertions.assertThat(fs.isDirectory(new Path(metaLocation))).isFalse();
+ assertThat(fs.isDirectory(new Path(metaLocation))).isFalse();
}
@Test
@@ -498,8 +482,8 @@
}
// Check the result of the findVersion(), and load the table and check the current snapshotId
- Assertions.assertThat(tableOperations.findVersion()).isEqualTo(1);
- Assertions.assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
+ assertThat(tableOperations.findVersion()).isEqualTo(1);
+ assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
.isEqualTo(secondSnapshotId);
// Write newer data to confirm that we are writing the correct file
@@ -509,8 +493,8 @@
}
// Check the result of the findVersion(), and load the table and check the current snapshotId
- Assertions.assertThat(tableOperations.findVersion()).isEqualTo(3);
- Assertions.assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
+ assertThat(tableOperations.findVersion()).isEqualTo(3);
+ assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
.isEqualTo(secondSnapshotId);
// Write an empty version hint file
@@ -518,16 +502,16 @@
io.newOutputFile(versionHintFile.getPath()).create().close();
// Check the result of the findVersion(), and load the table and check the current snapshotId
- Assertions.assertThat(tableOperations.findVersion()).isEqualTo(3);
- Assertions.assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
+ assertThat(tableOperations.findVersion()).isEqualTo(3);
+ assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
.isEqualTo(secondSnapshotId);
// Just delete the file
io.deleteFile(versionHintFile.getPath());
// Check the result of the versionHint(), and load the table and check the current snapshotId
- Assertions.assertThat(tableOperations.findVersion()).isEqualTo(3);
- Assertions.assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
+ assertThat(tableOperations.findVersion()).isEqualTo(3);
+ assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
.isEqualTo(secondSnapshotId);
}
@@ -548,8 +532,8 @@
io.deleteFile(tableOperations.getMetadataFile(1).toString());
// Check the result of the findVersion(), and load the table and check the current snapshotId
- Assertions.assertThat(tableOperations.findVersion()).isEqualTo(3);
- Assertions.assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
+ assertThat(tableOperations.findVersion()).isEqualTo(3);
+ assertThat(TABLES.load(tableLocation).currentSnapshot().snapshotId())
.isEqualTo(secondSnapshotId);
// Remove all the version files, and see if we can recover. Hint... not :)
@@ -558,8 +542,8 @@
// Check that we got 0 findVersion, and a NoSuchTableException is thrown when trying to load the
// table
- Assertions.assertThat(tableOperations.findVersion()).isEqualTo(0);
- Assertions.assertThatThrownBy(() -> TABLES.load(tableLocation))
+ assertThat(tableOperations.findVersion()).isEqualTo(0);
+ assertThatThrownBy(() -> TABLES.load(tableLocation))
.isInstanceOf(NoSuchTableException.class)
.hasMessageStartingWith("Table does not exist");
}
@@ -571,12 +555,12 @@
catalog.buildTable(tableIdent, SCHEMA).withPartitionSpec(SPEC).create();
Table table = catalog.loadTable(tableIdent);
- Assertions.assertThat(table.name()).isEqualTo("hadoop.db.ns1.ns2.tbl");
+ assertThat(table.name()).isEqualTo("hadoop.db.ns1.ns2.tbl");
TableIdentifier snapshotsTableIdent =
TableIdentifier.of("db", "ns1", "ns2", "tbl", "snapshots");
Table snapshotsTable = catalog.loadTable(snapshotsTableIdent);
- Assertions.assertThat(snapshotsTable.name()).isEqualTo("hadoop.db.ns1.ns2.tbl.snapshots");
+ assertThat(snapshotsTable.name()).isEqualTo("hadoop.db.ns1.ns2.tbl.snapshots");
}
private static void addVersionsToTable(Table table) {
@@ -619,21 +603,21 @@
.withProperty("key5", "table-key5")
.create();
- Assertions.assertThat(table.properties().get("key1"))
+ assertThat(table.properties().get("key1"))
.as("Table defaults set for the catalog must be added to the table properties.")
.isEqualTo("catalog-default-key1");
- Assertions.assertThat(table.properties().get("key2"))
+ assertThat(table.properties().get("key2"))
.as("Table property must override table default properties set at catalog level.")
.isEqualTo("table-key2");
- Assertions.assertThat(table.properties().get("key3"))
+ assertThat(table.properties().get("key3"))
.as(
"Table property override set at catalog level must override table default"
+ " properties set at catalog level and table property specified.")
.isEqualTo("catalog-override-key3");
- Assertions.assertThat(table.properties().get("key4"))
+ assertThat(table.properties().get("key4"))
.as("Table override not in table props or defaults should be added to table properties")
.isEqualTo("catalog-override-key4");
- Assertions.assertThat(table.properties().get("key5"))
+ assertThat(table.properties().get("key5"))
.as(
"Table properties without any catalog level default or override should be added to table"
+ " properties.")
@@ -649,10 +633,10 @@
Table registeringTable = catalog.loadTable(identifier);
TableOperations ops = ((HasTableOperations) registeringTable).operations();
String metadataLocation = ((HadoopTableOperations) ops).current().metadataFileLocation();
- Assertions.assertThat(catalog.registerTable(identifier2, metadataLocation)).isNotNull();
- Assertions.assertThat(catalog.loadTable(identifier2)).isNotNull();
- Assertions.assertThat(catalog.dropTable(identifier)).isTrue();
- Assertions.assertThat(catalog.dropTable(identifier2)).isTrue();
+ assertThat(catalog.registerTable(identifier2, metadataLocation)).isNotNull();
+ assertThat(catalog.loadTable(identifier2)).isNotNull();
+ assertThat(catalog.dropTable(identifier)).isTrue();
+ assertThat(catalog.dropTable(identifier2)).isTrue();
}
@Test
@@ -663,9 +647,9 @@
Table registeringTable = catalog.loadTable(identifier);
TableOperations ops = ((HasTableOperations) registeringTable).operations();
String metadataLocation = ((HadoopTableOperations) ops).current().metadataFileLocation();
- Assertions.assertThatThrownBy(() -> catalog.registerTable(identifier, metadataLocation))
+ assertThatThrownBy(() -> catalog.registerTable(identifier, metadataLocation))
.isInstanceOf(AlreadyExistsException.class)
.hasMessage("Table already exists: a.t1");
- Assertions.assertThat(catalog.dropTable(identifier)).isTrue();
+ assertThat(catalog.dropTable(identifier)).isTrue();
}
}
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopCommits.java b/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopCommits.java
index b3ddc09..60bef7f 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopCommits.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopCommits.java
@@ -21,6 +21,8 @@
import static org.apache.iceberg.TableProperties.COMMIT_NUM_RETRIES;
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
@@ -58,7 +60,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.Tasks;
-import org.assertj.core.api.Assertions;
import org.awaitility.Awaitility;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -70,53 +71,53 @@
public void testCreateTable() throws Exception {
PartitionSpec expectedSpec = PartitionSpec.builderFor(TABLE_SCHEMA).bucket("data", 16).build();
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match schema with reassigned ids")
.isEqualTo(TABLE_SCHEMA.asStruct());
- Assertions.assertThat(table.spec())
+ assertThat(table.spec())
.as("Table partition spec should match with reassigned ids")
.isEqualTo(expectedSpec);
List<FileScanTask> tasks = Lists.newArrayList(table.newScan().planFiles());
- Assertions.assertThat(tasks).as("Should not create any scan tasks").isEmpty();
- Assertions.assertThat(tableDir).as("Table location should exist").exists();
- Assertions.assertThat(metadataDir).as("Should create metadata folder").exists().isDirectory();
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
- Assertions.assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
- Assertions.assertThat(versionHintFile).as("Should create version hint file").exists();
- Assertions.assertThat(readVersionHint())
+ assertThat(tasks).as("Should not create any scan tasks").isEmpty();
+ assertThat(tableDir).as("Table location should exist").exists();
+ assertThat(metadataDir).as("Should create metadata folder").exists().isDirectory();
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
+ assertThat(versionHintFile).as("Should create version hint file").exists();
+ assertThat(readVersionHint())
.as("Should write the current version to the hint file")
.isEqualTo(1);
List<File> manifests = listManifestFiles();
- Assertions.assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
+ assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
}
@Test
public void testSchemaUpdate() throws Exception {
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
- Assertions.assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
table.updateSchema().addColumn("n", Types.IntegerType.get()).commit();
- Assertions.assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
- Assertions.assertThat(readVersionHint())
+ assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
+ assertThat(readVersionHint())
.as("Should write the current version to the hint file")
.isEqualTo(2);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match schema with reassigned ids")
.isEqualTo(UPDATED_SCHEMA.asStruct());
List<FileScanTask> tasks = Lists.newArrayList(table.newScan().planFiles());
- Assertions.assertThat(tasks).as("Should not create any scan tasks").isEmpty();
+ assertThat(tasks).as("Should not create any scan tasks").isEmpty();
List<File> manifests = listManifestFiles();
- Assertions.assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
+ assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
}
@Test
public void testSchemaUpdateComplexType() throws Exception {
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
- Assertions.assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
Types.StructType complexColumn =
Types.StructType.of(
@@ -146,25 +147,25 @@
table.updateSchema().addColumn("complex", complexColumn).commit();
- Assertions.assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
- Assertions.assertThat(readVersionHint())
+ assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
+ assertThat(readVersionHint())
.as("Should write the current version to the hint file")
.isEqualTo(2);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match schema with reassigned ids")
.isEqualTo(updatedSchema.asStruct());
List<FileScanTask> tasks = Lists.newArrayList(table.newScan().planFiles());
- Assertions.assertThat(tasks).as("Should not create any scan tasks").isEmpty();
+ assertThat(tasks).as("Should not create any scan tasks").isEmpty();
List<File> manifests = listManifestFiles();
- Assertions.assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
+ assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
}
@Test
public void testSchemaUpdateIdentifierFields() throws Exception {
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
- Assertions.assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
Schema updatedSchema =
new Schema(
@@ -175,14 +176,14 @@
table.updateSchema().setIdentifierFields("id").commit();
- Assertions.assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
- Assertions.assertThat(readVersionHint())
+ assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
+ assertThat(readVersionHint())
.as("Should write the current version to the hint file")
.isEqualTo(2);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Table schema should match schema with reassigned ids")
.isEqualTo(updatedSchema.asStruct());
- Assertions.assertThat(table.schema().identifierFieldIds())
+ assertThat(table.schema().identifierFieldIds())
.as("Identifier fields should match schema with reassigned ids")
.isEqualTo(updatedSchema.identifierFieldIds());
}
@@ -193,25 +194,25 @@
UpdateSchema update = table.updateSchema().addColumn("n", Types.IntegerType.get());
update.apply();
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
- Assertions.assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
version(2).createNewFile();
- Assertions.assertThatThrownBy(update::commit)
+ assertThatThrownBy(update::commit)
.isInstanceOf(CommitFailedException.class)
.hasMessageStartingWith("Version 2 already exists");
List<File> manifests = listManifestFiles();
- Assertions.assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
+ assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
}
@Test
public void testStaleMetadata() throws Exception {
Table tableCopy = TABLES.load(tableLocation);
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
- Assertions.assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
// prepare changes on the copy without committing
UpdateSchema updateCopy = tableCopy.updateSchema().addColumn("m", Types.IntegerType.get());
@@ -219,40 +220,40 @@
table.updateSchema().addColumn("n", Types.IntegerType.get()).commit();
- Assertions.assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
+ assertThat(table.schema().asStruct())
.as("Unmodified copy should be out of date after update")
.isNotEqualTo(tableCopy.schema().asStruct());
// update the table
tableCopy.refresh();
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Copy should be back in sync")
.isEqualTo(tableCopy.schema().asStruct());
- Assertions.assertThatThrownBy(updateCopy::commit)
+ assertThatThrownBy(updateCopy::commit)
.isInstanceOf(CommitFailedException.class)
.hasMessage("Cannot commit changes based on stale table metadata");
List<File> manifests = listManifestFiles();
- Assertions.assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
+ assertThat(manifests).as("Should contain 0 Avro manifest files").isEmpty();
}
@Test
public void testStaleVersionHint() throws Exception {
Table stale = TABLES.load(tableLocation);
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
- Assertions.assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
table.updateSchema().addColumn("n", Types.IntegerType.get()).commit();
- Assertions.assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
- Assertions.assertThat(readVersionHint())
+ assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
+ assertThat(readVersionHint())
.as("Should write the current version to the hint file")
.isEqualTo(2);
- Assertions.assertThat(stale.schema().asStruct())
+ assertThat(stale.schema().asStruct())
.as("Stable table schema should not match")
.isNotEqualTo(UPDATED_SCHEMA.asStruct());
@@ -260,12 +261,12 @@
replaceVersionHint(1);
Table reloaded = TABLES.load(tableLocation);
- Assertions.assertThat(reloaded.schema().asStruct())
+ assertThat(reloaded.schema().asStruct())
.as("Updated schema for newly loaded table should match")
.isEqualTo(UPDATED_SCHEMA.asStruct());
stale.refresh();
- Assertions.assertThat(reloaded.schema().asStruct())
+ assertThat(reloaded.schema().asStruct())
.as("Refreshed schema for stale table should match")
.isEqualTo(UPDATED_SCHEMA.asStruct());
}
@@ -275,33 +276,31 @@
// first append
table.newFastAppend().appendFile(FILE_A).commit();
- Assertions.assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
- Assertions.assertThat(readVersionHint())
+ assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
+ assertThat(readVersionHint())
.as("Should write the current version to the hint file")
.isEqualTo(2);
List<FileScanTask> tasks = Lists.newArrayList(table.newScan().planFiles());
- Assertions.assertThat(tasks).as("Should scan 1 file").hasSize(1);
+ assertThat(tasks).as("Should scan 1 file").hasSize(1);
List<File> manifests = listManifestFiles();
- Assertions.assertThat(manifests).as("Should contain only one Avro manifest file").hasSize(1);
+ assertThat(manifests).as("Should contain only one Avro manifest file").hasSize(1);
// second append
table.newFastAppend().appendFile(FILE_B).commit();
- Assertions.assertThat(version(3)).as("Should create v3 for the update").exists().isFile();
- Assertions.assertThat(readVersionHint())
+ assertThat(version(3)).as("Should create v3 for the update").exists().isFile();
+ assertThat(readVersionHint())
.as("Should write the current version to the hint file")
.isEqualTo(3);
tasks = Lists.newArrayList(table.newScan().planFiles());
- Assertions.assertThat(tasks).as("Should scan 2 files").hasSize(2);
- Assertions.assertThat(listManifestFiles())
- .as("Should contain 2 Avro manifest files")
- .hasSize(2);
+ assertThat(tasks).as("Should scan 2 files").hasSize(2);
+ assertThat(listManifestFiles()).as("Should contain 2 Avro manifest files").hasSize(2);
TableMetadata metadata = readMetadataVersion(3);
- Assertions.assertThat(metadata.currentSnapshot().allManifests(table.io()))
+ assertThat(metadata.currentSnapshot().allManifests(table.io()))
.as("Current snapshot should contain 2 manifests")
.hasSize(2);
}
@@ -317,14 +316,12 @@
table.newAppend().appendFile(FILE_C).commit();
List<FileScanTask> tasks = Lists.newArrayList(table.newScan().planFiles());
- Assertions.assertThat(tasks).as("Should scan 3 files").hasSize(3);
+ assertThat(tasks).as("Should scan 3 files").hasSize(3);
- Assertions.assertThat(listManifestFiles())
- .as("Should contain 3 Avro manifest files")
- .hasSize(3);
+ assertThat(listManifestFiles()).as("Should contain 3 Avro manifest files").hasSize(3);
TableMetadata metadata = readMetadataVersion(5);
- Assertions.assertThat(metadata.currentSnapshot().allManifests(table.io()))
+ assertThat(metadata.currentSnapshot().allManifests(table.io()))
.as("Current snapshot should contain 1 merged manifest")
.hasSize(1);
}
@@ -350,9 +347,9 @@
* provided {@link FileSystem} object. The provided FileSystem will be injected for commit call.
*/
private void testRenameWithFileSystem(FileSystem mockFs) throws Exception {
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
- Assertions.assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
- Assertions.assertThat(table).isInstanceOf(BaseTable.class);
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(2)).as("Should not create v2 or newer versions").doesNotExist();
+ assertThat(table).isInstanceOf(BaseTable.class);
BaseTable baseTable = (BaseTable) table;
// use v1 metafile as the test rename destination.
TableMetadata meta1 = baseTable.operations().current();
@@ -361,33 +358,31 @@
// (so that we have 2 valid and different metadata files, which will reach the rename part
// during commit)
table.updateSchema().addColumn("n", Types.IntegerType.get()).commit();
- Assertions.assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
- Assertions.assertThat(readVersionHint())
+ assertThat(version(2)).as("Should create v2 for the update").exists().isFile();
+ assertThat(readVersionHint())
.as("Should write the current version to the hint file")
.isEqualTo(2);
// mock / spy the classes for testing
TableOperations tops = baseTable.operations();
- Assertions.assertThat(tops).isInstanceOf(HadoopTableOperations.class);
+ assertThat(tops).isInstanceOf(HadoopTableOperations.class);
HadoopTableOperations spyOps = Mockito.spy((HadoopTableOperations) tops);
// inject the mockFS into the TableOperations
doReturn(mockFs).when(spyOps).getFileSystem(any(), any());
- Assertions.assertThatThrownBy(() -> spyOps.commit(tops.current(), meta1))
+ assertThatThrownBy(() -> spyOps.commit(tops.current(), meta1))
.isInstanceOf(CommitFailedException.class);
// Verifies that there is no temporary metadata.json files left on rename failures.
Set<String> actual =
listMetadataJsonFiles().stream().map(File::getName).collect(Collectors.toSet());
Set<String> expected = Sets.newHashSet("v1.metadata.json", "v2.metadata.json");
- Assertions.assertThat(actual)
- .as("only v1 and v2 metadata.json should exist.")
- .isEqualTo(expected);
+ assertThat(actual).as("only v1 and v2 metadata.json should exist.").isEqualTo(expected);
}
@Test
public void testCanReadOldCompressedManifestFiles() throws Exception {
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
// do a file append
table.newAppend().appendFile(FILE_A).commit();
@@ -399,20 +394,20 @@
List<File> metadataFiles = listMetadataJsonFiles();
- Assertions.assertThat(metadataFiles).as("Should have two versions").hasSize(2);
- Assertions.assertThat(metadataFiles.stream().map(File::getName))
+ assertThat(metadataFiles).as("Should have two versions").hasSize(2);
+ assertThat(metadataFiles.stream().map(File::getName))
.as("Metadata should be compressed with old format.")
.allMatch(f -> f.endsWith(".metadata.json.gz"));
Table reloaded = TABLES.load(tableLocation);
List<FileScanTask> tasks = Lists.newArrayList(reloaded.newScan().planFiles());
- Assertions.assertThat(tasks).as("Should scan 1 files").hasSize(1);
+ assertThat(tasks).as("Should scan 1 files").hasSize(1);
}
@Test
public void testConcurrentFastAppends(@TempDir File dir) throws Exception {
- Assertions.assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
+ assertThat(version(1)).as("Should create v1 metadata").exists().isFile();
int threadsCount = 5;
int numberOfCommitedFilesPerThread = 10;
Table tableWithHighRetries =
@@ -452,7 +447,7 @@
});
tableWithHighRetries.refresh();
- Assertions.assertThat(Lists.newArrayList(tableWithHighRetries.snapshots()))
+ assertThat(Lists.newArrayList(tableWithHighRetries.snapshots()))
.hasSize(threadsCount * numberOfCommitedFilesPerThread);
}
@@ -467,7 +462,7 @@
tableOperations.refresh();
BaseTable baseTable = (BaseTable) table;
TableMetadata meta2 = baseTable.operations().current();
- Assertions.assertThatThrownBy(() -> tableOperations.commit(tableOperations.current(), meta2))
+ assertThatThrownBy(() -> tableOperations.commit(tableOperations.current(), meta2))
.isInstanceOf(CommitFailedException.class)
.hasMessageStartingWith("Failed to acquire lock on file");
}
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopStreams.java b/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopStreams.java
index 09b478e..af3fb56 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopStreams.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopStreams.java
@@ -18,12 +18,13 @@
*/
package org.apache.iceberg.hadoop;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.IOException;
import java.util.concurrent.Executors;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.s3a.S3ABlockOutputStream;
import org.apache.iceberg.io.PositionOutputStream;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
class TestHadoopStreams {
@@ -46,7 +47,7 @@
s3ABlockOutputStream.interruptClose();
});
- Assertions.assertThatThrownBy(wrap::close)
+ assertThatThrownBy(wrap::close)
.isInstanceOf(IOException.class)
.hasMessage("S3ABlockOutputStream failed to upload object after stream was closed");
}
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopTables.java b/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopTables.java
index e3d3244..70a04b9 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopTables.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/TestHadoopTables.java
@@ -21,6 +21,8 @@
import static org.apache.iceberg.NullOrder.NULLS_FIRST;
import static org.apache.iceberg.SortDirection.ASC;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.File;
import java.io.IOException;
@@ -40,7 +42,6 @@
import org.apache.iceberg.transforms.Transform;
import org.apache.iceberg.transforms.Transforms;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -57,10 +58,10 @@
@Test
public void testTableExists() {
- Assertions.assertThat(TABLES.exists(tableDir.toURI().toString())).isFalse();
+ assertThat(TABLES.exists(tableDir.toURI().toString())).isFalse();
PartitionSpec spec = PartitionSpec.builderFor(SCHEMA).bucket("data", 16).build();
TABLES.create(SCHEMA, spec, tableDir.toURI().toString());
- Assertions.assertThat(TABLES.exists(tableDir.toURI().toString())).isTrue();
+ assertThat(TABLES.exists(tableDir.toURI().toString())).isTrue();
}
@Test
@@ -68,7 +69,7 @@
TABLES.create(SCHEMA, tableDir.toURI().toString());
TABLES.dropTable(tableDir.toURI().toString());
- Assertions.assertThatThrownBy(() -> TABLES.load(tableDir.toURI().toString()))
+ assertThatThrownBy(() -> TABLES.load(tableDir.toURI().toString()))
.isInstanceOf(NoSuchTableException.class)
.hasMessageStartingWith("Table does not exist");
}
@@ -79,13 +80,13 @@
createDummyTable(tableDir, dataDir);
TABLES.dropTable(tableDir.toURI().toString(), true);
- Assertions.assertThatThrownBy(() -> TABLES.load(tableDir.toURI().toString()))
+ assertThatThrownBy(() -> TABLES.load(tableDir.toURI().toString()))
.isInstanceOf(NoSuchTableException.class)
.hasMessageStartingWith("Table does not exist");
- Assertions.assertThat(dataDir.listFiles()).hasSize(0);
- Assertions.assertThat(tableDir).doesNotExist();
- Assertions.assertThat(TABLES.dropTable(tableDir.toURI().toString())).isFalse();
+ assertThat(dataDir.listFiles()).hasSize(0);
+ assertThat(tableDir).doesNotExist();
+ assertThat(TABLES.dropTable(tableDir.toURI().toString())).isFalse();
}
@Test
@@ -93,13 +94,13 @@
createDummyTable(tableDir, dataDir);
TABLES.dropTable(tableDir.toURI().toString(), false);
- Assertions.assertThatThrownBy(() -> TABLES.load(tableDir.toURI().toString()))
+ assertThatThrownBy(() -> TABLES.load(tableDir.toURI().toString()))
.isInstanceOf(NoSuchTableException.class)
.hasMessageStartingWith("Table does not exist");
- Assertions.assertThat(dataDir.listFiles()).hasSize(1);
- Assertions.assertThat(tableDir).doesNotExist();
- Assertions.assertThat(TABLES.dropTable(tableDir.toURI().toString())).isFalse();
+ assertThat(dataDir.listFiles()).hasSize(1);
+ assertThat(tableDir).doesNotExist();
+ assertThat(TABLES.dropTable(tableDir.toURI().toString())).isFalse();
}
@Test
@@ -108,8 +109,8 @@
Table table = TABLES.create(SCHEMA, spec, tableDir.toURI().toString());
SortOrder sortOrder = table.sortOrder();
- Assertions.assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(0);
- Assertions.assertThat(sortOrder.isUnsorted()).as("Order must be unsorted").isTrue();
+ assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(0);
+ assertThat(sortOrder.isUnsorted()).as("Order must be unsorted").isTrue();
}
@Test
@@ -120,16 +121,14 @@
TABLES.create(SCHEMA, spec, order, Maps.newHashMap(), tableDir.toURI().toString());
SortOrder sortOrder = table.sortOrder();
- Assertions.assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(1);
- Assertions.assertThat(sortOrder.fields()).as("Order must have 1 field").hasSize(1);
- Assertions.assertThat(sortOrder.fields().get(0).direction())
- .as("Direction must match")
- .isEqualTo(ASC);
- Assertions.assertThat(sortOrder.fields().get(0).nullOrder())
+ assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(1);
+ assertThat(sortOrder.fields()).as("Order must have 1 field").hasSize(1);
+ assertThat(sortOrder.fields().get(0).direction()).as("Direction must match").isEqualTo(ASC);
+ assertThat(sortOrder.fields().get(0).nullOrder())
.as("Null order must match")
.isEqualTo(NULLS_FIRST);
Transform<?, ?> transform = Transforms.identity();
- Assertions.assertThat(sortOrder.fields().get(0).transform())
+ assertThat(sortOrder.fields().get(0).transform())
.as("Transform must match")
.isEqualTo(transform);
}
@@ -141,12 +140,10 @@
TABLES.create(SCHEMA, spec, location);
Table table = TABLES.load(location);
- Assertions.assertThat(table.name()).as("Name must match").isEqualTo(location);
+ assertThat(table.name()).as("Name must match").isEqualTo(location);
Table snapshotsTable = TABLES.load(location + "#snapshots");
- Assertions.assertThat(snapshotsTable.name())
- .as("Name must match")
- .isEqualTo(location + "#snapshots");
+ assertThat(snapshotsTable.name()).as("Name must match").isEqualTo(location + "#snapshots");
}
private static void createDummyTable(File tableDir, File dataDir) throws IOException {
@@ -164,7 +161,7 @@
append.commit();
// Make sure that the data file and the manifest dir is created
- Assertions.assertThat(dataDir.listFiles()).hasSize(1);
- Assertions.assertThat(tableDir.listFiles()).hasSize(1);
+ assertThat(dataDir.listFiles()).hasSize(1);
+ assertThat(tableDir.listFiles()).hasSize(1);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/TestStaticTable.java b/core/src/test/java/org/apache/iceberg/hadoop/TestStaticTable.java
index 7cb57d7..377ef42 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/TestStaticTable.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/TestStaticTable.java
@@ -18,11 +18,13 @@
*/
package org.apache.iceberg.hadoop;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataTableType;
import org.apache.iceberg.StaticTableOperations;
import org.apache.iceberg.Table;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestStaticTable extends HadoopTableTestBase {
@@ -39,7 +41,7 @@
@Test
public void testLoadFromMetadata() {
Table staticTable = getStaticTable();
- Assertions.assertThat(((HasTableOperations) staticTable).operations())
+ assertThat(((HasTableOperations) staticTable).operations())
.as("Loading a metadata file based table should return StaticTableOperations")
.isInstanceOf(StaticTableOperations.class);
}
@@ -47,7 +49,7 @@
@Test
public void testCannotBeAddedTo() {
Table staticTable = getStaticTable();
- Assertions.assertThatThrownBy(() -> staticTable.newOverwrite().addFile(FILE_A).commit())
+ assertThatThrownBy(() -> staticTable.newOverwrite().addFile(FILE_A).commit())
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("Cannot modify a static table");
}
@@ -56,7 +58,7 @@
public void testCannotBeDeletedFrom() {
table.newAppend().appendFile(FILE_A).commit();
Table staticTable = getStaticTable();
- Assertions.assertThatThrownBy(() -> staticTable.newDelete().deleteFile(FILE_A).commit())
+ assertThatThrownBy(() -> staticTable.newDelete().deleteFile(FILE_A).commit())
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("Cannot modify a static table");
}
@@ -69,11 +71,11 @@
Table staticTable = getStaticTable(type);
if (type.equals(MetadataTableType.POSITION_DELETES)) {
- Assertions.assertThatThrownBy(staticTable::newScan)
+ assertThatThrownBy(staticTable::newScan)
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("Cannot create TableScan from table of type POSITION_DELETES");
} else {
- Assertions.assertThatThrownBy(() -> staticTable.newScan().appendsAfter(1))
+ assertThatThrownBy(() -> staticTable.newScan().appendsAfter(1))
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage(String.format("Cannot incrementally scan table of type %s", type));
}
@@ -86,13 +88,11 @@
table.newAppend().appendFile(FILE_B).commit();
table.newOverwrite().deleteFile(FILE_B).addFile(FILE_C).commit();
Table staticTable = getStaticTable();
- Assertions.assertThat(table.history()).as("Same history?").containsAll(staticTable.history());
- Assertions.assertThat(table.currentSnapshot().snapshotId())
+ assertThat(table.history()).as("Same history?").containsAll(staticTable.history());
+ assertThat(table.currentSnapshot().snapshotId())
.as("Same snapshot?")
.isEqualTo(staticTable.currentSnapshot().snapshotId());
- Assertions.assertThat(table.properties())
- .as("Same properties?")
- .isEqualTo(staticTable.properties());
+ assertThat(table.properties()).as("Same properties?").isEqualTo(staticTable.properties());
}
@Test
@@ -105,7 +105,7 @@
table.newOverwrite().deleteFile(FILE_B).addFile(FILE_C).commit();
staticTable.refresh();
- Assertions.assertThat(staticTable.currentSnapshot().snapshotId())
+ assertThat(staticTable.currentSnapshot().snapshotId())
.as("Snapshot unchanged after table modified")
.isEqualTo(originalSnapshot);
}
@@ -114,7 +114,7 @@
public void testMetadataTables() {
for (MetadataTableType type : MetadataTableType.values()) {
String enumName = type.name().replace("_", "").toLowerCase();
- Assertions.assertThat(getStaticTable(type).getClass().getName().toLowerCase())
+ assertThat(getStaticTable(type).getClass().getName().toLowerCase())
.as("Should be able to get MetadataTable of type : " + type)
.contains(enumName);
}
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/TestTableSerialization.java b/core/src/test/java/org/apache/iceberg/hadoop/TestTableSerialization.java
index 78a7242..fa4b227 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/TestTableSerialization.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/TestTableSerialization.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.hadoop;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@@ -43,7 +45,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
@@ -62,8 +63,8 @@
Table serializableTable = SerializableTable.copyOf(table);
TestHelpers.assertSerializedAndLoadedMetadata(
serializableTable, TestHelpers.KryoHelpers.roundTripSerialize(serializableTable));
- Assertions.assertThat(serializableTable).isInstanceOf(HasTableOperations.class);
- Assertions.assertThat(((HasTableOperations) serializableTable).operations())
+ assertThat(serializableTable).isInstanceOf(HasTableOperations.class);
+ assertThat(((HasTableOperations) serializableTable).operations())
.isInstanceOf(StaticTableOperations.class);
}
@@ -119,10 +120,10 @@
Set<CharSequence> deserializedFiles = getFiles(deserialized);
// Checks that the deserialized data stays the same
- Assertions.assertThat(deserializedFiles).isEqualTo(expected);
+ assertThat(deserializedFiles).isEqualTo(expected);
// We expect that the files changed in the meantime
- Assertions.assertThat(deserializedFiles).isNotEqualTo(getFiles(table));
+ assertThat(deserializedFiles).isNotEqualTo(getFiles(table));
}
@ParameterizedTest
@@ -153,13 +154,13 @@
Set<CharSequence> deserializedFiles = getFiles(deserializeFromBytes(serialized.get(type)));
// Checks that the deserialized data stays the same
- Assertions.assertThat(deserializedFiles).isEqualTo(expected.get(type));
+ assertThat(deserializedFiles).isEqualTo(expected.get(type));
// Collect the current data
Set<CharSequence> newFiles = getFiles(getMetaDataTable(table, type));
// Expect that the new data is changed in the meantime
- Assertions.assertThat(deserializedFiles).isNotEqualTo(newFiles);
+ assertThat(deserializedFiles).isNotEqualTo(newFiles);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryFileIO.java b/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryFileIO.java
index 12f5bf8..174d054 100644
--- a/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryFileIO.java
+++ b/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryFileIO.java
@@ -18,13 +18,15 @@
*/
package org.apache.iceberg.inmemory;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
+
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.UUID;
import org.apache.iceberg.exceptions.AlreadyExistsException;
import org.apache.iceberg.exceptions.NotFoundException;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestInMemoryFileIO {
@@ -33,35 +35,35 @@
public void testBasicEndToEnd() throws IOException {
InMemoryFileIO fileIO = new InMemoryFileIO();
String location = randomLocation();
- Assertions.assertThat(fileIO.fileExists(location)).isFalse();
+ assertThat(fileIO.fileExists(location)).isFalse();
OutputStream outputStream = fileIO.newOutputFile(location).create();
byte[] data = "hello world".getBytes();
outputStream.write(data);
outputStream.close();
- Assertions.assertThat(fileIO.fileExists(location)).isTrue();
+ assertThat(fileIO.fileExists(location)).isTrue();
InputStream inputStream = fileIO.newInputFile(location).newStream();
byte[] buf = new byte[data.length];
inputStream.read(buf);
inputStream.close();
- Assertions.assertThat(new String(buf)).isEqualTo("hello world");
+ assertThat(new String(buf)).isEqualTo("hello world");
fileIO.deleteFile(location);
- Assertions.assertThat(fileIO.fileExists(location)).isFalse();
+ assertThat(fileIO.fileExists(location)).isFalse();
}
@Test
public void testNewInputFileNotFound() {
InMemoryFileIO fileIO = new InMemoryFileIO();
- Assertions.assertThatExceptionOfType(NotFoundException.class)
+ assertThatExceptionOfType(NotFoundException.class)
.isThrownBy(() -> fileIO.newInputFile("s3://nonexistent/file"));
}
@Test
public void testDeleteFileNotFound() {
InMemoryFileIO fileIO = new InMemoryFileIO();
- Assertions.assertThatExceptionOfType(NotFoundException.class)
+ assertThatExceptionOfType(NotFoundException.class)
.isThrownBy(() -> fileIO.deleteFile("s3://nonexistent/file"));
}
@@ -70,7 +72,7 @@
String location = randomLocation();
InMemoryFileIO fileIO = new InMemoryFileIO();
fileIO.addFile(location, "hello world".getBytes());
- Assertions.assertThatExceptionOfType(AlreadyExistsException.class)
+ assertThatExceptionOfType(AlreadyExistsException.class)
.isThrownBy(() -> fileIO.newOutputFile(location).create());
}
@@ -86,11 +88,11 @@
// Even though we've called create() and started writing data, this file won't yet exist
// in the parentFileIO before we've closed it.
- Assertions.assertThat(fileIO.fileExists(location)).isFalse();
+ assertThat(fileIO.fileExists(location)).isFalse();
// File appears after closing it.
outputStream.close();
- Assertions.assertThat(fileIO.fileExists(location)).isTrue();
+ assertThat(fileIO.fileExists(location)).isTrue();
// Start a new OutputFile and write new data but don't close() it yet.
outputStream = fileIO.newOutputFile(location).createOrOverwrite();
@@ -101,7 +103,7 @@
byte[] buf = new byte[oldData.length];
inputStream.read(buf);
inputStream.close();
- Assertions.assertThat(new String(buf)).isEqualTo("old data");
+ assertThat(new String(buf)).isEqualTo("old data");
// Finally, close the new output stream; data should be overwritten with new data now.
outputStream.close();
@@ -109,7 +111,7 @@
buf = new byte[newData.length];
inputStream.read(buf);
inputStream.close();
- Assertions.assertThat(new String(buf)).isEqualTo("new data");
+ assertThat(new String(buf)).isEqualTo("new data");
}
@Test
@@ -119,7 +121,7 @@
fileIO.addFile(location, "hello world".getBytes());
InMemoryFileIO fileIO2 = new InMemoryFileIO();
- Assertions.assertThat(fileIO2.fileExists(location))
+ assertThat(fileIO2.fileExists(location))
.isTrue()
.as("Files should be shared across all InMemoryFileIO instances");
}
diff --git a/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryInputFile.java b/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryInputFile.java
index 5aa5e42..63168a5 100644
--- a/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryInputFile.java
+++ b/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryInputFile.java
@@ -18,10 +18,12 @@
*/
package org.apache.iceberg.inmemory;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestInMemoryInputFile {
@@ -30,8 +32,8 @@
InMemoryInputFile inputFile =
new InMemoryInputFile("abc".getBytes(StandardCharsets.ISO_8859_1));
InputStream inputStream = inputFile.newStream();
- Assertions.assertThat(inputStream.read()).isEqualTo('a');
+ assertThat(inputStream.read()).isEqualTo('a');
inputStream.close();
- Assertions.assertThatThrownBy(inputStream::read).hasMessage("Stream is closed");
+ assertThatThrownBy(inputStream::read).hasMessage("Stream is closed");
}
}
diff --git a/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryOutputFile.java b/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryOutputFile.java
index 8015c5d..2ae5936 100644
--- a/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryOutputFile.java
+++ b/core/src/test/java/org/apache/iceberg/inmemory/TestInMemoryOutputFile.java
@@ -18,10 +18,12 @@
*/
package org.apache.iceberg.inmemory;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestInMemoryOutputFile {
@@ -32,8 +34,7 @@
outputStream.write('a');
outputStream.write('b');
outputStream.close();
- Assertions.assertThatThrownBy(() -> outputStream.write('c')).hasMessage("Stream is closed");
- Assertions.assertThat(outputFile.toByteArray())
- .isEqualTo("ab".getBytes(StandardCharsets.ISO_8859_1));
+ assertThatThrownBy(() -> outputStream.write('c')).hasMessage("Stream is closed");
+ assertThat(outputFile.toByteArray()).isEqualTo("ab".getBytes(StandardCharsets.ISO_8859_1));
}
}
diff --git a/core/src/test/java/org/apache/iceberg/io/TestByteBufferInputStreams.java b/core/src/test/java/org/apache/iceberg/io/TestByteBufferInputStreams.java
index 408de7c..c0850d8 100644
--- a/core/src/test/java/org/apache/iceberg/io/TestByteBufferInputStreams.java
+++ b/core/src/test/java/org/apache/iceberg/io/TestByteBufferInputStreams.java
@@ -18,13 +18,15 @@
*/
package org.apache.iceberg.io;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.EOFException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.List;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public abstract class TestByteBufferInputStreams {
@@ -39,14 +41,12 @@
ByteBufferInputStream stream = newStream();
- Assertions.assertThat(stream.read(bytes)).as("Should read 0 bytes").isEqualTo(0);
+ assertThat(stream.read(bytes)).as("Should read 0 bytes").isEqualTo(0);
int bytesRead = stream.read(new byte[100]);
- Assertions.assertThat(bytesRead).as("Should read to end of stream").isLessThan(100);
+ assertThat(bytesRead).as("Should read to end of stream").isLessThan(100);
- Assertions.assertThat(stream.read(bytes))
- .as("Should read 0 bytes at end of stream")
- .isEqualTo(0);
+ assertThat(stream.read(bytes)).as("Should read 0 bytes at end of stream").isEqualTo(0);
}
@Test
@@ -56,22 +56,18 @@
ByteBufferInputStream stream = newStream();
int bytesRead = stream.read(bytes);
- Assertions.assertThat(bytesRead).as("Should read the entire buffer").isEqualTo(bytes.length);
+ assertThat(bytesRead).as("Should read the entire buffer").isEqualTo(bytes.length);
for (int i = 0; i < bytes.length; i += 1) {
- Assertions.assertThat(bytes[i]).as("Byte i should be i").isEqualTo((byte) i);
- Assertions.assertThat(stream.getPos()).as("Should advance position").isEqualTo(35);
+ assertThat(bytes[i]).as("Byte i should be i").isEqualTo((byte) i);
+ assertThat(stream.getPos()).as("Should advance position").isEqualTo(35);
}
- Assertions.assertThat(stream.available())
- .as("Should have no more remaining content")
- .isEqualTo(0);
+ assertThat(stream.available()).as("Should have no more remaining content").isEqualTo(0);
- Assertions.assertThat(stream.read(bytes)).as("Should return -1 at end of stream").isEqualTo(-1);
+ assertThat(stream.read(bytes)).as("Should return -1 at end of stream").isEqualTo(-1);
- Assertions.assertThat(stream.available())
- .as("Should have no more remaining content")
- .isEqualTo(0);
+ assertThat(stream.available()).as("Should have no more remaining content").isEqualTo(0);
checkOriginalData();
}
@@ -86,37 +82,27 @@
int lastBytesRead = bytes.length;
for (int offset = 0; offset < length; offset += bytes.length) {
- Assertions.assertThat(lastBytesRead)
- .as("Should read requested len")
- .isEqualTo(bytes.length);
+ assertThat(lastBytesRead).as("Should read requested len").isEqualTo(bytes.length);
lastBytesRead = stream.read(bytes, 0, bytes.length);
- Assertions.assertThat(stream.getPos())
- .as("Should advance position")
- .isEqualTo(offset + lastBytesRead);
+ assertThat(stream.getPos()).as("Should advance position").isEqualTo(offset + lastBytesRead);
// validate the bytes that were read
for (int i = 0; i < lastBytesRead; i += 1) {
- Assertions.assertThat(bytes[i]).as("Byte i should be i").isEqualTo((byte) (offset + i));
+ assertThat(bytes[i]).as("Byte i should be i").isEqualTo((byte) (offset + i));
}
}
- Assertions.assertThat(lastBytesRead % bytes.length)
+ assertThat(lastBytesRead % bytes.length)
.as("Should read fewer bytes at end of buffer")
.isEqualTo(length % bytes.length);
- Assertions.assertThat(stream.available())
- .as("Should have no more remaining content")
- .isEqualTo(0);
+ assertThat(stream.available()).as("Should have no more remaining content").isEqualTo(0);
- Assertions.assertThat(stream.read(bytes))
- .as("Should return -1 at end of stream")
- .isEqualTo(-1);
+ assertThat(stream.read(bytes)).as("Should return -1 at end of stream").isEqualTo(-1);
- Assertions.assertThat(stream.available())
- .as("Should have no more remaining content")
- .isEqualTo(0);
+ assertThat(stream.available()).as("Should have no more remaining content").isEqualTo(0);
}
checkOriginalData();
@@ -131,40 +117,32 @@
int lastBytesRead = size;
for (int offset = 0; offset < bytes.length; offset += size) {
- Assertions.assertThat(lastBytesRead).as("Should read requested len").isEqualTo(size);
+ assertThat(lastBytesRead).as("Should read requested len").isEqualTo(size);
lastBytesRead = stream.read(bytes, offset, Math.min(size, bytes.length - offset));
- Assertions.assertThat(stream.getPos())
+ assertThat(stream.getPos())
.as("Should advance position")
.isEqualTo(lastBytesRead > 0 ? offset + lastBytesRead : offset);
}
- Assertions.assertThat(lastBytesRead % size)
+ assertThat(lastBytesRead % size)
.as("Should read fewer bytes at end of buffer")
.isEqualTo(bytes.length % size);
for (int i = 0; i < bytes.length; i += 1) {
- Assertions.assertThat(bytes[i]).as("Byte i should be i").isEqualTo((byte) i);
+ assertThat(bytes[i]).as("Byte i should be i").isEqualTo((byte) i);
}
- Assertions.assertThat(stream.available())
- .as("Should have no more remaining content")
- .isEqualTo(2);
+ assertThat(stream.available()).as("Should have no more remaining content").isEqualTo(2);
- Assertions.assertThat(stream.read(bytes)).as("Should return 2 more bytes").isEqualTo(2);
+ assertThat(stream.read(bytes)).as("Should return 2 more bytes").isEqualTo(2);
- Assertions.assertThat(stream.available())
- .as("Should have no more remaining content")
- .isEqualTo(0);
+ assertThat(stream.available()).as("Should have no more remaining content").isEqualTo(0);
- Assertions.assertThat(stream.read(bytes))
- .as("Should return -1 at end of stream")
- .isEqualTo(-1);
+ assertThat(stream.read(bytes)).as("Should return -1 at end of stream").isEqualTo(-1);
- Assertions.assertThat(stream.available())
- .as("Should have no more remaining content")
- .isEqualTo(0);
+ assertThat(stream.available()).as("Should have no more remaining content").isEqualTo(0);
}
checkOriginalData();
@@ -176,11 +154,11 @@
int length = stream.available();
for (int i = 0; i < length; i += 1) {
- Assertions.assertThat(stream.getPos()).as("Position should increment").isEqualTo(i);
- Assertions.assertThat(stream.read()).isEqualTo(i);
+ assertThat(stream.getPos()).as("Position should increment").isEqualTo(i);
+ assertThat(stream.read()).isEqualTo(i);
}
- Assertions.assertThatThrownBy(stream::read).isInstanceOf(EOFException.class).hasMessage(null);
+ assertThatThrownBy(stream::read).isInstanceOf(EOFException.class).hasMessage(null);
checkOriginalData();
}
@@ -193,11 +171,9 @@
ByteBuffer empty = stream.slice(0);
- Assertions.assertThat(empty).as("slice(0) should produce a non-null buffer").isNotNull();
- Assertions.assertThat(empty.remaining())
- .as("slice(0) should produce an empty buffer")
- .isEqualTo(0);
- Assertions.assertThat(stream.getPos()).as("Position should be at start").isEqualTo(0);
+ assertThat(empty).as("slice(0) should produce a non-null buffer").isNotNull();
+ assertThat(empty.remaining()).as("slice(0) should produce an empty buffer").isEqualTo(0);
+ assertThat(stream.getPos()).as("Position should be at start").isEqualTo(0);
int i = 0;
while (stream.available() > 0) {
@@ -205,13 +181,13 @@
ByteBuffer buffer = stream.slice(bytesToSlice);
for (int j = 0; j < bytesToSlice; j += 1) {
- Assertions.assertThat(buffer.get()).as("Data should be correct").isEqualTo((byte) (i + j));
+ assertThat(buffer.get()).as("Data should be correct").isEqualTo((byte) (i + j));
}
i += bytesToSlice;
}
- Assertions.assertThat(stream.getPos()).as("Position should be at end").isEqualTo(length);
+ assertThat(stream.getPos()).as("Position should be at end").isEqualTo(length);
checkOriginalData();
}
@@ -220,7 +196,7 @@
public void testSliceBuffers0() throws Exception {
ByteBufferInputStream stream = newStream();
- Assertions.assertThat(stream.sliceBuffers(0))
+ assertThat(stream.sliceBuffers(0))
.as("Should return an empty list")
.isEqualTo(Collections.emptyList());
}
@@ -232,15 +208,15 @@
List<ByteBuffer> buffers = stream.sliceBuffers(stream.available());
- Assertions.assertThat(stream.getPos()).as("Should consume all buffers").isEqualTo(length);
+ assertThat(stream.getPos()).as("Should consume all buffers").isEqualTo(length);
- Assertions.assertThatThrownBy(() -> stream.sliceBuffers(length))
+ assertThatThrownBy(() -> stream.sliceBuffers(length))
.isInstanceOf(EOFException.class)
.hasMessage(null);
ByteBufferInputStream copy = ByteBufferInputStream.wrap(buffers);
for (int i = 0; i < length; i += 1) {
- Assertions.assertThat(copy.read()).as("Slice should have identical data").isEqualTo(i);
+ assertThat(copy.read()).as("Slice should have identical data").isEqualTo(i);
}
checkOriginalData();
@@ -257,12 +233,12 @@
buffers.addAll(stream.sliceBuffers(Math.min(size, stream.available())));
}
- Assertions.assertThat(stream.getPos()).as("Should consume all content").isEqualTo(length);
+ assertThat(stream.getPos()).as("Should consume all content").isEqualTo(length);
ByteBufferInputStream newStream = new MultiBufferInputStream(buffers);
for (int i = 0; i < length; i += 1) {
- Assertions.assertThat(newStream.read()).as("Data should be correct").isEqualTo(i);
+ assertThat(newStream.read()).as("Data should be correct").isEqualTo(i);
}
}
@@ -277,36 +253,32 @@
int sliceLength = 5;
List<ByteBuffer> buffers = stream.sliceBuffers(sliceLength);
- Assertions.assertThat(stream.available())
+ assertThat(stream.available())
.as("Should advance the original stream")
.isEqualTo(length - sliceLength);
- Assertions.assertThat(stream.getPos())
+ assertThat(stream.getPos())
.as("Should advance the original stream position")
.isEqualTo(sliceLength);
- Assertions.assertThat(buffers.size())
- .as("Should return a slice of the first buffer")
- .isEqualTo(1);
+ assertThat(buffers.size()).as("Should return a slice of the first buffer").isEqualTo(1);
ByteBuffer buffer = buffers.get(0);
- Assertions.assertThat(buffer.remaining())
- .as("Should have requested bytes")
- .isEqualTo(sliceLength);
+ assertThat(buffer.remaining()).as("Should have requested bytes").isEqualTo(sliceLength);
// read the buffer one past the returned limit. this should not change the
// next value in the original stream
buffer.limit(sliceLength + 1);
for (int i = 0; i < sliceLength + 1; i += 1) {
- Assertions.assertThat(buffer.get()).as("Should have correct data").isEqualTo((byte) i);
+ assertThat(buffer.get()).as("Should have correct data").isEqualTo((byte) i);
}
- Assertions.assertThat(stream.getPos())
+ assertThat(stream.getPos())
.as("Reading a slice shouldn't advance the original stream")
.isEqualTo(sliceLength);
- Assertions.assertThat(stream.read())
+ assertThat(stream.read())
.as("Reading a slice shouldn't change the underlying data")
.isEqualTo(sliceLength);
@@ -318,11 +290,11 @@
try {
buffer.put((byte) 255);
- Assertions.assertThat(stream.getPos())
+ assertThat(stream.getPos())
.as("Writing to a slice shouldn't advance the original stream")
.isEqualTo(sliceLength + 1);
- Assertions.assertThat(stream.read())
+ assertThat(stream.read())
.as("Writing to a slice should change the underlying data")
.isEqualTo(255);
@@ -337,20 +309,20 @@
while (stream.available() > 0) {
int bytesToSkip = Math.min(stream.available(), 10);
- Assertions.assertThat(stream.skip(bytesToSkip))
+ assertThat(stream.skip(bytesToSkip))
.as("Should skip all, regardless of backing buffers")
.isEqualTo(bytesToSkip);
}
stream = newStream();
- Assertions.assertThat(stream.skip(0)).isEqualTo(0);
+ assertThat(stream.skip(0)).isEqualTo(0);
int length = stream.available();
- Assertions.assertThat(stream.skip(length + 10))
+ assertThat(stream.skip(length + 10))
.as("Should stop at end when out of bytes")
.isEqualTo(length);
- Assertions.assertThat(stream.skip(10)).as("Should return -1 when at end").isEqualTo(-1);
+ assertThat(stream.skip(10)).as("Should return -1 when at end").isEqualTo(-1);
}
@Test
@@ -363,7 +335,7 @@
stream.skipFully(bytesToSkip);
- Assertions.assertThat(stream.getPos() - lastPosition)
+ assertThat(stream.getPos() - lastPosition)
.as("Should skip all, regardless of backing buffers")
.isEqualTo(bytesToSkip);
@@ -372,11 +344,11 @@
ByteBufferInputStream stream2 = newStream();
stream2.skipFully(0);
- Assertions.assertThat(stream2.getPos()).as("Check initial position").isEqualTo(0);
+ assertThat(stream2.getPos()).as("Check initial position").isEqualTo(0);
int length = stream2.available();
- Assertions.assertThatThrownBy(() -> stream2.skipFully(length + 10))
+ assertThatThrownBy(() -> stream2.skipFully(length + 10))
.isInstanceOf(EOFException.class)
.hasMessageStartingWith("Not enough bytes to skip");
}
@@ -397,20 +369,18 @@
stream.reset();
- Assertions.assertThat(stream.getPos()).as("Position should return to the mark").isEqualTo(mark);
+ assertThat(stream.getPos()).as("Position should return to the mark").isEqualTo(mark);
byte[] afterReset = new byte[100];
int bytesReadAfterReset = stream.read(afterReset);
- Assertions.assertThat(bytesReadAfterReset)
+ assertThat(bytesReadAfterReset)
.as("Should read the same number of bytes")
.isEqualTo(expectedBytesRead);
- Assertions.assertThat(stream.getPos())
- .as("Read should end at the same position")
- .isEqualTo(end);
+ assertThat(stream.getPos()).as("Read should end at the same position").isEqualTo(end);
- Assertions.assertThat(afterReset).as("Content should be equal").isEqualTo(expected);
+ assertThat(afterReset).as("Content should be equal").isEqualTo(expected);
}
@Test
@@ -430,19 +400,17 @@
stream.reset();
- Assertions.assertThat(stream.getPos()).as("Position should return to the mark").isEqualTo(mark);
+ assertThat(stream.getPos()).as("Position should return to the mark").isEqualTo(mark);
byte[] afterReset = new byte[100];
int bytesReadAfterReset = stream.read(afterReset);
- Assertions.assertThat(bytesReadAfterReset)
+ assertThat(bytesReadAfterReset)
.as("Should read the same number of bytes")
.isEqualTo(expectedBytesRead);
- Assertions.assertThat(stream.getPos())
- .as("Read should end at the same position")
- .isEqualTo(end);
+ assertThat(stream.getPos()).as("Read should end at the same position").isEqualTo(end);
- Assertions.assertThat(afterReset).as("Content should be equal").isEqualTo(expected);
+ assertThat(afterReset).as("Content should be equal").isEqualTo(expected);
}
@Test
@@ -454,22 +422,20 @@
long mark = stream.getPos();
byte[] expected = new byte[10];
- Assertions.assertThat(stream.read(expected)).as("Should read 10 bytes").isEqualTo(10);
+ assertThat(stream.read(expected)).as("Should read 10 bytes").isEqualTo(10);
long end = stream.getPos();
stream.reset();
- Assertions.assertThat(stream.getPos()).as("Position should return to the mark").isEqualTo(mark);
+ assertThat(stream.getPos()).as("Position should return to the mark").isEqualTo(mark);
byte[] afterReset = new byte[10];
- Assertions.assertThat(stream.read(afterReset)).as("Should read 10 bytes").isEqualTo(10);
+ assertThat(stream.read(afterReset)).as("Should read 10 bytes").isEqualTo(10);
- Assertions.assertThat(stream.getPos())
- .as("Read should end at the same position")
- .isEqualTo(end);
+ assertThat(stream.getPos()).as("Read should end at the same position").isEqualTo(end);
- Assertions.assertThat(afterReset).as("Content should be equal").isEqualTo(expected);
+ assertThat(afterReset).as("Content should be equal").isEqualTo(expected);
}
@Test
@@ -477,36 +443,34 @@
ByteBufferInputStream stream = newStream();
int bytesRead = stream.read(new byte[100]);
- Assertions.assertThat(bytesRead < 100).as("Should read to end of stream").isTrue();
+ assertThat(bytesRead < 100).as("Should read to end of stream").isTrue();
stream.mark(100);
long mark = stream.getPos();
byte[] expected = new byte[10];
- Assertions.assertThat(stream.read(expected)).as("Should read 0 bytes").isEqualTo(-1);
+ assertThat(stream.read(expected)).as("Should read 0 bytes").isEqualTo(-1);
long end = stream.getPos();
stream.reset();
- Assertions.assertThat(stream.getPos()).as("Position should return to the mark").isEqualTo(mark);
+ assertThat(stream.getPos()).as("Position should return to the mark").isEqualTo(mark);
byte[] afterReset = new byte[10];
- Assertions.assertThat(stream.read(afterReset)).as("Should read 0 bytes").isEqualTo(-1);
+ assertThat(stream.read(afterReset)).as("Should read 0 bytes").isEqualTo(-1);
- Assertions.assertThat(stream.getPos())
- .as("Read should end at the same position")
- .isEqualTo(end);
+ assertThat(stream.getPos()).as("Read should end at the same position").isEqualTo(end);
- Assertions.assertThat(afterReset).as("Content should be equal").isEqualTo(expected);
+ assertThat(afterReset).as("Content should be equal").isEqualTo(expected);
}
@Test
public void testMarkUnset() {
ByteBufferInputStream stream = newStream();
- Assertions.assertThatThrownBy(stream::reset)
+ assertThatThrownBy(stream::reset)
.isInstanceOf(IOException.class)
.hasMessageStartingWith("No mark defined");
}
@@ -517,28 +481,26 @@
byte[] expected = new byte[6];
stream.mark(10);
- Assertions.assertThat(stream.read(expected))
- .as("Should read expected bytes")
- .isEqualTo(expected.length);
+ assertThat(stream.read(expected)).as("Should read expected bytes").isEqualTo(expected.length);
stream.reset();
stream.mark(10);
byte[] firstRead = new byte[6];
- Assertions.assertThat(stream.read(firstRead))
+ assertThat(stream.read(firstRead))
.as("Should read firstRead bytes")
.isEqualTo(firstRead.length);
stream.reset();
byte[] secondRead = new byte[6];
- Assertions.assertThat(stream.read(secondRead))
+ assertThat(stream.read(secondRead))
.as("Should read secondRead bytes")
.isEqualTo(secondRead.length);
- Assertions.assertThat(firstRead).as("First read should be correct").isEqualTo(expected);
+ assertThat(firstRead).as("First read should be correct").isEqualTo(expected);
- Assertions.assertThat(secondRead).as("Second read should be correct").isEqualTo(expected);
+ assertThat(secondRead).as("Second read should be correct").isEqualTo(expected);
}
@Test
@@ -546,13 +508,13 @@
ByteBufferInputStream stream = newStream();
stream.mark(5);
- Assertions.assertThat(stream.read(new byte[5])).as("Should read 5 bytes").isEqualTo(5);
+ assertThat(stream.read(new byte[5])).as("Should read 5 bytes").isEqualTo(5);
stream.reset();
- Assertions.assertThat(stream.read(new byte[6])).as("Should read 6 bytes").isEqualTo(6);
+ assertThat(stream.read(new byte[6])).as("Should read 6 bytes").isEqualTo(6);
- Assertions.assertThatThrownBy(stream::reset)
+ assertThatThrownBy(stream::reset)
.isInstanceOf(IOException.class)
.hasMessageStartingWith("No mark defined");
}
@@ -562,11 +524,11 @@
ByteBufferInputStream stream = newStream();
stream.mark(5);
- Assertions.assertThat(stream.read(new byte[5])).as("Should read 5 bytes").isEqualTo(5);
+ assertThat(stream.read(new byte[5])).as("Should read 5 bytes").isEqualTo(5);
stream.reset();
- Assertions.assertThatThrownBy(stream::reset)
+ assertThatThrownBy(stream::reset)
.isInstanceOf(IOException.class)
.hasMessageStartingWith("No mark defined");
}
diff --git a/core/src/test/java/org/apache/iceberg/io/TestIOUtil.java b/core/src/test/java/org/apache/iceberg/io/TestIOUtil.java
index e6b4cb9..48c75b5 100644
--- a/core/src/test/java/org/apache/iceberg/io/TestIOUtil.java
+++ b/core/src/test/java/org/apache/iceberg/io/TestIOUtil.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.io;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.EOFException;
import java.io.IOException;
import java.nio.ByteBuffer;
@@ -25,7 +28,6 @@
import java.util.Arrays;
import org.apache.iceberg.inmemory.InMemoryOutputFile;
import org.apache.iceberg.relocated.com.google.common.base.Strings;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestIOUtil {
@@ -36,13 +38,11 @@
MockInputStream stream = new MockInputStream();
IOUtil.readFully(stream, buffer, 0, buffer.length);
- Assertions.assertThat(buffer)
+ assertThat(buffer)
.as("Byte array contents should match")
.isEqualTo(Arrays.copyOfRange(MockInputStream.TEST_ARRAY, 0, 5));
- Assertions.assertThat(stream.getPos())
- .as("Stream position should reflect bytes read")
- .isEqualTo(5);
+ assertThat(stream.getPos()).as("Stream position should reflect bytes read").isEqualTo(5);
}
@Test
@@ -52,13 +52,11 @@
MockInputStream stream = new MockInputStream(2, 3, 3);
IOUtil.readFully(stream, buffer, 0, buffer.length);
- Assertions.assertThat(buffer)
+ assertThat(buffer)
.as("Byte array contents should match")
.containsExactly(Arrays.copyOfRange(MockInputStream.TEST_ARRAY, 0, 5));
- Assertions.assertThat(stream.getPos())
- .as("Stream position should reflect bytes read")
- .isEqualTo(5);
+ assertThat(stream.getPos()).as("Stream position should reflect bytes read").isEqualTo(5);
}
@Test
@@ -68,15 +66,11 @@
final MockInputStream stream = new MockInputStream(2, 3, 3);
IOUtil.readFully(stream, buffer, 0, buffer.length);
- Assertions.assertThat(buffer)
- .as("Byte array contents should match")
- .isEqualTo(MockInputStream.TEST_ARRAY);
+ assertThat(buffer).as("Byte array contents should match").isEqualTo(MockInputStream.TEST_ARRAY);
- Assertions.assertThat(stream.getPos())
- .as("Stream position should reflect bytes read")
- .isEqualTo(10);
+ assertThat(stream.getPos()).as("Stream position should reflect bytes read").isEqualTo(10);
- Assertions.assertThatThrownBy(() -> IOUtil.readFully(stream, buffer, 0, 1))
+ assertThatThrownBy(() -> IOUtil.readFully(stream, buffer, 0, 1))
.isInstanceOf(EOFException.class)
.hasMessage("Reached the end of stream with 1 bytes left to read");
}
@@ -87,17 +81,15 @@
final MockInputStream stream = new MockInputStream(2, 3, 3);
- Assertions.assertThatThrownBy(() -> IOUtil.readFully(stream, buffer, 0, buffer.length))
+ assertThatThrownBy(() -> IOUtil.readFully(stream, buffer, 0, buffer.length))
.isInstanceOf(EOFException.class)
.hasMessage("Reached the end of stream with 1 bytes left to read");
- Assertions.assertThat(Arrays.copyOfRange(buffer, 0, 10))
+ assertThat(Arrays.copyOfRange(buffer, 0, 10))
.as("Should have consumed bytes")
.isEqualTo(MockInputStream.TEST_ARRAY);
- Assertions.assertThat(stream.getPos())
- .as("Stream position should reflect bytes read")
- .isEqualTo(10);
+ assertThat(stream.getPos()).as("Stream position should reflect bytes read").isEqualTo(10);
}
@Test
@@ -107,13 +99,11 @@
MockInputStream stream = new MockInputStream();
IOUtil.readFully(stream, buffer, 2, 5);
- Assertions.assertThat(Arrays.copyOfRange(buffer, 2, 7))
+ assertThat(Arrays.copyOfRange(buffer, 2, 7))
.as("Byte array contents should match")
.isEqualTo(Arrays.copyOfRange(MockInputStream.TEST_ARRAY, 0, 5));
- Assertions.assertThat(stream.getPos())
- .as("Stream position should reflect bytes read")
- .isEqualTo(5);
+ assertThat(stream.getPos()).as("Stream position should reflect bytes read").isEqualTo(5);
}
@Test
@@ -123,9 +113,7 @@
MockInputStream stream = new MockInputStream();
IOUtil.readFully(stream, buffer, 0, buffer.length);
- Assertions.assertThat(stream.getPos())
- .as("Stream position should reflect bytes read")
- .isEqualTo(0);
+ assertThat(stream.getPos()).as("Stream position should reflect bytes read").isEqualTo(0);
}
@Test
@@ -135,13 +123,11 @@
MockInputStream stream = new MockInputStream(2, 2, 3);
IOUtil.readFully(stream, buffer, 2, 5);
- Assertions.assertThat(Arrays.copyOfRange(buffer, 2, 7))
+ assertThat(Arrays.copyOfRange(buffer, 2, 7))
.as("Byte array contents should match")
.isEqualTo(Arrays.copyOfRange(MockInputStream.TEST_ARRAY, 0, 5));
- Assertions.assertThat(stream.getPos())
- .as("Stream position should reflect bytes read")
- .isEqualTo(5);
+ assertThat(stream.getPos()).as("Stream position should reflect bytes read").isEqualTo(5);
}
@Test
@@ -151,6 +137,6 @@
try (PositionOutputStream outputStream = outputFile.create()) {
IOUtil.writeFully(outputStream, ByteBuffer.wrap(input.clone()));
}
- Assertions.assertThat(outputFile.toByteArray()).isEqualTo(input);
+ assertThat(outputFile.toByteArray()).isEqualTo(input);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/jdbc/TestJdbcCatalog.java b/core/src/test/java/org/apache/iceberg/jdbc/TestJdbcCatalog.java
index 90492b5..c0c9afa 100644
--- a/core/src/test/java/org/apache/iceberg/jdbc/TestJdbcCatalog.java
+++ b/core/src/test/java/org/apache/iceberg/jdbc/TestJdbcCatalog.java
@@ -79,7 +79,6 @@
import org.apache.iceberg.transforms.Transform;
import org.apache.iceberg.transforms.Transforms;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -256,7 +255,7 @@
jdbcCatalog.setConf(conf);
jdbcCatalog.initialize("test_catalog_with_retryable_status_codes", properties);
JdbcClientPool jdbcClientPool = jdbcCatalog.connectionPool();
- Assertions.assertThat(
+ assertThat(
jdbcClientPool.isConnectionException(
new SQLNonTransientConnectionException("Failed to authenticate")))
.as("SQL Non Transient exception is not retryable")
@@ -522,8 +521,7 @@
FileSystem fs = Util.getFs(new Path(metaLocation), conf);
assertThat(fs.isDirectory(new Path(metaLocation))).isTrue();
- Assertions.assertThatThrownBy(
- () -> catalog.createTable(testTable, SCHEMA, PartitionSpec.unpartitioned()))
+ assertThatThrownBy(() -> catalog.createTable(testTable, SCHEMA, PartitionSpec.unpartitioned()))
.isInstanceOf(AlreadyExistsException.class)
.hasMessage("Table already exists: db.ns1.ns2.tbl");
@@ -580,7 +578,7 @@
.withRecordCount(1)
.build();
- Assertions.assertThatThrownBy(() -> table.newAppend().appendFile(dataFile2).commit())
+ assertThatThrownBy(() -> table.newAppend().appendFile(dataFile2).commit())
.isInstanceOf(NoSuchTableException.class)
.hasMessage(
"Failed to load table db.table from catalog test_jdbc_catalog: dropped by another process");
@@ -636,7 +634,7 @@
assertThat(catalog.listTables(testTable.namespace())).doesNotContain(testTable);
catalog.dropTable(testTable2);
- Assertions.assertThatThrownBy(() -> catalog.listTables(testTable2.namespace()))
+ assertThatThrownBy(() -> catalog.listTables(testTable2.namespace()))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: db.ns1.ns2");
@@ -665,15 +663,14 @@
assertThat(catalog.listTables(to.namespace())).contains(to).doesNotContain(from);
assertThat(catalog.loadTable(to).name()).endsWith(to.name());
- Assertions.assertThatThrownBy(
- () -> catalog.renameTable(TableIdentifier.of("db", "tbl-not-exists"), to))
+ assertThatThrownBy(() -> catalog.renameTable(TableIdentifier.of("db", "tbl-not-exists"), to))
.isInstanceOf(NoSuchTableException.class)
.hasMessage("Table does not exist: db.tbl-not-exists");
// rename table to existing table name!
TableIdentifier from2 = TableIdentifier.of("db", "tbl2");
catalog.createTable(from2, SCHEMA, PartitionSpec.unpartitioned());
- Assertions.assertThatThrownBy(() -> catalog.renameTable(from2, to))
+ assertThatThrownBy(() -> catalog.renameTable(from2, to))
.isInstanceOf(AlreadyExistsException.class)
.hasMessage("Table already exists: db.tbl2-newtable");
}
@@ -697,7 +694,7 @@
assertThat(tbls2).hasSize(1);
assertThat(tbls2.get(0).name()).isEqualTo("tbl3");
- Assertions.assertThatThrownBy(() -> catalog.listTables(Namespace.of("db", "ns1", "ns2")))
+ assertThatThrownBy(() -> catalog.listTables(Namespace.of("db", "ns1", "ns2")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: db.ns1.ns2");
}
@@ -774,7 +771,7 @@
Set<String> tblSet3 = Sets.newHashSet(nsp4.stream().map(Namespace::toString).iterator());
assertThat(tblSet3).hasSize(3).contains("db", "db2", "");
- Assertions.assertThatThrownBy(() -> catalog.listNamespaces(Namespace.of("db", "db2", "ns2")))
+ assertThatThrownBy(() -> catalog.listNamespaces(Namespace.of("db", "db2", "ns2")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: db.db2.ns2");
}
@@ -791,8 +788,7 @@
assertThat(catalog.loadNamespaceMetadata(Namespace.of("db"))).containsKey("location");
- Assertions.assertThatThrownBy(
- () -> catalog.loadNamespaceMetadata(Namespace.of("db", "db2", "ns2")))
+ assertThatThrownBy(() -> catalog.loadNamespaceMetadata(Namespace.of("db", "db2", "ns2")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: db.db2.ns2");
}
@@ -829,15 +825,15 @@
Lists.newArrayList(tbl0, tbl1, tbl2, tbl3, tbl4)
.forEach(t -> catalog.createTable(t, SCHEMA, PartitionSpec.unpartitioned()));
- Assertions.assertThatThrownBy(() -> catalog.dropNamespace(tbl1.namespace()))
+ assertThatThrownBy(() -> catalog.dropNamespace(tbl1.namespace()))
.isInstanceOf(NamespaceNotEmptyException.class)
.hasMessage("Namespace db.ns1.ns2 is not empty. 2 tables exist.");
- Assertions.assertThatThrownBy(() -> catalog.dropNamespace(tbl2.namespace()))
+ assertThatThrownBy(() -> catalog.dropNamespace(tbl2.namespace()))
.isInstanceOf(NamespaceNotEmptyException.class)
.hasMessage("Namespace db.ns1 is not empty. 1 tables exist.");
- Assertions.assertThatThrownBy(() -> catalog.dropNamespace(tbl4.namespace()))
+ assertThatThrownBy(() -> catalog.dropNamespace(tbl4.namespace()))
.isInstanceOf(NamespaceNotEmptyException.class)
.hasMessage("Namespace db is not empty. 1 tables exist.");
}
@@ -915,12 +911,12 @@
try (JdbcCatalog jdbcCatalog = initCatalog("non_strict_jdbc_catalog", ImmutableMap.of())) {
Namespace namespace = Namespace.of("test\\D_b%", "ns1", "ns2");
TableIdentifier identifier = TableIdentifier.of(namespace, "someTable");
- Assertions.assertThat(jdbcCatalog.namespaceExists(namespace)).isFalse();
- Assertions.assertThat(jdbcCatalog.tableExists(identifier)).isFalse();
+ assertThat(jdbcCatalog.namespaceExists(namespace)).isFalse();
+ assertThat(jdbcCatalog.tableExists(identifier)).isFalse();
// default=non-strict mode allows creating a table in a non-existing namespace
jdbcCatalog.createTable(identifier, SCHEMA, PARTITION_SPEC);
- Assertions.assertThat(jdbcCatalog.loadTable(identifier)).isNotNull();
+ assertThat(jdbcCatalog.loadTable(identifier)).isNotNull();
}
}
@@ -931,20 +927,19 @@
"strict_jdbc_catalog", ImmutableMap.of(JdbcUtil.STRICT_MODE_PROPERTY, "true"))) {
Namespace namespace = Namespace.of("testDb", "ns1", "ns2");
TableIdentifier identifier = TableIdentifier.of(namespace, "someTable");
- Assertions.assertThat(jdbcCatalog.namespaceExists(namespace)).isFalse();
- Assertions.assertThat(jdbcCatalog.tableExists(identifier)).isFalse();
- Assertions.assertThatThrownBy(
- () -> jdbcCatalog.createTable(identifier, SCHEMA, PARTITION_SPEC))
+ assertThat(jdbcCatalog.namespaceExists(namespace)).isFalse();
+ assertThat(jdbcCatalog.tableExists(identifier)).isFalse();
+ assertThatThrownBy(() -> jdbcCatalog.createTable(identifier, SCHEMA, PARTITION_SPEC))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage(
"Cannot create table testDb.ns1.ns2.someTable in catalog strict_jdbc_catalog. Namespace testDb.ns1.ns2 does not exist");
- Assertions.assertThat(jdbcCatalog.tableExists(identifier)).isFalse();
+ assertThat(jdbcCatalog.tableExists(identifier)).isFalse();
jdbcCatalog.createNamespace(namespace);
- Assertions.assertThat(jdbcCatalog.tableExists(identifier)).isFalse();
+ assertThat(jdbcCatalog.tableExists(identifier)).isFalse();
jdbcCatalog.createTable(identifier, SCHEMA, PARTITION_SPEC);
- Assertions.assertThat(jdbcCatalog.loadTable(identifier)).isNotNull();
+ assertThat(jdbcCatalog.loadTable(identifier)).isNotNull();
}
}
@@ -967,7 +962,7 @@
Map<String, String> testMetadata = ImmutableMap.of();
catalog.createNamespace(testNamespace, testMetadata);
- Assertions.assertThat(catalog.loadNamespaceMetadata(testNamespace)).containsKey("location");
+ assertThat(catalog.loadNamespaceMetadata(testNamespace)).containsKey("location");
}
@Test
@@ -979,7 +974,7 @@
Map<String, String> testMetadata = ImmutableMap.of("location", namespaceLocation);
catalog.createNamespace(testNamespace, testMetadata);
- Assertions.assertThat(catalog.loadNamespaceMetadata(testNamespace))
+ assertThat(catalog.loadNamespaceMetadata(testNamespace))
.containsEntry("location", namespaceLocation);
}
@@ -1073,14 +1068,14 @@
.build())
.commit();
try (CloseableIterable<FileScanTask> tasks = table.newScan().planFiles()) {
- Assertions.assertThat(tasks.iterator()).hasNext();
+ assertThat(tasks.iterator()).hasNext();
}
} finally {
catalogWithCustomReporter.dropTable(TABLE);
}
// counter of custom metrics reporter should have been increased
// 1x for commit metrics / 1x for scan metrics
- Assertions.assertThat(CustomMetricsReporter.COUNTER.get()).isEqualTo(2);
+ assertThat(CustomMetricsReporter.COUNTER.get()).isEqualTo(2);
}
@Test
diff --git a/core/src/test/java/org/apache/iceberg/metrics/TestCommitReportParser.java b/core/src/test/java/org/apache/iceberg/metrics/TestCommitReportParser.java
index 8c018e1..dc74503 100644
--- a/core/src/test/java/org/apache/iceberg/metrics/TestCommitReportParser.java
+++ b/core/src/test/java/org/apache/iceberg/metrics/TestCommitReportParser.java
@@ -18,53 +18,54 @@
*/
package org.apache.iceberg.metrics;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.iceberg.SnapshotSummary;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCommitReportParser {
@Test
public void nullCommitReport() {
- Assertions.assertThatThrownBy(() -> CommitReportParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> CommitReportParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse commit report from null object");
- Assertions.assertThatThrownBy(() -> CommitReportParser.toJson(null))
+ assertThatThrownBy(() -> CommitReportParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid commit report: null");
}
@Test
public void missingFields() {
- Assertions.assertThatThrownBy(() -> CommitReportParser.fromJson("{}"))
+ assertThatThrownBy(() -> CommitReportParser.fromJson("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: table-name");
- Assertions.assertThatThrownBy(
- () -> CommitReportParser.fromJson("{\"table-name\":\"roundTripTableName\"}"))
+ assertThatThrownBy(() -> CommitReportParser.fromJson("{\"table-name\":\"roundTripTableName\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing long: snapshot-id");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CommitReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing long: sequence-number");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CommitReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23,\"sequence-number\":24}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: operation");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CommitReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23,\"sequence-number\":24, \"operation\": \"DELETE\"}"))
@@ -74,14 +75,14 @@
@Test
public void invalidTableName() {
- Assertions.assertThatThrownBy(() -> CommitReportParser.fromJson("{\"table-name\":23}"))
+ assertThatThrownBy(() -> CommitReportParser.fromJson("{\"table-name\":23}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a string value: table-name: 23");
}
@Test
public void invalidSnapshotId() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CommitReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":\"invalid\"}"))
@@ -239,8 +240,8 @@
+ "}";
String json = CommitReportParser.toJson(commitReport, true);
- Assertions.assertThat(CommitReportParser.fromJson(json)).isEqualTo(commitReport);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(CommitReportParser.fromJson(json)).isEqualTo(commitReport);
+ assertThat(json).isEqualTo(expectedJson);
}
@Test
@@ -265,8 +266,8 @@
+ "}";
String json = CommitReportParser.toJson(commitReport, true);
- Assertions.assertThat(CommitReportParser.fromJson(json)).isEqualTo(commitReport);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(CommitReportParser.fromJson(json)).isEqualTo(commitReport);
+ assertThat(json).isEqualTo(expectedJson);
}
@Test
@@ -296,7 +297,7 @@
+ "}";
String json = CommitReportParser.toJson(commitReport, true);
- Assertions.assertThat(CommitReportParser.fromJson(json)).isEqualTo(commitReport);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(CommitReportParser.fromJson(json)).isEqualTo(commitReport);
+ assertThat(json).isEqualTo(expectedJson);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/metrics/TestCounterResultParser.java b/core/src/test/java/org/apache/iceberg/metrics/TestCounterResultParser.java
index ea936a0..89808c5 100644
--- a/core/src/test/java/org/apache/iceberg/metrics/TestCounterResultParser.java
+++ b/core/src/test/java/org/apache/iceberg/metrics/TestCounterResultParser.java
@@ -18,53 +18,54 @@
*/
package org.apache.iceberg.metrics;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.iceberg.metrics.MetricsContext.Unit;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCounterResultParser {
@Test
public void nullCounter() {
- Assertions.assertThatThrownBy(() -> CounterResultParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> CounterResultParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse counter from null object");
- Assertions.assertThatThrownBy(() -> CounterResultParser.toJson(null))
+ assertThatThrownBy(() -> CounterResultParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid counter: null");
}
@Test
public void missingFields() {
- Assertions.assertThatThrownBy(() -> CounterResultParser.fromJson("{}"))
+ assertThatThrownBy(() -> CounterResultParser.fromJson("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: unit");
- Assertions.assertThatThrownBy(() -> CounterResultParser.fromJson("{\"unit\":\"bytes\"}"))
+ assertThatThrownBy(() -> CounterResultParser.fromJson("{\"unit\":\"bytes\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing long: value");
}
@Test
public void extraFields() {
- Assertions.assertThat(
+ assertThat(
CounterResultParser.fromJson("{\"unit\":\"bytes\",\"value\":23,\"extra\": \"value\"}"))
.isEqualTo(CounterResult.of(Unit.BYTES, 23L));
}
@Test
public void unsupportedUnit() {
- Assertions.assertThatThrownBy(
- () -> CounterResultParser.fromJson("{\"unit\":\"unknown\",\"value\":23}"))
+ assertThatThrownBy(() -> CounterResultParser.fromJson("{\"unit\":\"unknown\",\"value\":23}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid unit: unknown");
}
@Test
public void invalidValue() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> CounterResultParser.fromJson("{\"unit\":\"count\",\"value\":\"illegal\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a long value: value: \"illegal\"");
@@ -75,7 +76,7 @@
CounterResult counter = CounterResult.of(Unit.BYTES, Long.MAX_VALUE);
String json = CounterResultParser.toJson(counter);
- Assertions.assertThat(CounterResultParser.fromJson(json)).isEqualTo(counter);
- Assertions.assertThat(json).isEqualTo("{\"unit\":\"bytes\",\"value\":9223372036854775807}");
+ assertThat(CounterResultParser.fromJson(json)).isEqualTo(counter);
+ assertThat(json).isEqualTo("{\"unit\":\"bytes\",\"value\":9223372036854775807}");
}
}
diff --git a/core/src/test/java/org/apache/iceberg/metrics/TestScanMetricsResultParser.java b/core/src/test/java/org/apache/iceberg/metrics/TestScanMetricsResultParser.java
index 2daa2b4..44d5803 100644
--- a/core/src/test/java/org/apache/iceberg/metrics/TestScanMetricsResultParser.java
+++ b/core/src/test/java/org/apache/iceberg/metrics/TestScanMetricsResultParser.java
@@ -18,22 +18,24 @@
*/
package org.apache.iceberg.metrics;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
import org.apache.iceberg.metrics.MetricsContext.Unit;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestScanMetricsResultParser {
@Test
public void nullMetrics() {
- Assertions.assertThatThrownBy(() -> ScanMetricsResultParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> ScanMetricsResultParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse scan metrics from null object");
- Assertions.assertThatThrownBy(() -> ScanMetricsResultParser.toJson(null))
+ assertThatThrownBy(() -> ScanMetricsResultParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid scan metrics: null");
}
@@ -41,27 +43,27 @@
@SuppressWarnings("MethodLength")
@Test
public void missingFields() {
- Assertions.assertThat(ScanMetricsResultParser.fromJson("{}"))
+ assertThat(ScanMetricsResultParser.fromJson("{}"))
.isEqualTo(ImmutableScanMetricsResult.builder().build());
ImmutableScanMetricsResult scanMetricsResult =
ImmutableScanMetricsResult.builder()
.totalPlanningDuration(TimerResult.of(TimeUnit.HOURS, Duration.ofHours(10), 3L))
.build();
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10}}"))
.isEqualTo(scanMetricsResult);
scanMetricsResult = scanMetricsResult.withResultDataFiles(CounterResult.of(Unit.COUNT, 5L));
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5}}"))
.isEqualTo(scanMetricsResult);
scanMetricsResult = scanMetricsResult.withResultDeleteFiles(CounterResult.of(Unit.COUNT, 5L));
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5},"
@@ -69,7 +71,7 @@
.isEqualTo(scanMetricsResult);
scanMetricsResult = scanMetricsResult.withTotalDataManifests(CounterResult.of(Unit.COUNT, 5L));
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5},"
@@ -79,7 +81,7 @@
scanMetricsResult =
scanMetricsResult.withTotalDeleteManifests(CounterResult.of(Unit.COUNT, 0L));
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5},"
@@ -90,7 +92,7 @@
scanMetricsResult =
scanMetricsResult.withScannedDataManifests(CounterResult.of(Unit.COUNT, 5L));
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5},"
@@ -102,7 +104,7 @@
scanMetricsResult =
scanMetricsResult.withSkippedDataManifests(CounterResult.of(Unit.COUNT, 5L));
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5},"
@@ -115,7 +117,7 @@
scanMetricsResult =
scanMetricsResult.withTotalFileSizeInBytes(CounterResult.of(Unit.BYTES, 1069L));
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5},"
@@ -129,7 +131,7 @@
scanMetricsResult =
scanMetricsResult.withTotalDeleteFileSizeInBytes(CounterResult.of(Unit.BYTES, 1023L));
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5},"
@@ -143,7 +145,7 @@
.isEqualTo(scanMetricsResult);
scanMetricsResult = scanMetricsResult.withSkippedDataFiles(CounterResult.of(Unit.COUNT, 23L));
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":3,\"time-unit\":\"hours\",\"total-duration\":10},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5},"
@@ -179,7 +181,7 @@
scanMetrics.equalityDeleteFiles().increment(4L);
ScanMetricsResult scanMetricsResult = ScanMetricsResult.fromScanMetrics(scanMetrics);
- Assertions.assertThat(
+ assertThat(
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":1,\"time-unit\":\"nanoseconds\",\"total-duration\":600000000000},"
+ "\"result-data-files\":{\"unit\":\"count\",\"value\":5},"
@@ -203,7 +205,7 @@
@Test
public void invalidTimer() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"unit\":\"count\",\"value\":5}}"))
@@ -213,7 +215,7 @@
@Test
public void invalidCounter() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ScanMetricsResultParser.fromJson(
"{\"total-planning-duration\":{\"count\":1,\"time-unit\":\"nanoseconds\",\"total-duration\":600000000000},"
@@ -314,8 +316,8 @@
+ "}";
String json = ScanMetricsResultParser.toJson(scanMetricsResult, true);
- Assertions.assertThat(ScanMetricsResultParser.fromJson(json)).isEqualTo(scanMetricsResult);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(ScanMetricsResultParser.fromJson(json)).isEqualTo(scanMetricsResult);
+ assertThat(json).isEqualTo(expectedJson);
}
@Test
@@ -324,8 +326,8 @@
String expectedJson = "{ }";
String json = ScanMetricsResultParser.toJson(scanMetricsResult, true);
- Assertions.assertThat(json).isEqualTo(expectedJson);
- Assertions.assertThat(ScanMetricsResultParser.fromJson(json))
+ assertThat(json).isEqualTo(expectedJson);
+ assertThat(ScanMetricsResultParser.fromJson(json))
.isEqualTo(ImmutableScanMetricsResult.builder().build());
}
}
diff --git a/core/src/test/java/org/apache/iceberg/metrics/TestScanReport.java b/core/src/test/java/org/apache/iceberg/metrics/TestScanReport.java
index caaf1bc..89256cb 100644
--- a/core/src/test/java/org/apache/iceberg/metrics/TestScanReport.java
+++ b/core/src/test/java/org/apache/iceberg/metrics/TestScanReport.java
@@ -18,29 +18,31 @@
*/
package org.apache.iceberg.metrics;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.time.Duration;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.iceberg.expressions.Expressions;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestScanReport {
@Test
public void missingFields() {
- Assertions.assertThatThrownBy(() -> ImmutableScanReport.builder().build())
+ assertThatThrownBy(() -> ImmutableScanReport.builder().build())
.isInstanceOf(IllegalStateException.class)
.hasMessage(
"Cannot build ScanReport, some of required attributes are not set [tableName, snapshotId, filter, schemaId, scanMetrics]");
- Assertions.assertThatThrownBy(() -> ImmutableScanReport.builder().tableName("x").build())
+ assertThatThrownBy(() -> ImmutableScanReport.builder().tableName("x").build())
.isInstanceOf(IllegalStateException.class)
.hasMessage(
"Cannot build ScanReport, some of required attributes are not set [snapshotId, filter, schemaId, scanMetrics]");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ImmutableScanReport.builder()
.tableName("x")
@@ -50,7 +52,7 @@
.hasMessage(
"Cannot build ScanReport, some of required attributes are not set [snapshotId, schemaId, scanMetrics]");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ImmutableScanReport.builder()
.tableName("x")
@@ -61,7 +63,7 @@
.hasMessage(
"Cannot build ScanReport, some of required attributes are not set [schemaId, scanMetrics]");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ImmutableScanReport.builder()
.tableName("x")
@@ -93,21 +95,21 @@
.scanMetrics(ScanMetricsResult.fromScanMetrics(ScanMetrics.noop()))
.build();
- Assertions.assertThat(scanReport.tableName()).isEqualTo(tableName);
- Assertions.assertThat(scanReport.schemaId()).isEqualTo(schemaId);
- Assertions.assertThat(scanReport.projectedFieldIds()).isEqualTo(fieldIds);
- Assertions.assertThat(scanReport.projectedFieldNames()).isEqualTo(fieldNames);
- Assertions.assertThat(scanReport.filter()).isEqualTo(Expressions.alwaysTrue());
- Assertions.assertThat(scanReport.snapshotId()).isEqualTo(23L);
- Assertions.assertThat(scanReport.scanMetrics().totalPlanningDuration()).isNull();
- Assertions.assertThat(scanReport.scanMetrics().resultDataFiles()).isNull();
- Assertions.assertThat(scanReport.scanMetrics().resultDeleteFiles()).isNull();
- Assertions.assertThat(scanReport.scanMetrics().totalDataManifests()).isNull();
- Assertions.assertThat(scanReport.scanMetrics().totalDeleteManifests()).isNull();
- Assertions.assertThat(scanReport.scanMetrics().scannedDataManifests()).isNull();
- Assertions.assertThat(scanReport.scanMetrics().skippedDataManifests()).isNull();
- Assertions.assertThat(scanReport.scanMetrics().totalFileSizeInBytes()).isNull();
- Assertions.assertThat(scanReport.scanMetrics().totalDeleteFileSizeInBytes()).isNull();
+ assertThat(scanReport.tableName()).isEqualTo(tableName);
+ assertThat(scanReport.schemaId()).isEqualTo(schemaId);
+ assertThat(scanReport.projectedFieldIds()).isEqualTo(fieldIds);
+ assertThat(scanReport.projectedFieldNames()).isEqualTo(fieldNames);
+ assertThat(scanReport.filter()).isEqualTo(Expressions.alwaysTrue());
+ assertThat(scanReport.snapshotId()).isEqualTo(23L);
+ assertThat(scanReport.scanMetrics().totalPlanningDuration()).isNull();
+ assertThat(scanReport.scanMetrics().resultDataFiles()).isNull();
+ assertThat(scanReport.scanMetrics().resultDeleteFiles()).isNull();
+ assertThat(scanReport.scanMetrics().totalDataManifests()).isNull();
+ assertThat(scanReport.scanMetrics().totalDeleteManifests()).isNull();
+ assertThat(scanReport.scanMetrics().scannedDataManifests()).isNull();
+ assertThat(scanReport.scanMetrics().skippedDataManifests()).isNull();
+ assertThat(scanReport.scanMetrics().totalFileSizeInBytes()).isNull();
+ assertThat(scanReport.scanMetrics().totalDeleteFileSizeInBytes()).isNull();
}
@Test
@@ -136,24 +138,24 @@
.scanMetrics(ScanMetricsResult.fromScanMetrics(scanMetrics))
.build();
- Assertions.assertThat(scanReport.tableName()).isEqualTo(tableName);
- Assertions.assertThat(scanReport.schemaId()).isEqualTo(schemaId);
- Assertions.assertThat(scanReport.projectedFieldIds()).isEqualTo(fieldIds);
- Assertions.assertThat(scanReport.projectedFieldNames()).isEqualTo(fieldNames);
- Assertions.assertThat(scanReport.filter()).isEqualTo(Expressions.alwaysTrue());
- Assertions.assertThat(scanReport.snapshotId()).isEqualTo(23L);
- Assertions.assertThat(scanReport.scanMetrics().totalPlanningDuration().totalDuration())
+ assertThat(scanReport.tableName()).isEqualTo(tableName);
+ assertThat(scanReport.schemaId()).isEqualTo(schemaId);
+ assertThat(scanReport.projectedFieldIds()).isEqualTo(fieldIds);
+ assertThat(scanReport.projectedFieldNames()).isEqualTo(fieldNames);
+ assertThat(scanReport.filter()).isEqualTo(Expressions.alwaysTrue());
+ assertThat(scanReport.snapshotId()).isEqualTo(23L);
+ assertThat(scanReport.scanMetrics().totalPlanningDuration().totalDuration())
.isEqualTo(Duration.ofMinutes(10L));
- Assertions.assertThat(scanReport.scanMetrics().resultDataFiles().value()).isEqualTo(5);
- Assertions.assertThat(scanReport.scanMetrics().resultDeleteFiles().value()).isEqualTo(5);
- Assertions.assertThat(scanReport.scanMetrics().scannedDataManifests().value()).isEqualTo(5);
- Assertions.assertThat(scanReport.scanMetrics().totalDataManifests().value()).isEqualTo(5);
- Assertions.assertThat(scanReport.scanMetrics().totalFileSizeInBytes().value()).isEqualTo(1024L);
+ assertThat(scanReport.scanMetrics().resultDataFiles().value()).isEqualTo(5);
+ assertThat(scanReport.scanMetrics().resultDeleteFiles().value()).isEqualTo(5);
+ assertThat(scanReport.scanMetrics().scannedDataManifests().value()).isEqualTo(5);
+ assertThat(scanReport.scanMetrics().totalDataManifests().value()).isEqualTo(5);
+ assertThat(scanReport.scanMetrics().totalFileSizeInBytes().value()).isEqualTo(1024L);
}
@Test
public void nullScanMetrics() {
- Assertions.assertThatThrownBy(() -> ScanMetrics.of(null))
+ assertThatThrownBy(() -> ScanMetrics.of(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("metricsContext");
}
diff --git a/core/src/test/java/org/apache/iceberg/metrics/TestScanReportParser.java b/core/src/test/java/org/apache/iceberg/metrics/TestScanReportParser.java
index df12660..51e21ad 100644
--- a/core/src/test/java/org/apache/iceberg/metrics/TestScanReportParser.java
+++ b/core/src/test/java/org/apache/iceberg/metrics/TestScanReportParser.java
@@ -18,45 +18,46 @@
*/
package org.apache.iceberg.metrics;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import java.util.concurrent.TimeUnit;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestScanReportParser {
@Test
public void nullScanReport() {
- Assertions.assertThatThrownBy(() -> ScanReportParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> ScanReportParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse scan report from null object");
- Assertions.assertThatThrownBy(() -> ScanReportParser.toJson(null))
+ assertThatThrownBy(() -> ScanReportParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid scan report: null");
}
@Test
public void missingFields() {
- Assertions.assertThatThrownBy(() -> ScanReportParser.fromJson("{}"))
+ assertThatThrownBy(() -> ScanReportParser.fromJson("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: table-name");
- Assertions.assertThatThrownBy(
- () -> ScanReportParser.fromJson("{\"table-name\":\"roundTripTableName\"}"))
+ assertThatThrownBy(() -> ScanReportParser.fromJson("{\"table-name\":\"roundTripTableName\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing long: snapshot-id");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ScanReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23,\"filter\":true}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing int: schema-id");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ScanReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23,\"filter\":true,"
@@ -97,7 +98,7 @@
.scanMetrics(ScanMetricsResult.fromScanMetrics(scanMetrics))
.build();
- Assertions.assertThat(
+ assertThat(
ScanReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23,"
+ "\"filter\":true,\"schema-id\": 4,\"projected-field-ids\": [ 1, 2, 3 ],\"projected-field-names\": [ \"c1\", \"c2\", \"c3\" ],"
@@ -124,14 +125,14 @@
@Test
public void invalidTableName() {
- Assertions.assertThatThrownBy(() -> ScanReportParser.fromJson("{\"table-name\":23}"))
+ assertThatThrownBy(() -> ScanReportParser.fromJson("{\"table-name\":23}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a string value: table-name: 23");
}
@Test
public void invalidSnapshotId() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ScanReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":\"invalid\"}"))
@@ -141,7 +142,7 @@
@Test
public void invalidExpressionFilter() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ScanReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23,\"filter\":23,\"projection\":23}"))
@@ -151,21 +152,21 @@
@Test
public void invalidSchema() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ScanReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23,\"filter\":true,\"schema-id\":\"23\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to an integer value: schema-id: \"23\"");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ScanReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23,\"filter\":true,\"schema-id\":23,\"projected-field-ids\": [\"1\"],\"metrics\":{}}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse integer from non-int value in projected-field-ids: \"1\"");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ScanReportParser.fromJson(
"{\"table-name\":\"roundTripTableName\",\"snapshot-id\":23,\"filter\":true,\"schema-id\":23,\"projected-field-ids\": [1],\"projected-field-names\": [1],\"metrics\":{}}"))
@@ -283,8 +284,8 @@
+ "}";
String json = ScanReportParser.toJson(scanReport, true);
- Assertions.assertThat(ScanReportParser.fromJson(json)).isEqualTo(scanReport);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(ScanReportParser.fromJson(json)).isEqualTo(scanReport);
+ assertThat(json).isEqualTo(expectedJson);
}
@Test
@@ -313,8 +314,8 @@
+ "}";
String json = ScanReportParser.toJson(scanReport, true);
- Assertions.assertThat(ScanReportParser.fromJson(json)).isEqualTo(scanReport);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(ScanReportParser.fromJson(json)).isEqualTo(scanReport);
+ assertThat(json).isEqualTo(expectedJson);
}
@Test
@@ -341,8 +342,8 @@
+ "}";
String json = ScanReportParser.toJson(scanReport, true);
- Assertions.assertThat(ScanReportParser.fromJson(json)).isEqualTo(scanReport);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(ScanReportParser.fromJson(json)).isEqualTo(scanReport);
+ assertThat(json).isEqualTo(expectedJson);
}
@Test
@@ -374,7 +375,7 @@
+ "}";
String json = ScanReportParser.toJson(scanReport, true);
- Assertions.assertThat(ScanReportParser.fromJson(json)).isEqualTo(scanReport);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(ScanReportParser.fromJson(json)).isEqualTo(scanReport);
+ assertThat(json).isEqualTo(expectedJson);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/metrics/TestTimerResultParser.java b/core/src/test/java/org/apache/iceberg/metrics/TestTimerResultParser.java
index 3bbf2b8..1bcb249 100644
--- a/core/src/test/java/org/apache/iceberg/metrics/TestTimerResultParser.java
+++ b/core/src/test/java/org/apache/iceberg/metrics/TestTimerResultParser.java
@@ -18,45 +18,46 @@
*/
package org.apache.iceberg.metrics;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.concurrent.TimeUnit;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestTimerResultParser {
@Test
public void nullTimer() {
- Assertions.assertThatThrownBy(() -> TimerResultParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> TimerResultParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse timer from null object");
- Assertions.assertThatThrownBy(() -> TimerResultParser.toJson(null))
+ assertThatThrownBy(() -> TimerResultParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid timer: null");
}
@Test
public void missingFields() {
- Assertions.assertThatThrownBy(() -> TimerResultParser.fromJson("{}"))
+ assertThatThrownBy(() -> TimerResultParser.fromJson("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing long: count");
- Assertions.assertThatThrownBy(() -> TimerResultParser.fromJson("{\"count\":44}"))
+ assertThatThrownBy(() -> TimerResultParser.fromJson("{\"count\":44}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: time-unit");
- Assertions.assertThatThrownBy(
- () -> TimerResultParser.fromJson("{\"count\":44,\"time-unit\":\"hours\"}"))
+ assertThatThrownBy(() -> TimerResultParser.fromJson("{\"count\":44,\"time-unit\":\"hours\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing long: total-duration");
}
@Test
public void extraFields() {
- Assertions.assertThat(
+ assertThat(
TimerResultParser.fromJson(
"{\"count\":44,\"time-unit\":\"hours\",\"total-duration\":24,\"extra\": \"value\"}"))
.isEqualTo(TimerResult.of(TimeUnit.HOURS, Duration.ofHours(24), 44));
@@ -64,7 +65,7 @@
@Test
public void unsupportedDuration() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
TimerResultParser.fromJson(
"{\"count\":44,\"time-unit\":\"hours\",\"total-duration\":\"xx\"}"))
@@ -74,7 +75,7 @@
@Test
public void unsupportedTimeUnit() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
TimerResultParser.fromJson(
"{\"count\":44,\"time-unit\":\"unknown\",\"total-duration\":24}"))
@@ -84,7 +85,7 @@
@Test
public void invalidCount() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
TimerResultParser.fromJson(
"{\"count\":\"illegal\",\"time-unit\":\"hours\",\"total-duration\":24}"))
@@ -97,48 +98,41 @@
TimerResult timer = TimerResult.of(TimeUnit.HOURS, Duration.ofHours(23), 44);
String json = TimerResultParser.toJson(timer);
- Assertions.assertThat(TimerResultParser.fromJson(json)).isEqualTo(timer);
- Assertions.assertThat(json)
- .isEqualTo("{\"count\":44,\"time-unit\":\"hours\",\"total-duration\":23}");
+ assertThat(TimerResultParser.fromJson(json)).isEqualTo(timer);
+ assertThat(json).isEqualTo("{\"count\":44,\"time-unit\":\"hours\",\"total-duration\":23}");
}
@Test
public void toDuration() {
- Assertions.assertThat(TimerResultParser.toDuration(5L, TimeUnit.NANOSECONDS))
+ assertThat(TimerResultParser.toDuration(5L, TimeUnit.NANOSECONDS))
.isEqualTo(Duration.ofNanos(5L));
- Assertions.assertThat(TimerResultParser.toDuration(5L, TimeUnit.MICROSECONDS))
+ assertThat(TimerResultParser.toDuration(5L, TimeUnit.MICROSECONDS))
.isEqualTo(Duration.of(5L, ChronoUnit.MICROS));
- Assertions.assertThat(TimerResultParser.toDuration(5L, TimeUnit.MILLISECONDS))
+ assertThat(TimerResultParser.toDuration(5L, TimeUnit.MILLISECONDS))
.isEqualTo(Duration.ofMillis(5L));
- Assertions.assertThat(TimerResultParser.toDuration(5L, TimeUnit.SECONDS))
+ assertThat(TimerResultParser.toDuration(5L, TimeUnit.SECONDS))
.isEqualTo(Duration.ofSeconds(5L));
- Assertions.assertThat(TimerResultParser.toDuration(5L, TimeUnit.MINUTES))
+ assertThat(TimerResultParser.toDuration(5L, TimeUnit.MINUTES))
.isEqualTo(Duration.ofMinutes(5L));
- Assertions.assertThat(TimerResultParser.toDuration(5L, TimeUnit.HOURS))
- .isEqualTo(Duration.ofHours(5L));
- Assertions.assertThat(TimerResultParser.toDuration(5L, TimeUnit.DAYS))
- .isEqualTo(Duration.ofDays(5L));
+ assertThat(TimerResultParser.toDuration(5L, TimeUnit.HOURS)).isEqualTo(Duration.ofHours(5L));
+ assertThat(TimerResultParser.toDuration(5L, TimeUnit.DAYS)).isEqualTo(Duration.ofDays(5L));
}
@Test
public void fromDuration() {
- Assertions.assertThat(
- TimerResultParser.fromDuration(Duration.ofNanos(5L), TimeUnit.NANOSECONDS))
+ assertThat(TimerResultParser.fromDuration(Duration.ofNanos(5L), TimeUnit.NANOSECONDS))
.isEqualTo(5L);
- Assertions.assertThat(
+ assertThat(
TimerResultParser.fromDuration(
Duration.of(5L, ChronoUnit.MICROS), TimeUnit.MICROSECONDS))
.isEqualTo(5L);
- Assertions.assertThat(
- TimerResultParser.fromDuration(Duration.ofMillis(5L), TimeUnit.MILLISECONDS))
+ assertThat(TimerResultParser.fromDuration(Duration.ofMillis(5L), TimeUnit.MILLISECONDS))
.isEqualTo(5L);
- Assertions.assertThat(TimerResultParser.fromDuration(Duration.ofSeconds(5L), TimeUnit.SECONDS))
+ assertThat(TimerResultParser.fromDuration(Duration.ofSeconds(5L), TimeUnit.SECONDS))
.isEqualTo(5L);
- Assertions.assertThat(TimerResultParser.fromDuration(Duration.ofMinutes(5L), TimeUnit.MINUTES))
+ assertThat(TimerResultParser.fromDuration(Duration.ofMinutes(5L), TimeUnit.MINUTES))
.isEqualTo(5L);
- Assertions.assertThat(TimerResultParser.fromDuration(Duration.ofHours(5L), TimeUnit.HOURS))
- .isEqualTo(5L);
- Assertions.assertThat(TimerResultParser.fromDuration(Duration.ofDays(5L), TimeUnit.DAYS))
- .isEqualTo(5L);
+ assertThat(TimerResultParser.fromDuration(Duration.ofHours(5L), TimeUnit.HOURS)).isEqualTo(5L);
+ assertThat(TimerResultParser.fromDuration(Duration.ofDays(5L), TimeUnit.DAYS)).isEqualTo(5L);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/RequestResponseTestBase.java b/core/src/test/java/org/apache/iceberg/rest/RequestResponseTestBase.java
index 4855e9a..9c49db6 100644
--- a/core/src/test/java/org/apache/iceberg/rest/RequestResponseTestBase.java
+++ b/core/src/test/java/org/apache/iceberg/rest/RequestResponseTestBase.java
@@ -18,13 +18,14 @@
*/
package org.apache.iceberg.rest;
+import static org.assertj.core.api.Assertions.assertThat;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.Collections;
import java.util.Set;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public abstract class RequestResponseTestBase<T extends RESTMessage> {
@@ -75,13 +76,11 @@
try {
JsonNode node = mapper().readValue(serialize(createExampleInstance()), JsonNode.class);
for (String field : fieldsFromSpec) {
- Assertions.assertThat(node.has(field)).as("Should have field: %s", field).isTrue();
+ assertThat(node.has(field)).as("Should have field: %s", field).isTrue();
}
for (String field : ((Iterable<? extends String>) node::fieldNames)) {
- Assertions.assertThat(fieldsFromSpec)
- .as("Should not have field: %s", field)
- .contains(field);
+ assertThat(fieldsFromSpec).as("Should not have field: %s", field).contains(field);
}
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
@@ -99,6 +98,6 @@
assertEquals(actual, expected);
// Check that the deserialized value serializes back into the original JSON
- Assertions.assertThat(serialize(expected)).isEqualTo(json);
+ assertThat(serialize(expected)).isEqualTo(json);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/TestHTTPClient.java b/core/src/test/java/org/apache/iceberg/rest/TestHTTPClient.java
index ffd10df..1229639 100644
--- a/core/src/test/java/org/apache/iceberg/rest/TestHTTPClient.java
+++ b/core/src/test/java/org/apache/iceberg/rest/TestHTTPClient.java
@@ -19,6 +19,7 @@
package org.apache.iceberg.rest;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
@@ -51,7 +52,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.rest.responses.ErrorResponse;
import org.apache.iceberg.rest.responses.ErrorResponseParser;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -154,7 +154,7 @@
@Test
public void testProxyCredentialProviderWithoutProxyServer() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
HTTPClient.builder(ImmutableMap.of())
.uri(URI)
@@ -166,7 +166,7 @@
@Test
public void testProxyServerWithNullHostname() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> HTTPClient.builder(ImmutableMap.of()).uri(URI).withProxy(null, 1070).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid hostname for http client proxy: null");
@@ -212,7 +212,7 @@
}
};
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> clientWithProxy.get("v1/config", Item.class, ImmutableMap.of(), onError))
.isInstanceOf(RuntimeException.class)
.hasMessage(
@@ -270,7 +270,7 @@
.withDelay(TimeUnit.MILLISECONDS, 5000);
mockServer.when(mockRequest).respond(mockResponse);
- Assertions.assertThatThrownBy(() -> client.head(path, ImmutableMap.of(), (unused) -> {}))
+ assertThatThrownBy(() -> client.head(path, ImmutableMap.of(), (unused) -> {}))
.cause()
.isInstanceOf(SocketTimeoutException.class)
.hasMessage("Read timed out");
@@ -281,7 +281,7 @@
@ValueSource(strings = {HTTPClient.REST_CONNECTION_TIMEOUT_MS, HTTPClient.REST_SOCKET_TIMEOUT_MS})
public void testInvalidTimeout(String timeoutMsType) {
String invalidTimeoutMs = "invalidMs";
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
HTTPClient.builder(ImmutableMap.of(timeoutMsType, invalidTimeoutMs))
.uri(URI)
@@ -290,7 +290,7 @@
.hasMessage(String.format("For input string: \"%s\"", invalidTimeoutMs));
String invalidNegativeTimeoutMs = "-1";
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
HTTPClient.builder(ImmutableMap.of(timeoutMsType, invalidNegativeTimeoutMs))
.uri(URI)
@@ -312,7 +312,7 @@
doExecuteRequest(method, path, body, onError, h -> assertThat(h).isNotEmpty());
if (method.usesRequestBody()) {
- Assertions.assertThat(body)
+ assertThat(body)
.as("On a successful " + method + ", the correct response body should be returned")
.isEqualTo(successResponse);
}
@@ -335,7 +335,7 @@
String path = addRequestTestCaseAndGetPath(method, body, statusCode);
- Assertions.assertThatThrownBy(() -> doExecuteRequest(method, path, body, onError, h -> {}))
+ assertThatThrownBy(() -> doExecuteRequest(method, path, body, onError, h -> {}))
.isInstanceOf(RuntimeException.class)
.hasMessage(
String.format(
diff --git a/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java b/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java
index 34d088c..6f79733 100644
--- a/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java
+++ b/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java
@@ -84,7 +84,6 @@
import org.apache.iceberg.rest.responses.LoadTableResponse;
import org.apache.iceberg.rest.responses.OAuthTokenResponse;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.awaitility.Awaitility;
import org.eclipse.jetty.server.Server;
@@ -292,15 +291,15 @@
restCat.setConf(new Configuration());
restCat.initialize("prod", initialConfig);
- Assertions.assertThat(restCat.properties().get(CatalogProperties.CACHE_ENABLED))
+ assertThat(restCat.properties().get(CatalogProperties.CACHE_ENABLED))
.as("Catalog properties after initialize should use the server's override properties")
.isEqualTo("false");
- Assertions.assertThat(restCat.properties().get(CatalogProperties.CLIENT_POOL_SIZE))
+ assertThat(restCat.properties().get(CatalogProperties.CLIENT_POOL_SIZE))
.as("Catalog after initialize should use the server's default properties if not specified")
.isEqualTo("1");
- Assertions.assertThat(restCat.properties().get(CatalogProperties.WAREHOUSE_LOCATION))
+ assertThat(restCat.properties().get(CatalogProperties.WAREHOUSE_LOCATION))
.as("Catalog should return final warehouse location")
.isEqualTo("s3://bucket/warehouse");
@@ -310,12 +309,11 @@
@Test
public void testInitializeWithBadArguments() throws IOException {
RESTCatalog restCat = new RESTCatalog();
- org.assertj.core.api.Assertions.assertThatThrownBy(() -> restCat.initialize("prod", null))
+ assertThatThrownBy(() -> restCat.initialize("prod", null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid configuration: null");
- org.assertj.core.api.Assertions.assertThatThrownBy(
- () -> restCat.initialize("prod", ImmutableMap.of()))
+ assertThatThrownBy(() -> restCat.initialize("prod", ImmutableMap.of()))
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid uri for http client: null");
@@ -333,7 +331,7 @@
catalog.initialize(
"prod", ImmutableMap.of(CatalogProperties.URI, "ignored", "token", "bearer-token"));
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
// the bearer token should be used for all interactions
Mockito.verify(adapter)
@@ -369,7 +367,7 @@
catalog.initialize(
"prod", ImmutableMap.of(CatalogProperties.URI, "ignored", "credential", "catalog:secret"));
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
// no token or credential for catalog token exchange
Mockito.verify(adapter)
@@ -424,7 +422,7 @@
OAuth2Properties.OAUTH2_SERVER_URI,
oauth2ServerUri));
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
// no token or credential for catalog token exchange
Mockito.verify(adapter)
@@ -485,7 +483,7 @@
OAuth2Properties.OAUTH2_SERVER_URI,
oauth2ServerUri));
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
// use the bearer token for config
Mockito.verify(adapter)
@@ -548,7 +546,7 @@
OAuth2Properties.OAUTH2_SERVER_URI,
oauth2ServerUri));
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
// call client credentials with no initial auth
Mockito.verify(adapter)
@@ -623,7 +621,7 @@
OAuth2Properties.OAUTH2_SERVER_URI,
oauth2ServerUri));
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
// use the bearer token for client credentials
Mockito.verify(adapter)
@@ -819,7 +817,7 @@
.build();
catalog.initialize("prod", initializationProperties);
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
Mockito.verify(adapter)
.execute(
@@ -971,7 +969,7 @@
// don't call snapshots() directly as that would cause to load all snapshots. Instead,
// make sure the snapshots field holds exactly 1 snapshot
- Assertions.assertThat(refsMetadata)
+ assertThat(refsMetadata)
.extracting("snapshots")
.asInstanceOf(InstanceOfAssertFactories.list(Snapshot.class))
.hasSize(1);
@@ -1097,7 +1095,7 @@
// don't call snapshots() directly as that would cause to load all snapshots. Instead,
// make sure the snapshots field holds exactly 2 snapshots (the latest snapshot for main
// and the branch)
- Assertions.assertThat(refsMetadata)
+ assertThat(refsMetadata)
.extracting("snapshots")
.asInstanceOf(InstanceOfAssertFactories.list(Snapshot.class))
.hasSize(2);
@@ -1213,7 +1211,7 @@
// don't call snapshots() directly as that would cause to load all snapshots. Instead,
// make sure the snapshots field holds exactly 1 snapshot
- Assertions.assertThat(refsMetadata)
+ assertThat(refsMetadata)
.extracting("snapshots")
.asInstanceOf(InstanceOfAssertFactories.list(Snapshot.class))
.hasSize(1);
@@ -1312,12 +1310,12 @@
}
Table table = catalog.createTable(ident, expectedSchema);
- Assertions.assertThat(table.schema().asStruct())
+ assertThat(table.schema().asStruct())
.as("Schema should match")
.isEqualTo(expectedSchema.asStruct());
Table loaded = catalog.loadTable(ident); // the first load will send the token
- Assertions.assertThat(loaded.schema().asStruct())
+ assertThat(loaded.schema().asStruct())
.as("Schema should match")
.isEqualTo(expectedSchema.asStruct());
@@ -1570,8 +1568,7 @@
.untilAsserted(
() -> {
// use the exchanged catalog token
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table")))
- .isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
// call client credentials with no initial auth
Mockito.verify(adapter)
@@ -1732,7 +1729,7 @@
OAuth2Properties.OAUTH2_SERVER_URI,
oauth2ServerUri));
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
// call client credentials with no initial auth
Mockito.verify(adapter)
@@ -1818,7 +1815,7 @@
RESTCatalog catalog = new RESTCatalog(context, (config) -> adapter);
catalog.initialize("prod", ImmutableMap.of(CatalogProperties.URI, "ignored", "token", token));
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
Mockito.verify(adapter)
.execute(
@@ -1919,8 +1916,7 @@
.untilAsserted(
() -> {
// use the exchanged catalog token
- Assertions.assertThat(catalog.tableExists(TableIdentifier.of("ns", "table")))
- .isFalse();
+ assertThat(catalog.tableExists(TableIdentifier.of("ns", "table"))).isFalse();
// call client credentials with no initial auth
Mockito.verify(adapter)
@@ -2337,7 +2333,7 @@
RESTCatalogAdapter adapter = Mockito.spy(new RESTCatalogAdapter(backendCatalog));
RESTCatalog catalog =
new RESTCatalog(SessionCatalog.SessionContext.createEmpty(), (config) -> adapter);
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
catalog.initialize(
"test", ImmutableMap.of(RESTSessionCatalog.REST_PAGE_SIZE, "-1")))
@@ -2505,8 +2501,7 @@
// cleaned up
UpdateTableRequest request = captor.getValue();
MetadataUpdate.AddSnapshot addSnapshot = (MetadataUpdate.AddSnapshot) request.updates().get(0);
- Assertions.assertThatThrownBy(
- () -> table.io().newInputFile(addSnapshot.snapshot().manifestListLocation()))
+ assertThatThrownBy(() -> table.io().newInputFile(addSnapshot.snapshot().manifestListLocation()))
.isInstanceOf(NotFoundException.class);
}
@@ -2535,8 +2530,7 @@
// exist even though the commit failed
UpdateTableRequest request = captor.getValue();
MetadataUpdate.AddSnapshot addSnapshot = (MetadataUpdate.AddSnapshot) request.updates().get(0);
- Assertions.assertThat(
- table.io().newInputFile(addSnapshot.snapshot().manifestListLocation()).exists())
+ assertThat(table.io().newInputFile(addSnapshot.snapshot().manifestListLocation()).exists())
.isTrue();
}
@@ -2570,7 +2564,7 @@
assertThat(appendSnapshot).isPresent();
MetadataUpdate.AddSnapshot addSnapshot = (MetadataUpdate.AddSnapshot) appendSnapshot.get();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
catalog
.loadTable(TABLE)
@@ -2608,7 +2602,7 @@
assertThat(appendSnapshot).isPresent();
MetadataUpdate.AddSnapshot addSnapshot = (MetadataUpdate.AddSnapshot) appendSnapshot.get();
- Assertions.assertThat(
+ assertThat(
catalog
.loadTable(TABLE)
.io()
@@ -2646,7 +2640,7 @@
assertThat(appendSnapshot).isPresent();
MetadataUpdate.AddSnapshot addSnapshot = (MetadataUpdate.AddSnapshot) appendSnapshot.get();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
catalog
.loadTable(TABLE)
@@ -2683,7 +2677,7 @@
assertThat(appendSnapshot).isPresent();
MetadataUpdate.AddSnapshot addSnapshot = (MetadataUpdate.AddSnapshot) appendSnapshot.get();
- Assertions.assertThat(
+ assertThat(
catalog
.loadTable(TABLE)
.io()
diff --git a/core/src/test/java/org/apache/iceberg/rest/TestRESTUtil.java b/core/src/test/java/org/apache/iceberg/rest/TestRESTUtil.java
index 680a8bc..c7667d9 100644
--- a/core/src/test/java/org/apache/iceberg/rest/TestRESTUtil.java
+++ b/core/src/test/java/org/apache/iceberg/rest/TestRESTUtil.java
@@ -18,10 +18,12 @@
*/
package org.apache.iceberg.rest;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
+
import java.util.Map;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestRESTUtil {
@@ -45,7 +47,7 @@
Map<String, String> actual = RESTUtil.extractPrefixMap(input, "rest.");
- Assertions.assertThat(actual).isEqualTo(expected);
+ assertThat(actual).isEqualTo(expected);
}
@Test
@@ -61,7 +63,7 @@
for (String[] testCase : testCases) {
String input = testCase[0];
String expected = testCase[1];
- Assertions.assertThat(RESTUtil.stripTrailingSlash(input)).isEqualTo(expected);
+ assertThat(RESTUtil.stripTrailingSlash(input)).isEqualTo(expected);
}
}
@@ -87,21 +89,21 @@
Namespace namespace = Namespace.of(levels);
// To be placed into a URL path as query parameter or path parameter
- Assertions.assertThat(RESTUtil.encodeNamespace(namespace)).isEqualTo(encodedNs);
+ assertThat(RESTUtil.encodeNamespace(namespace)).isEqualTo(encodedNs);
// Decoded (after pulling as String) from URL
Namespace asNamespace = RESTUtil.decodeNamespace(encodedNs);
- Assertions.assertThat(asNamespace).isEqualTo(namespace);
+ assertThat(asNamespace).isEqualTo(namespace);
}
}
@Test
public void testNamespaceUrlEncodeDecodeDoesNotAllowNull() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> RESTUtil.encodeNamespace(null))
.withMessage("Invalid namespace: null");
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> RESTUtil.decodeNamespace(null))
.withMessage("Invalid namespace: null");
}
@@ -113,7 +115,7 @@
String utf8 = "\u0020\u0025\u0026\u002B\u00A3\u20AC";
String expected = "+%25%26%2B%C2%A3%E2%82%AC";
- Assertions.assertThat(RESTUtil.encodeString(utf8)).isEqualTo(expected);
+ assertThat(RESTUtil.encodeString(utf8)).isEqualTo(expected);
}
@Test
@@ -124,7 +126,7 @@
Map<String, String> formData = ImmutableMap.of("client_id", "12345", "client_secret", utf8);
String expected = "client_id=12345&client_secret=" + asString;
- Assertions.assertThat(RESTUtil.encodeFormData(formData)).isEqualTo(expected);
+ assertThat(RESTUtil.encodeFormData(formData)).isEqualTo(expected);
}
@Test
@@ -135,6 +137,6 @@
Map<String, String> expected = ImmutableMap.of("client_id", "12345", "client_secret", utf8);
String formString = "client_id=12345&client_secret=" + asString;
- Assertions.assertThat(RESTUtil.decodeFormData(formString)).isEqualTo(expected);
+ assertThat(RESTUtil.decodeFormData(formString)).isEqualTo(expected);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/TestResourcePaths.java b/core/src/test/java/org/apache/iceberg/rest/TestResourcePaths.java
index 4b91fbb..c2d3fe5 100644
--- a/core/src/test/java/org/apache/iceberg/rest/TestResourcePaths.java
+++ b/core/src/test/java/org/apache/iceberg/rest/TestResourcePaths.java
@@ -18,10 +18,11 @@
*/
package org.apache.iceberg.rest;
+import static org.assertj.core.api.Assertions.assertThat;
+
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestResourcePaths {
@@ -33,161 +34,149 @@
@Test
public void testConfigPath() {
// prefix does not affect the config route because config is merged into catalog properties
- Assertions.assertThat(ResourcePaths.config()).isEqualTo("v1/config");
+ assertThat(ResourcePaths.config()).isEqualTo("v1/config");
}
@Test
public void testNamespaces() {
- Assertions.assertThat(withPrefix.namespaces()).isEqualTo("v1/ws/catalog/namespaces");
- Assertions.assertThat(withoutPrefix.namespaces()).isEqualTo("v1/namespaces");
+ assertThat(withPrefix.namespaces()).isEqualTo("v1/ws/catalog/namespaces");
+ assertThat(withoutPrefix.namespaces()).isEqualTo("v1/namespaces");
}
@Test
public void testNamespace() {
Namespace ns = Namespace.of("ns");
- Assertions.assertThat(withPrefix.namespace(ns)).isEqualTo("v1/ws/catalog/namespaces/ns");
- Assertions.assertThat(withoutPrefix.namespace(ns)).isEqualTo("v1/namespaces/ns");
+ assertThat(withPrefix.namespace(ns)).isEqualTo("v1/ws/catalog/namespaces/ns");
+ assertThat(withoutPrefix.namespace(ns)).isEqualTo("v1/namespaces/ns");
}
@Test
public void testNamespaceWithSlash() {
Namespace ns = Namespace.of("n/s");
- Assertions.assertThat(withPrefix.namespace(ns)).isEqualTo("v1/ws/catalog/namespaces/n%2Fs");
- Assertions.assertThat(withoutPrefix.namespace(ns)).isEqualTo("v1/namespaces/n%2Fs");
+ assertThat(withPrefix.namespace(ns)).isEqualTo("v1/ws/catalog/namespaces/n%2Fs");
+ assertThat(withoutPrefix.namespace(ns)).isEqualTo("v1/namespaces/n%2Fs");
}
@Test
public void testNamespaceWithMultipartNamespace() {
Namespace ns = Namespace.of("n", "s");
- Assertions.assertThat(withPrefix.namespace(ns)).isEqualTo("v1/ws/catalog/namespaces/n%1Fs");
- Assertions.assertThat(withoutPrefix.namespace(ns)).isEqualTo("v1/namespaces/n%1Fs");
+ assertThat(withPrefix.namespace(ns)).isEqualTo("v1/ws/catalog/namespaces/n%1Fs");
+ assertThat(withoutPrefix.namespace(ns)).isEqualTo("v1/namespaces/n%1Fs");
}
@Test
public void testNamespaceProperties() {
Namespace ns = Namespace.of("ns");
- Assertions.assertThat(withPrefix.namespaceProperties(ns))
+ assertThat(withPrefix.namespaceProperties(ns))
.isEqualTo("v1/ws/catalog/namespaces/ns/properties");
- Assertions.assertThat(withoutPrefix.namespaceProperties(ns))
- .isEqualTo("v1/namespaces/ns/properties");
+ assertThat(withoutPrefix.namespaceProperties(ns)).isEqualTo("v1/namespaces/ns/properties");
}
@Test
public void testNamespacePropertiesWithSlash() {
Namespace ns = Namespace.of("n/s");
- Assertions.assertThat(withPrefix.namespaceProperties(ns))
+ assertThat(withPrefix.namespaceProperties(ns))
.isEqualTo("v1/ws/catalog/namespaces/n%2Fs/properties");
- Assertions.assertThat(withoutPrefix.namespaceProperties(ns))
- .isEqualTo("v1/namespaces/n%2Fs/properties");
+ assertThat(withoutPrefix.namespaceProperties(ns)).isEqualTo("v1/namespaces/n%2Fs/properties");
}
@Test
public void testNamespacePropertiesWithMultipartNamespace() {
Namespace ns = Namespace.of("n", "s");
- Assertions.assertThat(withPrefix.namespaceProperties(ns))
+ assertThat(withPrefix.namespaceProperties(ns))
.isEqualTo("v1/ws/catalog/namespaces/n%1Fs/properties");
- Assertions.assertThat(withoutPrefix.namespaceProperties(ns))
- .isEqualTo("v1/namespaces/n%1Fs/properties");
+ assertThat(withoutPrefix.namespaceProperties(ns)).isEqualTo("v1/namespaces/n%1Fs/properties");
}
@Test
public void testTables() {
Namespace ns = Namespace.of("ns");
- Assertions.assertThat(withPrefix.tables(ns)).isEqualTo("v1/ws/catalog/namespaces/ns/tables");
- Assertions.assertThat(withoutPrefix.tables(ns)).isEqualTo("v1/namespaces/ns/tables");
+ assertThat(withPrefix.tables(ns)).isEqualTo("v1/ws/catalog/namespaces/ns/tables");
+ assertThat(withoutPrefix.tables(ns)).isEqualTo("v1/namespaces/ns/tables");
}
@Test
public void testTablesWithSlash() {
Namespace ns = Namespace.of("n/s");
- Assertions.assertThat(withPrefix.tables(ns)).isEqualTo("v1/ws/catalog/namespaces/n%2Fs/tables");
- Assertions.assertThat(withoutPrefix.tables(ns)).isEqualTo("v1/namespaces/n%2Fs/tables");
+ assertThat(withPrefix.tables(ns)).isEqualTo("v1/ws/catalog/namespaces/n%2Fs/tables");
+ assertThat(withoutPrefix.tables(ns)).isEqualTo("v1/namespaces/n%2Fs/tables");
}
@Test
public void testTablesWithMultipartNamespace() {
Namespace ns = Namespace.of("n", "s");
- Assertions.assertThat(withPrefix.tables(ns)).isEqualTo("v1/ws/catalog/namespaces/n%1Fs/tables");
- Assertions.assertThat(withoutPrefix.tables(ns)).isEqualTo("v1/namespaces/n%1Fs/tables");
+ assertThat(withPrefix.tables(ns)).isEqualTo("v1/ws/catalog/namespaces/n%1Fs/tables");
+ assertThat(withoutPrefix.tables(ns)).isEqualTo("v1/namespaces/n%1Fs/tables");
}
@Test
public void testTable() {
TableIdentifier ident = TableIdentifier.of("ns", "table");
- Assertions.assertThat(withPrefix.table(ident))
- .isEqualTo("v1/ws/catalog/namespaces/ns/tables/table");
- Assertions.assertThat(withoutPrefix.table(ident)).isEqualTo("v1/namespaces/ns/tables/table");
+ assertThat(withPrefix.table(ident)).isEqualTo("v1/ws/catalog/namespaces/ns/tables/table");
+ assertThat(withoutPrefix.table(ident)).isEqualTo("v1/namespaces/ns/tables/table");
}
@Test
public void testTableWithSlash() {
TableIdentifier ident = TableIdentifier.of("n/s", "tab/le");
- Assertions.assertThat(withPrefix.table(ident))
- .isEqualTo("v1/ws/catalog/namespaces/n%2Fs/tables/tab%2Fle");
- Assertions.assertThat(withoutPrefix.table(ident))
- .isEqualTo("v1/namespaces/n%2Fs/tables/tab%2Fle");
+ assertThat(withPrefix.table(ident)).isEqualTo("v1/ws/catalog/namespaces/n%2Fs/tables/tab%2Fle");
+ assertThat(withoutPrefix.table(ident)).isEqualTo("v1/namespaces/n%2Fs/tables/tab%2Fle");
}
@Test
public void testTableWithMultipartNamespace() {
TableIdentifier ident = TableIdentifier.of("n", "s", "table");
- Assertions.assertThat(withPrefix.table(ident))
- .isEqualTo("v1/ws/catalog/namespaces/n%1Fs/tables/table");
- Assertions.assertThat(withoutPrefix.table(ident)).isEqualTo("v1/namespaces/n%1Fs/tables/table");
+ assertThat(withPrefix.table(ident)).isEqualTo("v1/ws/catalog/namespaces/n%1Fs/tables/table");
+ assertThat(withoutPrefix.table(ident)).isEqualTo("v1/namespaces/n%1Fs/tables/table");
}
@Test
public void testRegister() {
Namespace ns = Namespace.of("ns");
- Assertions.assertThat(withPrefix.register(ns))
- .isEqualTo("v1/ws/catalog/namespaces/ns/register");
- Assertions.assertThat(withoutPrefix.register(ns)).isEqualTo("v1/namespaces/ns/register");
+ assertThat(withPrefix.register(ns)).isEqualTo("v1/ws/catalog/namespaces/ns/register");
+ assertThat(withoutPrefix.register(ns)).isEqualTo("v1/namespaces/ns/register");
}
@Test
public void views() {
Namespace ns = Namespace.of("ns");
- Assertions.assertThat(withPrefix.views(ns)).isEqualTo("v1/ws/catalog/namespaces/ns/views");
- Assertions.assertThat(withoutPrefix.views(ns)).isEqualTo("v1/namespaces/ns/views");
+ assertThat(withPrefix.views(ns)).isEqualTo("v1/ws/catalog/namespaces/ns/views");
+ assertThat(withoutPrefix.views(ns)).isEqualTo("v1/namespaces/ns/views");
}
@Test
public void viewsWithSlash() {
Namespace ns = Namespace.of("n/s");
- Assertions.assertThat(withPrefix.views(ns)).isEqualTo("v1/ws/catalog/namespaces/n%2Fs/views");
- Assertions.assertThat(withoutPrefix.views(ns)).isEqualTo("v1/namespaces/n%2Fs/views");
+ assertThat(withPrefix.views(ns)).isEqualTo("v1/ws/catalog/namespaces/n%2Fs/views");
+ assertThat(withoutPrefix.views(ns)).isEqualTo("v1/namespaces/n%2Fs/views");
}
@Test
public void viewsWithMultipartNamespace() {
Namespace ns = Namespace.of("n", "s");
- Assertions.assertThat(withPrefix.views(ns)).isEqualTo("v1/ws/catalog/namespaces/n%1Fs/views");
- Assertions.assertThat(withoutPrefix.views(ns)).isEqualTo("v1/namespaces/n%1Fs/views");
+ assertThat(withPrefix.views(ns)).isEqualTo("v1/ws/catalog/namespaces/n%1Fs/views");
+ assertThat(withoutPrefix.views(ns)).isEqualTo("v1/namespaces/n%1Fs/views");
}
@Test
public void view() {
TableIdentifier ident = TableIdentifier.of("ns", "view-name");
- Assertions.assertThat(withPrefix.view(ident))
- .isEqualTo("v1/ws/catalog/namespaces/ns/views/view-name");
- Assertions.assertThat(withoutPrefix.view(ident)).isEqualTo("v1/namespaces/ns/views/view-name");
+ assertThat(withPrefix.view(ident)).isEqualTo("v1/ws/catalog/namespaces/ns/views/view-name");
+ assertThat(withoutPrefix.view(ident)).isEqualTo("v1/namespaces/ns/views/view-name");
}
@Test
public void viewWithSlash() {
TableIdentifier ident = TableIdentifier.of("n/s", "vi/ew-name");
- Assertions.assertThat(withPrefix.view(ident))
+ assertThat(withPrefix.view(ident))
.isEqualTo("v1/ws/catalog/namespaces/n%2Fs/views/vi%2Few-name");
- Assertions.assertThat(withoutPrefix.view(ident))
- .isEqualTo("v1/namespaces/n%2Fs/views/vi%2Few-name");
+ assertThat(withoutPrefix.view(ident)).isEqualTo("v1/namespaces/n%2Fs/views/vi%2Few-name");
}
@Test
public void viewWithMultipartNamespace() {
TableIdentifier ident = TableIdentifier.of("n", "s", "view-name");
- Assertions.assertThat(withPrefix.view(ident))
- .isEqualTo("v1/ws/catalog/namespaces/n%1Fs/views/view-name");
- Assertions.assertThat(withoutPrefix.view(ident))
- .isEqualTo("v1/namespaces/n%1Fs/views/view-name");
+ assertThat(withPrefix.view(ident)).isEqualTo("v1/ws/catalog/namespaces/n%1Fs/views/view-name");
+ assertThat(withoutPrefix.view(ident)).isEqualTo("v1/namespaces/n%1Fs/views/view-name");
}
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/requests/TestCreateNamespaceRequest.java b/core/src/test/java/org/apache/iceberg/rest/requests/TestCreateNamespaceRequest.java
index 8b849b2..313f5e1 100644
--- a/core/src/test/java/org/apache/iceberg/rest/requests/TestCreateNamespaceRequest.java
+++ b/core/src/test/java/org/apache/iceberg/rest/requests/TestCreateNamespaceRequest.java
@@ -18,13 +18,15 @@
*/
package org.apache.iceberg.rest.requests;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.Map;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.rest.RequestResponseTestBase;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCreateNamespaceRequest extends RequestResponseTestBase<CreateNamespaceRequest> {
@@ -76,49 +78,46 @@
public void testDeserializeInvalidRequest() {
String jsonIncorrectTypeForNamespace =
"{\"namespace\":\"accounting%1Ftax\",\"properties\":null}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonIncorrectTypeForNamespace))
+ assertThatThrownBy(() -> deserialize(jsonIncorrectTypeForNamespace))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Cannot parse string array from non-array");
String jsonIncorrectTypeForProperties =
"{\"namespace\":[\"accounting\",\"tax\"],\"properties\":[]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonIncorrectTypeForProperties))
+ assertThatThrownBy(() -> deserialize(jsonIncorrectTypeForProperties))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Cannot deserialize value of type");
String jsonMisspelledKeys =
"{\"namepsace\":[\"accounting\",\"tax\"],\"propertiezzzz\":{\"owner\":\"Hank\"}}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonMisspelledKeys))
+ assertThatThrownBy(() -> deserialize(jsonMisspelledKeys))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid namespace: null");
String emptyJson = "{}";
- Assertions.assertThatThrownBy(() -> deserialize(emptyJson))
+ assertThatThrownBy(() -> deserialize(emptyJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid namespace: null");
- Assertions.assertThatThrownBy(() -> deserialize(null))
+ assertThatThrownBy(() -> deserialize(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@Test
public void testBuilderDoesNotBuildInvalidRequests() {
- Assertions.assertThatThrownBy(
- () -> CreateNamespaceRequest.builder().withNamespace(null).build())
+ assertThatThrownBy(() -> CreateNamespaceRequest.builder().withNamespace(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid namespace: null");
- Assertions.assertThatThrownBy(
- () -> CreateNamespaceRequest.builder().setProperties(null).build())
+ assertThatThrownBy(() -> CreateNamespaceRequest.builder().setProperties(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid collection of properties: null");
Map<String, String> mapWithNullKey = Maps.newHashMap();
mapWithNullKey.put(null, "hello");
- Assertions.assertThatThrownBy(
- () -> CreateNamespaceRequest.builder().setProperties(mapWithNullKey).build())
+ assertThatThrownBy(() -> CreateNamespaceRequest.builder().setProperties(mapWithNullKey).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid property: null");
@@ -126,7 +125,7 @@
mapWithMultipleNullValues.put("a", null);
mapWithMultipleNullValues.put("b", "b");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> CreateNamespaceRequest.builder().setProperties(mapWithMultipleNullValues).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid value for properties [a]: null");
@@ -147,8 +146,8 @@
@Override
public void assertEquals(CreateNamespaceRequest actual, CreateNamespaceRequest expected) {
- Assertions.assertThat(actual.namespace()).isEqualTo(expected.namespace());
- Assertions.assertThat(actual.properties()).isEqualTo(expected.properties());
+ assertThat(actual.namespace()).isEqualTo(expected.namespace());
+ assertThat(actual.properties()).isEqualTo(expected.properties());
}
@Override
diff --git a/core/src/test/java/org/apache/iceberg/rest/requests/TestCreateTableRequest.java b/core/src/test/java/org/apache/iceberg/rest/requests/TestCreateTableRequest.java
index a5895d3..0d4280c 100644
--- a/core/src/test/java/org/apache/iceberg/rest/requests/TestCreateTableRequest.java
+++ b/core/src/test/java/org/apache/iceberg/rest/requests/TestCreateTableRequest.java
@@ -20,6 +20,8 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.Map;
@@ -34,7 +36,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.rest.RequestResponseTestBase;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCreateTableRequest extends RequestResponseTestBase<CreateTableRequest> {
@@ -167,7 +168,7 @@
String jsonMissingSchema =
"{\"name\":\"foo\",\"location\":null,\"partition-spec\":null,\"write-order\":null,\"properties\":{},"
+ "\"stage-create\":false}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonMissingSchema))
+ assertThatThrownBy(() -> deserialize(jsonMissingSchema))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid schema: null");
@@ -176,7 +177,7 @@
"{\"location\":null,\"schema\":%s,\"spec\":null,\"write-order\":null,\"properties\":{},"
+ "\"stage-create\":false}",
SAMPLE_SCHEMA_JSON);
- Assertions.assertThatThrownBy(() -> deserialize(jsonMissingName))
+ assertThatThrownBy(() -> deserialize(jsonMissingName))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid table name: null");
@@ -185,53 +186,52 @@
"{\"name\":\"foo\",\"location\":null,\"schema\":%s,\"partition-spec\":null,\"write-order\":null,"
+ "\"properties\":[],\"stage-create\":false}",
SAMPLE_SCHEMA_JSON);
- Assertions.assertThatThrownBy(() -> deserialize(jsonIncorrectTypeForProperties))
+ assertThatThrownBy(() -> deserialize(jsonIncorrectTypeForProperties))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Cannot deserialize value of type");
- Assertions.assertThatThrownBy(() -> deserialize("{}"))
+ assertThatThrownBy(() -> deserialize("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid table name: null");
- Assertions.assertThatThrownBy(() -> deserialize(null))
+ assertThatThrownBy(() -> deserialize(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@Test
public void testBuilderDoesNotBuildInvalidRequests() {
- Assertions.assertThatThrownBy(() -> CreateTableRequest.builder().withName(null))
+ assertThatThrownBy(() -> CreateTableRequest.builder().withName(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid name: null");
- Assertions.assertThatThrownBy(() -> CreateTableRequest.builder().withSchema(null))
+ assertThatThrownBy(() -> CreateTableRequest.builder().withSchema(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid schema: null");
- Assertions.assertThatThrownBy(() -> CreateTableRequest.builder().setProperties(null))
+ assertThatThrownBy(() -> CreateTableRequest.builder().setProperties(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid collection of properties: null");
Map<String, String> mapWithNullKey = Maps.newHashMap();
mapWithNullKey.put(null, "hello");
- Assertions.assertThatThrownBy(() -> CreateTableRequest.builder().setProperties(mapWithNullKey))
+ assertThatThrownBy(() -> CreateTableRequest.builder().setProperties(mapWithNullKey))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid property: null");
Map<String, String> mapWithNullValue = Maps.newHashMap();
mapWithNullValue.put("a", null);
mapWithNullValue.put("b", "b");
- Assertions.assertThatThrownBy(
- () -> CreateTableRequest.builder().setProperties(mapWithNullValue).build())
+ assertThatThrownBy(() -> CreateTableRequest.builder().setProperties(mapWithNullValue).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid value for properties [a]: null");
- Assertions.assertThatThrownBy(() -> CreateTableRequest.builder().setProperty("foo", null))
+ assertThatThrownBy(() -> CreateTableRequest.builder().setProperty("foo", null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid value for property foo: null");
- Assertions.assertThatThrownBy(() -> CreateTableRequest.builder().setProperty(null, "foo"))
+ assertThatThrownBy(() -> CreateTableRequest.builder().setProperty(null, "foo"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid property: null");
}
@@ -258,25 +258,23 @@
@Override
public void assertEquals(CreateTableRequest actual, CreateTableRequest expected) {
- Assertions.assertThat(actual.name()).as("Name should be the same").isEqualTo(expected.name());
- Assertions.assertThat(actual.location())
+ assertThat(actual.name()).as("Name should be the same").isEqualTo(expected.name());
+ assertThat(actual.location())
.as("Location should be the same if provided")
.isEqualTo(expected.location());
- Assertions.assertThat(
+ assertThat(
expected.schema().sameSchema(actual.schema())
&& expected.schema().schemaId() == actual.schema().schemaId())
.as("Schemas should be equivalent and have same schema id")
.isTrue();
- Assertions.assertThat(actual.spec())
- .as("Partition spec should be equal")
- .isEqualTo(expected.spec());
- Assertions.assertThat(actual.writeOrder())
+ assertThat(actual.spec()).as("Partition spec should be equal").isEqualTo(expected.spec());
+ assertThat(actual.writeOrder())
.as("Write [sort] order should be the same")
.isEqualTo(expected.writeOrder());
- Assertions.assertThat(actual.properties())
+ assertThat(actual.properties())
.as("Properties should be the same")
.isEqualTo(expected.properties());
- Assertions.assertThat(actual.stageCreate())
+ assertThat(actual.stageCreate())
.as("Stage create should be equal")
.isEqualTo(expected.stageCreate());
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/requests/TestRegisterTableRequestParser.java b/core/src/test/java/org/apache/iceberg/rest/requests/TestRegisterTableRequestParser.java
index 9b479d8..50a47df 100644
--- a/core/src/test/java/org/apache/iceberg/rest/requests/TestRegisterTableRequestParser.java
+++ b/core/src/test/java/org/apache/iceberg/rest/requests/TestRegisterTableRequestParser.java
@@ -19,36 +19,35 @@
package org.apache.iceberg.rest.requests;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import com.fasterxml.jackson.databind.JsonNode;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestRegisterTableRequestParser {
@Test
public void nullCheck() {
- Assertions.assertThatThrownBy(() -> RegisterTableRequestParser.toJson(null))
+ assertThatThrownBy(() -> RegisterTableRequestParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid register table request: null");
- Assertions.assertThatThrownBy(() -> RegisterTableRequestParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> RegisterTableRequestParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse register table request from null object");
}
@Test
public void missingFields() {
- Assertions.assertThatThrownBy(() -> RegisterTableRequestParser.fromJson("{}"))
+ assertThatThrownBy(() -> RegisterTableRequestParser.fromJson("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: name");
- Assertions.assertThatThrownBy(
- () -> RegisterTableRequestParser.fromJson("{\"name\" : \"test_tbl\"}"))
+ assertThatThrownBy(() -> RegisterTableRequestParser.fromJson("{\"name\" : \"test_tbl\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: metadata-location");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
RegisterTableRequestParser.fromJson(
"{\"metadata-location\" : \"file://tmp/NS/test_tbl/metadata/00000-d4f60d2f-2ad2-408b-8832-0ed7fbd851ee.metadata.json\"}"))
diff --git a/core/src/test/java/org/apache/iceberg/rest/requests/TestRenameTableRequest.java b/core/src/test/java/org/apache/iceberg/rest/requests/TestRenameTableRequest.java
index aeff875..de236a9 100644
--- a/core/src/test/java/org/apache/iceberg/rest/requests/TestRenameTableRequest.java
+++ b/core/src/test/java/org/apache/iceberg/rest/requests/TestRenameTableRequest.java
@@ -18,12 +18,14 @@
*/
package org.apache.iceberg.rest.requests;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.catalog.TableIdentifierParser;
import org.apache.iceberg.rest.RequestResponseTestBase;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestRenameTableRequest extends RequestResponseTestBase<RenameTableRequest> {
@@ -51,44 +53,44 @@
String jsonSourceNullName =
"{\"source\":{\"namespace\":[\"accounting\",\"tax\"],\"name\":null},"
+ "\"destination\":{\"namespace\":[\"accounting\",\"tax\"],\"name\":\"paid_2022\"}}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonSourceNullName))
+ assertThatThrownBy(() -> deserialize(jsonSourceNullName))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Cannot parse to a string value: name: null");
String jsonDestinationNullName =
"{\"source\":{\"namespace\":[\"accounting\",\"tax\"],\"name\":\"paid\"},"
+ "\"destination\":{\"namespace\":[\"accounting\",\"tax\"],\"name\":null}}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonDestinationNullName))
+ assertThatThrownBy(() -> deserialize(jsonDestinationNullName))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Cannot parse to a string value: name: null");
String jsonSourceMissingName =
"{\"source\":{\"namespace\":[\"accounting\",\"tax\"]},"
+ "\"destination\":{\"name\":\"paid_2022\"}}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonSourceMissingName))
+ assertThatThrownBy(() -> deserialize(jsonSourceMissingName))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Cannot parse missing string: name");
String jsonDestinationMissingName =
"{\"source\":{\"namespace\":[\"accounting\",\"tax\"],\"name\":\"paid\"},"
+ "\"destination\":{\"namespace\":[\"accounting\",\"tax\"]}}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonDestinationMissingName))
+ assertThatThrownBy(() -> deserialize(jsonDestinationMissingName))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Cannot parse missing string: name");
String emptyJson = "{}";
- Assertions.assertThatThrownBy(() -> deserialize(emptyJson))
+ assertThatThrownBy(() -> deserialize(emptyJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid source table: null");
- Assertions.assertThatThrownBy(() -> deserialize(null))
+ assertThatThrownBy(() -> deserialize(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@Test
public void testBuilderDoesNotBuildInvalidRequests() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
RenameTableRequest.builder()
.withSource(null)
@@ -97,7 +99,7 @@
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid source table identifier: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> RenameTableRequest.builder().withSource(TAX_PAID).withDestination(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid destination table identifier: null");
@@ -118,10 +120,10 @@
@Override
public void assertEquals(RenameTableRequest actual, RenameTableRequest expected) {
- Assertions.assertThat(actual.source())
+ assertThat(actual.source())
.as("Source table identifier should be equal")
.isEqualTo(expected.source());
- Assertions.assertThat(actual.destination())
+ assertThat(actual.destination())
.as("Destination table identifier should be equal")
.isEqualTo(expected.destination());
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/requests/TestReportMetricsRequestParser.java b/core/src/test/java/org/apache/iceberg/rest/requests/TestReportMetricsRequestParser.java
index f049389..b0f4306 100644
--- a/core/src/test/java/org/apache/iceberg/rest/requests/TestReportMetricsRequestParser.java
+++ b/core/src/test/java/org/apache/iceberg/rest/requests/TestReportMetricsRequestParser.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.rest.requests;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.metrics.CommitMetrics;
@@ -30,34 +33,33 @@
import org.apache.iceberg.metrics.ScanMetricsResult;
import org.apache.iceberg.metrics.ScanReport;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestReportMetricsRequestParser {
@Test
public void nullCheck() {
- Assertions.assertThatThrownBy(() -> ReportMetricsRequestParser.toJson(null))
+ assertThatThrownBy(() -> ReportMetricsRequestParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid metrics request: null");
- Assertions.assertThatThrownBy(() -> ReportMetricsRequestParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> ReportMetricsRequestParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse metrics request from null object");
}
@Test
public void missingFields() {
- Assertions.assertThatThrownBy(() -> ReportMetricsRequestParser.fromJson("{}"))
+ assertThatThrownBy(() -> ReportMetricsRequestParser.fromJson("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: report-type");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> ReportMetricsRequestParser.fromJson("{\"report-type\":\"scan-report\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: table-name");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ReportMetricsRequestParser.fromJson(
"{\"report-type\":\"scan-report\", \"table-name\" : \"x\"}"))
@@ -67,11 +69,10 @@
@Test
public void invalidReportType() {
- Assertions.assertThat(
- ReportMetricsRequestParser.fromJson("{\"report-type\":\"invalid\"}").reportType())
+ assertThat(ReportMetricsRequestParser.fromJson("{\"report-type\":\"invalid\"}").reportType())
.isEqualTo(ReportMetricsRequest.unknown().reportType());
- Assertions.assertThat(
+ assertThat(
ReportMetricsRequestParser.fromJson(
ReportMetricsRequestParser.toJson(
ReportMetricsRequest.of(new MetricsReport() {})))
@@ -93,8 +94,7 @@
+ "}";
ReportMetricsRequest request = ReportMetricsRequestParser.fromJson(json);
- Assertions.assertThat(request.reportType())
- .isEqualTo(ReportMetricsRequest.unknown().reportType());
+ assertThat(request.reportType()).isEqualTo(ReportMetricsRequest.unknown().reportType());
}
@Test
@@ -126,9 +126,9 @@
ReportMetricsRequest metricsRequest = ReportMetricsRequest.of(scanReport);
String json = ReportMetricsRequestParser.toJson(metricsRequest, true);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(json).isEqualTo(expectedJson);
- Assertions.assertThat(ReportMetricsRequestParser.fromJson(json).report())
+ assertThat(ReportMetricsRequestParser.fromJson(json).report())
.isEqualTo(metricsRequest.report());
}
@@ -157,9 +157,9 @@
ReportMetricsRequest metricsRequest = ReportMetricsRequest.of(commitReport);
String json = ReportMetricsRequestParser.toJson(metricsRequest, true);
- Assertions.assertThat(json).isEqualTo(expectedJson);
+ assertThat(json).isEqualTo(expectedJson);
- Assertions.assertThat(ReportMetricsRequestParser.fromJson(json).report())
+ assertThat(ReportMetricsRequestParser.fromJson(json).report())
.isEqualTo(metricsRequest.report());
}
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/requests/TestUpdateNamespacePropertiesRequest.java b/core/src/test/java/org/apache/iceberg/rest/requests/TestUpdateNamespacePropertiesRequest.java
index daef718..38e04ab 100644
--- a/core/src/test/java/org/apache/iceberg/rest/requests/TestUpdateNamespacePropertiesRequest.java
+++ b/core/src/test/java/org/apache/iceberg/rest/requests/TestUpdateNamespacePropertiesRequest.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.rest.requests;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.List;
import java.util.Map;
@@ -27,7 +30,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.rest.RequestResponseTestBase;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestUpdateNamespacePropertiesRequest
@@ -117,13 +119,13 @@
// Invalid top-level types
String jsonInvalidTypeOnRemovalField =
"{\"removals\":{\"foo\":\"bar\"},\"updates\":{\"owner\":\"Hank\"}}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonInvalidTypeOnRemovalField))
+ assertThatThrownBy(() -> deserialize(jsonInvalidTypeOnRemovalField))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Cannot deserialize value of type");
String jsonInvalidTypeOnUpdatesField =
"{\"removals\":[\"foo\":\"bar\"],\"updates\":[\"owner\"]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonInvalidTypeOnUpdatesField))
+ assertThatThrownBy(() -> deserialize(jsonInvalidTypeOnUpdatesField))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Unexpected character")
.hasMessageContaining("expecting comma to separate Array entries");
@@ -133,52 +135,48 @@
// e.g. { removals: [ "foo", "bar", 1234 ] } will parse correctly.
String invalidJsonWrongTypeInRemovalsList =
"{\"removals\":[\"foo\",\"bar\", {\"owner\": \"Hank\"}],\"updates\":{\"owner\":\"Hank\"}}";
- Assertions.assertThatThrownBy(() -> deserialize(invalidJsonWrongTypeInRemovalsList))
+ assertThatThrownBy(() -> deserialize(invalidJsonWrongTypeInRemovalsList))
.isInstanceOf(JsonProcessingException.class)
.hasMessageStartingWith("Cannot deserialize value of type");
String nullJson = null;
- Assertions.assertThatThrownBy(() -> deserialize(nullJson))
+ assertThatThrownBy(() -> deserialize(nullJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@Test
public void testBuilderDoesNotCreateInvalidObjects() {
- Assertions.assertThatThrownBy(
- () -> UpdateNamespacePropertiesRequest.builder().remove(null).build())
+ assertThatThrownBy(() -> UpdateNamespacePropertiesRequest.builder().remove(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid property to remove: null");
- Assertions.assertThatThrownBy(
- () -> UpdateNamespacePropertiesRequest.builder().removeAll(null).build())
+ assertThatThrownBy(() -> UpdateNamespacePropertiesRequest.builder().removeAll(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid list of properties to remove: null");
List<String> listWithNull = Lists.newArrayList("a", null, null);
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> UpdateNamespacePropertiesRequest.builder().removeAll(listWithNull).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid property to remove: null");
- Assertions.assertThatThrownBy(
- () -> UpdateNamespacePropertiesRequest.builder().update(null, "100").build())
+ assertThatThrownBy(() -> UpdateNamespacePropertiesRequest.builder().update(null, "100").build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid property to update: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> UpdateNamespacePropertiesRequest.builder().update("owner", null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid value to update for key [owner]: null. Use remove instead");
- Assertions.assertThatThrownBy(
- () -> UpdateNamespacePropertiesRequest.builder().updateAll(null).build())
+ assertThatThrownBy(() -> UpdateNamespacePropertiesRequest.builder().updateAll(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid collection of properties to update: null");
Map<String, String> mapWithNullKey = Maps.newHashMap();
mapWithNullKey.put(null, "hello");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> UpdateNamespacePropertiesRequest.builder().updateAll(mapWithNullKey).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid property to update: null");
@@ -186,7 +184,7 @@
Map<String, String> mapWithMultipleNullValues = Maps.newHashMap();
mapWithMultipleNullValues.put("a", null);
mapWithMultipleNullValues.put("b", "b");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
UpdateNamespacePropertiesRequest.builder()
.updateAll(mapWithMultipleNullValues)
@@ -211,10 +209,10 @@
@Override
public void assertEquals(
UpdateNamespacePropertiesRequest actual, UpdateNamespacePropertiesRequest expected) {
- Assertions.assertThat(actual.updates())
+ assertThat(actual.updates())
.as("Properties to update should be equal")
.isEqualTo(expected.updates());
- Assertions.assertThat(Sets.newHashSet(actual.removals()))
+ assertThat(Sets.newHashSet(actual.removals()))
.as("Properties to remove should be equal")
.containsExactlyInAnyOrderElementsOf(Sets.newHashSet(expected.removals()));
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestCatalogErrorResponseParser.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestCatalogErrorResponseParser.java
index e4fb413..6144bca 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestCatalogErrorResponseParser.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestCatalogErrorResponseParser.java
@@ -18,9 +18,10 @@
*/
package org.apache.iceberg.rest.responses;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.util.Arrays;
import java.util.List;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCatalogErrorResponseParser {
@@ -35,7 +36,7 @@
String json = "{\"error\":" + errorModelJson + "}";
ErrorResponse response =
ErrorResponse.builder().withMessage(message).withType(type).responseCode(code).build();
- Assertions.assertThat(ErrorResponseParser.toJson(response))
+ assertThat(ErrorResponseParser.toJson(response))
.as("Should be able to serialize an error response as json")
.isEqualTo(json);
}
@@ -58,7 +59,7 @@
.responseCode(code)
.withStackTrace(stack)
.build();
- Assertions.assertThat(ErrorResponseParser.toJson(response))
+ assertThat(ErrorResponseParser.toJson(response))
.as("Should be able to serialize an error response as json")
.isEqualTo(json);
}
@@ -121,9 +122,9 @@
}
public void assertEquals(ErrorResponse expected, ErrorResponse actual) {
- Assertions.assertThat(actual.message()).isEqualTo(expected.message());
- Assertions.assertThat(actual.type()).isEqualTo(expected.type());
- Assertions.assertThat(actual.code()).isEqualTo(expected.code());
- Assertions.assertThat(actual.stack()).isEqualTo(expected.stack());
+ assertThat(actual.message()).isEqualTo(expected.message());
+ assertThat(actual.type()).isEqualTo(expected.type());
+ assertThat(actual.code()).isEqualTo(expected.code());
+ assertThat(actual.stack()).isEqualTo(expected.stack());
}
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestConfigResponse.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestConfigResponse.java
index 273fe48..0cf30e6 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestConfigResponse.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestConfigResponse.java
@@ -18,12 +18,14 @@
*/
package org.apache.iceberg.rest.responses;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.Map;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.rest.RequestResponseTestBase;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -144,51 +146,49 @@
public void testDeserializeInvalidResponse() {
String jsonDefaultsHasWrongType =
"{\"defaults\":[\"warehouse\",\"s3://bucket/warehouse\"],\"overrides\":{\"clients\":\"5\"}}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonDefaultsHasWrongType))
+ assertThatThrownBy(() -> deserialize(jsonDefaultsHasWrongType))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining(
"Cannot parse string map from non-object value: defaults: [\"warehouse\",\"s3://bucket/warehouse\"]");
String jsonOverridesHasWrongType =
"{\"defaults\":{\"warehouse\":\"s3://bucket/warehouse\"},\"overrides\":\"clients\"}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonOverridesHasWrongType))
+ assertThatThrownBy(() -> deserialize(jsonOverridesHasWrongType))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining(
"Cannot parse string map from non-object value: overrides: \"clients\"");
- Assertions.assertThatThrownBy(() -> deserialize(null))
+ assertThatThrownBy(() -> deserialize(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@Test
public void testBuilderDoesNotCreateInvalidObjects() {
- Assertions.assertThatThrownBy(() -> ConfigResponse.builder().withOverride(null, "100").build())
+ assertThatThrownBy(() -> ConfigResponse.builder().withOverride(null, "100").build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid override property: null");
- Assertions.assertThatThrownBy(() -> ConfigResponse.builder().withDefault(null, "100").build())
+ assertThatThrownBy(() -> ConfigResponse.builder().withDefault(null, "100").build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid default property: null");
- Assertions.assertThatThrownBy(() -> ConfigResponse.builder().withOverrides(null).build())
+ assertThatThrownBy(() -> ConfigResponse.builder().withOverrides(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid override properties map: null");
- Assertions.assertThatThrownBy(() -> ConfigResponse.builder().withDefaults(null).build())
+ assertThatThrownBy(() -> ConfigResponse.builder().withDefaults(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid default properties map: null");
Map<String, String> mapWithNullKey = Maps.newHashMap();
mapWithNullKey.put(null, "a");
mapWithNullKey.put("b", "b");
- Assertions.assertThatThrownBy(
- () -> ConfigResponse.builder().withDefaults(mapWithNullKey).build())
+ assertThatThrownBy(() -> ConfigResponse.builder().withDefaults(mapWithNullKey).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid default property: null");
- Assertions.assertThatThrownBy(
- () -> ConfigResponse.builder().withOverrides(mapWithNullKey).build())
+ assertThatThrownBy(() -> ConfigResponse.builder().withOverrides(mapWithNullKey).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid override property: null");
}
@@ -214,11 +214,11 @@
"b", "from_overrides",
"c", "from_client");
- Assertions.assertThat(merged)
+ assertThat(merged)
.as(
"The merged properties map should use values from defaults, then client config, and finally overrides")
.isEqualTo(expected);
- Assertions.assertThat(merged)
+ assertThat(merged)
.as("The merged properties map should omit keys with null values")
.doesNotContainValue(null);
}
@@ -235,10 +235,10 @@
@Override
public void assertEquals(ConfigResponse actual, ConfigResponse expected) {
- Assertions.assertThat(actual.defaults())
+ assertThat(actual.defaults())
.as("Config properties to use as defaults should be equal")
.isEqualTo(expected.defaults());
- Assertions.assertThat(actual.overrides())
+ assertThat(actual.overrides())
.as("Config properties to use as overrides should be equal")
.isEqualTo(expected.overrides());
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestCreateNamespaceResponse.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestCreateNamespaceResponse.java
index ece9f1e..dfde5bf 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestCreateNamespaceResponse.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestCreateNamespaceResponse.java
@@ -18,13 +18,15 @@
*/
package org.apache.iceberg.rest.responses;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.Map;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.rest.RequestResponseTestBase;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestCreateNamespaceResponse extends RequestResponseTestBase<CreateNamespaceResponse> {
@@ -84,47 +86,45 @@
public void testDeserializeInvalidResponse() {
String jsonResponseMalformedNamespaceValue =
"{\"namespace\":\"accounting%1Ftax\",\"properties\":null}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonResponseMalformedNamespaceValue))
+ assertThatThrownBy(() -> deserialize(jsonResponseMalformedNamespaceValue))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining("Cannot parse string array from non-array");
String jsonResponsePropertiesHasWrongType =
"{\"namespace\":[\"accounting\",\"tax\"],\"properties\":[]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonResponsePropertiesHasWrongType))
+ assertThatThrownBy(() -> deserialize(jsonResponsePropertiesHasWrongType))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining(
"Cannot deserialize value of type `java.util.LinkedHashMap<java.lang.String,java.lang.String>`");
- Assertions.assertThatThrownBy(() -> deserialize("{}"))
+ assertThatThrownBy(() -> deserialize("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid namespace: null");
String jsonMisspelledKeys =
"{\"namepsace\":[\"accounting\",\"tax\"],\"propertiezzzz\":{\"owner\":\"Hank\"}}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonMisspelledKeys))
+ assertThatThrownBy(() -> deserialize(jsonMisspelledKeys))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid namespace: null");
- Assertions.assertThatThrownBy(() -> deserialize(null))
+ assertThatThrownBy(() -> deserialize(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@Test
public void testBuilderDoesNotBuildInvalidRequests() {
- Assertions.assertThatThrownBy(
- () -> CreateNamespaceResponse.builder().withNamespace(null).build())
+ assertThatThrownBy(() -> CreateNamespaceResponse.builder().withNamespace(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid namespace: null");
- Assertions.assertThatThrownBy(
- () -> CreateNamespaceResponse.builder().setProperties(null).build())
+ assertThatThrownBy(() -> CreateNamespaceResponse.builder().setProperties(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid collection of properties: null");
Map<String, String> mapWithNullKey = Maps.newHashMap();
mapWithNullKey.put(null, "hello");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> CreateNamespaceResponse.builder().setProperties(mapWithNullKey).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid property to set: null");
@@ -132,7 +132,7 @@
Map<String, String> mapWithMultipleNullValues = Maps.newHashMap();
mapWithMultipleNullValues.put("a", null);
mapWithMultipleNullValues.put("b", "b");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
CreateNamespaceResponse.builder().setProperties(mapWithMultipleNullValues).build())
.isInstanceOf(IllegalArgumentException.class)
@@ -154,8 +154,8 @@
@Override
public void assertEquals(CreateNamespaceResponse actual, CreateNamespaceResponse expected) {
- Assertions.assertThat(actual.namespace()).isEqualTo(expected.namespace());
- Assertions.assertThat(actual.properties()).isEqualTo(expected.properties());
+ assertThat(actual.namespace()).isEqualTo(expected.namespace());
+ assertThat(actual.properties()).isEqualTo(expected.properties());
}
@Override
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestGetNamespaceResponse.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestGetNamespaceResponse.java
index d228e9c..680b67d 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestGetNamespaceResponse.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestGetNamespaceResponse.java
@@ -18,13 +18,15 @@
*/
package org.apache.iceberg.rest.responses;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.Map;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.rest.RequestResponseTestBase;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestGetNamespaceResponse extends RequestResponseTestBase<GetNamespaceResponse> {
@@ -66,56 +68,55 @@
@Test
public void testDeserializeInvalidResponse() {
String jsonNamespaceHasWrongType = "{\"namespace\":\"accounting%1Ftax\",\"properties\":null}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonNamespaceHasWrongType))
+ assertThatThrownBy(() -> deserialize(jsonNamespaceHasWrongType))
.as("A JSON response with the wrong type for a field should fail to deserialize")
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining("Cannot parse string array from non-array");
String jsonPropertiesHasWrongType =
"{\"namespace\":[\"accounting\",\"tax\"],\"properties\":[]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonPropertiesHasWrongType))
+ assertThatThrownBy(() -> deserialize(jsonPropertiesHasWrongType))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining(
"Cannot deserialize value of type `java.util.LinkedHashMap<java.lang.String,java.lang.String>`");
String emptyJson = "{}";
- Assertions.assertThatThrownBy(() -> deserialize(emptyJson))
+ assertThatThrownBy(() -> deserialize(emptyJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid namespace: null");
String jsonWithKeysSpelledIncorrectly =
"{\"namepsace\":[\"accounting\",\"tax\"],\"propertiezzzz\":{\"owner\":\"Hank\"}}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonWithKeysSpelledIncorrectly))
+ assertThatThrownBy(() -> deserialize(jsonWithKeysSpelledIncorrectly))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid namespace: null");
String nullJson = null;
- Assertions.assertThatThrownBy(() -> deserialize(nullJson))
+ assertThatThrownBy(() -> deserialize(nullJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@Test
public void testBuilderDoesNotBuildInvalidRequests() {
- Assertions.assertThatThrownBy(() -> GetNamespaceResponse.builder().withNamespace(null).build())
+ assertThatThrownBy(() -> GetNamespaceResponse.builder().withNamespace(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid namespace: null");
- Assertions.assertThatThrownBy(() -> GetNamespaceResponse.builder().setProperties(null).build())
+ assertThatThrownBy(() -> GetNamespaceResponse.builder().setProperties(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid properties map: null");
Map<String, String> mapWithNullKey = Maps.newHashMap();
mapWithNullKey.put(null, "hello");
- Assertions.assertThatThrownBy(
- () -> GetNamespaceResponse.builder().setProperties(mapWithNullKey).build())
+ assertThatThrownBy(() -> GetNamespaceResponse.builder().setProperties(mapWithNullKey).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid property: null");
Map<String, String> mapWithMultipleNullValues = Maps.newHashMap();
mapWithMultipleNullValues.put("a", null);
mapWithMultipleNullValues.put("b", "b");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> GetNamespaceResponse.builder().setProperties(mapWithMultipleNullValues).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid value for properties [a]: null");
@@ -136,8 +137,8 @@
@Override
public void assertEquals(GetNamespaceResponse actual, GetNamespaceResponse expected) {
- Assertions.assertThat(actual.namespace()).isEqualTo(expected.namespace());
- Assertions.assertThat(actual.properties()).isEqualTo(expected.properties());
+ assertThat(actual.namespace()).isEqualTo(expected.namespace());
+ assertThat(actual.properties()).isEqualTo(expected.properties());
}
@Override
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestListNamespacesResponse.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestListNamespacesResponse.java
index d9ed801..0db75a1 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestListNamespacesResponse.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestListNamespacesResponse.java
@@ -18,13 +18,15 @@
*/
package org.apache.iceberg.rest.responses;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.List;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.rest.RequestResponseTestBase;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestListNamespacesResponse extends RequestResponseTestBase<ListNamespacesResponse> {
@@ -45,40 +47,39 @@
@Test
public void testDeserializeInvalidResponseThrows() {
String jsonNamespacesHasWrongType = "{\"namespaces\":\"accounting\"}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonNamespacesHasWrongType))
+ assertThatThrownBy(() -> deserialize(jsonNamespacesHasWrongType))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining(
"Cannot deserialize value of type `java.util.ArrayList<org.apache.iceberg.catalog.Namespace>`");
String emptyJson = "{}";
- Assertions.assertThatThrownBy(() -> deserialize(emptyJson))
+ assertThatThrownBy(() -> deserialize(emptyJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid namespace: null");
String jsonWithKeysSpelledIncorrectly = "{\"namepsacezz\":[\"accounting\",\"tax\"]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonWithKeysSpelledIncorrectly))
+ assertThatThrownBy(() -> deserialize(jsonWithKeysSpelledIncorrectly))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid namespace: null");
String nullJson = null;
- Assertions.assertThatThrownBy(() -> deserialize(nullJson))
+ assertThatThrownBy(() -> deserialize(nullJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@Test
public void testBuilderDoesNotCreateInvalidObjects() {
- Assertions.assertThatThrownBy(() -> ListNamespacesResponse.builder().add(null).build())
+ assertThatThrownBy(() -> ListNamespacesResponse.builder().add(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid namespace: null");
- Assertions.assertThatThrownBy(() -> ListNamespacesResponse.builder().addAll(null).build())
+ assertThatThrownBy(() -> ListNamespacesResponse.builder().addAll(null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid namespace list: null");
List<Namespace> listWithNullElement = Lists.newArrayList(Namespace.of("a"), null);
- Assertions.assertThatThrownBy(
- () -> ListNamespacesResponse.builder().addAll(listWithNullElement).build())
+ assertThatThrownBy(() -> ListNamespacesResponse.builder().addAll(listWithNullElement).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid namespace: null");
}
@@ -90,8 +91,8 @@
ListNamespacesResponse response =
ListNamespacesResponse.builder().addAll(NAMESPACES).nextPageToken(null).build();
assertRoundTripSerializesEquallyFrom(jsonWithNullPageToken, response);
- Assertions.assertThat(response.nextPageToken()).isNull();
- Assertions.assertThat(response.namespaces()).isEqualTo(NAMESPACES);
+ assertThat(response.nextPageToken()).isNull();
+ assertThat(response.namespaces()).isEqualTo(NAMESPACES);
}
@Test
@@ -102,8 +103,8 @@
ListNamespacesResponse response =
ListNamespacesResponse.builder().addAll(NAMESPACES).nextPageToken(pageToken).build();
assertRoundTripSerializesEquallyFrom(jsonWithPageToken, response);
- Assertions.assertThat(response.nextPageToken()).isEqualTo("token");
- Assertions.assertThat(response.namespaces()).isEqualTo(NAMESPACES);
+ assertThat(response.nextPageToken()).isEqualTo("token");
+ assertThat(response.namespaces()).isEqualTo(NAMESPACES);
}
@Override
@@ -118,7 +119,7 @@
@Override
public void assertEquals(ListNamespacesResponse actual, ListNamespacesResponse expected) {
- Assertions.assertThat(actual.namespaces())
+ assertThat(actual.namespaces())
.as("Namespaces list should be equal")
.hasSize(expected.namespaces().size())
.containsExactlyInAnyOrderElementsOf(expected.namespaces());
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestListTablesResponse.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestListTablesResponse.java
index d46228f..bc95261 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestListTablesResponse.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestListTablesResponse.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.rest.responses;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.List;
import org.apache.iceberg.catalog.Namespace;
@@ -25,7 +28,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.rest.RequestResponseTestBase;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestListTablesResponse extends RequestResponseTestBase<ListTablesResponse> {
@@ -47,60 +49,60 @@
@Test
public void testDeserializeInvalidResponsesThrows() {
String identifiersHasWrongType = "{\"identifiers\":\"accounting%1Ftax\"}";
- Assertions.assertThatThrownBy(() -> deserialize(identifiersHasWrongType))
+ assertThatThrownBy(() -> deserialize(identifiersHasWrongType))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining(
"Cannot deserialize value of type `java.util.ArrayList<org.apache.iceberg.catalog.TableIdentifier>`");
String emptyJson = "{}";
- Assertions.assertThatThrownBy(() -> deserialize(emptyJson))
+ assertThatThrownBy(() -> deserialize(emptyJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid identifier list: null");
String jsonWithKeysSpelledIncorrectly =
"{\"identifyrezzzz\":[{\"namespace\":[\"accounting\",\"tax\"],\"name\":\"paid\"}]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonWithKeysSpelledIncorrectly))
+ assertThatThrownBy(() -> deserialize(jsonWithKeysSpelledIncorrectly))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid identifier list: null");
String jsonWithInvalidIdentifiersInList =
"{\"identifiers\":[{\"namespace\":\"accounting.tax\",\"name\":\"paid\"}]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonWithInvalidIdentifiersInList))
+ assertThatThrownBy(() -> deserialize(jsonWithInvalidIdentifiersInList))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining(
"Cannot parse JSON array from non-array value: namespace: \"accounting.tax\"");
String jsonWithInvalidIdentifiersInList2 =
"{\"identifiers\":[{\"namespace\":[\"accounting\",\"tax\"],\"name\":\"paid\"},\"accounting.tax.paid\"]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonWithInvalidIdentifiersInList2))
+ assertThatThrownBy(() -> deserialize(jsonWithInvalidIdentifiersInList2))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining("Cannot parse missing or non-object table identifier");
String jsonWithInvalidTypeForNamePartOfIdentifier =
"{\"identifiers\":[{\"namespace\":[\"accounting\",\"tax\"],\"name\":true}]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonWithInvalidTypeForNamePartOfIdentifier))
+ assertThatThrownBy(() -> deserialize(jsonWithInvalidTypeForNamePartOfIdentifier))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining("Cannot parse to a string value");
String nullJson = null;
- Assertions.assertThatThrownBy(() -> deserialize(nullJson))
+ assertThatThrownBy(() -> deserialize(nullJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@Test
public void testBuilderDoesNotCreateInvalidObjects() {
- Assertions.assertThatThrownBy(() -> ListTablesResponse.builder().add(null))
+ assertThatThrownBy(() -> ListTablesResponse.builder().add(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid table identifier: null");
- Assertions.assertThatThrownBy(() -> ListTablesResponse.builder().addAll(null))
+ assertThatThrownBy(() -> ListTablesResponse.builder().addAll(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid table identifier list: null");
List<TableIdentifier> listWithNullElement =
Lists.newArrayList(TableIdentifier.of(Namespace.of("foo"), "bar"), null);
- Assertions.assertThatThrownBy(() -> ListTablesResponse.builder().addAll(listWithNullElement))
+ assertThatThrownBy(() -> ListTablesResponse.builder().addAll(listWithNullElement))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid table identifier: null");
}
@@ -112,8 +114,8 @@
ListTablesResponse response =
ListTablesResponse.builder().addAll(IDENTIFIERS).nextPageToken(null).build();
assertRoundTripSerializesEquallyFrom(jsonWithNullPageToken, response);
- Assertions.assertThat(response.nextPageToken()).isNull();
- Assertions.assertThat(response.identifiers()).isEqualTo(IDENTIFIERS);
+ assertThat(response.nextPageToken()).isNull();
+ assertThat(response.identifiers()).isEqualTo(IDENTIFIERS);
}
@Test
@@ -124,8 +126,8 @@
ListTablesResponse response =
ListTablesResponse.builder().addAll(IDENTIFIERS).nextPageToken(pageToken).build();
assertRoundTripSerializesEquallyFrom(jsonWithPageToken, response);
- Assertions.assertThat(response.nextPageToken()).isEqualTo("token");
- Assertions.assertThat(response.identifiers()).isEqualTo(IDENTIFIERS);
+ assertThat(response.nextPageToken()).isEqualTo("token");
+ assertThat(response.identifiers()).isEqualTo(IDENTIFIERS);
}
@Override
@@ -140,7 +142,7 @@
@Override
public void assertEquals(ListTablesResponse actual, ListTablesResponse expected) {
- Assertions.assertThat(actual.identifiers())
+ assertThat(actual.identifiers())
.as("Identifiers should be equal")
.hasSameSizeAs(expected.identifiers())
.containsExactlyInAnyOrderElementsOf(expected.identifiers());
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestLoadTableResponse.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestLoadTableResponse.java
index 0b65541..2325260 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestLoadTableResponse.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestLoadTableResponse.java
@@ -19,6 +19,8 @@
package org.apache.iceberg.rest.responses;
import static org.apache.iceberg.TestHelpers.assertSameSchemaList;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import com.fasterxml.jackson.core.JsonProcessingException;
import java.nio.file.Path;
@@ -36,7 +38,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.rest.RequestResponseTestBase;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestLoadTableResponse extends RequestResponseTestBase<LoadTableResponse> {
@@ -97,7 +98,7 @@
@Test
public void testFailures() {
- Assertions.assertThatThrownBy(() -> LoadTableResponse.builder().build())
+ assertThatThrownBy(() -> LoadTableResponse.builder().build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid metadata: null");
}
@@ -123,7 +124,7 @@
public void testMissingSchemaType() throws Exception {
// When the schema type (struct) is missing
String tableMetadataJson = readTableMetadataInputFile("TableMetadataV1MissingSchemaType.json");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> TableMetadataParser.fromJson(TEST_METADATA_LOCATION, tableMetadataJson))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot parse type from json:");
@@ -157,105 +158,93 @@
LoadTableResponse actual = deserialize(json);
LoadTableResponse expected = LoadTableResponse.builder().withTableMetadata(metadata).build();
assertEquals(actual, expected);
- Assertions.assertThat(actual.config())
+ assertThat(actual.config())
.as("Deserialized JSON with missing fields should have the default values")
.isEqualTo(ImmutableMap.of());
}
@Override
public void assertEquals(LoadTableResponse actual, LoadTableResponse expected) {
- Assertions.assertThat(actual.config())
+ assertThat(actual.config())
.as("Should have the same configuration")
.isEqualTo(expected.config());
assertEqualTableMetadata(actual.tableMetadata(), expected.tableMetadata());
- Assertions.assertThat(actual.metadataLocation())
+ assertThat(actual.metadataLocation())
.as("Should have the same metadata location")
.isEqualTo(expected.metadataLocation());
}
private void assertEqualTableMetadata(TableMetadata actual, TableMetadata expected) {
- Assertions.assertThat(actual.formatVersion())
+ assertThat(actual.formatVersion())
.as("Format version should match")
.isEqualTo(expected.formatVersion());
- Assertions.assertThat(actual.uuid()).as("Table UUID should match").isEqualTo(expected.uuid());
- Assertions.assertThat(actual.location())
- .as("Table location should match")
- .isEqualTo(expected.location());
- Assertions.assertThat(actual.lastColumnId())
- .as("Last column id")
- .isEqualTo(expected.lastColumnId());
- Assertions.assertThat(actual.schema().asStruct())
+ assertThat(actual.uuid()).as("Table UUID should match").isEqualTo(expected.uuid());
+ assertThat(actual.location()).as("Table location should match").isEqualTo(expected.location());
+ assertThat(actual.lastColumnId()).as("Last column id").isEqualTo(expected.lastColumnId());
+ assertThat(actual.schema().asStruct())
.as("Schema should match")
.isEqualTo(expected.schema().asStruct());
assertSameSchemaList(expected.schemas(), actual.schemas());
- Assertions.assertThat(actual.currentSchemaId())
+ assertThat(actual.currentSchemaId())
.as("Current schema id should match")
.isEqualTo(expected.currentSchemaId());
- Assertions.assertThat(actual.schema().asStruct())
+ assertThat(actual.schema().asStruct())
.as("Schema should match")
.isEqualTo(expected.schema().asStruct());
- Assertions.assertThat(actual.lastSequenceNumber())
+ assertThat(actual.lastSequenceNumber())
.as("Last sequence number should match")
.isEqualTo(expected.lastSequenceNumber());
- Assertions.assertThat(actual.spec().toString())
+ assertThat(actual.spec().toString())
.as("Partition spec should match")
.isEqualTo(expected.spec().toString());
- Assertions.assertThat(actual.defaultSpecId())
+ assertThat(actual.defaultSpecId())
.as("Default spec ID should match")
.isEqualTo(expected.defaultSpecId());
- Assertions.assertThat(actual.specs())
- .as("PartitionSpec map should match")
- .isEqualTo(expected.specs());
- Assertions.assertThat(actual.defaultSortOrderId())
+ assertThat(actual.specs()).as("PartitionSpec map should match").isEqualTo(expected.specs());
+ assertThat(actual.defaultSortOrderId())
.as("Default Sort ID should match")
.isEqualTo(expected.defaultSortOrderId());
- Assertions.assertThat(actual.sortOrder())
- .as("Sort order should match")
- .isEqualTo(expected.sortOrder());
- Assertions.assertThat(actual.sortOrders())
+ assertThat(actual.sortOrder()).as("Sort order should match").isEqualTo(expected.sortOrder());
+ assertThat(actual.sortOrders())
.as("Sort order map should match")
.isEqualTo(expected.sortOrders());
- Assertions.assertThat(actual.properties())
- .as("Properties should match")
- .isEqualTo(expected.properties());
- Assertions.assertThat(Lists.transform(actual.snapshots(), Snapshot::snapshotId))
+ assertThat(actual.properties()).as("Properties should match").isEqualTo(expected.properties());
+ assertThat(Lists.transform(actual.snapshots(), Snapshot::snapshotId))
.as("Snapshots should match")
.isEqualTo(Lists.transform(expected.snapshots(), Snapshot::snapshotId));
- Assertions.assertThat(actual.snapshotLog())
- .as("History should match")
- .isEqualTo(expected.snapshotLog());
+ assertThat(actual.snapshotLog()).as("History should match").isEqualTo(expected.snapshotLog());
Snapshot expectedCurrentSnapshot = expected.currentSnapshot();
Snapshot actualCurrentSnapshot = actual.currentSnapshot();
- Assertions.assertThat(
+ assertThat(
expectedCurrentSnapshot != null && actualCurrentSnapshot != null
|| expectedCurrentSnapshot == null && actualCurrentSnapshot == null)
.as("Both expected and actual current snapshot should either be null or non-null")
.isTrue();
if (expectedCurrentSnapshot != null) {
- Assertions.assertThat(actual.currentSnapshot().snapshotId())
+ assertThat(actual.currentSnapshot().snapshotId())
.as("Current snapshot ID should match")
.isEqualTo(expected.currentSnapshot().snapshotId());
- Assertions.assertThat(actual.currentSnapshot().parentId())
+ assertThat(actual.currentSnapshot().parentId())
.as("Parent snapshot ID should match")
.isEqualTo(expected.currentSnapshot().parentId());
- Assertions.assertThat(actual.currentSnapshot().schemaId())
+ assertThat(actual.currentSnapshot().schemaId())
.as("Schema ID for current snapshot should match")
.isEqualTo(expected.currentSnapshot().schemaId());
}
- Assertions.assertThat(actual.metadataFileLocation())
+ assertThat(actual.metadataFileLocation())
.as("Metadata file location should match")
.isEqualTo(expected.metadataFileLocation());
- Assertions.assertThat(actual.lastColumnId())
+ assertThat(actual.lastColumnId())
.as("Last column id should match")
.isEqualTo(expected.lastColumnId());
- Assertions.assertThat(actual.schema().asStruct())
+ assertThat(actual.schema().asStruct())
.as("Schema should match")
.isEqualTo(expected.schema().asStruct());
assertSameSchemaList(expected.schemas(), actual.schemas());
- Assertions.assertThat(actual.currentSchemaId())
+ assertThat(actual.currentSchemaId())
.as("Current schema id should match")
.isEqualTo(expected.currentSchemaId());
- Assertions.assertThat(actual.refs()).as("Refs map should match").isEqualTo(expected.refs());
+ assertThat(actual.refs()).as("Refs map should match").isEqualTo(expected.refs());
}
private String readTableMetadataInputFile(String fileName) throws Exception {
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestOAuthErrorResponseParser.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestOAuthErrorResponseParser.java
index 96953d3..b041aac 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestOAuthErrorResponseParser.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestOAuthErrorResponseParser.java
@@ -18,8 +18,10 @@
*/
package org.apache.iceberg.rest.responses;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import org.apache.iceberg.rest.auth.OAuth2Properties;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestOAuthErrorResponseParser {
@@ -56,14 +58,14 @@
String uri = "http://iceberg.apache.org";
String json =
String.format("{\"error_description\":\"%s\",\"error_uri\":\"%s\"}", description, uri);
- Assertions.assertThatThrownBy(() -> OAuthErrorResponseParser.fromJson(400, json))
+ assertThatThrownBy(() -> OAuthErrorResponseParser.fromJson(400, json))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: error");
}
public void assertEquals(ErrorResponse expected, ErrorResponse actual) {
- Assertions.assertThat(actual.code()).isEqualTo(expected.code());
- Assertions.assertThat(actual.type()).isEqualTo(expected.type());
- Assertions.assertThat(actual.message()).isEqualTo(expected.message());
+ assertThat(actual.code()).isEqualTo(expected.code());
+ assertThat(actual.type()).isEqualTo(expected.type());
+ assertThat(actual.message()).isEqualTo(expected.message());
}
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestOAuthTokenResponse.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestOAuthTokenResponse.java
index cf568ac..aec052d 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestOAuthTokenResponse.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestOAuthTokenResponse.java
@@ -18,10 +18,12 @@
*/
package org.apache.iceberg.rest.responses;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.iceberg.rest.RequestResponseTestBase;
import org.apache.iceberg.rest.auth.OAuth2Util;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestOAuthTokenResponse extends RequestResponseTestBase<OAuthTokenResponse> {
@@ -43,17 +45,15 @@
@Override
public void assertEquals(OAuthTokenResponse actual, OAuthTokenResponse expected) {
- Assertions.assertThat(actual.token()).as("Token should match").isEqualTo(expected.token());
- Assertions.assertThat(actual.tokenType())
- .as("Token type should match")
- .isEqualTo(expected.tokenType());
- Assertions.assertThat(actual.issuedTokenType())
+ assertThat(actual.token()).as("Token should match").isEqualTo(expected.token());
+ assertThat(actual.tokenType()).as("Token type should match").isEqualTo(expected.tokenType());
+ assertThat(actual.issuedTokenType())
.as("Issued token type should match")
.isEqualTo(expected.issuedTokenType());
- Assertions.assertThat(actual.expiresInSeconds())
+ assertThat(actual.expiresInSeconds())
.as("Expiration should match")
.isEqualTo(expected.expiresInSeconds());
- Assertions.assertThat(actual.scopes()).as("Scope should match").isEqualTo(expected.scopes());
+ assertThat(actual.scopes()).as("Scope should match").isEqualTo(expected.scopes());
}
@Override
@@ -114,21 +114,19 @@
@Test
public void testFailures() {
- Assertions.assertThatThrownBy(() -> deserialize("{\"token_type\":\"bearer\"}"))
+ assertThatThrownBy(() -> deserialize("{\"token_type\":\"bearer\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("missing string: access_token");
- Assertions.assertThatThrownBy(
- () -> deserialize("{\"access_token\":34,\"token_type\":\"bearer\"}"))
+ assertThatThrownBy(() -> deserialize("{\"access_token\":34,\"token_type\":\"bearer\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot parse to a string value: access_token: 34");
- Assertions.assertThatThrownBy(() -> deserialize("{\"access_token\":\"bearer-token\"}"))
+ assertThatThrownBy(() -> deserialize("{\"access_token\":\"bearer-token\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("missing string: token_type");
- Assertions.assertThatThrownBy(
- () -> deserialize("{\"access_token\":\"bearer-token\",\"token_type\":34}"))
+ assertThatThrownBy(() -> deserialize("{\"access_token\":\"bearer-token\",\"token_type\":34}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Cannot parse to a string value: token_type: 34");
}
diff --git a/core/src/test/java/org/apache/iceberg/rest/responses/TestUpdateNamespacePropertiesResponse.java b/core/src/test/java/org/apache/iceberg/rest/responses/TestUpdateNamespacePropertiesResponse.java
index 17176ef..33c6c53 100644
--- a/core/src/test/java/org/apache/iceberg/rest/responses/TestUpdateNamespacePropertiesResponse.java
+++ b/core/src/test/java/org/apache/iceberg/rest/responses/TestUpdateNamespacePropertiesResponse.java
@@ -18,12 +18,14 @@
*/
package org.apache.iceberg.rest.responses;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.List;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.rest.RequestResponseTestBase;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestUpdateNamespacePropertiesResponse
@@ -143,25 +145,25 @@
// Invalid top-level types
String jsonInvalidTypeOnRemovedField =
"{\"removed\":{\"foo\":true},\"updated\":[\"owner\"],\"missing\":[\"bar\"]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonInvalidTypeOnRemovedField))
+ assertThatThrownBy(() -> deserialize(jsonInvalidTypeOnRemovedField))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining(
"Cannot deserialize value of type `java.util.ArrayList<java.lang.String>`");
String jsonInvalidTypeOnUpdatedField = "{\"updated\":\"owner\",\"missing\":[\"bar\"]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonInvalidTypeOnUpdatedField))
+ assertThatThrownBy(() -> deserialize(jsonInvalidTypeOnUpdatedField))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining("Cannot construct instance of `java.util.ArrayList`");
// Valid top-level (array) types, but at least one entry in the list is not the expected type
String jsonInvalidValueOfTypeIntNestedInRemovedList =
"{\"removed\":[\"foo\", \"bar\", 123456], ,\"updated\":[\"owner\"],\"missing\":[\"bar\"]}";
- Assertions.assertThatThrownBy(() -> deserialize(jsonInvalidValueOfTypeIntNestedInRemovedList))
+ assertThatThrownBy(() -> deserialize(jsonInvalidValueOfTypeIntNestedInRemovedList))
.isInstanceOf(JsonProcessingException.class)
.hasMessageContaining("Unexpected character (',' (code 44))");
// Exception comes from Jackson
- Assertions.assertThatThrownBy(() -> deserialize(null))
+ assertThatThrownBy(() -> deserialize(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("argument \"content\" is null");
}
@@ -171,54 +173,54 @@
List<String> listContainingNull = Lists.newArrayList("a", null, null);
// updated
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> UpdateNamespacePropertiesResponse.builder().addUpdated((String) null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid updated property: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
UpdateNamespacePropertiesResponse.builder().addUpdated((List<String>) null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid updated property list: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
UpdateNamespacePropertiesResponse.builder().addUpdated(listContainingNull).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid updated property: null");
// removed
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> UpdateNamespacePropertiesResponse.builder().addRemoved((String) null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid removed property: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
UpdateNamespacePropertiesResponse.builder().addRemoved((List<String>) null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid removed property list: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
UpdateNamespacePropertiesResponse.builder().addRemoved(listContainingNull).build())
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid removed property: null");
// missing
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> UpdateNamespacePropertiesResponse.builder().addMissing((String) null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid missing property: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
UpdateNamespacePropertiesResponse.builder().addMissing((List<String>) null).build())
.isInstanceOf(NullPointerException.class)
.hasMessage("Invalid missing property list: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
UpdateNamespacePropertiesResponse.builder().addMissing(listContainingNull).build())
.isInstanceOf(IllegalArgumentException.class)
@@ -242,13 +244,13 @@
@Override
public void assertEquals(
UpdateNamespacePropertiesResponse actual, UpdateNamespacePropertiesResponse expected) {
- Assertions.assertThat(actual.updated())
+ assertThat(actual.updated())
.as("Properties updated should be equal")
.containsExactlyInAnyOrderElementsOf(expected.updated());
- Assertions.assertThat(actual.removed())
+ assertThat(actual.removed())
.as("Properties removed should be equal")
.containsExactlyInAnyOrderElementsOf(expected.removed());
- Assertions.assertThat(actual.missing())
+ assertThat(actual.missing())
.as("Properties missing should be equal")
.containsExactlyInAnyOrderElementsOf(expected.missing());
}
diff --git a/core/src/test/java/org/apache/iceberg/util/TestInMemoryLockManager.java b/core/src/test/java/org/apache/iceberg/util/TestInMemoryLockManager.java
index b4d9296..da7de5e 100644
--- a/core/src/test/java/org/apache/iceberg/util/TestInMemoryLockManager.java
+++ b/core/src/test/java/org/apache/iceberg/util/TestInMemoryLockManager.java
@@ -19,6 +19,7 @@
package org.apache.iceberg.util;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.List;
import java.util.UUID;
@@ -28,7 +29,6 @@
import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -56,7 +56,7 @@
@Test
public void testAcquireOnceSingleProcess() {
lockManager.acquireOnce(lockEntityId, ownerId);
- Assertions.assertThatThrownBy(() -> lockManager.acquireOnce(lockEntityId, ownerId))
+ assertThatThrownBy(() -> lockManager.acquireOnce(lockEntityId, ownerId))
.isInstanceOf(IllegalStateException.class)
.hasMessageStartingWith("Lock for")
.hasMessageContaining("currently held by")
diff --git a/core/src/test/java/org/apache/iceberg/util/TestJsonUtil.java b/core/src/test/java/org/apache/iceberg/util/TestJsonUtil.java
index f5d9212..7702d69 100644
--- a/core/src/test/java/org/apache/iceberg/util/TestJsonUtil.java
+++ b/core/src/test/java/org/apache/iceberg/util/TestJsonUtil.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.util;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.core.JsonProcessingException;
import java.nio.ByteBuffer;
import java.util.Arrays;
@@ -26,65 +29,57 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.relocated.com.google.common.io.BaseEncoding;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestJsonUtil {
@Test
public void get() throws JsonProcessingException {
- Assertions.assertThatThrownBy(() -> JsonUtil.get("x", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.get("x", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing field: x");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.get("x", JsonUtil.mapper().readTree("{\"x\": null}")))
+ assertThatThrownBy(() -> JsonUtil.get("x", JsonUtil.mapper().readTree("{\"x\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing field: x");
- Assertions.assertThat(JsonUtil.get("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")).asText())
+ assertThat(JsonUtil.get("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")).asText())
.isEqualTo("23");
}
@Test
public void getInt() throws JsonProcessingException {
- Assertions.assertThatThrownBy(() -> JsonUtil.getInt("x", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getInt("x", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing int: x");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getInt("x", JsonUtil.mapper().readTree("{\"x\": null}")))
+ assertThatThrownBy(() -> JsonUtil.getInt("x", JsonUtil.mapper().readTree("{\"x\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to an integer value: x: null");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getInt("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
+ assertThatThrownBy(() -> JsonUtil.getInt("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to an integer value: x: \"23\"");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getInt("x", JsonUtil.mapper().readTree("{\"x\": 23.0}")))
+ assertThatThrownBy(() -> JsonUtil.getInt("x", JsonUtil.mapper().readTree("{\"x\": 23.0}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to an integer value: x: 23.0");
- Assertions.assertThat(JsonUtil.getInt("x", JsonUtil.mapper().readTree("{\"x\": 23}")))
- .isEqualTo(23);
+ assertThat(JsonUtil.getInt("x", JsonUtil.mapper().readTree("{\"x\": 23}"))).isEqualTo(23);
}
@Test
public void getIntOrNull() throws JsonProcessingException {
- Assertions.assertThat(JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull();
- Assertions.assertThat(JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23}")))
- .isEqualTo(23);
- Assertions.assertThat(JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}")))
- .isNull();
+ assertThat(JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull();
+ assertThat(JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23}"))).isEqualTo(23);
+ assertThat(JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}"))).isNull();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to an integer value: x: \"23\"");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getIntOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23.0}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to an integer value: x: 23.0");
@@ -92,43 +87,38 @@
@Test
public void getLong() throws JsonProcessingException {
- Assertions.assertThatThrownBy(() -> JsonUtil.getLong("x", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getLong("x", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing long: x");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getLong("x", JsonUtil.mapper().readTree("{\"x\": null}")))
+ assertThatThrownBy(() -> JsonUtil.getLong("x", JsonUtil.mapper().readTree("{\"x\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a long value: x: null");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getLong("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
+ assertThatThrownBy(() -> JsonUtil.getLong("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a long value: x: \"23\"");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getLong("x", JsonUtil.mapper().readTree("{\"x\": 23.0}")))
+ assertThatThrownBy(() -> JsonUtil.getLong("x", JsonUtil.mapper().readTree("{\"x\": 23.0}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a long value: x: 23.0");
- Assertions.assertThat(JsonUtil.getLong("x", JsonUtil.mapper().readTree("{\"x\": 23}")))
- .isEqualTo(23);
+ assertThat(JsonUtil.getLong("x", JsonUtil.mapper().readTree("{\"x\": 23}"))).isEqualTo(23);
}
@Test
public void getLongOrNull() throws JsonProcessingException {
- Assertions.assertThat(JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull();
- Assertions.assertThat(JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23}")))
+ assertThat(JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull();
+ assertThat(JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23}")))
.isEqualTo(23);
- Assertions.assertThat(JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}")))
- .isNull();
+ assertThat(JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}"))).isNull();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a long value: x: \"23\"");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getLongOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23.0}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a long value: x: 23.0");
@@ -136,35 +126,30 @@
@Test
public void getString() throws JsonProcessingException {
- Assertions.assertThatThrownBy(() -> JsonUtil.getString("x", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getString("x", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: x");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getString("x", JsonUtil.mapper().readTree("{\"x\": null}")))
+ assertThatThrownBy(() -> JsonUtil.getString("x", JsonUtil.mapper().readTree("{\"x\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a string value: x: null");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getString("x", JsonUtil.mapper().readTree("{\"x\": 23}")))
+ assertThatThrownBy(() -> JsonUtil.getString("x", JsonUtil.mapper().readTree("{\"x\": 23}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a string value: x: 23");
- Assertions.assertThat(JsonUtil.getString("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
+ assertThat(JsonUtil.getString("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
.isEqualTo("23");
}
@Test
public void getStringOrNull() throws JsonProcessingException {
- Assertions.assertThat(JsonUtil.getStringOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull();
- Assertions.assertThat(
- JsonUtil.getStringOrNull("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
+ assertThat(JsonUtil.getStringOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull();
+ assertThat(JsonUtil.getStringOrNull("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
.isEqualTo("23");
- Assertions.assertThat(
- JsonUtil.getStringOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}")))
- .isNull();
+ assertThat(JsonUtil.getStringOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}"))).isNull();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getStringOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a string value: x: 23");
@@ -172,19 +157,17 @@
@Test
public void getByteBufferOrNull() throws JsonProcessingException {
- Assertions.assertThat(JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree("{}")))
- .isNull();
- Assertions.assertThat(
- JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}")))
+ assertThat(JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull();
+ assertThat(JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}")))
.isNull();
byte[] bytes = new byte[] {1, 2, 3, 4};
String base16Str = BaseEncoding.base16().encode(bytes);
String json = String.format("{\"x\": \"%s\"}", base16Str);
ByteBuffer byteBuffer = JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree(json));
- Assertions.assertThat(byteBuffer.array()).isEqualTo(bytes);
+ assertThat(byteBuffer.array()).isEqualTo(bytes);
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse byte buffer from non-text value: x: 23");
@@ -192,48 +175,41 @@
@Test
public void getBool() throws JsonProcessingException {
- Assertions.assertThatThrownBy(() -> JsonUtil.getBool("x", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getBool("x", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing boolean: x");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": null}")))
+ assertThatThrownBy(() -> JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a boolean value: x: null");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
+ assertThatThrownBy(() -> JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": \"23\"}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a boolean value: x: \"23\"");
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": \"true\"}")))
+ assertThatThrownBy(() -> JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": \"true\"}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse to a boolean value: x: \"true\"");
- Assertions.assertThat(JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": true}")))
- .isTrue();
- Assertions.assertThat(JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": false}")))
- .isFalse();
+ assertThat(JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": true}"))).isTrue();
+ assertThat(JsonUtil.getBool("x", JsonUtil.mapper().readTree("{\"x\": false}"))).isFalse();
}
@Test
public void getIntArrayOrNull() throws JsonProcessingException {
- Assertions.assertThat(JsonUtil.getIntArrayOrNull("items", JsonUtil.mapper().readTree("{}")))
+ assertThat(JsonUtil.getIntArrayOrNull("items", JsonUtil.mapper().readTree("{}"))).isNull();
+
+ assertThat(JsonUtil.getIntArrayOrNull("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isNull();
- Assertions.assertThat(
- JsonUtil.getIntArrayOrNull("items", JsonUtil.mapper().readTree("{\"items\": null}")))
- .isNull();
-
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getIntArrayOrNull(
"items", JsonUtil.mapper().readTree("{\"items\": [13, \"23\"]}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse integer from non-int value in items: \"23\"");
- Assertions.assertThat(
+ assertThat(
JsonUtil.getIntArrayOrNull(
"items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
.isEqualTo(new int[] {23, 45});
@@ -241,17 +217,16 @@
@Test
public void getIntegerList() throws JsonProcessingException {
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getIntegerList("items", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getIntegerList("items", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing list: items");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getIntegerList("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse JSON array from non-array value: items: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getIntegerList(
"items", JsonUtil.mapper().readTree("{\"items\": [13, \"23\"]}")))
@@ -259,7 +234,7 @@
.hasMessage("Cannot parse integer from non-int value in items: \"23\"");
List<Integer> items = Arrays.asList(23, 45);
- Assertions.assertThat(
+ assertThat(
JsonUtil.getIntegerList("items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
.isEqualTo(items);
@@ -271,51 +246,47 @@
gen.writeEndObject();
},
false);
- Assertions.assertThat(JsonUtil.getIntegerList("items", JsonUtil.mapper().readTree(json)))
- .isEqualTo(items);
+ assertThat(JsonUtil.getIntegerList("items", JsonUtil.mapper().readTree(json))).isEqualTo(items);
}
@Test
public void getIntegerSet() throws JsonProcessingException {
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getIntegerSet("items", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getIntegerSet("items", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing set: items");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getIntegerSet("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse JSON array from non-array value: items: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getIntegerSet(
"items", JsonUtil.mapper().readTree("{\"items\": [13, \"23\"]}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse integer from non-int value in items: \"23\"");
- Assertions.assertThat(
- JsonUtil.getIntegerSet("items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
+ assertThat(JsonUtil.getIntegerSet("items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
.containsExactlyElementsOf(Arrays.asList(23, 45));
}
@Test
public void getIntegerSetOrNull() throws JsonProcessingException {
- Assertions.assertThat(JsonUtil.getIntegerSetOrNull("items", JsonUtil.mapper().readTree("{}")))
- .isNull();
+ assertThat(JsonUtil.getIntegerSetOrNull("items", JsonUtil.mapper().readTree("{}"))).isNull();
- Assertions.assertThat(
+ assertThat(
JsonUtil.getIntegerSetOrNull("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isNull();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getIntegerSetOrNull(
"items", JsonUtil.mapper().readTree("{\"items\": [13, \"23\"]}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse integer from non-int value in items: \"23\"");
- Assertions.assertThat(
+ assertThat(
JsonUtil.getIntegerSetOrNull(
"items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
.containsExactlyElementsOf(Arrays.asList(23, 45));
@@ -323,17 +294,16 @@
@Test
public void getLongList() throws JsonProcessingException {
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getLongList("items", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getLongList("items", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing list: items");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getLongList("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse JSON array from non-array value: items: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getLongList(
"items", JsonUtil.mapper().readTree("{\"items\": [13, \"23\"]}")))
@@ -341,8 +311,7 @@
.hasMessage("Cannot parse long from non-long value in items: \"23\"");
List<Long> items = Arrays.asList(23L, 45L);
- Assertions.assertThat(
- JsonUtil.getLongList("items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
+ assertThat(JsonUtil.getLongList("items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
.isEqualTo(items);
String json =
@@ -353,27 +322,24 @@
gen.writeEndObject();
},
false);
- Assertions.assertThat(JsonUtil.getLongList("items", JsonUtil.mapper().readTree(json)))
- .isEqualTo(items);
+ assertThat(JsonUtil.getLongList("items", JsonUtil.mapper().readTree(json))).isEqualTo(items);
}
@Test
public void getLongListOrNull() throws JsonProcessingException {
- Assertions.assertThat(JsonUtil.getLongListOrNull("items", JsonUtil.mapper().readTree("{}")))
+ assertThat(JsonUtil.getLongListOrNull("items", JsonUtil.mapper().readTree("{}"))).isNull();
+
+ assertThat(JsonUtil.getLongListOrNull("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isNull();
- Assertions.assertThat(
- JsonUtil.getLongListOrNull("items", JsonUtil.mapper().readTree("{\"items\": null}")))
- .isNull();
-
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getLongListOrNull(
"items", JsonUtil.mapper().readTree("{\"items\": [13, \"23\"]}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse long from non-long value in items: \"23\"");
- Assertions.assertThat(
+ assertThat(
JsonUtil.getLongListOrNull(
"items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
.containsExactlyElementsOf(Arrays.asList(23L, 45L));
@@ -381,62 +347,57 @@
@Test
public void getLongSet() throws JsonProcessingException {
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getLongSet("items", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getLongSet("items", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing set: items");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getLongSet("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse JSON array from non-array value: items: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getLongSet(
"items", JsonUtil.mapper().readTree("{\"items\": [13, \"23\"]}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse long from non-long value in items: \"23\"");
- Assertions.assertThat(
- JsonUtil.getLongSet("items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
+ assertThat(JsonUtil.getLongSet("items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
.containsExactlyElementsOf(Arrays.asList(23L, 45L));
}
@Test
public void getLongSetOrNull() throws JsonProcessingException {
- Assertions.assertThat(JsonUtil.getLongSetOrNull("items", JsonUtil.mapper().readTree("{}")))
+ assertThat(JsonUtil.getLongSetOrNull("items", JsonUtil.mapper().readTree("{}"))).isNull();
+
+ assertThat(JsonUtil.getLongSetOrNull("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isNull();
- Assertions.assertThat(
- JsonUtil.getLongSetOrNull("items", JsonUtil.mapper().readTree("{\"items\": null}")))
- .isNull();
-
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getLongSetOrNull(
"items", JsonUtil.mapper().readTree("{\"items\": [13, \"23\"]}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse long from non-long value in items: \"23\"");
- Assertions.assertThat(
+ assertThat(
JsonUtil.getLongSetOrNull("items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
.containsExactlyElementsOf(Arrays.asList(23L, 45L));
}
@Test
public void getStringList() throws JsonProcessingException {
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getStringList("items", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getStringList("items", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing list: items");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getStringList("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse JSON array from non-array value: items: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getStringList(
"items", JsonUtil.mapper().readTree("{\"items\": [\"23\", 45]}")))
@@ -444,7 +405,7 @@
.hasMessage("Cannot parse string from non-text value in items: 45");
List<String> items = Arrays.asList("23", "45");
- Assertions.assertThat(
+ assertThat(
JsonUtil.getStringList(
"items", JsonUtil.mapper().readTree("{\"items\": [\"23\", \"45\"]}")))
.containsExactlyElementsOf(items);
@@ -457,27 +418,25 @@
gen.writeEndObject();
},
false);
- Assertions.assertThat(JsonUtil.getStringList("items", JsonUtil.mapper().readTree(json)))
- .isEqualTo(items);
+ assertThat(JsonUtil.getStringList("items", JsonUtil.mapper().readTree(json))).isEqualTo(items);
}
@Test
public void getStringListOrNull() throws JsonProcessingException {
- Assertions.assertThat(JsonUtil.getStringListOrNull("items", JsonUtil.mapper().readTree("{}")))
- .isNull();
+ assertThat(JsonUtil.getStringListOrNull("items", JsonUtil.mapper().readTree("{}"))).isNull();
- Assertions.assertThat(
+ assertThat(
JsonUtil.getStringListOrNull("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isNull();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getStringListOrNull(
"items", JsonUtil.mapper().readTree("{\"items\": [\"23\", 45]}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse string from non-text value in items: 45");
- Assertions.assertThat(
+ assertThat(
JsonUtil.getStringListOrNull(
"items", JsonUtil.mapper().readTree("{\"items\": [\"23\", \"45\"]}")))
.containsExactlyElementsOf(Arrays.asList("23", "45"));
@@ -485,24 +444,23 @@
@Test
public void getStringSet() throws JsonProcessingException {
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getStringSet("items", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getStringSet("items", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing set: items");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getStringSet("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse JSON array from non-array value: items: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getStringSet(
"items", JsonUtil.mapper().readTree("{\"items\": [\"23\", 45]}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse string from non-text value in items: 45");
- Assertions.assertThat(
+ assertThat(
JsonUtil.getStringSet(
"items", JsonUtil.mapper().readTree("{\"items\": [\"23\", \"45\"]}")))
.containsExactlyElementsOf(Arrays.asList("23", "45"));
@@ -510,17 +468,16 @@
@Test
public void getStringMap() throws JsonProcessingException {
- Assertions.assertThatThrownBy(
- () -> JsonUtil.getStringMap("items", JsonUtil.mapper().readTree("{}")))
+ assertThatThrownBy(() -> JsonUtil.getStringMap("items", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing map: items");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getStringMap("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse string map from non-object value: items: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getStringMap(
"items", JsonUtil.mapper().readTree("{\"items\": {\"a\":\"23\", \"b\":45}}")))
@@ -528,7 +485,7 @@
.hasMessage("Cannot parse to a string value: b: 45");
Map<String, String> items = ImmutableMap.of("a", "23", "b", "45");
- Assertions.assertThat(
+ assertThat(
JsonUtil.getStringMap(
"items", JsonUtil.mapper().readTree("{\"items\": {\"a\":\"23\", \"b\":\"45\"}}")))
.isEqualTo(items);
@@ -541,25 +498,24 @@
gen.writeEndObject();
},
false);
- Assertions.assertThat(JsonUtil.getStringMap("items", JsonUtil.mapper().readTree(json)))
- .isEqualTo(items);
+ assertThat(JsonUtil.getStringMap("items", JsonUtil.mapper().readTree(json))).isEqualTo(items);
}
@Test
public void getStringMapNullableValues() throws JsonProcessingException {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> JsonUtil.getStringMapNullableValues("items", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing map: items");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getStringMapNullableValues(
"items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse string map from non-object value: items: null");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
JsonUtil.getStringMapNullableValues(
"items", JsonUtil.mapper().readTree("{\"items\": {\"a\":\"23\", \"b\":45}}")))
@@ -570,7 +526,7 @@
itemsWithNullableValues.put("a", null);
itemsWithNullableValues.put("b", null);
itemsWithNullableValues.put("c", "23");
- Assertions.assertThat(
+ assertThat(
JsonUtil.getStringMapNullableValues(
"items",
JsonUtil.mapper()
@@ -586,8 +542,7 @@
},
false);
- Assertions.assertThat(
- JsonUtil.getStringMapNullableValues("items", JsonUtil.mapper().readTree(json)))
+ assertThat(JsonUtil.getStringMapNullableValues("items", JsonUtil.mapper().readTree(json)))
.isEqualTo(itemsWithNullableValues);
}
}
diff --git a/core/src/test/java/org/apache/iceberg/util/TestLocationUtil.java b/core/src/test/java/org/apache/iceberg/util/TestLocationUtil.java
index 2775004..9a7b276 100644
--- a/core/src/test/java/org/apache/iceberg/util/TestLocationUtil.java
+++ b/core/src/test/java/org/apache/iceberg/util/TestLocationUtil.java
@@ -19,8 +19,8 @@
package org.apache.iceberg.util;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestLocationUtil {
@@ -53,7 +53,7 @@
String[] invalidPaths = new String[] {null, ""};
for (String invalidPath : invalidPaths) {
- Assertions.assertThatThrownBy(() -> LocationUtil.stripTrailingSlash(invalidPath))
+ assertThatThrownBy(() -> LocationUtil.stripTrailingSlash(invalidPath))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("path must not be null or empty");
}
diff --git a/core/src/test/java/org/apache/iceberg/util/TestLockManagers.java b/core/src/test/java/org/apache/iceberg/util/TestLockManagers.java
index 5dd23c3..c3207ae 100644
--- a/core/src/test/java/org/apache/iceberg/util/TestLockManagers.java
+++ b/core/src/test/java/org/apache/iceberg/util/TestLockManagers.java
@@ -18,18 +18,19 @@
*/
package org.apache.iceberg.util;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.util.Map;
import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.LockManager;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestLockManagers {
@Test
public void testLoadDefaultLockManager() {
- Assertions.assertThat(LockManagers.defaultLockManager())
+ assertThat(LockManagers.defaultLockManager())
.isInstanceOf(LockManagers.InMemoryLockManager.class);
}
@@ -37,7 +38,7 @@
public void testLoadCustomLockManager() {
Map<String, String> properties = Maps.newHashMap();
properties.put(CatalogProperties.LOCK_IMPL, CustomLockManager.class.getName());
- Assertions.assertThat(LockManagers.from(properties)).isInstanceOf(CustomLockManager.class);
+ assertThat(LockManagers.from(properties)).isInstanceOf(CustomLockManager.class);
}
static class CustomLockManager implements LockManager {
diff --git a/core/src/test/java/org/apache/iceberg/util/TestSnapshotUtil.java b/core/src/test/java/org/apache/iceberg/util/TestSnapshotUtil.java
index db6be5b..af39a59 100644
--- a/core/src/test/java/org/apache/iceberg/util/TestSnapshotUtil.java
+++ b/core/src/test/java/org/apache/iceberg/util/TestSnapshotUtil.java
@@ -35,7 +35,6 @@
import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.TestTables;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -178,7 +177,7 @@
}
// Once snapshot iterator has been exhausted, call hasNext again to make sure it is stable.
- Assertions.assertThat(snapshotIter).isExhausted();
+ assertThat(snapshotIter).isExhausted();
}
private void expectedSnapshots(long[] snapshotIdExpected, Iterable<Snapshot> snapshotsActual) {
diff --git a/core/src/test/java/org/apache/iceberg/util/TestTruncateUtil.java b/core/src/test/java/org/apache/iceberg/util/TestTruncateUtil.java
index 6bf6ec1..025f0c9 100644
--- a/core/src/test/java/org/apache/iceberg/util/TestTruncateUtil.java
+++ b/core/src/test/java/org/apache/iceberg/util/TestTruncateUtil.java
@@ -18,17 +18,19 @@
*/
package org.apache.iceberg.util;
-import org.assertj.core.api.Assertions;
+import static org.assertj.core.api.Assertions.assertThatException;
+import static org.assertj.core.api.Assertions.assertThatNoException;
+
import org.junit.jupiter.api.Test;
class TestTruncateUtil {
@Test
public void testInvalidInputWidthBehavior() {
- Assertions.assertThatNoException()
+ assertThatNoException()
.as("Invalid width input shouldn't necessarily throw an exception as it's not validated")
.isThrownBy(() -> TruncateUtil.truncateInt(-1, 100));
- Assertions.assertThatException()
+ assertThatException()
.as("Invalid width input can possibly throw an exception")
.isThrownBy(() -> TruncateUtil.truncateInt(0, 100));
}
diff --git a/core/src/test/java/org/apache/iceberg/view/TestSQLViewRepresentationParser.java b/core/src/test/java/org/apache/iceberg/view/TestSQLViewRepresentationParser.java
index 8eae11a..8c58f13 100644
--- a/core/src/test/java/org/apache/iceberg/view/TestSQLViewRepresentationParser.java
+++ b/core/src/test/java/org/apache/iceberg/view/TestSQLViewRepresentationParser.java
@@ -18,8 +18,10 @@
*/
package org.apache.iceberg.view;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestSQLViewRepresentationParser {
@@ -33,7 +35,7 @@
.dialect("spark-sql")
.build();
- Assertions.assertThat(SQLViewRepresentationParser.fromJson(requiredFields))
+ assertThat(SQLViewRepresentationParser.fromJson(requiredFields))
.as("Should be able to parse valid SQL view representation")
.isEqualTo(viewRepresentation);
@@ -45,7 +47,7 @@
.sql("select * from foo")
.dialect("spark-sql")
.build();
- Assertions.assertThat(SQLViewRepresentationParser.fromJson(requiredAndOptionalFields))
+ assertThat(SQLViewRepresentationParser.fromJson(requiredAndOptionalFields))
.as("Should be able to parse valid SQL view representation")
.isEqualTo(viewWithOptionalFields);
}
@@ -53,12 +55,12 @@
@Test
public void testParseSqlViewRepresentationMissingRequiredFields() {
String missingDialect = "{\"type\":\"sql\", \"sql\": \"select * from foo\"}";
- Assertions.assertThatThrownBy(() -> ViewRepresentationParser.fromJson(missingDialect))
+ assertThatThrownBy(() -> ViewRepresentationParser.fromJson(missingDialect))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: dialect");
String missingType = "{\"sql\":\"select * from foo\",\"dialect\":\"spark-sql\"}";
- Assertions.assertThatThrownBy(() -> ViewRepresentationParser.fromJson(missingType))
+ assertThatThrownBy(() -> ViewRepresentationParser.fromJson(missingType))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: type");
}
@@ -71,21 +73,21 @@
.sql("select * from foo")
.dialect("spark-sql")
.build();
- Assertions.assertThat(ViewRepresentationParser.toJson(viewRepresentation))
+ assertThat(ViewRepresentationParser.toJson(viewRepresentation))
.as("Should be able to serialize valid SQL view representation")
.isEqualTo(json);
- Assertions.assertThat(
+ assertThat(
ViewRepresentationParser.fromJson(ViewRepresentationParser.toJson(viewRepresentation)))
.isEqualTo(viewRepresentation);
}
@Test
public void testNullSqlViewRepresentation() {
- Assertions.assertThatThrownBy(() -> SQLViewRepresentationParser.toJson(null))
+ assertThatThrownBy(() -> SQLViewRepresentationParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid SQL view representation: null");
- Assertions.assertThatThrownBy(() -> SQLViewRepresentationParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> SQLViewRepresentationParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse SQL view representation from null object");
}
diff --git a/core/src/test/java/org/apache/iceberg/view/TestViewHistoryEntryParser.java b/core/src/test/java/org/apache/iceberg/view/TestViewHistoryEntryParser.java
index 99c24fb..387b90c 100644
--- a/core/src/test/java/org/apache/iceberg/view/TestViewHistoryEntryParser.java
+++ b/core/src/test/java/org/apache/iceberg/view/TestViewHistoryEntryParser.java
@@ -18,8 +18,10 @@
*/
package org.apache.iceberg.view;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestViewHistoryEntryParser {
@@ -29,7 +31,7 @@
String json = "{\"timestamp-ms\":123,\"version-id\":1}";
ViewHistoryEntry viewHistoryEntry =
ImmutableViewHistoryEntry.builder().versionId(1).timestampMillis(123).build();
- Assertions.assertThat(ViewHistoryEntryParser.fromJson(json))
+ assertThat(ViewHistoryEntryParser.fromJson(json))
.as("Should be able to deserialize valid view history entry")
.isEqualTo(viewHistoryEntry);
}
@@ -39,34 +41,33 @@
String json = "{\"timestamp-ms\":123,\"version-id\":1}";
ViewHistoryEntry viewHistoryEntry =
ImmutableViewHistoryEntry.builder().versionId(1).timestampMillis(123).build();
- Assertions.assertThat(ViewHistoryEntryParser.toJson(viewHistoryEntry))
+ assertThat(ViewHistoryEntryParser.toJson(viewHistoryEntry))
.as("Should be able to serialize view history entry")
.isEqualTo(json);
}
@Test
public void testNullViewHistoryEntry() {
- Assertions.assertThatThrownBy(() -> ViewHistoryEntryParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> ViewHistoryEntryParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse view history entry from null object");
- Assertions.assertThatThrownBy(() -> ViewHistoryEntryParser.toJson(null))
+ assertThatThrownBy(() -> ViewHistoryEntryParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid view history entry: null");
}
@Test
public void testViewHistoryEntryMissingFields() {
- Assertions.assertThatThrownBy(() -> ViewHistoryEntryParser.fromJson("{}"))
+ assertThatThrownBy(() -> ViewHistoryEntryParser.fromJson("{}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing int: version-id");
- Assertions.assertThatThrownBy(
- () -> ViewHistoryEntryParser.fromJson("{\"timestamp-ms\":\"123\"}"))
+ assertThatThrownBy(() -> ViewHistoryEntryParser.fromJson("{\"timestamp-ms\":\"123\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing int: version-id");
- Assertions.assertThatThrownBy(() -> ViewHistoryEntryParser.fromJson("{\"version-id\":1}"))
+ assertThatThrownBy(() -> ViewHistoryEntryParser.fromJson("{\"version-id\":1}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing long: timestamp-ms");
}
diff --git a/core/src/test/java/org/apache/iceberg/view/TestViewRepresentationParser.java b/core/src/test/java/org/apache/iceberg/view/TestViewRepresentationParser.java
index 37f9ae2..d81dfb7 100644
--- a/core/src/test/java/org/apache/iceberg/view/TestViewRepresentationParser.java
+++ b/core/src/test/java/org/apache/iceberg/view/TestViewRepresentationParser.java
@@ -18,7 +18,9 @@
*/
package org.apache.iceberg.view;
-import org.assertj.core.api.Assertions;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import org.junit.jupiter.api.Test;
public class TestViewRepresentationParser {
@@ -27,26 +29,25 @@
public void testParseUnknownViewRepresentation() {
String json = "{\"type\":\"unknown-sql-representation\"}";
ViewRepresentation unknownRepresentation = ViewRepresentationParser.fromJson(json);
- Assertions.assertThat(
+ assertThat(
ImmutableUnknownViewRepresentation.builder().type("unknown-sql-representation").build())
.isEqualTo(unknownRepresentation);
- Assertions.assertThatThrownBy(() -> ViewRepresentationParser.toJson(unknownRepresentation))
+ assertThatThrownBy(() -> ViewRepresentationParser.toJson(unknownRepresentation))
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("Cannot serialize unsupported view representation: unknown-sql-representation");
}
@Test
public void testNullViewRepresentation() {
- Assertions.assertThatThrownBy(() -> ViewRepresentationParser.toJson(null))
+ assertThatThrownBy(() -> ViewRepresentationParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid view representation: null");
}
@Test
public void testViewRepresentationMissingType() {
- Assertions.assertThatThrownBy(
- () -> ViewRepresentationParser.fromJson("{\"sql\":\"select * from foo\"}"))
+ assertThatThrownBy(() -> ViewRepresentationParser.fromJson("{\"sql\":\"select * from foo\"}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing string: type");
}
diff --git a/core/src/test/java/org/apache/iceberg/view/TestViewVersionParser.java b/core/src/test/java/org/apache/iceberg/view/TestViewVersionParser.java
index 03db389..1d115c0 100644
--- a/core/src/test/java/org/apache/iceberg/view/TestViewVersionParser.java
+++ b/core/src/test/java/org/apache/iceberg/view/TestViewVersionParser.java
@@ -18,10 +18,12 @@
*/
package org.apache.iceberg.view;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestViewVersionParser {
@@ -58,7 +60,7 @@
"{\"version-id\":1, \"timestamp-ms\":12345, \"schema-id\":1, \"summary\":{\"user\":\"some-user\"}, \"representations\":%s, \"default-namespace\":[\"one\",\"two\"]}",
serializedRepresentations);
- Assertions.assertThat(ViewVersionParser.fromJson(serializedViewVersion))
+ assertThat(ViewVersionParser.fromJson(serializedViewVersion))
.as("Should be able to parse valid view version")
.isEqualTo(expectedViewVersion);
}
@@ -97,29 +99,29 @@
+ "\"default-catalog\":\"catalog\",\"default-namespace\":[\"one\",\"two\"],\"representations\":%s}",
expectedRepresentations);
- Assertions.assertThat(ViewVersionParser.toJson(viewVersion))
+ assertThat(ViewVersionParser.toJson(viewVersion))
.as("Should be able to serialize valid view version")
.isEqualTo(expectedViewVersion);
}
@Test
public void testNullViewVersion() {
- Assertions.assertThatThrownBy(() -> ViewVersionParser.toJson(null))
+ assertThatThrownBy(() -> ViewVersionParser.toJson(null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot serialize null view version");
- Assertions.assertThatThrownBy(() -> ViewVersionParser.fromJson((JsonNode) null))
+ assertThatThrownBy(() -> ViewVersionParser.fromJson((JsonNode) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse view version from null object");
- Assertions.assertThatThrownBy(() -> ViewVersionParser.fromJson((String) null))
+ assertThatThrownBy(() -> ViewVersionParser.fromJson((String) null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse view version from null string");
}
@Test
public void missingDefaultCatalog() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ViewVersionParser.fromJson(
"{\"version-id\":1,\"timestamp-ms\":12345,\"schema-id\":1,"
diff --git a/data/src/test/java/org/apache/iceberg/data/DataTestHelpers.java b/data/src/test/java/org/apache/iceberg/data/DataTestHelpers.java
index d4813ca..e02ec8e 100644
--- a/data/src/test/java/org/apache/iceberg/data/DataTestHelpers.java
+++ b/data/src/test/java/org/apache/iceberg/data/DataTestHelpers.java
@@ -18,11 +18,12 @@
*/
package org.apache.iceberg.data;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.util.List;
import java.util.Map;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.Assert;
public class DataTestHelpers {
@@ -88,28 +89,24 @@
"Primitive value should be equal to expected for type " + type, expected, actual);
break;
case FIXED:
- Assertions.assertThat(expected)
- .as("Expected should be a byte[]")
- .isInstanceOf(byte[].class);
- Assertions.assertThat(expected).as("Actual should be a byte[]").isInstanceOf(byte[].class);
+ assertThat(expected).as("Expected should be a byte[]").isInstanceOf(byte[].class);
+ assertThat(expected).as("Actual should be a byte[]").isInstanceOf(byte[].class);
Assert.assertArrayEquals(
"Array contents should be equal", (byte[]) expected, (byte[]) actual);
break;
case STRUCT:
- Assertions.assertThat(expected)
- .as("Expected should be a Record")
- .isInstanceOf(Record.class);
- Assertions.assertThat(actual).as("Actual should be a Record").isInstanceOf(Record.class);
+ assertThat(expected).as("Expected should be a Record").isInstanceOf(Record.class);
+ assertThat(actual).as("Actual should be a Record").isInstanceOf(Record.class);
assertEquals(type.asStructType(), (Record) expected, (Record) actual);
break;
case LIST:
- Assertions.assertThat(expected).as("Expected should be a List").isInstanceOf(List.class);
- Assertions.assertThat(actual).as("Actual should be a List").isInstanceOf(List.class);
+ assertThat(expected).as("Expected should be a List").isInstanceOf(List.class);
+ assertThat(actual).as("Actual should be a List").isInstanceOf(List.class);
assertEquals(type.asListType(), (List) expected, (List) actual);
break;
case MAP:
- Assertions.assertThat(expected).as("Expected should be a Map").isInstanceOf(Map.class);
- Assertions.assertThat(actual).as("Actual should be a Map").isInstanceOf(Map.class);
+ assertThat(expected).as("Expected should be a Map").isInstanceOf(Map.class);
+ assertThat(actual).as("Actual should be a Map").isInstanceOf(Map.class);
assertEquals(type.asMapType(), (Map<?, ?>) expected, (Map<?, ?>) actual);
break;
default:
diff --git a/data/src/test/java/org/apache/iceberg/data/TestGenericRecord.java b/data/src/test/java/org/apache/iceberg/data/TestGenericRecord.java
index 74f04e1..dee38b9 100644
--- a/data/src/test/java/org/apache/iceberg/data/TestGenericRecord.java
+++ b/data/src/test/java/org/apache/iceberg/data/TestGenericRecord.java
@@ -19,10 +19,10 @@
package org.apache.iceberg.data;
import static org.apache.iceberg.types.Types.NestedField.optional;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.Assert;
import org.junit.Test;
@@ -54,7 +54,7 @@
GenericRecord record = GenericRecord.create(schema);
record.set(0, 10L);
- Assertions.assertThatThrownBy(() -> record.get(0, CharSequence.class))
+ assertThatThrownBy(() -> record.get(0, CharSequence.class))
.isInstanceOf(IllegalStateException.class)
.hasMessage("Not an instance of java.lang.CharSequence: 10");
}
diff --git a/data/src/test/java/org/apache/iceberg/data/TestLocalScan.java b/data/src/test/java/org/apache/iceberg/data/TestLocalScan.java
index 47209aa..5855d24 100644
--- a/data/src/test/java/org/apache/iceberg/data/TestLocalScan.java
+++ b/data/src/test/java/org/apache/iceberg/data/TestLocalScan.java
@@ -27,6 +27,7 @@
import static org.apache.iceberg.relocated.com.google.common.collect.Iterables.transform;
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.File;
import java.io.IOException;
@@ -59,7 +60,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.DateTimeUtil;
-import org.assertj.core.api.Assertions;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
@@ -505,8 +505,7 @@
IcebergGenerics.ScanBuilder scanBuilder = IcebergGenerics.read(sharedTable);
- Assertions.assertThatThrownBy(
- () -> scanBuilder.useSnapshot(/* unknown snapshot id */ minSnapshotId - 1))
+ assertThatThrownBy(() -> scanBuilder.useSnapshot(/* unknown snapshot id */ minSnapshotId - 1))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot find snapshot with ID " + (minSnapshotId - 1));
}
@@ -516,8 +515,7 @@
IcebergGenerics.ScanBuilder scanBuilder = IcebergGenerics.read(sharedTable);
long timestamp = sharedTable.history().get(0).timestampMillis() - 1;
- Assertions.assertThatThrownBy(
- () -> scanBuilder.asOfTime(/* older than first snapshot */ timestamp))
+ assertThatThrownBy(() -> scanBuilder.asOfTime(/* older than first snapshot */ timestamp))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Cannot find a snapshot older than " + DateTimeUtil.formatTimestampMillis(timestamp));
diff --git a/data/src/test/java/org/apache/iceberg/data/TestMetricsRowGroupFilter.java b/data/src/test/java/org/apache/iceberg/data/TestMetricsRowGroupFilter.java
index cb6b8d7..9525001 100644
--- a/data/src/test/java/org/apache/iceberg/data/TestMetricsRowGroupFilter.java
+++ b/data/src/test/java/org/apache/iceberg/data/TestMetricsRowGroupFilter.java
@@ -39,6 +39,8 @@
import static org.apache.iceberg.expressions.Expressions.truncate;
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.File;
import java.io.IOException;
@@ -77,7 +79,6 @@
import org.apache.parquet.hadoop.metadata.BlockMetaData;
import org.apache.parquet.io.DelegatingSeekableInputStream;
import org.apache.parquet.schema.MessageType;
-import org.assertj.core.api.Assertions;
import org.assertj.core.api.Assumptions;
import org.junit.Assert;
import org.junit.Assume;
@@ -397,7 +398,7 @@
@Test
public void testMissingColumn() {
- Assertions.assertThatThrownBy(() -> shouldRead(lessThan("missing", 5)))
+ assertThatThrownBy(() -> shouldRead(lessThan("missing", 5)))
.as("Should complain about missing column in expression")
.isInstanceOf(ValidationException.class)
.hasMessageStartingWith("Cannot find field 'missing'");
@@ -946,7 +947,7 @@
boolean shouldRead =
new ParquetMetricsRowGroupFilter(SCHEMA, equal(truncate("required", 2), "some_value"), true)
.shouldRead(parquetSchema, rowGroupMetadata);
- Assertions.assertThat(shouldRead)
+ assertThat(shouldRead)
.as("Should read: filter contains non-reference evaluate as True")
.isTrue();
}
diff --git a/data/src/test/java/org/apache/iceberg/data/TestReadProjection.java b/data/src/test/java/org/apache/iceberg/data/TestReadProjection.java
index 0b5f9ed..374a97b 100644
--- a/data/src/test/java/org/apache/iceberg/data/TestReadProjection.java
+++ b/data/src/test/java/org/apache/iceberg/data/TestReadProjection.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.data;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.IOException;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
@@ -30,7 +32,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.types.Comparators;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
@@ -188,8 +189,7 @@
Assert.assertNotNull("Should read a non-null record", projected);
// this is expected because there are no values
- Assertions.assertThatThrownBy(() -> projected.get(0))
- .isInstanceOf(ArrayIndexOutOfBoundsException.class);
+ assertThatThrownBy(() -> projected.get(0)).isInstanceOf(ArrayIndexOutOfBoundsException.class);
}
@Test
diff --git a/data/src/test/java/org/apache/iceberg/data/avro/TestSingleMessageEncoding.java b/data/src/test/java/org/apache/iceberg/data/avro/TestSingleMessageEncoding.java
index 1b8da1e..b68c20f 100644
--- a/data/src/test/java/org/apache/iceberg/data/avro/TestSingleMessageEncoding.java
+++ b/data/src/test/java/org/apache/iceberg/data/avro/TestSingleMessageEncoding.java
@@ -20,6 +20,7 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.nio.ByteBuffer;
import java.util.Arrays;
@@ -40,7 +41,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Ordering;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.Assert;
import org.junit.Test;
@@ -132,7 +132,7 @@
ByteBuffer v1Buffer = v1Encoder.encode(V1_RECORDS.get(3));
- Assertions.assertThatThrownBy(() -> v2Decoder.decode(v1Buffer))
+ assertThatThrownBy(() -> v2Decoder.decode(v1Buffer))
.isInstanceOf(MissingSchemaException.class)
.hasMessageContaining("Cannot resolve schema for fingerprint");
}
@@ -206,7 +206,7 @@
buffer.limit(12);
- Assertions.assertThatThrownBy(() -> decoder.decode(buffer))
+ assertThatThrownBy(() -> decoder.decode(buffer))
.isInstanceOf(AvroRuntimeException.class)
.hasMessageContaining("Decoding datum failed");
}
@@ -220,7 +220,7 @@
buffer.limit(8);
- Assertions.assertThatThrownBy(() -> decoder.decode(buffer))
+ assertThatThrownBy(() -> decoder.decode(buffer))
.isInstanceOf(BadHeaderException.class)
.hasMessage("Not enough header bytes");
}
@@ -233,7 +233,7 @@
ByteBuffer buffer = encoder.encode(V2_RECORDS.get(0));
buffer.array()[0] = 0x00;
- Assertions.assertThatThrownBy(() -> decoder.decode(buffer))
+ assertThatThrownBy(() -> decoder.decode(buffer))
.isInstanceOf(BadHeaderException.class)
.hasMessageContaining("Unrecognized header bytes");
}
@@ -246,7 +246,7 @@
ByteBuffer buffer = encoder.encode(V2_RECORDS.get(0));
buffer.array()[1] = 0x00;
- Assertions.assertThatThrownBy(() -> decoder.decode(buffer))
+ assertThatThrownBy(() -> decoder.decode(buffer))
.isInstanceOf(BadHeaderException.class)
.hasMessageContaining("Unrecognized header bytes");
}
@@ -259,7 +259,7 @@
ByteBuffer buffer = encoder.encode(V2_RECORDS.get(0));
buffer.array()[4] = 0x00;
- Assertions.assertThatThrownBy(() -> decoder.decode(buffer))
+ assertThatThrownBy(() -> decoder.decode(buffer))
.isInstanceOf(MissingSchemaException.class)
.hasMessageContaining("Cannot resolve schema for fingerprint");
}
diff --git a/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsCatalog.java b/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsCatalog.java
index a35ba31..b0e2cfa 100644
--- a/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsCatalog.java
+++ b/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsCatalog.java
@@ -20,6 +20,7 @@
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.IOException;
import java.util.Map;
@@ -40,7 +41,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableSet;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.Assert;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@@ -128,11 +128,11 @@
ecsCatalog.createNamespace(Namespace.of("a", "b1"));
ecsCatalog.createTable(TableIdentifier.of("a", "t1"), SCHEMA);
- Assertions.assertThatThrownBy(() -> ecsCatalog.dropNamespace(Namespace.of("unknown")))
+ assertThatThrownBy(() -> ecsCatalog.dropNamespace(Namespace.of("unknown")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace unknown does not exist");
- Assertions.assertThatThrownBy(() -> ecsCatalog.dropNamespace(Namespace.of("a")))
+ assertThatThrownBy(() -> ecsCatalog.dropNamespace(Namespace.of("a")))
.isInstanceOf(NamespaceNotEmptyException.class)
.hasMessage("Namespace a is not empty");
@@ -165,14 +165,14 @@
ecsCatalog.createTable(TableIdentifier.of("a", "t1"), SCHEMA);
ecsCatalog.createNamespace(Namespace.of("b"));
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ecsCatalog.renameTable(
TableIdentifier.of("unknown"), TableIdentifier.of("b", "t2")))
.isInstanceOf(NoSuchTableException.class)
.hasMessage("Cannot rename table because table unknown does not exist");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ecsCatalog.renameTable(
TableIdentifier.of("a", "t1"), TableIdentifier.of("unknown", "t2")))
@@ -198,12 +198,12 @@
TableOperations ops = ((HasTableOperations) registeringTable).operations();
String metadataLocation = ((EcsTableOperations) ops).currentMetadataLocation();
Table registeredTable = ecsCatalog.registerTable(identifier, metadataLocation);
- Assertions.assertThat(registeredTable).isNotNull();
+ assertThat(registeredTable).isNotNull();
String expectedMetadataLocation =
((HasTableOperations) registeredTable).operations().current().metadataFileLocation();
- Assertions.assertThat(metadataLocation).isEqualTo(expectedMetadataLocation);
- Assertions.assertThat(ecsCatalog.loadTable(identifier)).isNotNull();
- Assertions.assertThat(ecsCatalog.dropTable(identifier, true)).isTrue();
+ assertThat(metadataLocation).isEqualTo(expectedMetadataLocation);
+ assertThat(ecsCatalog.loadTable(identifier)).isNotNull();
+ assertThat(ecsCatalog.dropTable(identifier, true)).isTrue();
}
@Test
@@ -213,9 +213,9 @@
Table registeringTable = ecsCatalog.loadTable(identifier);
TableOperations ops = ((HasTableOperations) registeringTable).operations();
String metadataLocation = ((EcsTableOperations) ops).currentMetadataLocation();
- Assertions.assertThatThrownBy(() -> ecsCatalog.registerTable(identifier, metadataLocation))
+ assertThatThrownBy(() -> ecsCatalog.registerTable(identifier, metadataLocation))
.isInstanceOf(AlreadyExistsException.class)
.hasMessage("Table already exists: a.t1");
- Assertions.assertThat(ecsCatalog.dropTable(identifier, true)).isTrue();
+ assertThat(ecsCatalog.dropTable(identifier, true)).isTrue();
}
}
diff --git a/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsOutputFile.java b/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsOutputFile.java
index 9c8ef25..5b346e2 100644
--- a/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsOutputFile.java
+++ b/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsOutputFile.java
@@ -19,6 +19,7 @@
package org.apache.iceberg.dell.ecs;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import com.emc.object.Range;
import java.io.IOException;
@@ -28,7 +29,6 @@
import org.apache.iceberg.exceptions.AlreadyExistsException;
import org.apache.iceberg.io.PositionOutputStream;
import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
@@ -87,7 +87,7 @@
output.write("1234567890".getBytes());
}
- Assertions.assertThatThrownBy(outputFile::create)
+ assertThatThrownBy(outputFile::create)
.isInstanceOf(AlreadyExistsException.class)
.hasMessage("ECS object already exists: " + outputFile.location());
}
diff --git a/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsTableOperations.java b/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsTableOperations.java
index 809af32..b8167ba 100644
--- a/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsTableOperations.java
+++ b/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsTableOperations.java
@@ -19,6 +19,7 @@
package org.apache.iceberg.dell.ecs;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.Map;
import org.apache.iceberg.CatalogProperties;
@@ -33,7 +34,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableSet;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
@@ -57,7 +57,7 @@
// Use the TableOperations to test the CommitFailedException
// High level actions, such as Table#updateProperties(), may refresh metadata.
TableOperations operations = ((HasTableOperations) catalog2Table).operations();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
operations.commit(
operations.current(),
diff --git a/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsURI.java b/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsURI.java
index 0e80ac0..ec56e76 100644
--- a/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsURI.java
+++ b/dell/src/test/java/org/apache/iceberg/dell/ecs/TestEcsURI.java
@@ -19,9 +19,9 @@
package org.apache.iceberg.dell.ecs;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import org.apache.iceberg.exceptions.ValidationException;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestEcsURI {
@@ -55,7 +55,7 @@
@Test
public void testInvalidLocation() {
- Assertions.assertThatThrownBy(() -> new EcsURI("http://bucket/a"))
+ assertThatThrownBy(() -> new EcsURI("http://bucket/a"))
.isInstanceOf(ValidationException.class)
.hasMessage("Invalid ecs location: http://bucket/a");
}
diff --git a/dell/src/test/java/org/apache/iceberg/dell/mock/ecs/TestExceptionCode.java b/dell/src/test/java/org/apache/iceberg/dell/mock/ecs/TestExceptionCode.java
index 719c346..4e74556 100644
--- a/dell/src/test/java/org/apache/iceberg/dell/mock/ecs/TestExceptionCode.java
+++ b/dell/src/test/java/org/apache/iceberg/dell/mock/ecs/TestExceptionCode.java
@@ -18,10 +18,12 @@
*/
package org.apache.iceberg.dell.mock.ecs;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import com.emc.object.Range;
import com.emc.object.s3.S3Exception;
import com.emc.object.s3.request.PutObjectRequest;
-import org.assertj.core.api.Assertions;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
@@ -67,17 +69,11 @@
}
public void assertS3Exception(String message, int httpCode, String errorCode, Runnable task) {
- Assertions.assertThatThrownBy(task::run)
+ assertThatThrownBy(task::run)
.isInstanceOf(S3Exception.class)
.asInstanceOf(InstanceOfAssertFactories.type(S3Exception.class))
.satisfies(
- e ->
- Assertions.assertThat(e.getErrorCode())
- .as(message + ", http code")
- .isEqualTo(errorCode),
- e ->
- Assertions.assertThat(e.getHttpCode())
- .as(message + ", error code")
- .isEqualTo(httpCode));
+ e -> assertThat(e.getErrorCode()).as(message + ", http code").isEqualTo(errorCode),
+ e -> assertThat(e.getHttpCode()).as(message + ", error code").isEqualTo(httpCode));
}
}
diff --git a/delta-lake/src/integration/java/org/apache/iceberg/delta/TestSnapshotDeltaLakeTable.java b/delta-lake/src/integration/java/org/apache/iceberg/delta/TestSnapshotDeltaLakeTable.java
index cebbea6..945e896 100644
--- a/delta-lake/src/integration/java/org/apache/iceberg/delta/TestSnapshotDeltaLakeTable.java
+++ b/delta-lake/src/integration/java/org/apache/iceberg/delta/TestSnapshotDeltaLakeTable.java
@@ -23,6 +23,7 @@
import static org.apache.spark.sql.functions.date_add;
import static org.apache.spark.sql.functions.date_format;
import static org.apache.spark.sql.functions.expr;
+import static org.assertj.core.api.Assertions.assertThat;
import io.delta.standalone.DeltaLog;
import io.delta.standalone.Operation;
@@ -59,7 +60,6 @@
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.connector.catalog.CatalogPlugin;
import org.apache.spark.sql.delta.catalog.DeltaCatalog;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -327,7 +327,7 @@
Paths.get(
URI.create(
vacuumTestTableLocation.concat("/_delta_log/00000000000000000000.json"))));
- Assertions.assertThat(deleteResult).isTrue();
+ assertThat(deleteResult).isTrue();
spark.sql("VACUUM " + vacuumTestIdentifier + " RETAIN 0 HOURS");
String newTableIdentifier = destName(icebergCatalogName, "iceberg_vacuum_table");
@@ -362,7 +362,7 @@
Paths.get(
URI.create(
logCleanTestTableLocation.concat("/_delta_log/00000000000000000000.json"))));
- Assertions.assertThat(deleteResult).isTrue();
+ assertThat(deleteResult).isTrue();
String newTableIdentifier = destName(icebergCatalogName, "iceberg_log_clean_table");
SnapshotDeltaLakeTable.Result result =
@@ -388,11 +388,10 @@
List<Row> icebergTableContents =
spark.sql("SELECT * FROM " + icebergTableIdentifier).collectAsList();
- Assertions.assertThat(deltaTableContents).hasSize(icebergTableContents.size());
- Assertions.assertThat(snapshotReport.snapshotDataFilesCount())
+ assertThat(deltaTableContents).hasSize(icebergTableContents.size());
+ assertThat(snapshotReport.snapshotDataFilesCount())
.isEqualTo(countDataFilesInDeltaLakeTable(deltaLog, firstConstructableVersion));
- Assertions.assertThat(icebergTableContents)
- .containsExactlyInAnyOrderElementsOf(deltaTableContents);
+ assertThat(icebergTableContents).containsExactlyInAnyOrderElementsOf(deltaTableContents);
}
private void checkTagContentAndOrder(
@@ -403,8 +402,7 @@
Map<String, SnapshotRef> icebergSnapshotRefs = icebergTable.refs();
List<Snapshot> icebergSnapshots = Lists.newArrayList(icebergTable.snapshots());
- Assertions.assertThat(icebergSnapshots.size())
- .isEqualTo(currentVersion - firstConstructableVersion + 1);
+ assertThat(icebergSnapshots.size()).isEqualTo(currentVersion - firstConstructableVersion + 1);
for (int i = 0; i < icebergSnapshots.size(); i++) {
long deltaVersion = firstConstructableVersion + i;
@@ -412,25 +410,25 @@
String expectedVersionTag = "delta-version-" + deltaVersion;
icebergSnapshotRefs.get(expectedVersionTag);
- Assertions.assertThat(icebergSnapshotRefs.get(expectedVersionTag)).isNotNull();
- Assertions.assertThat(icebergSnapshotRefs.get(expectedVersionTag).isTag()).isTrue();
- Assertions.assertThat(icebergSnapshotRefs.get(expectedVersionTag).snapshotId())
+ assertThat(icebergSnapshotRefs.get(expectedVersionTag)).isNotNull();
+ assertThat(icebergSnapshotRefs.get(expectedVersionTag).isTag()).isTrue();
+ assertThat(icebergSnapshotRefs.get(expectedVersionTag).snapshotId())
.isEqualTo(currentIcebergSnapshot.snapshotId());
Timestamp deltaVersionTimestamp = deltaLog.getCommitInfoAt(deltaVersion).getTimestamp();
- Assertions.assertThat(deltaVersionTimestamp).isNotNull();
+ assertThat(deltaVersionTimestamp).isNotNull();
String expectedTimestampTag = "delta-ts-" + deltaVersionTimestamp.getTime();
- Assertions.assertThat(icebergSnapshotRefs.get(expectedTimestampTag)).isNotNull();
- Assertions.assertThat(icebergSnapshotRefs.get(expectedTimestampTag).isTag()).isTrue();
- Assertions.assertThat(icebergSnapshotRefs.get(expectedTimestampTag).snapshotId())
+ assertThat(icebergSnapshotRefs.get(expectedTimestampTag)).isNotNull();
+ assertThat(icebergSnapshotRefs.get(expectedTimestampTag).isTag()).isTrue();
+ assertThat(icebergSnapshotRefs.get(expectedTimestampTag).snapshotId())
.isEqualTo(currentIcebergSnapshot.snapshotId());
}
}
private void checkIcebergTableLocation(String icebergTableIdentifier, String expectedLocation) {
Table icebergTable = getIcebergTable(icebergTableIdentifier);
- Assertions.assertThat(icebergTable.location())
+ assertThat(icebergTable.location())
.isEqualTo(LocationUtil.stripTrailingSlash(expectedLocation));
}
@@ -445,10 +443,8 @@
expectedPropertiesBuilder.putAll(expectedAdditionalProperties);
ImmutableMap<String, String> expectedProperties = expectedPropertiesBuilder.build();
- Assertions.assertThat(icebergTable.properties().entrySet())
- .containsAll(expectedProperties.entrySet());
- Assertions.assertThat(icebergTable.properties())
- .containsEntry(ORIGINAL_LOCATION_PROP, deltaTableLocation);
+ assertThat(icebergTable.properties().entrySet()).containsAll(expectedProperties.entrySet());
+ assertThat(icebergTable.properties()).containsEntry(ORIGINAL_LOCATION_PROP, deltaTableLocation);
}
private void checkDataFilePathsIntegrity(
@@ -466,8 +462,8 @@
.addedDataFiles(icebergTable.io())
.forEach(
dataFile -> {
- Assertions.assertThat(URI.create(dataFile.path().toString()).isAbsolute()).isTrue();
- Assertions.assertThat(deltaTableDataFilePaths).contains(dataFile.path().toString());
+ assertThat(URI.create(dataFile.path().toString()).isAbsolute()).isTrue();
+ assertThat(deltaTableDataFilePaths).contains(dataFile.path().toString());
});
}
diff --git a/delta-lake/src/test/java/org/apache/iceberg/delta/TestBaseSnapshotDeltaLakeTableAction.java b/delta-lake/src/test/java/org/apache/iceberg/delta/TestBaseSnapshotDeltaLakeTableAction.java
index 22e27c1..f77ce44 100644
--- a/delta-lake/src/test/java/org/apache/iceberg/delta/TestBaseSnapshotDeltaLakeTableAction.java
+++ b/delta-lake/src/test/java/org/apache/iceberg/delta/TestBaseSnapshotDeltaLakeTableAction.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.delta;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.File;
import java.io.IOException;
import java.util.List;
@@ -27,7 +29,6 @@
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -53,7 +54,7 @@
.icebergCatalog(testCatalog)
.deltaLakeConfiguration(testHadoopConf)
.tableLocation(newTableLocation);
- Assertions.assertThatThrownBy(testAction::execute)
+ assertThatThrownBy(testAction::execute)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Iceberg catalog and identifier cannot be null. Make sure to configure the action with a valid Iceberg catalog and identifier.");
@@ -66,7 +67,7 @@
.as(TableIdentifier.of("test", "test"))
.deltaLakeConfiguration(testHadoopConf)
.tableLocation(newTableLocation);
- Assertions.assertThatThrownBy(testAction::execute)
+ assertThatThrownBy(testAction::execute)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Iceberg catalog and identifier cannot be null. Make sure to configure the action with a valid Iceberg catalog and identifier.");
@@ -79,7 +80,7 @@
.as(TableIdentifier.of("test", "test"))
.icebergCatalog(testCatalog)
.tableLocation(newTableLocation);
- Assertions.assertThatThrownBy(testAction::execute)
+ assertThatThrownBy(testAction::execute)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Make sure to configure the action with a valid deltaLakeConfiguration");
}
@@ -92,7 +93,7 @@
.deltaLakeConfiguration(testHadoopConf)
.icebergCatalog(testCatalog)
.tableLocation(newTableLocation);
- Assertions.assertThatThrownBy(testAction::execute)
+ assertThatThrownBy(testAction::execute)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Delta Lake table does not exist at the given location: %s", sourceTableLocation);
diff --git a/delta-lake/src/test/java/org/apache/iceberg/delta/TestDeltaLakeTypeToType.java b/delta-lake/src/test/java/org/apache/iceberg/delta/TestDeltaLakeTypeToType.java
index 20989ee..6d99d64 100644
--- a/delta-lake/src/test/java/org/apache/iceberg/delta/TestDeltaLakeTypeToType.java
+++ b/delta-lake/src/test/java/org/apache/iceberg/delta/TestDeltaLakeTypeToType.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.delta;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import io.delta.standalone.types.ArrayType;
import io.delta.standalone.types.BinaryType;
import io.delta.standalone.types.BooleanType;
@@ -31,7 +34,6 @@
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -75,12 +77,10 @@
deltaAtomicSchema, new DeltaLakeTypeToType(deltaAtomicSchema));
Schema convertedSchema = new Schema(converted.asNestedType().asStructType().fields());
- Assertions.assertThat(convertedSchema.findType(optionalBooleanType))
- .isInstanceOf(Types.BooleanType.class);
- Assertions.assertThat(convertedSchema.findField(optionalBooleanType).isOptional()).isTrue();
- Assertions.assertThat(convertedSchema.findType(requiredBinaryType))
- .isInstanceOf(Types.BinaryType.class);
- Assertions.assertThat(convertedSchema.findField(requiredBinaryType).isRequired()).isTrue();
+ assertThat(convertedSchema.findType(optionalBooleanType)).isInstanceOf(Types.BooleanType.class);
+ assertThat(convertedSchema.findField(optionalBooleanType).isOptional()).isTrue();
+ assertThat(convertedSchema.findType(requiredBinaryType)).isInstanceOf(Types.BinaryType.class);
+ assertThat(convertedSchema.findField(requiredBinaryType).isRequired()).isTrue();
}
@Test
@@ -90,49 +90,41 @@
deltaNestedSchema, new DeltaLakeTypeToType(deltaNestedSchema));
Schema convertedSchema = new Schema(converted.asNestedType().asStructType().fields());
- Assertions.assertThat(convertedSchema.findType(innerAtomicSchema))
- .isInstanceOf(Types.StructType.class);
- Assertions.assertThat(convertedSchema.findField(innerAtomicSchema).isOptional()).isTrue();
- Assertions.assertThat(
+ assertThat(convertedSchema.findType(innerAtomicSchema)).isInstanceOf(Types.StructType.class);
+ assertThat(convertedSchema.findField(innerAtomicSchema).isOptional()).isTrue();
+ assertThat(
convertedSchema
.findType(innerAtomicSchema)
.asStructType()
.fieldType(optionalBooleanType))
.isInstanceOf(Types.BooleanType.class);
- Assertions.assertThat(
+ assertThat(
convertedSchema
.findType(innerAtomicSchema)
.asStructType()
.fieldType(requiredBinaryType))
.isInstanceOf(Types.BinaryType.class);
- Assertions.assertThat(
+ assertThat(
convertedSchema
.findType(innerAtomicSchema)
.asStructType()
.field(requiredBinaryType)
.isRequired())
.isTrue();
- Assertions.assertThat(convertedSchema.findType(stringLongMapType))
- .isInstanceOf(Types.MapType.class);
- Assertions.assertThat(convertedSchema.findType(stringLongMapType).asMapType().keyType())
+ assertThat(convertedSchema.findType(stringLongMapType)).isInstanceOf(Types.MapType.class);
+ assertThat(convertedSchema.findType(stringLongMapType).asMapType().keyType())
.isInstanceOf(Types.StringType.class);
- Assertions.assertThat(convertedSchema.findType(stringLongMapType).asMapType().valueType())
+ assertThat(convertedSchema.findType(stringLongMapType).asMapType().valueType())
.isInstanceOf(Types.LongType.class);
- Assertions.assertThat(convertedSchema.findType(doubleArrayType))
- .isInstanceOf(Types.ListType.class);
- Assertions.assertThat(convertedSchema.findField(doubleArrayType).isRequired()).isTrue();
- Assertions.assertThat(
- convertedSchema.findType(doubleArrayType).asListType().isElementOptional())
- .isTrue();
- Assertions.assertThat(convertedSchema.findType(structArrayType))
- .isInstanceOf(Types.ListType.class);
- Assertions.assertThat(convertedSchema.findField(structArrayType).isRequired()).isTrue();
- Assertions.assertThat(
- convertedSchema.findType(structArrayType).asListType().isElementOptional())
- .isTrue();
- Assertions.assertThat(convertedSchema.findType(structArrayType).asListType().elementType())
+ assertThat(convertedSchema.findType(doubleArrayType)).isInstanceOf(Types.ListType.class);
+ assertThat(convertedSchema.findField(doubleArrayType).isRequired()).isTrue();
+ assertThat(convertedSchema.findType(doubleArrayType).asListType().isElementOptional()).isTrue();
+ assertThat(convertedSchema.findType(structArrayType)).isInstanceOf(Types.ListType.class);
+ assertThat(convertedSchema.findField(structArrayType).isRequired()).isTrue();
+ assertThat(convertedSchema.findType(structArrayType).asListType().isElementOptional()).isTrue();
+ assertThat(convertedSchema.findType(structArrayType).asListType().elementType())
.isInstanceOf(Types.StructType.class);
- Assertions.assertThat(
+ assertThat(
convertedSchema
.findType(structArrayType)
.asListType()
@@ -140,7 +132,7 @@
.asStructType()
.fieldType(optionalBooleanType))
.isInstanceOf(Types.BooleanType.class);
- Assertions.assertThat(
+ assertThat(
convertedSchema
.findType(structArrayType)
.asListType()
@@ -149,7 +141,7 @@
.field(optionalBooleanType)
.isOptional())
.isTrue();
- Assertions.assertThat(
+ assertThat(
convertedSchema
.findType(structArrayType)
.asListType()
@@ -157,7 +149,7 @@
.asStructType()
.fieldType(requiredBinaryType))
.isInstanceOf(Types.BinaryType.class);
- Assertions.assertThat(
+ assertThat(
convertedSchema
.findType(structArrayType)
.asListType()
@@ -170,13 +162,13 @@
@Test
public void testNullTypeConversion() {
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
DeltaLakeDataTypeVisitor.visit(
deltaNullTypeSchema, new DeltaLakeTypeToType(deltaNullTypeSchema)))
.isInstanceOf(ValidationException.class)
.hasMessage(String.format("Not a supported type: %s", new NullType().getCatalogString()));
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
DeltaLakeDataTypeVisitor.visit(
deltaShallowNullTypeSchema,
diff --git a/gcp/src/test/java/org/apache/iceberg/gcp/GCPPropertiesTest.java b/gcp/src/test/java/org/apache/iceberg/gcp/GCPPropertiesTest.java
index f7c770d..c71b558 100644
--- a/gcp/src/test/java/org/apache/iceberg/gcp/GCPPropertiesTest.java
+++ b/gcp/src/test/java/org/apache/iceberg/gcp/GCPPropertiesTest.java
@@ -21,16 +21,16 @@
import static org.apache.iceberg.gcp.GCPProperties.GCS_NO_AUTH;
import static org.apache.iceberg.gcp.GCPProperties.GCS_OAUTH2_TOKEN;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatIllegalStateException;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class GCPPropertiesTest {
@Test
public void testOAuthWithNoAuth() {
- Assertions.assertThatIllegalStateException()
+ assertThatIllegalStateException()
.isThrownBy(
() ->
new GCPProperties(ImmutableMap.of(GCS_OAUTH2_TOKEN, "oauth", GCS_NO_AUTH, "true")))
diff --git a/gcp/src/test/java/org/apache/iceberg/gcp/gcs/GCSInputStreamTest.java b/gcp/src/test/java/org/apache/iceberg/gcp/gcs/GCSInputStreamTest.java
index 76a0fa6..db6b5d9 100644
--- a/gcp/src/test/java/org/apache/iceberg/gcp/gcs/GCSInputStreamTest.java
+++ b/gcp/src/test/java/org/apache/iceberg/gcp/gcs/GCSInputStreamTest.java
@@ -34,7 +34,6 @@
import org.apache.iceberg.io.RangeReadable;
import org.apache.iceberg.io.SeekableInputStream;
import org.apache.iceberg.metrics.MetricsContext;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class GCSInputStreamTest {
@@ -157,7 +156,7 @@
throws IOException {
in.readFully(position, buffer, offset, length);
- Assertions.assertThat(Arrays.copyOfRange(buffer, offset, offset + length))
+ assertThat(Arrays.copyOfRange(buffer, offset, offset + length))
.isEqualTo(Arrays.copyOfRange(original, offset, offset + length));
}
diff --git a/gcp/src/test/java/org/apache/iceberg/gcp/gcs/GCSLocationTest.java b/gcp/src/test/java/org/apache/iceberg/gcp/gcs/GCSLocationTest.java
index 551eb13..e2ad925 100644
--- a/gcp/src/test/java/org/apache/iceberg/gcp/gcs/GCSLocationTest.java
+++ b/gcp/src/test/java/org/apache/iceberg/gcp/gcs/GCSLocationTest.java
@@ -18,8 +18,10 @@
*/
package org.apache.iceberg.gcp.gcs;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import org.apache.iceberg.exceptions.ValidationException;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class GCSLocationTest {
@@ -28,8 +30,8 @@
String p1 = "gs://bucket/path/to/prefix";
GCSLocation location = new GCSLocation(p1);
- Assertions.assertThat(location.bucket()).isEqualTo("bucket");
- Assertions.assertThat(location.prefix()).isEqualTo("path/to/prefix");
+ assertThat(location.bucket()).isEqualTo("bucket");
+ assertThat(location.prefix()).isEqualTo("path/to/prefix");
}
@Test
@@ -37,20 +39,20 @@
String p1 = "gs://bucket/path%20to%20prefix";
GCSLocation location = new GCSLocation(p1);
- Assertions.assertThat(location.bucket()).isEqualTo("bucket");
- Assertions.assertThat(location.prefix()).isEqualTo("path%20to%20prefix");
+ assertThat(location.bucket()).isEqualTo("bucket");
+ assertThat(location.prefix()).isEqualTo("path%20to%20prefix");
}
@Test
public void testMissingScheme() {
- Assertions.assertThatThrownBy(() -> new GCSLocation("/path/to/prefix"))
+ assertThatThrownBy(() -> new GCSLocation("/path/to/prefix"))
.isInstanceOf(ValidationException.class)
.hasMessage("Invalid GCS URI, cannot determine scheme: /path/to/prefix");
}
@Test
public void testInvalidScheme() {
- Assertions.assertThatThrownBy(() -> new GCSLocation("s3://bucket/path/to/prefix"))
+ assertThatThrownBy(() -> new GCSLocation("s3://bucket/path/to/prefix"))
.isInstanceOf(ValidationException.class)
.hasMessage("Invalid GCS URI, invalid scheme: s3");
}
@@ -60,8 +62,8 @@
String p1 = "gs://bucket";
GCSLocation location = new GCSLocation(p1);
- Assertions.assertThat(location.bucket()).isEqualTo("bucket");
- Assertions.assertThat(location.prefix()).isEqualTo("");
+ assertThat(location.bucket()).isEqualTo("bucket");
+ assertThat(location.prefix()).isEqualTo("");
}
@Test
@@ -69,7 +71,7 @@
String p1 = "gs://bucket/path/to/prefix?query=foo#bar";
GCSLocation location = new GCSLocation(p1);
- Assertions.assertThat(location.bucket()).isEqualTo("bucket");
- Assertions.assertThat(location.prefix()).isEqualTo("path/to/prefix");
+ assertThat(location.bucket()).isEqualTo("bucket");
+ assertThat(location.prefix()).isEqualTo("path/to/prefix");
}
}
diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergFilterFactory.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergFilterFactory.java
index 121e2c8..579a4b8 100644
--- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergFilterFactory.java
+++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergFilterFactory.java
@@ -19,6 +19,7 @@
package org.apache.iceberg.mr.hive;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.math.BigDecimal;
import java.sql.Date;
@@ -39,7 +40,6 @@
import org.apache.iceberg.expressions.UnboundPredicate;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.DateTimeUtil;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestHiveIcebergFilterFactory {
@@ -159,7 +159,7 @@
.between("salary", PredicateLeaf.Type.LONG, 9000L, 15000L)
.end()
.build());
- Assertions.assertThatThrownBy(() -> HiveIcebergFilterFactory.generateFilterExpression(arg))
+ assertThatThrownBy(() -> HiveIcebergFilterFactory.generateFilterExpression(arg))
.isInstanceOf(UnsupportedOperationException.class)
.hasMessage("Missing leaf literals: Leaf[empty]");
}
diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergOutputCommitter.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergOutputCommitter.java
index 8b8e209..147e0ba 100644
--- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergOutputCommitter.java
+++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergOutputCommitter.java
@@ -21,6 +21,7 @@
import static org.apache.iceberg.mr.hive.HiveIcebergRecordWriter.getWriters;
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.IOException;
import java.nio.file.Path;
@@ -56,7 +57,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.SerializationUtil;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.mockito.ArgumentCaptor;
@@ -205,8 +205,7 @@
Table table = table(temp.toFile().getPath(), false);
JobConf conf = jobConf(table, 1);
- Assertions.assertThatThrownBy(
- () -> writeRecords(table.name(), 1, 0, true, false, conf, failingCommitter))
+ assertThatThrownBy(() -> writeRecords(table.name(), 1, 0, true, false, conf, failingCommitter))
.isInstanceOf(RuntimeException.class)
.hasMessage(exceptionMessage);
diff --git a/nessie/src/test/java/org/apache/iceberg/nessie/TestBranchVisibility.java b/nessie/src/test/java/org/apache/iceberg/nessie/TestBranchVisibility.java
index f38be05..00aa845 100644
--- a/nessie/src/test/java/org/apache/iceberg/nessie/TestBranchVisibility.java
+++ b/nessie/src/test/java/org/apache/iceberg/nessie/TestBranchVisibility.java
@@ -19,6 +19,8 @@
package org.apache.iceberg.nessie;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.Arrays;
import java.util.Collections;
@@ -41,7 +43,6 @@
import org.apache.iceberg.view.BaseView;
import org.apache.iceberg.view.View;
import org.assertj.core.api.AbstractStringAssert;
-import org.assertj.core.api.Assertions;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.assertj.core.api.ThrowableAssert.ThrowingCallable;
import org.junit.jupiter.api.AfterEach;
@@ -121,15 +122,14 @@
String mainName = "main";
// asking for table@branch gives expected regardless of catalog
- Assertions.assertThat(metadataLocation(catalog, TableIdentifier.of("test-ns", "table1@test")))
+ assertThat(metadataLocation(catalog, TableIdentifier.of("test-ns", "table1@test")))
.isEqualTo(metadataLocation(testCatalog, tableIdentifier1));
// Asking for table@branch gives expected regardless of catalog.
// Earlier versions used "table1@" + tree.getReferenceByName("main").getHash() before, but since
// Nessie 0.8.2 the branch name became mandatory and specifying a hash within a branch is not
// possible.
- Assertions.assertThat(
- metadataLocation(catalog, TableIdentifier.of("test-ns", "table1@" + mainName)))
+ assertThat(metadataLocation(catalog, TableIdentifier.of("test-ns", "table1@" + mainName)))
.isEqualTo(metadataLocation(testCatalog, tableIdentifier1));
}
@@ -160,7 +160,7 @@
String metadataOnTest2 =
addRow(catalog, tableIdentifier1, "added-data-on-test", ImmutableMap.of("id0", 5L));
- Assertions.assertThat(metadataOnTest2).isNotEqualTo(metadataOnTest);
+ assertThat(metadataOnTest2).isNotEqualTo(metadataOnTest);
long snapshotIdOnTest2 = snapshotIdFromMetadata(catalog, metadataOnTest2);
verifyRefState(catalog, tableIdentifier1, snapshotIdOnTest2, 0);
@@ -173,7 +173,7 @@
tableIdentifier1,
"testSchemaSnapshot-in-1",
ImmutableMap.of("id0", 42L, "id1", "world"));
- Assertions.assertThat(metadataOn1).isNotEqualTo(metadataOnTest).isNotEqualTo(metadataOnTest2);
+ assertThat(metadataOn1).isNotEqualTo(metadataOnTest).isNotEqualTo(metadataOnTest2);
NessieCatalog catalogBranch2 = initCatalog(branch2);
updateSchema(catalogBranch2, tableIdentifier1, Types.IntegerType.get());
@@ -184,7 +184,7 @@
tableIdentifier1,
"testSchemaSnapshot-in-2",
ImmutableMap.of("id0", 43L, "id2", 666));
- Assertions.assertThat(metadataOn2).isNotEqualTo(metadataOnTest).isNotEqualTo(metadataOnTest2);
+ assertThat(metadataOn2).isNotEqualTo(metadataOnTest).isNotEqualTo(metadataOnTest2);
}
@Test
@@ -202,15 +202,13 @@
catalog = initCatalog(branch2);
String metadataLocationOfCommit2 =
addRow(catalog, tableIdentifier1, "some-more-data", ImmutableMap.of("id0", 42L));
- Assertions.assertThat(metadataLocationOfCommit2)
- .isNotNull()
- .isNotEqualTo(metadataLocationOfCommit1);
+ assertThat(metadataLocationOfCommit2).isNotNull().isNotEqualTo(metadataLocationOfCommit1);
catalog = initCatalog(branch1);
// load tableIdentifier1 on branch1
BaseTable table = (BaseTable) catalog.loadTable(tableIdentifier1);
// branch1's tableIdentifier1's metadata location must not have changed
- Assertions.assertThat(table.operations().current().metadataFileLocation())
+ assertThat(table.operations().current().metadataFileLocation())
.isNotNull()
.isNotEqualTo(metadataLocationOfCommit2);
}
@@ -236,7 +234,7 @@
// Add a row and verify that the
String metadataOnTest =
addRow(catalog, tableIdentifier1, "initial-data", Collections.singletonMap("id0", 1L));
- Assertions.assertThat(metadataOnTest).isNotEqualTo(initialLocation);
+ assertThat(metadataOnTest).isNotEqualTo(initialLocation);
long snapshotIdOnTest = snapshotIdFromMetadata(catalog, metadataOnTest);
verifyRefState(catalog, tableIdentifier1, snapshotIdOnTest, 0);
@@ -260,15 +258,15 @@
"branch-a-1",
ImmutableMap.of("id0", 2L, "id1", "hello"));
// addRow() must produce a new metadata
- Assertions.assertThat(metadataOnA1).isNotEqualTo(metadataOnTest);
+ assertThat(metadataOnA1).isNotEqualTo(metadataOnTest);
long snapshotIdOnA1 = snapshotIdFromMetadata(catalogBranchA, metadataOnA1);
- Assertions.assertThat(snapshotIdOnA1).isNotEqualTo(snapshotIdOnTest);
+ assertThat(snapshotIdOnA1).isNotEqualTo(snapshotIdOnTest);
verifyRefState(catalogBranchA, tableIdentifier1, snapshotIdOnA1, 1);
verifyRefState(catalog, tableIdentifier1, snapshotIdOnTest, 0);
NessieCatalog catalogBranchB = initCatalog(branchB);
long snapshotIdOnB = snapshotIdFromNessie(catalogBranchB, tableIdentifier1);
- Assertions.assertThat(snapshotIdOnB).isEqualTo(snapshotIdOnTest);
+ assertThat(snapshotIdOnB).isEqualTo(snapshotIdOnTest);
// branchB hasn't been modified yet, so it must be "equal" to branch "test"
verifyRefState(catalogBranchB, tableIdentifier1, snapshotIdOnB, 0);
// updateSchema should use schema-id 1, because it's not tracked globally
@@ -282,7 +280,7 @@
catalogBranchB, tableIdentifier1, "branch-b-1", ImmutableMap.of("id0", 3L, "id2", 42L));
long snapshotIdOnB1 = snapshotIdFromMetadata(catalogBranchB, metadataOnB1);
// addRow() must produce a new metadata
- Assertions.assertThat(metadataOnB1).isNotEqualTo(metadataOnA1).isNotEqualTo(metadataOnTest);
+ assertThat(metadataOnB1).isNotEqualTo(metadataOnA1).isNotEqualTo(metadataOnTest);
verifyRefState(catalogBranchB, tableIdentifier1, snapshotIdOnB1, 1);
verifyRefState(catalog, tableIdentifier1, snapshotIdOnTest, 0);
@@ -296,7 +294,7 @@
"branch-a-2",
ImmutableMap.of("id0", 4L, "id1", "hello"));
long snapshotIdOnA2 = snapshotIdFromMetadata(catalogBranchA, metadataOnA2);
- Assertions.assertThat(metadataOnA2)
+ assertThat(metadataOnA2)
.isNotEqualTo(metadataOnA1)
.isNotEqualTo(metadataOnB1)
.isNotEqualTo(metadataOnTest);
@@ -312,7 +310,7 @@
"branch-b-2",
ImmutableMap.of("id0", 5L, "id2", 666L));
long snapshotIdOnB2 = snapshotIdFromMetadata(catalogBranchA, metadataOnB2);
- Assertions.assertThat(metadataOnB2)
+ assertThat(metadataOnB2)
.isNotEqualTo(metadataOnA1)
.isNotEqualTo(metadataOnA2)
.isNotEqualTo(metadataOnB1)
@@ -327,7 +325,7 @@
NessieCatalog catalog, TableIdentifier identifier, long snapshotId, int schemaId)
throws Exception {
IcebergTable icebergTable = loadIcebergTable(catalog, identifier);
- Assertions.assertThat(icebergTable)
+ assertThat(icebergTable)
.extracting(IcebergTable::getSnapshotId, IcebergTable::getSchemaId)
.containsExactly(snapshotId, schemaId);
}
@@ -378,7 +376,7 @@
}
private void verifySchema(NessieCatalog catalog, TableIdentifier identifier, Type... types) {
- Assertions.assertThat(catalog.loadTable(identifier))
+ assertThat(catalog.loadTable(identifier))
.extracting(t -> t.schema().columns().stream().map(NestedField::type))
.asInstanceOf(InstanceOfAssertFactories.stream(Type.class))
.containsExactly(types);
@@ -416,7 +414,7 @@
String table2 = metadataLocation(catalog, tableIdentifier2);
AbstractStringAssert<?> assertion =
- Assertions.assertThat(table1)
+ assertThat(table1)
.describedAs(
"Table %s on ref %s should%s be equal to table %s on ref %s",
tableIdentifier1.name(),
@@ -431,7 +429,7 @@
}
assertion =
- Assertions.assertThat(table2)
+ assertThat(table2)
.describedAs(
"Table %s on ref %s should%s be equal to table %s on ref %s",
tableIdentifier2.name(),
@@ -457,41 +455,41 @@
String hashBeforeNamespaceCreation = api.getReference().refName(testBranch).get().getHash();
Namespace namespaceA = Namespace.of("a");
Namespace namespaceAB = Namespace.of("a", "b");
- Assertions.assertThat(nessieCatalog.listNamespaces(namespaceAB)).isEmpty();
+ assertThat(nessieCatalog.listNamespaces(namespaceAB)).isEmpty();
createMissingNamespaces(
nessieCatalog, Namespace.of(Arrays.copyOf(namespaceAB.levels(), namespaceAB.length() - 1)));
nessieCatalog.createNamespace(namespaceAB);
- Assertions.assertThat(nessieCatalog.listNamespaces(namespaceAB)).isEmpty();
- Assertions.assertThat(nessieCatalog.listNamespaces(namespaceA)).containsExactly(namespaceAB);
- Assertions.assertThat(nessieCatalog.listTables(namespaceAB)).isEmpty();
+ assertThat(nessieCatalog.listNamespaces(namespaceAB)).isEmpty();
+ assertThat(nessieCatalog.listNamespaces(namespaceA)).containsExactly(namespaceAB);
+ assertThat(nessieCatalog.listTables(namespaceAB)).isEmpty();
NessieCatalog catalogAtHash1 = initCatalog(testBranch, hashBeforeNamespaceCreation);
- Assertions.assertThat(catalogAtHash1.listNamespaces(namespaceAB)).isEmpty();
- Assertions.assertThat(catalogAtHash1.listTables(namespaceAB)).isEmpty();
+ assertThat(catalogAtHash1.listNamespaces(namespaceAB)).isEmpty();
+ assertThat(catalogAtHash1.listTables(namespaceAB)).isEmpty();
TableIdentifier identifier = TableIdentifier.of(namespaceAB, "table");
String hashBeforeTableCreation = nessieCatalog.currentHash();
nessieCatalog.createTable(identifier, schema);
- Assertions.assertThat(nessieCatalog.listTables(namespaceAB)).hasSize(1);
+ assertThat(nessieCatalog.listTables(namespaceAB)).hasSize(1);
NessieCatalog catalogAtHash2 = initCatalog(testBranch, hashBeforeTableCreation);
- Assertions.assertThat(catalogAtHash2.listNamespaces(namespaceAB)).isEmpty();
- Assertions.assertThat(catalogAtHash2.listNamespaces(namespaceA)).containsExactly(namespaceAB);
- Assertions.assertThat(catalogAtHash2.listTables(namespaceAB)).isEmpty();
+ assertThat(catalogAtHash2.listNamespaces(namespaceAB)).isEmpty();
+ assertThat(catalogAtHash2.listNamespaces(namespaceA)).containsExactly(namespaceAB);
+ assertThat(catalogAtHash2.listTables(namespaceAB)).isEmpty();
// updates should not be possible
- Assertions.assertThatThrownBy(() -> catalogAtHash2.createTable(identifier, schema))
+ assertThatThrownBy(() -> catalogAtHash2.createTable(identifier, schema))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"You can only mutate tables/views when using a branch without a hash or timestamp.");
- Assertions.assertThat(catalogAtHash2.listTables(namespaceAB)).isEmpty();
+ assertThat(catalogAtHash2.listTables(namespaceAB)).isEmpty();
// updates should be still possible here
nessieCatalog = initCatalog(testBranch);
TableIdentifier identifier2 = TableIdentifier.of(namespaceAB, "table2");
nessieCatalog.createTable(identifier2, schema);
- Assertions.assertThat(nessieCatalog.listTables(namespaceAB)).hasSize(2);
+ assertThat(nessieCatalog.listTables(namespaceAB)).hasSize(2);
}
@Test
@@ -515,14 +513,14 @@
createMissingNamespaces(nessieCatalog, identifier);
Table table1 = nessieCatalog.createTable(identifier, schema1);
- Assertions.assertThat(table1.schema().asStruct()).isEqualTo(schema1.asStruct());
+ assertThat(table1.schema().asStruct()).isEqualTo(schema1.asStruct());
nessieCatalog = initCatalog(branch2);
createMissingNamespaces(nessieCatalog, identifier);
Table table2 = nessieCatalog.createTable(identifier, schema2);
- Assertions.assertThat(table2.schema().asStruct()).isEqualTo(schema2.asStruct());
+ assertThat(table2.schema().asStruct()).isEqualTo(schema2.asStruct());
- Assertions.assertThat(table1.location()).isNotEqualTo(table2.location());
+ assertThat(table1.location()).isNotEqualTo(table2.location());
}
@Test
@@ -551,15 +549,13 @@
.current()
.metadataFileLocation();
- Assertions.assertThat(metadataLocationOfCommit2)
- .isNotNull()
- .isNotEqualTo(metadataLocationOfCommit1);
+ assertThat(metadataLocationOfCommit2).isNotNull().isNotEqualTo(metadataLocationOfCommit1);
catalog = initCatalog(branch1);
// load viewIdentifier on branch1
BaseView view = (BaseView) catalog.loadView(viewIdentifier);
// branch1's viewIdentifier's metadata location must not have changed
- Assertions.assertThat(view.operations().current().metadataFileLocation())
+ assertThat(view.operations().current().metadataFileLocation())
.isNotNull()
.isNotEqualTo(metadataLocationOfCommit2);
@@ -587,13 +583,13 @@
createMissingNamespaces(nessieCatalog, identifier);
View view1 = createView(nessieCatalog, identifier, schema1);
- Assertions.assertThat(view1.schema().asStruct()).isEqualTo(schema1.asStruct());
+ assertThat(view1.schema().asStruct()).isEqualTo(schema1.asStruct());
nessieCatalog = initCatalog(branch2);
createMissingNamespaces(nessieCatalog, identifier);
View view2 = createView(nessieCatalog, identifier, schema2);
- Assertions.assertThat(view2.schema().asStruct()).isEqualTo(schema2.asStruct());
+ assertThat(view2.schema().asStruct()).isEqualTo(schema2.asStruct());
- Assertions.assertThat(view1.location()).isNotEqualTo(view2.location());
+ assertThat(view1.location()).isNotEqualTo(view2.location());
}
}
diff --git a/nessie/src/test/java/org/apache/iceberg/nessie/TestMultipleClients.java b/nessie/src/test/java/org/apache/iceberg/nessie/TestMultipleClients.java
index b6ae906..d6f4f68 100644
--- a/nessie/src/test/java/org/apache/iceberg/nessie/TestMultipleClients.java
+++ b/nessie/src/test/java/org/apache/iceberg/nessie/TestMultipleClients.java
@@ -19,6 +19,8 @@
package org.apache.iceberg.nessie;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.IOException;
import java.net.URI;
@@ -34,7 +36,6 @@
import org.apache.iceberg.exceptions.NoSuchNamespaceException;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -72,34 +73,33 @@
@Test
public void testListNamespaces() throws NessieConflictException, NessieNotFoundException {
- Assertions.assertThat(catalog.listNamespaces()).isEmpty();
- Assertions.assertThat(anotherCatalog.listNamespaces()).isEmpty();
+ assertThat(catalog.listNamespaces()).isEmpty();
+ assertThat(anotherCatalog.listNamespaces()).isEmpty();
// listing a non-existent namespace should return empty
- Assertions.assertThat(catalog.listNamespaces(Namespace.of("db1"))).isEmpty();
- Assertions.assertThat(anotherCatalog.listNamespaces(Namespace.of("db1"))).isEmpty();
+ assertThat(catalog.listNamespaces(Namespace.of("db1"))).isEmpty();
+ assertThat(anotherCatalog.listNamespaces(Namespace.of("db1"))).isEmpty();
catalog.createNamespace(Namespace.of("db1"), Collections.emptyMap());
- Assertions.assertThat(catalog.listNamespaces()).containsExactlyInAnyOrder(Namespace.of("db1"));
- Assertions.assertThat(anotherCatalog.listNamespaces())
- .containsExactlyInAnyOrder(Namespace.of("db1"));
+ assertThat(catalog.listNamespaces()).containsExactlyInAnyOrder(Namespace.of("db1"));
+ assertThat(anotherCatalog.listNamespaces()).containsExactlyInAnyOrder(Namespace.of("db1"));
// another client creates a namespace with the same nessie server
anotherCatalog.createNamespace(Namespace.of("db2"), Collections.emptyMap());
- Assertions.assertThat(catalog.listNamespaces())
+ assertThat(catalog.listNamespaces())
.containsExactlyInAnyOrder(Namespace.of("db1"), Namespace.of("db2"));
- Assertions.assertThat(anotherCatalog.listNamespaces())
+ assertThat(anotherCatalog.listNamespaces())
.containsExactlyInAnyOrder(Namespace.of("db1"), Namespace.of("db2"));
api.deleteBranch().branch((Branch) api.getReference().refName(branch).get()).delete();
- Assertions.assertThatThrownBy(() -> catalog.listNamespaces())
+ assertThatThrownBy(() -> catalog.listNamespaces())
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining(
"Cannot list top-level namespaces: ref '%s' is no longer valid", branch);
- Assertions.assertThatThrownBy(() -> anotherCatalog.listNamespaces(Namespace.of("db1")))
+ assertThatThrownBy(() -> anotherCatalog.listNamespaces(Namespace.of("db1")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining(
"Cannot list child namespaces from 'db1': ref '%s' is no longer valid", branch);
@@ -107,25 +107,23 @@
@Test
public void testLoadNamespaceMetadata() throws NessieConflictException, NessieNotFoundException {
- Assertions.assertThatThrownBy(() -> catalog.loadNamespaceMetadata(Namespace.of("namespace1")))
+ assertThatThrownBy(() -> catalog.loadNamespaceMetadata(Namespace.of("namespace1")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Namespace does not exist: namespace1");
- Assertions.assertThatThrownBy(
- () -> anotherCatalog.loadNamespaceMetadata(Namespace.of("namespace1")))
+ assertThatThrownBy(() -> anotherCatalog.loadNamespaceMetadata(Namespace.of("namespace1")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Namespace does not exist: namespace1");
catalog.createNamespace(Namespace.of("namespace1"), Collections.emptyMap());
// both clients should see the namespace because we read the HEAD of the ref
- Assertions.assertThat(catalog.listNamespaces())
- .containsExactlyInAnyOrder(Namespace.of("namespace1"));
- Assertions.assertThat(anotherCatalog.listNamespaces())
+ assertThat(catalog.listNamespaces()).containsExactlyInAnyOrder(Namespace.of("namespace1"));
+ assertThat(anotherCatalog.listNamespaces())
.containsExactlyInAnyOrder(Namespace.of("namespace1"));
// the other client should not be able to update the namespace
// because it is still on the old ref hash
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
anotherCatalog.setProperties(
Namespace.of("namespace1"), Collections.singletonMap("k1", "v1")))
@@ -136,19 +134,18 @@
// load metadata from the same client and another client both should work fine
// because we read the HEAD of the ref
- Assertions.assertThat(anotherCatalog.loadNamespaceMetadata(Namespace.of("namespace1")))
+ assertThat(anotherCatalog.loadNamespaceMetadata(Namespace.of("namespace1")))
.containsExactly(Map.entry("k1", "v1"));
- Assertions.assertThat(catalog.loadNamespaceMetadata(Namespace.of("namespace1")))
+ assertThat(catalog.loadNamespaceMetadata(Namespace.of("namespace1")))
.containsExactly(Map.entry("k1", "v1"));
api.deleteBranch().branch((Branch) api.getReference().refName(branch).get()).delete();
- Assertions.assertThatThrownBy(() -> catalog.loadNamespaceMetadata(Namespace.of("namespace1")))
+ assertThatThrownBy(() -> catalog.loadNamespaceMetadata(Namespace.of("namespace1")))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining(
"Cannot load namespace 'namespace1': ref '%s' is no longer valid", branch);
- Assertions.assertThatThrownBy(
- () -> anotherCatalog.loadNamespaceMetadata(Namespace.of("namespace1")))
+ assertThatThrownBy(() -> anotherCatalog.loadNamespaceMetadata(Namespace.of("namespace1")))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining(
"Cannot load namespace 'namespace1': ref '%s' is no longer valid", branch);
@@ -157,16 +154,16 @@
@Test
public void testListTables() {
createTable(TableIdentifier.parse("foo.tbl1"), schema);
- Assertions.assertThat(catalog.listTables(Namespace.of("foo")))
+ assertThat(catalog.listTables(Namespace.of("foo")))
.containsExactlyInAnyOrder(TableIdentifier.parse("foo.tbl1"));
// another client creates a table with the same nessie server
anotherCatalog.createTable(TableIdentifier.parse("foo.tbl2"), schema);
- Assertions.assertThat(anotherCatalog.listTables(Namespace.of("foo")))
+ assertThat(anotherCatalog.listTables(Namespace.of("foo")))
.containsExactlyInAnyOrder(
TableIdentifier.parse("foo.tbl1"), TableIdentifier.parse("foo.tbl2"));
- Assertions.assertThat(catalog.listTables(Namespace.of("foo")))
+ assertThat(catalog.listTables(Namespace.of("foo")))
.containsExactlyInAnyOrder(
TableIdentifier.parse("foo.tbl1"), TableIdentifier.parse("foo.tbl2"));
}
@@ -184,8 +181,8 @@
tableFromCatalog.updateSchema().addColumn("x3", Types.LongType.get()).commit();
tableFromAnotherCatalog.updateSchema().addColumn("x4", Types.LongType.get()).commit();
- Assertions.assertThat(catalog.loadTable(identifier).schema().columns()).hasSize(5);
- Assertions.assertThat(anotherCatalog.loadTable(identifier).schema().columns()).hasSize(5);
+ assertThat(catalog.loadTable(identifier).schema().columns()).hasSize(5);
+ assertThat(anotherCatalog.loadTable(identifier).schema().columns()).hasSize(5);
}
@Test
@@ -207,10 +204,10 @@
// refresh the catalog's client.
String hashAfter = catalog.currentHash();
- Assertions.assertThat(hashBefore).isNotEqualTo(hashAfter);
+ assertThat(hashBefore).isNotEqualTo(hashAfter);
// client refresh should not affect the ongoing commits (commit should still fail due staleness)
- Assertions.assertThatThrownBy(() -> ops1.commit(ops1.current(), metadata1))
+ assertThatThrownBy(() -> ops1.commit(ops1.current(), metadata1))
.isInstanceOf(CommitFailedException.class)
.hasMessageContaining(
"Cannot commit: Reference hash is out of date. Update the reference 'multiple-clients-test' and try again");
diff --git a/nessie/src/test/java/org/apache/iceberg/nessie/TestNamespace.java b/nessie/src/test/java/org/apache/iceberg/nessie/TestNamespace.java
index 7238df9..29b9f73 100644
--- a/nessie/src/test/java/org/apache/iceberg/nessie/TestNamespace.java
+++ b/nessie/src/test/java/org/apache/iceberg/nessie/TestNamespace.java
@@ -19,6 +19,8 @@
package org.apache.iceberg.nessie;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.Arrays;
import java.util.Collections;
@@ -34,7 +36,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableSet;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.projectnessie.error.NessieNotFoundException;
import org.projectnessie.model.ContentKey;
@@ -63,56 +64,55 @@
createTable(TableIdentifier.of("t6"));
List<TableIdentifier> tables = catalog.listTables(nsABC);
- Assertions.assertThat(tables).isNotNull().hasSize(1);
+ assertThat(tables).isNotNull().hasSize(1);
tables = catalog.listTables(nsAB);
- Assertions.assertThat(tables).isNotNull().hasSize(2);
+ assertThat(tables).isNotNull().hasSize(2);
tables = catalog.listTables(nsA);
- Assertions.assertThat(tables).isNotNull().hasSize(3);
+ assertThat(tables).isNotNull().hasSize(3);
tables = catalog.listTables(null);
- Assertions.assertThat(tables).isNotNull().hasSize(6);
+ assertThat(tables).isNotNull().hasSize(6);
List<Namespace> namespaces = catalog.listNamespaces();
- Assertions.assertThat(namespaces).containsExactly(nsA, nsB);
+ assertThat(namespaces).containsExactly(nsA, nsB);
namespaces = catalog.listNamespaces(nsA);
- Assertions.assertThat(namespaces).containsExactly(nsAB);
+ assertThat(namespaces).containsExactly(nsAB);
namespaces = catalog.listNamespaces(nsAB);
- Assertions.assertThat(namespaces).containsExactly(nsABC);
+ assertThat(namespaces).containsExactly(nsABC);
namespaces = catalog.listNamespaces(nsB);
- Assertions.assertThat(namespaces).containsExactly(nsBC);
+ assertThat(namespaces).containsExactly(nsBC);
}
@Test
public void testCreatingAndDroppingNamespace() {
Namespace namespace = Namespace.of("test");
catalog.createNamespace(namespace, ImmutableMap.of());
- Assertions.assertThat(catalog.namespaceExists(namespace)).isTrue();
+ assertThat(catalog.namespaceExists(namespace)).isTrue();
catalog.dropNamespace(namespace);
- Assertions.assertThat(catalog.namespaceExists(namespace)).isFalse();
+ assertThat(catalog.namespaceExists(namespace)).isFalse();
}
@Test
public void testCreatingAndDroppingNamespaceWithContent() throws NessieNotFoundException {
Namespace namespace = Namespace.of("test");
catalog.createNamespace(namespace, ImmutableMap.of());
- Assertions.assertThat(catalog.namespaceExists(namespace)).isTrue();
+ assertThat(catalog.namespaceExists(namespace)).isTrue();
TableIdentifier identifier = TableIdentifier.of(namespace, "tbl");
Schema schema =
new Schema(Types.StructType.of(required(1, "id", Types.LongType.get())).fields());
- Assertions.assertThat(catalog.createTable(identifier, schema)).isNotNull();
+ assertThat(catalog.createTable(identifier, schema)).isNotNull();
ContentKey key = NessieUtil.toKey(identifier);
- Assertions.assertThat(
- api.getContent().key(key).refName(BRANCH).get().get(key).unwrap(IcebergTable.class))
+ assertThat(api.getContent().key(key).refName(BRANCH).get().get(key).unwrap(IcebergTable.class))
.isPresent();
- Assertions.assertThatThrownBy(() -> catalog.dropNamespace(namespace))
+ assertThatThrownBy(() -> catalog.dropNamespace(namespace))
.isInstanceOf(NamespaceNotEmptyException.class)
.hasMessageContaining("Namespace 'test' is not empty");
catalog.dropTable(identifier, true);
catalog.dropNamespace(namespace);
- Assertions.assertThat(catalog.namespaceExists(namespace)).isFalse();
+ assertThat(catalog.namespaceExists(namespace)).isFalse();
}
@Test
@@ -120,46 +120,43 @@
Map<String, String> properties = ImmutableMap.of("prop", "val");
Namespace namespace = Namespace.of("withProperties");
catalog.createNamespace(namespace, properties);
- Assertions.assertThat(catalog.namespaceExists(namespace)).isTrue();
- Assertions.assertThat(catalog.loadNamespaceMetadata(namespace)).isEqualTo(properties);
+ assertThat(catalog.namespaceExists(namespace)).isTrue();
+ assertThat(catalog.loadNamespaceMetadata(namespace)).isEqualTo(properties);
ImmutableMap<String, String> updatedProperties =
ImmutableMap.of("prop2", "val2", "prop", "new_val");
catalog.setProperties(namespace, updatedProperties);
- Assertions.assertThat(catalog.loadNamespaceMetadata(namespace)).isEqualTo(updatedProperties);
+ assertThat(catalog.loadNamespaceMetadata(namespace)).isEqualTo(updatedProperties);
- Assertions.assertThatThrownBy(
- () -> catalog.setProperties(Namespace.of("unknown"), updatedProperties))
+ assertThatThrownBy(() -> catalog.setProperties(Namespace.of("unknown"), updatedProperties))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: unknown");
}
@Test
public void testEmptyNamespace() {
- Assertions.assertThatThrownBy(
- () -> catalog.createNamespace(Namespace.empty(), Collections.emptyMap()))
+ assertThatThrownBy(() -> catalog.createNamespace(Namespace.empty(), Collections.emptyMap()))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Invalid namespace: ");
- Assertions.assertThat(catalog.namespaceExists(Namespace.empty())).isFalse();
+ assertThat(catalog.namespaceExists(Namespace.empty())).isFalse();
- Assertions.assertThatThrownBy(() -> catalog.loadNamespaceMetadata(Namespace.empty()))
+ assertThatThrownBy(() -> catalog.loadNamespaceMetadata(Namespace.empty()))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Invalid namespace: ");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
catalog.setProperties(
Namespace.empty(), ImmutableMap.of("prop2", "val2", "prop", "val")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Invalid namespace: ");
- Assertions.assertThatThrownBy(
- () -> catalog.removeProperties(Namespace.empty(), ImmutableSet.of("prop2")))
+ assertThatThrownBy(() -> catalog.removeProperties(Namespace.empty(), ImmutableSet.of("prop2")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Invalid namespace: ");
- Assertions.assertThatThrownBy(() -> catalog.dropNamespace(Namespace.empty()))
+ assertThatThrownBy(() -> catalog.dropNamespace(Namespace.empty()))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Invalid namespace: ");
}
@@ -169,15 +166,14 @@
Map<String, String> properties = ImmutableMap.of("prop2", "val2", "prop", "val");
Namespace namespace = Namespace.of("withPropertyDeletes");
catalog.createNamespace(namespace, properties);
- Assertions.assertThat(catalog.namespaceExists(namespace)).isTrue();
- Assertions.assertThat(catalog.loadNamespaceMetadata(namespace)).isEqualTo(properties);
+ assertThat(catalog.namespaceExists(namespace)).isTrue();
+ assertThat(catalog.loadNamespaceMetadata(namespace)).isEqualTo(properties);
Set<String> toRemove = Sets.newHashSet(Arrays.asList("prop1", "prop2", "prop3"));
catalog.removeProperties(namespace, toRemove);
- Assertions.assertThat(catalog.loadNamespaceMetadata(namespace))
- .isEqualTo(ImmutableMap.of("prop", "val"));
+ assertThat(catalog.loadNamespaceMetadata(namespace)).isEqualTo(ImmutableMap.of("prop", "val"));
- Assertions.assertThatThrownBy(() -> catalog.removeProperties(Namespace.of("unknown"), toRemove))
+ assertThatThrownBy(() -> catalog.removeProperties(Namespace.of("unknown"), toRemove))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Namespace does not exist: unknown");
}
@@ -187,18 +183,15 @@
Map<String, String> properties = ImmutableMap.of("location", "/custom/location");
Namespace namespaceWithLocation = Namespace.of("withLocation");
catalog.createNamespace(namespaceWithLocation, properties);
- Assertions.assertThat(catalog.namespaceExists(namespaceWithLocation)).isTrue();
- Assertions.assertThat(
- catalog.defaultWarehouseLocation(TableIdentifier.of("withLocation", "testTable")))
+ assertThat(catalog.namespaceExists(namespaceWithLocation)).isTrue();
+ assertThat(catalog.defaultWarehouseLocation(TableIdentifier.of("withLocation", "testTable")))
.startsWith("/custom/location/testTable");
Namespace namespaceWithoutLocation = Namespace.of("withoutLocation");
catalog.createNamespace(namespaceWithoutLocation, ImmutableMap.of());
- Assertions.assertThat(catalog.namespaceExists(namespaceWithoutLocation)).isTrue();
- Assertions.assertThat(
- catalog.defaultWarehouseLocation(TableIdentifier.of("withoutLocation", "testTable")))
+ assertThat(catalog.namespaceExists(namespaceWithoutLocation)).isTrue();
+ assertThat(catalog.defaultWarehouseLocation(TableIdentifier.of("withoutLocation", "testTable")))
.contains("/withoutLocation/testTable");
- Assertions.assertThat(
- catalog.defaultWarehouseLocation(TableIdentifier.of("badNamespace", "testTable")))
+ assertThat(catalog.defaultWarehouseLocation(TableIdentifier.of("badNamespace", "testTable")))
.contains("/badNamespace/testTable");
}
}
diff --git a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieCatalog.java b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieCatalog.java
index dbe8f92..55be034 100644
--- a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieCatalog.java
+++ b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieCatalog.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.nessie;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.io.IOException;
import java.net.URI;
import java.nio.file.Path;
@@ -28,7 +30,6 @@
import org.apache.iceberg.catalog.CatalogTests;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.util.LocationUtil;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
@@ -161,7 +162,7 @@
@Test
public void testWarehouseLocationWithTrailingSlash() {
- Assertions.assertThat(catalog.defaultWarehouseLocation(TABLE))
+ assertThat(catalog.defaultWarehouseLocation(TABLE))
.startsWith(
LocationUtil.stripTrailingSlash(temp.toUri().toString())
+ "/"
diff --git a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieIcebergClient.java b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieIcebergClient.java
index e49990f..05ee755 100644
--- a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieIcebergClient.java
+++ b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieIcebergClient.java
@@ -19,6 +19,10 @@
package org.apache.iceberg.nessie;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
+import static org.assertj.core.api.Assertions.assertThatRuntimeException;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.IOException;
import java.util.Arrays;
@@ -37,7 +41,6 @@
import org.apache.iceberg.exceptions.NoSuchNamespaceException;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.assertj.core.data.Index;
import org.junit.jupiter.api.Test;
import org.projectnessie.error.NessieConflictException;
@@ -62,30 +65,26 @@
@Test
public void testWithNullRefLoadsMain() throws NessieNotFoundException {
NessieIcebergClient client = new NessieIcebergClient(api, null, null, ImmutableMap.of());
- Assertions.assertThat(client.getRef().getReference())
- .isEqualTo(api.getReference().refName("main").get());
+ assertThat(client.getRef().getReference()).isEqualTo(api.getReference().refName("main").get());
}
@Test
public void testWithNullHash() throws NessieNotFoundException {
NessieIcebergClient client = new NessieIcebergClient(api, BRANCH, null, ImmutableMap.of());
- Assertions.assertThat(client.getRef().getReference())
- .isEqualTo(api.getReference().refName(BRANCH).get());
+ assertThat(client.getRef().getReference()).isEqualTo(api.getReference().refName(BRANCH).get());
}
@Test
public void testWithReference() throws NessieNotFoundException {
NessieIcebergClient client = new NessieIcebergClient(api, "main", null, ImmutableMap.of());
- Assertions.assertThat(client.withReference(null, null)).isEqualTo(client);
- Assertions.assertThat(client.withReference("main", null)).isNotEqualTo(client);
- Assertions.assertThat(
- client.withReference("main", api.getReference().refName("main").get().getHash()))
+ assertThat(client.withReference(null, null)).isEqualTo(client);
+ assertThat(client.withReference("main", null)).isNotEqualTo(client);
+ assertThat(client.withReference("main", api.getReference().refName("main").get().getHash()))
.isEqualTo(client);
- Assertions.assertThat(client.withReference(BRANCH, null)).isNotEqualTo(client);
- Assertions.assertThat(
- client.withReference(BRANCH, api.getReference().refName(BRANCH).get().getHash()))
+ assertThat(client.withReference(BRANCH, null)).isNotEqualTo(client);
+ assertThat(client.withReference(BRANCH, api.getReference().refName(BRANCH).get().getHash()))
.isNotEqualTo(client);
}
@@ -99,7 +98,7 @@
// just create a new commit on the branch and then delete & re-create it
Namespace namespace = Namespace.of("a");
client.createNamespace(namespace, ImmutableMap.of());
- Assertions.assertThat(client.listNamespaces(namespace)).isNotNull();
+ assertThat(client.listNamespaces(namespace)).isNotNull();
client
.getApi()
.deleteBranch()
@@ -109,9 +108,8 @@
// make sure the client uses the re-created branch
Reference ref = client.getApi().getReference().refName(branch).get();
- Assertions.assertThat(client.withReference(branch, null).getRef().getReference())
- .isEqualTo(ref);
- Assertions.assertThat(client.withReference(branch, null)).isNotEqualTo(client);
+ assertThat(client.withReference(branch, null).getRef().getReference()).isEqualTo(ref);
+ assertThat(client.withReference(branch, null)).isNotEqualTo(client);
}
@Test
@@ -125,10 +123,10 @@
NessieIcebergClient client = new NessieIcebergClient(api, branch, null, catalogOptions);
client.createNamespace(Namespace.of("a"), Map.of());
- Assertions.assertThat(client.listNamespaces(Namespace.of("a"))).isNotNull();
+ assertThat(client.listNamespaces(Namespace.of("a"))).isNotNull();
List<LogResponse.LogEntry> entries = api.getCommitLog().refName(branch).get().getLogEntries();
- Assertions.assertThat(entries)
+ assertThat(entries)
.isNotEmpty()
.first()
.extracting(LogResponse.LogEntry::getCommitMeta)
@@ -147,11 +145,11 @@
createBranch(branch);
NessieIcebergClient client = new NessieIcebergClient(api, branch, null, Map.of());
- Assertions.assertThatThrownBy(() -> client.createNamespace(Namespace.empty(), Map.of()))
+ assertThatThrownBy(() -> client.createNamespace(Namespace.empty(), Map.of()))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Invalid namespace: ");
- Assertions.assertThatThrownBy(() -> client.createNamespace(Namespace.of("a", "b"), Map.of()))
+ assertThatThrownBy(() -> client.createNamespace(Namespace.of("a", "b"), Map.of()))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Cannot create namespace 'a.b': parent namespace 'a' does not exist");
}
@@ -165,14 +163,14 @@
client.createNamespace(Namespace.of("a"), Map.of());
- Assertions.assertThatThrownBy(() -> client.createNamespace(Namespace.of("a"), Map.of()))
+ assertThatThrownBy(() -> client.createNamespace(Namespace.of("a"), Map.of()))
.isInstanceOf(AlreadyExistsException.class)
.hasMessageContaining("Namespace already exists: a");
client.commitTable(
null, newTableMetadata(), "file:///tmp/iceberg", (String) null, ContentKey.of("a", "tbl"));
- Assertions.assertThatThrownBy(() -> client.createNamespace(Namespace.of("a", "tbl"), Map.of()))
+ assertThatThrownBy(() -> client.createNamespace(Namespace.of("a", "tbl"), Map.of()))
.isInstanceOf(AlreadyExistsException.class)
.hasMessageContaining("Another content object with name 'a.tbl' already exists");
}
@@ -188,14 +186,14 @@
org.projectnessie.model.Namespace.of(ContentKey.of("a"));
commit(branch, "create namespace a", Operation.Put.of(ContentKey.of("a"), nessieNs));
- Assertions.assertThatThrownBy(() -> client.createNamespace(Namespace.of("a"), Map.of()))
+ assertThatThrownBy(() -> client.createNamespace(Namespace.of("a"), Map.of()))
.isInstanceOf(AlreadyExistsException.class)
.hasMessageContaining("Namespace already exists: a");
IcebergTable table = IcebergTable.of("file:///tmp/iceberg", 1, 1, 1, 1);
commit(branch, "create table a.tbl2", Operation.Put.of(ContentKey.of("a", "tbl"), table));
- Assertions.assertThatThrownBy(() -> client.createNamespace(Namespace.of("a", "tbl"), Map.of()))
+ assertThatThrownBy(() -> client.createNamespace(Namespace.of("a", "tbl"), Map.of()))
.isInstanceOf(AlreadyExistsException.class)
.hasMessageContaining("Another content object with name 'a.tbl' already exists");
}
@@ -211,7 +209,7 @@
api.deleteBranch().branch((Branch) api.getReference().refName(branch).get()).delete();
- Assertions.assertThatThrownBy(() -> client.createNamespace(Namespace.of("b"), Map.of()))
+ assertThatThrownBy(() -> client.createNamespace(Namespace.of("b"), Map.of()))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining(
"Cannot create namespace 'b': ref 'createNamespaceNonExistingRefBranch' is no longer valid");
@@ -230,33 +228,33 @@
Namespace parent = Namespace.of("a");
Namespace child = Namespace.of("a", "b");
- Assertions.assertThat(client.dropNamespace(parent)).isFalse();
- Assertions.assertThat(client.dropNamespace(child)).isFalse();
+ assertThat(client.dropNamespace(parent)).isFalse();
+ assertThat(client.dropNamespace(child)).isFalse();
client.createNamespace(parent, Map.of());
client.createNamespace(child, Map.of());
- Assertions.assertThat(client.dropNamespace(child)).isTrue();
- Assertions.assertThat(client.dropNamespace(parent)).isTrue();
+ assertThat(client.dropNamespace(child)).isTrue();
+ assertThat(client.dropNamespace(parent)).isTrue();
List<LogResponse.LogEntry> entries = api.getCommitLog().refName(branch).get().getLogEntries();
- Assertions.assertThat(entries)
+ assertThat(entries)
.isNotEmpty()
.extracting(LogResponse.LogEntry::getCommitMeta)
.satisfies(
meta -> {
- Assertions.assertThat(meta.getMessage()).contains("drop namespace a");
- Assertions.assertThat(meta.getAuthor()).isEqualTo("iceberg-user");
- Assertions.assertThat(meta.getProperties())
+ assertThat(meta.getMessage()).contains("drop namespace a");
+ assertThat(meta.getAuthor()).isEqualTo("iceberg-user");
+ assertThat(meta.getProperties())
.containsEntry(NessieUtil.APPLICATION_TYPE, "iceberg")
.containsEntry(CatalogProperties.APP_ID, "iceberg-nessie");
},
Index.atIndex(0))
.satisfies(
meta -> {
- Assertions.assertThat(meta.getMessage()).contains("drop namespace a.b");
- Assertions.assertThat(meta.getAuthor()).isEqualTo("iceberg-user");
- Assertions.assertThat(meta.getProperties())
+ assertThat(meta.getMessage()).contains("drop namespace a.b");
+ assertThat(meta.getAuthor()).isEqualTo("iceberg-user");
+ assertThat(meta.getProperties())
.containsEntry(NessieUtil.APPLICATION_TYPE, "iceberg")
.containsEntry(CatalogProperties.APP_ID, "iceberg-nessie");
},
@@ -272,7 +270,7 @@
client.createNamespace(Namespace.of("a"), Map.of());
client.createNamespace(Namespace.of("a", "b"), Map.of());
- Assertions.assertThatThrownBy(() -> client.dropNamespace(Namespace.of("a")))
+ assertThatThrownBy(() -> client.dropNamespace(Namespace.of("a")))
.isInstanceOf(NamespaceNotEmptyException.class)
.hasMessageContaining("Namespace 'a' is not empty.");
}
@@ -288,7 +286,7 @@
client.commitTable(
null, newTableMetadata(), "file:///tmp/iceberg", (String) null, ContentKey.of("a", "tbl"));
- Assertions.assertThatThrownBy(() -> client.dropNamespace(Namespace.of("a", "tbl")))
+ assertThatThrownBy(() -> client.dropNamespace(Namespace.of("a", "tbl")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Content object with name 'a.tbl' is not a namespace.");
}
@@ -310,7 +308,7 @@
.build();
commit(branch, "update namespace a", Operation.Put.of(ContentKey.of("a"), updated));
- Assertions.assertThatThrownBy(() -> client.dropNamespace(Namespace.of("a")))
+ assertThatThrownBy(() -> client.dropNamespace(Namespace.of("a")))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining(
"Cannot drop namespace 'a': Values of existing and expected content for key 'a' are different.");
@@ -327,7 +325,7 @@
api.deleteBranch().branch((Branch) api.getReference().refName(branch).get()).delete();
- Assertions.assertThat(client.dropNamespace(Namespace.of("a"))).isFalse();
+ assertThat(client.dropNamespace(Namespace.of("a"))).isFalse();
}
@Test
@@ -343,23 +341,23 @@
Namespace ns = Namespace.of("a");
client.createNamespace(ns, Map.of("k1", "v1a"));
- Assertions.assertThat(client.setProperties(ns, Map.of("k1", "v1b", "k2", "v2"))).isTrue();
+ assertThat(client.setProperties(ns, Map.of("k1", "v1b", "k2", "v2"))).isTrue();
- Assertions.assertThat(client.loadNamespaceMetadata(ns))
+ assertThat(client.loadNamespaceMetadata(ns))
.hasSize(2)
.containsEntry("k1", "v1b")
.containsEntry("k2", "v2");
List<LogResponse.LogEntry> entries = api.getCommitLog().refName(branch).get().getLogEntries();
- Assertions.assertThat(entries)
+ assertThat(entries)
.isNotEmpty()
.first()
.extracting(LogResponse.LogEntry::getCommitMeta)
.satisfies(
meta -> {
- Assertions.assertThat(meta.getMessage()).contains("update namespace a");
- Assertions.assertThat(meta.getAuthor()).isEqualTo("iceberg-user");
- Assertions.assertThat(meta.getProperties())
+ assertThat(meta.getMessage()).contains("update namespace a");
+ assertThat(meta.getAuthor()).isEqualTo("iceberg-user");
+ assertThat(meta.getProperties())
.containsEntry(NessieUtil.APPLICATION_TYPE, "iceberg")
.containsEntry(CatalogProperties.APP_ID, "iceberg-nessie");
});
@@ -385,9 +383,9 @@
commit(branch, "update namespace a", Operation.Put.of(key, updated));
// will generate a conflict and a retry
- Assertions.assertThat(client.setProperties(ns, Map.of("k1", "v1c", "k3", "v3"))).isTrue();
+ assertThat(client.setProperties(ns, Map.of("k1", "v1c", "k3", "v3"))).isTrue();
- Assertions.assertThat(client.loadNamespaceMetadata(ns))
+ assertThat(client.loadNamespaceMetadata(ns))
.hasSize(3)
.containsEntry("k1", "v1c")
.containsEntry("k2", "v2")
@@ -405,8 +403,7 @@
commit(branch, "delete namespace a", Operation.Delete.of(ContentKey.of("a")));
- Assertions.assertThatThrownBy(
- () -> client.setProperties(Namespace.of("a"), Map.of("k1", "v1a")))
+ assertThatThrownBy(() -> client.setProperties(Namespace.of("a"), Map.of("k1", "v1a")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Namespace does not exist: a");
}
@@ -422,7 +419,7 @@
api.deleteBranch().branch((Branch) api.getReference().refName(branch).get()).delete();
- Assertions.assertThatThrownBy(() -> client.setProperties(Namespace.of("a"), Map.of("k1", "v1")))
+ assertThatThrownBy(() -> client.setProperties(Namespace.of("a"), Map.of("k1", "v1")))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining(
"Cannot update properties on namespace 'a': ref 'setPropertiesNonExistingRefBranch' is no longer valid");
@@ -442,20 +439,20 @@
client.createNamespace(ns, Map.of("k1", "v1", "k2", "v2"));
- Assertions.assertThat(client.removeProperties(ns, Set.of("k1"))).isTrue();
+ assertThat(client.removeProperties(ns, Set.of("k1"))).isTrue();
- Assertions.assertThat(client.loadNamespaceMetadata(ns)).hasSize(1).containsOnlyKeys("k2");
+ assertThat(client.loadNamespaceMetadata(ns)).hasSize(1).containsOnlyKeys("k2");
List<LogResponse.LogEntry> entries = api.getCommitLog().refName(branch).get().getLogEntries();
- Assertions.assertThat(entries)
+ assertThat(entries)
.isNotEmpty()
.first()
.extracting(LogResponse.LogEntry::getCommitMeta)
.satisfies(
meta -> {
- Assertions.assertThat(meta.getMessage()).contains("update namespace a");
- Assertions.assertThat(meta.getAuthor()).isEqualTo("iceberg-user");
- Assertions.assertThat(meta.getProperties())
+ assertThat(meta.getMessage()).contains("update namespace a");
+ assertThat(meta.getAuthor()).isEqualTo("iceberg-user");
+ assertThat(meta.getProperties())
.containsEntry(NessieUtil.APPLICATION_TYPE, "iceberg")
.containsEntry(CatalogProperties.APP_ID, "iceberg-nessie");
});
@@ -481,9 +478,9 @@
commit(branch, "update namespace a", Operation.Put.of(key, updated));
// will generate a conflict and a retry
- Assertions.assertThat(client.removeProperties(ns, Set.of("k2"))).isTrue();
+ assertThat(client.removeProperties(ns, Set.of("k2"))).isTrue();
- Assertions.assertThat(client.loadNamespaceMetadata(ns)).hasSize(1).containsOnlyKeys("k3");
+ assertThat(client.loadNamespaceMetadata(ns)).hasSize(1).containsOnlyKeys("k3");
}
@Test
@@ -497,7 +494,7 @@
commit(branch, "delete namespace a", Operation.Delete.of(ContentKey.of("a")));
- Assertions.assertThatThrownBy(() -> client.removeProperties(Namespace.of("a"), Set.of("k1")))
+ assertThatThrownBy(() -> client.removeProperties(Namespace.of("a"), Set.of("k1")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessageContaining("Namespace does not exist: a");
}
@@ -513,7 +510,7 @@
api.deleteBranch().branch((Branch) api.getReference().refName(branch).get()).delete();
- Assertions.assertThatThrownBy(() -> client.removeProperties(Namespace.of("a"), Set.of("k1")))
+ assertThatThrownBy(() -> client.removeProperties(Namespace.of("a"), Set.of("k1")))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining(
"Cannot update properties on namespace 'a': ref 'removePropertiesNonExistingRefBranch' is no longer valid");
@@ -525,7 +522,7 @@
newCatalog.setConf(hadoopConfig);
ImmutableMap.Builder<String, String> options =
ImmutableMap.<String, String>builder().put("client-api-version", "3");
- Assertions.assertThatIllegalArgumentException()
+ assertThatIllegalArgumentException()
.isThrownBy(() -> newCatalog.initialize("nessie", options.buildOrThrow()))
.withMessage("Unsupported client-api-version: 3. Can only be 1 or 2");
}
@@ -537,14 +534,14 @@
newCatalog.setConf(hadoopConfig);
ImmutableMap.Builder<String, String> options =
ImmutableMap.<String, String>builder().put("uri", "some/uri/");
- Assertions.assertThatIllegalArgumentException()
+ assertThatIllegalArgumentException()
.isThrownBy(() -> newCatalog.initialize("nessie", options.buildOrThrow()))
.withMessage(
"URI doesn't end with the version: some/uri/. Please configure `client-api-version` in the catalog properties explicitly.");
ImmutableMap.Builder<String, String> newOptions =
ImmutableMap.<String, String>builder().put("uri", "some/uri/v3");
- Assertions.assertThatIllegalArgumentException()
+ assertThatIllegalArgumentException()
.isThrownBy(() -> newCatalog.initialize("nessie", newOptions.buildOrThrow()))
.withMessage("Unsupported client-api-version: 3. Can only be 1 or 2");
}
@@ -564,7 +561,7 @@
.put("client-api-version", version);
newCatalog.initialize("nessie", options.buildOrThrow());
// Since client-api-version is configured, API version should not be based on URI.
- Assertions.assertThatRuntimeException()
+ assertThatRuntimeException()
.isThrownBy(() -> newCatalog.loadTable(TableIdentifier.of("foo", "t1")))
.withMessageStartingWith("API version mismatch, check URI prefix");
}
diff --git a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieTable.java b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieTable.java
index 2501610..94eb314 100644
--- a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieTable.java
+++ b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieTable.java
@@ -20,6 +20,9 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatCode;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.File;
import java.io.IOException;
@@ -53,7 +56,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -145,10 +147,10 @@
Table tableInitialMain = catalog.loadTable(TABLE_IDENTIFIER);
// verify table-metadata-location + snapshot-id
- Assertions.assertThat(contentInitialMain)
+ assertThat(contentInitialMain)
.as("global-contents + snapshot-id equal on both branches in Nessie")
.isEqualTo(contentInitialBranch);
- Assertions.assertThat(tableInitialMain.currentSnapshot()).isNull();
+ assertThat(tableInitialMain.currentSnapshot()).isNull();
// 3. modify table in "main" branch (add some data)
@@ -161,18 +163,18 @@
// --> assert getValue() against both branches returns the updated metadata-location
// verify table-metadata-location
- Assertions.assertThat(contentInitialMain.getMetadataLocation())
+ assertThat(contentInitialMain.getMetadataLocation())
.describedAs("metadata-location must change on %s", BRANCH)
.isNotEqualTo(contentsAfter1Main.getMetadataLocation());
- Assertions.assertThat(contentInitialBranch.getMetadataLocation())
+ assertThat(contentInitialBranch.getMetadataLocation())
.describedAs("metadata-location must not change on %s", testCaseBranch)
.isEqualTo(contentsAfter1Branch.getMetadataLocation());
- Assertions.assertThat(contentsAfter1Main)
+ assertThat(contentsAfter1Main)
.extracting(IcebergTable::getSchemaId)
.describedAs("on-reference-state must not be equal on both branches")
.isEqualTo(contentsAfter1Branch.getSchemaId());
// verify manifests
- Assertions.assertThat(tableAfter1Main.currentSnapshot().allManifests(tableAfter1Main.io()))
+ assertThat(tableAfter1Main.currentSnapshot().allManifests(tableAfter1Main.io()))
.describedAs("verify number of manifests on 'main'")
.hasSize(1);
@@ -187,14 +189,14 @@
// --> assert getValue() against both branches returns the updated metadata-location
// verify table-metadata-location
- Assertions.assertThat(contentsAfter2Main.getMetadataLocation())
+ assertThat(contentsAfter2Main.getMetadataLocation())
.describedAs("metadata-location must change on %s", BRANCH)
.isNotEqualTo(contentsAfter1Main.getMetadataLocation());
- Assertions.assertThat(contentsAfter2Branch.getMetadataLocation())
+ assertThat(contentsAfter2Branch.getMetadataLocation())
.describedAs("on-reference-state must not change on %s", testCaseBranch)
.isEqualTo(contentsAfter1Branch.getMetadataLocation());
// verify manifests
- Assertions.assertThat(tableAfter2Main.currentSnapshot().allManifests(tableAfter2Main.io()))
+ assertThat(tableAfter2Main.currentSnapshot().allManifests(tableAfter2Main.io()))
.describedAs("verify number of manifests on 'main'")
.hasSize(2);
}
@@ -211,11 +213,11 @@
getTable(KEY); // sanity, check table exists
// check parameters are in expected state
String expected = temp.toUri() + DB_NAME + "/" + tableName;
- Assertions.assertThat(getTableBasePath(tableName)).isEqualTo(expected);
+ assertThat(getTableBasePath(tableName)).isEqualTo(expected);
// Only 1 snapshotFile Should exist and no manifests should exist
- Assertions.assertThat(metadataVersionFiles(tableLocation)).isNotNull().hasSize(2);
- Assertions.assertThat(manifestFiles(tableLocation)).isNotNull().isEmpty();
+ assertThat(metadataVersionFiles(tableLocation)).isNotNull().hasSize(2);
+ assertThat(manifestFiles(tableLocation)).isNotNull().isEmpty();
verifyCommitMetadata();
}
@@ -229,17 +231,17 @@
Table original = catalog.loadTable(TABLE_IDENTIFIER);
catalog.renameTable(TABLE_IDENTIFIER, renameTableIdentifier);
- Assertions.assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isFalse();
- Assertions.assertThat(catalog.tableExists(renameTableIdentifier)).isTrue();
+ assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isFalse();
+ assertThat(catalog.tableExists(renameTableIdentifier)).isTrue();
Table renamed = catalog.loadTable(renameTableIdentifier);
- Assertions.assertThat(original.schema().asStruct()).isEqualTo(renamed.schema().asStruct());
- Assertions.assertThat(original.spec()).isEqualTo(renamed.spec());
- Assertions.assertThat(original.location()).isEqualTo(renamed.location());
- Assertions.assertThat(original.currentSnapshot()).isEqualTo(renamed.currentSnapshot());
+ assertThat(original.schema().asStruct()).isEqualTo(renamed.schema().asStruct());
+ assertThat(original.spec()).isEqualTo(renamed.spec());
+ assertThat(original.location()).isEqualTo(renamed.location());
+ assertThat(original.currentSnapshot()).isEqualTo(renamed.currentSnapshot());
- Assertions.assertThat(catalog.dropTable(renameTableIdentifier)).isTrue();
+ assertThat(catalog.dropTable(renameTableIdentifier)).isTrue();
verifyCommitMetadata();
}
@@ -268,17 +270,17 @@
Table original = catalog.loadTable(fromIdentifier);
catalog.renameTable(fromIdentifier, toIdentifier);
- Assertions.assertThat(catalog.tableExists(fromIdentifier)).isFalse();
- Assertions.assertThat(catalog.tableExists(toIdentifier)).isTrue();
+ assertThat(catalog.tableExists(fromIdentifier)).isFalse();
+ assertThat(catalog.tableExists(toIdentifier)).isTrue();
Table renamed = catalog.loadTable(toIdentifier);
- Assertions.assertThat(original.schema().asStruct()).isEqualTo(renamed.schema().asStruct());
- Assertions.assertThat(original.spec()).isEqualTo(renamed.spec());
- Assertions.assertThat(original.location()).isEqualTo(renamed.location());
- Assertions.assertThat(original.currentSnapshot()).isEqualTo(renamed.currentSnapshot());
+ assertThat(original.schema().asStruct()).isEqualTo(renamed.schema().asStruct());
+ assertThat(original.spec()).isEqualTo(renamed.spec());
+ assertThat(original.location()).isEqualTo(renamed.location());
+ assertThat(original.currentSnapshot()).isEqualTo(renamed.currentSnapshot());
- Assertions.assertThat(catalog.dropTable(toIdentifier)).isTrue();
+ assertThat(catalog.dropTable(toIdentifier)).isTrue();
verifyCommitMetadata();
}
@@ -304,7 +306,7 @@
TableIdentifier toIdentifier =
TableIdentifier.of(TABLE_IDENTIFIER.namespace(), toTableReference.toString());
- Assertions.assertThatThrownBy(() -> catalog.renameTable(fromIdentifier, toIdentifier))
+ assertThatThrownBy(() -> catalog.renameTable(fromIdentifier, toIdentifier))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Cannot rename table 'tbl' on reference 'Something' to 'rename_table_name' on reference 'iceberg-table-test': source and target references must be the same.");
@@ -324,7 +326,7 @@
TableIdentifier toIdentifierNew =
TableIdentifier.of(TABLE_IDENTIFIER.namespace(), toTableReference.toString());
- Assertions.assertThatThrownBy(() -> catalog.renameTable(fromIdentifierNew, toIdentifierNew))
+ assertThatThrownBy(() -> catalog.renameTable(fromIdentifierNew, toIdentifierNew))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Cannot rename table 'tbl' on reference 'iceberg-table-test' to 'rename_table_name' on reference 'Something': source and target references must be the same.");
@@ -333,26 +335,26 @@
private void verifyCommitMetadata() throws NessieNotFoundException {
// check that the author is properly set
List<LogEntry> log = api.getCommitLog().refName(BRANCH).get().getLogEntries();
- Assertions.assertThat(log)
+ assertThat(log)
.isNotNull()
.isNotEmpty()
.filteredOn(e -> !e.getCommitMeta().getMessage().startsWith("create namespace "))
.allSatisfy(
logEntry -> {
CommitMeta commit = logEntry.getCommitMeta();
- Assertions.assertThat(commit.getAuthor()).isNotNull().isNotEmpty();
- Assertions.assertThat(commit.getAuthor()).isEqualTo(System.getProperty("user.name"));
- Assertions.assertThat(commit.getProperties().get(NessieUtil.APPLICATION_TYPE))
+ assertThat(commit.getAuthor()).isNotNull().isNotEmpty();
+ assertThat(commit.getAuthor()).isEqualTo(System.getProperty("user.name"));
+ assertThat(commit.getProperties().get(NessieUtil.APPLICATION_TYPE))
.isEqualTo("iceberg");
- Assertions.assertThat(commit.getMessage()).startsWith("Iceberg");
+ assertThat(commit.getMessage()).startsWith("Iceberg");
});
}
@Test
public void testDrop() throws NessieNotFoundException {
- Assertions.assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isTrue();
- Assertions.assertThat(catalog.dropTable(TABLE_IDENTIFIER)).isTrue();
- Assertions.assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isFalse();
+ assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isTrue();
+ assertThat(catalog.dropTable(TABLE_IDENTIFIER)).isTrue();
+ assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isFalse();
verifyCommitMetadata();
}
@@ -365,9 +367,9 @@
.build();
TableIdentifier identifier =
TableIdentifier.of(TABLE_IDENTIFIER.namespace(), tableReference.toString());
- Assertions.assertThat(catalog.tableExists(identifier)).isTrue();
- Assertions.assertThat(catalog.dropTable(identifier)).isTrue();
- Assertions.assertThat(catalog.tableExists(identifier)).isFalse();
+ assertThat(catalog.tableExists(identifier)).isTrue();
+ assertThat(catalog.dropTable(identifier)).isTrue();
+ assertThat(catalog.tableExists(identifier)).isFalse();
verifyCommitMetadata();
}
@@ -384,11 +386,11 @@
String manifestListLocation =
table.currentSnapshot().manifestListLocation().replace("file:", "");
- Assertions.assertThat(catalog.dropTable(TABLE_IDENTIFIER, false)).isTrue();
- Assertions.assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isFalse();
+ assertThat(catalog.dropTable(TABLE_IDENTIFIER, false)).isTrue();
+ assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isFalse();
- Assertions.assertThat(new File(fileLocation)).exists();
- Assertions.assertThat(new File(manifestListLocation)).exists();
+ assertThat(new File(fileLocation)).exists();
+ assertThat(new File(manifestListLocation)).exists();
}
@Test
@@ -412,37 +414,36 @@
List<ManifestFile> manifests = table.currentSnapshot().allManifests(table.io());
- Assertions.assertThat(catalog.dropTable(TABLE_IDENTIFIER)).isTrue();
- Assertions.assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isFalse();
+ assertThat(catalog.dropTable(TABLE_IDENTIFIER)).isTrue();
+ assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isFalse();
- Assertions.assertThat(new File(location1)).exists();
- Assertions.assertThat(new File(location2)).exists();
- Assertions.assertThat(new File(manifestListLocation)).exists();
+ assertThat(new File(location1)).exists();
+ assertThat(new File(location2)).exists();
+ assertThat(new File(manifestListLocation)).exists();
for (ManifestFile manifest : manifests) {
- Assertions.assertThat(new File(manifest.path().replace("file:", ""))).exists();
+ assertThat(new File(manifest.path().replace("file:", ""))).exists();
}
TableOperations ops = ((HasTableOperations) table).operations();
String metadataLocation = ((NessieTableOperations) ops).currentMetadataLocation();
- Assertions.assertThat(new File(metadataLocation.replace("file:", ""))).exists();
+ assertThat(new File(metadataLocation.replace("file:", ""))).exists();
verifyCommitMetadata();
}
private void validateRegister(TableIdentifier identifier, String metadataVersionFiles) {
- Assertions.assertThat(catalog.registerTable(identifier, "file:" + metadataVersionFiles))
- .isNotNull();
+ assertThat(catalog.registerTable(identifier, "file:" + metadataVersionFiles)).isNotNull();
Table newTable = catalog.loadTable(identifier);
- Assertions.assertThat(newTable).isNotNull();
+ assertThat(newTable).isNotNull();
TableOperations ops = ((HasTableOperations) newTable).operations();
String metadataLocation = ((NessieTableOperations) ops).currentMetadataLocation();
- Assertions.assertThat("file:" + metadataVersionFiles).isEqualTo(metadataLocation);
- Assertions.assertThat(catalog.dropTable(identifier, false)).isTrue();
+ assertThat("file:" + metadataVersionFiles).isEqualTo(metadataLocation);
+ assertThat(catalog.dropTable(identifier, false)).isTrue();
}
@Test
public void testRegisterTableWithGivenBranch() throws Exception {
List<String> metadataVersionFiles = metadataVersionFiles(tableLocation);
- Assertions.assertThat(1).isEqualTo(metadataVersionFiles.size());
+ assertThat(1).isEqualTo(metadataVersionFiles.size());
ImmutableTableReference tableReference =
ImmutableTableReference.builder().reference("main").name(TABLE_NAME).build();
TableIdentifier identifier = TableIdentifier.of(DB_NAME, tableReference.toString());
@@ -458,18 +459,18 @@
public void testRegisterTableFailureScenarios()
throws NessieConflictException, NessieNotFoundException {
List<String> metadataVersionFiles = metadataVersionFiles(tableLocation);
- Assertions.assertThat(1).isEqualTo(metadataVersionFiles.size());
+ assertThat(1).isEqualTo(metadataVersionFiles.size());
// Case 1: Branch does not exist
ImmutableTableReference defaultTableReference =
ImmutableTableReference.builder().reference("default").name(TABLE_NAME).build();
TableIdentifier defaultIdentifier =
TableIdentifier.of(DB_NAME, defaultTableReference.toString());
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> catalog.registerTable(defaultIdentifier, "file:" + metadataVersionFiles.get(0)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Nessie ref 'default' does not exist");
// Case 2: Table Already Exists
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> catalog.registerTable(TABLE_IDENTIFIER, "file:" + metadataVersionFiles.get(0)))
.isInstanceOf(AlreadyExistsException.class)
.hasMessage("Table already exists: db.tbl");
@@ -477,25 +478,25 @@
ImmutableTableReference branchTableReference =
ImmutableTableReference.builder().reference(BRANCH).name(TABLE_NAME).build();
TableIdentifier branchIdentifier = TableIdentifier.of(DB_NAME, branchTableReference.toString());
- Assertions.assertThat(catalog.dropTable(branchIdentifier, false)).isTrue();
+ assertThat(catalog.dropTable(branchIdentifier, false)).isTrue();
String hash = api.getReference().refName(BRANCH).get().getHash();
api.createReference().sourceRefName(BRANCH).reference(Tag.of("tag_1", hash)).create();
ImmutableTableReference tagTableReference =
ImmutableTableReference.builder().reference("tag_1").name(TABLE_NAME).build();
TableIdentifier tagIdentifier = TableIdentifier.of(DB_NAME, tagTableReference.toString());
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> catalog.registerTable(tagIdentifier, "file:" + metadataVersionFiles.get(0)))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"You can only mutate tables/views when using a branch without a hash or timestamp.");
// Case 4: non-null metadata path with null metadata location
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
catalog.registerTable(
TABLE_IDENTIFIER, "file:" + metadataVersionFiles.get(0) + "invalidName"))
.isInstanceOf(NotFoundException.class);
// Case 5: null identifier
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
catalog.registerTable(null, "file:" + metadataVersionFiles.get(0) + "invalidName"))
.isInstanceOf(IllegalArgumentException.class)
@@ -505,15 +506,15 @@
@Test
public void testRegisterTableWithDefaultBranch() {
List<String> metadataVersionFiles = metadataVersionFiles(tableLocation);
- Assertions.assertThat(1).isEqualTo(metadataVersionFiles.size());
- Assertions.assertThat(catalog.dropTable(TABLE_IDENTIFIER, false)).isTrue();
+ assertThat(1).isEqualTo(metadataVersionFiles.size());
+ assertThat(catalog.dropTable(TABLE_IDENTIFIER, false)).isTrue();
validateRegister(TABLE_IDENTIFIER, metadataVersionFiles.get(0));
}
@Test
public void testRegisterTableMoreThanOneBranch() throws Exception {
List<String> metadataVersionFiles = metadataVersionFiles(tableLocation);
- Assertions.assertThat(1).isEqualTo(metadataVersionFiles.size());
+ assertThat(1).isEqualTo(metadataVersionFiles.size());
ImmutableTableReference tableReference =
ImmutableTableReference.builder().reference("main").name(TABLE_NAME).build();
TableIdentifier identifier = TableIdentifier.of(DB_NAME, tableReference.toString());
@@ -523,7 +524,7 @@
// ignore
}
validateRegister(identifier, metadataVersionFiles.get(0));
- Assertions.assertThat(catalog.dropTable(TABLE_IDENTIFIER, false)).isTrue();
+ assertThat(catalog.dropTable(TABLE_IDENTIFIER, false)).isTrue();
validateRegister(TABLE_IDENTIFIER, metadataVersionFiles.get(0));
}
@@ -536,9 +537,9 @@
icebergTable = catalog.loadTable(TABLE_IDENTIFIER);
// Only 2 snapshotFile Should exist and no manifests should exist
- Assertions.assertThat(metadataVersionFiles(tableLocation)).isNotNull().hasSize(2);
- Assertions.assertThat(manifestFiles(tableLocation)).isNotNull().isEmpty();
- Assertions.assertThat(altered.asStruct()).isEqualTo(icebergTable.schema().asStruct());
+ assertThat(metadataVersionFiles(tableLocation)).isNotNull().hasSize(2);
+ assertThat(manifestFiles(tableLocation)).isNotNull().isEmpty();
+ assertThat(altered.asStruct()).isEqualTo(icebergTable.schema().asStruct());
}
@Test
@@ -555,7 +556,7 @@
.commitMeta(CommitMeta.fromMessage(""))
.commit();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() -> icebergTable.updateSchema().addColumn("data", Types.LongType.get()).commit())
.isInstanceOf(CommitFailedException.class)
.hasMessage(
@@ -570,18 +571,17 @@
.filter(t -> t.namespace().level(0).equals(DB_NAME) && t.name().equals(TABLE_NAME))
.collect(Collectors.toList());
- Assertions.assertThat(expectedIdents).hasSize(1);
- Assertions.assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isTrue();
+ assertThat(expectedIdents).hasSize(1);
+ assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isTrue();
}
@Test
public void testGCDisabled() {
Table icebergTable = catalog.loadTable(TABLE_IDENTIFIER);
- Assertions.assertThat(icebergTable.properties())
- .containsEntry(TableProperties.GC_ENABLED, "false");
+ assertThat(icebergTable.properties()).containsEntry(TableProperties.GC_ENABLED, "false");
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
icebergTable.expireSnapshots().expireOlderThan(System.currentTimeMillis()).commit())
.isInstanceOf(ValidationException.class)
@@ -593,10 +593,9 @@
public void testGCEnabled() {
Table icebergTable = catalog.loadTable(TABLE_IDENTIFIER);
icebergTable.updateProperties().set(TableProperties.GC_ENABLED, "true").commit();
- Assertions.assertThat(icebergTable.properties())
- .containsEntry(TableProperties.GC_ENABLED, "true");
+ assertThat(icebergTable.properties()).containsEntry(TableProperties.GC_ENABLED, "true");
- Assertions.assertThatCode(
+ assertThatCode(
() ->
icebergTable.expireSnapshots().expireOlderThan(System.currentTimeMillis()).commit())
.doesNotThrowAnyException();
@@ -618,7 +617,7 @@
tableLocation = createTable(TABLE_IDENTIFIER, schema).location().replaceFirst("file:", "");
Table icebergTable = catalog.loadTable(TABLE_IDENTIFIER);
- Assertions.assertThatCode(
+ assertThatCode(
() ->
icebergTable.expireSnapshots().expireOlderThan(System.currentTimeMillis()).commit())
.doesNotThrowAnyException();
@@ -638,19 +637,19 @@
((BaseTable) icebergTable).operations().current().metadataFileLocation();
Path metadataFileLocationPath = Paths.get(metadataFileLocation.replaceFirst("file:", ""));
- Assertions.assertThat(metadataFileLocationPath).exists();
+ assertThat(metadataFileLocationPath).exists();
icebergTable.updateSchema().addColumn("x1", Types.LongType.get()).commit();
icebergTable.updateSchema().addColumn("x2", Types.LongType.get()).commit();
// old table metadata file should still exist after commits.
- Assertions.assertThat(metadataFileLocationPath).exists();
+ assertThat(metadataFileLocationPath).exists();
// load the table from the specific hash which reads the mapping metadataFileLocation
ImmutableTableReference tableReference =
ImmutableTableReference.builder().reference(BRANCH).hash(hash).name(TABLE_NAME).build();
TableIdentifier identifier = TableIdentifier.of(DB_NAME, tableReference.toString());
- Assertions.assertThat(
+ assertThat(
((BaseTable) catalog.loadTable(identifier))
.operations()
.current()
@@ -663,7 +662,7 @@
.operations().current().previousFiles().stream()
.map(TableMetadata.MetadataLogEntry::file)
.collect(Collectors.toSet());
- Assertions.assertThat(tableMetadataFiles).hasSize(1).doesNotContain(metadataFileLocation);
+ assertThat(tableMetadataFiles).hasSize(1).doesNotContain(metadataFileLocation);
}
private String getTableBasePath(String tableName) {
diff --git a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieUtil.java b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieUtil.java
index 624ce99..1e1326d 100644
--- a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieUtil.java
+++ b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieUtil.java
@@ -18,9 +18,11 @@
*/
package org.apache.iceberg.nessie;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.projectnessie.model.CommitMeta;
@@ -28,7 +30,7 @@
@Test
public void testBuildingCommitMetadataWithNullCatalogOptions() {
- Assertions.assertThatThrownBy(() -> NessieUtil.buildCommitMetadata("msg", null))
+ assertThatThrownBy(() -> NessieUtil.buildCommitMetadata("msg", null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("catalogOptions must not be null");
}
@@ -42,24 +44,21 @@
NessieUtil.buildCommitMetadata(
commitMsg,
ImmutableMap.of(CatalogProperties.APP_ID, appId, CatalogProperties.USER, user));
- Assertions.assertThat(commitMeta.getMessage()).isEqualTo(commitMsg);
- Assertions.assertThat(commitMeta.getAuthor()).isEqualTo(user);
- Assertions.assertThat(commitMeta.getProperties()).hasSize(2);
- Assertions.assertThat(commitMeta.getProperties().get(NessieUtil.APPLICATION_TYPE))
- .isEqualTo("iceberg");
- Assertions.assertThat(commitMeta.getProperties().get(CatalogProperties.APP_ID))
- .isEqualTo(appId);
+ assertThat(commitMeta.getMessage()).isEqualTo(commitMsg);
+ assertThat(commitMeta.getAuthor()).isEqualTo(user);
+ assertThat(commitMeta.getProperties()).hasSize(2);
+ assertThat(commitMeta.getProperties().get(NessieUtil.APPLICATION_TYPE)).isEqualTo("iceberg");
+ assertThat(commitMeta.getProperties().get(CatalogProperties.APP_ID)).isEqualTo(appId);
}
@Test
public void testAuthorIsSetOnCommitMetadata() {
String commitMsg = "commit msg";
CommitMeta commitMeta = NessieUtil.buildCommitMetadata(commitMsg, ImmutableMap.of());
- Assertions.assertThat(commitMeta.getMessage()).isEqualTo(commitMsg);
- Assertions.assertThat(commitMeta.getAuthor()).isEqualTo(System.getProperty("user.name"));
- Assertions.assertThat(commitMeta.getProperties()).hasSize(1);
- Assertions.assertThat(commitMeta.getProperties().get(NessieUtil.APPLICATION_TYPE))
- .isEqualTo("iceberg");
+ assertThat(commitMeta.getMessage()).isEqualTo(commitMsg);
+ assertThat(commitMeta.getAuthor()).isEqualTo(System.getProperty("user.name"));
+ assertThat(commitMeta.getProperties()).hasSize(1);
+ assertThat(commitMeta.getProperties().get(NessieUtil.APPLICATION_TYPE)).isEqualTo("iceberg");
}
@Test
@@ -68,7 +67,7 @@
try {
System.clearProperty("user.name");
CommitMeta commitMeta = NessieUtil.buildCommitMetadata("commit msg", ImmutableMap.of());
- Assertions.assertThat(commitMeta.getAuthor()).isNull();
+ assertThat(commitMeta.getAuthor()).isNull();
} finally {
System.setProperty("user.name", jvmUserName);
}
diff --git a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieView.java b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieView.java
index 656363f..9c97a5c 100644
--- a/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieView.java
+++ b/nessie/src/test/java/org/apache/iceberg/nessie/TestNessieView.java
@@ -20,6 +20,8 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.File;
import java.io.IOException;
@@ -35,7 +37,6 @@
import org.apache.iceberg.types.Types;
import org.apache.iceberg.view.SQLViewRepresentation;
import org.apache.iceberg.view.View;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -127,10 +128,10 @@
View viewInitialMain = catalog.loadView(VIEW_IDENTIFIER);
// verify view-metadata-location + version-id
- Assertions.assertThat(contentInitialMain)
+ assertThat(contentInitialMain)
.as("global-contents + snapshot-id equal on both branches in Nessie")
.isEqualTo(contentInitialBranch);
- Assertions.assertThat(viewInitialMain.currentVersion().versionId()).isEqualTo(2);
+ assertThat(viewInitialMain.currentVersion().versionId()).isEqualTo(2);
// 3. modify view in "main" branch
icebergView
@@ -146,19 +147,19 @@
// --> assert getValue() against both branches returns the updated metadata-location
// verify view-metadata-location
- Assertions.assertThat(contentInitialMain.getMetadataLocation())
+ assertThat(contentInitialMain.getMetadataLocation())
.describedAs("metadata-location must change on %s", BRANCH)
.isNotEqualTo(contentsAfter1Main.getMetadataLocation());
- Assertions.assertThat(contentInitialBranch.getMetadataLocation())
+ assertThat(contentInitialBranch.getMetadataLocation())
.describedAs("metadata-location must not change on %s", testCaseBranch)
.isEqualTo(contentsAfter1Branch.getMetadataLocation());
- Assertions.assertThat(contentsAfter1Main)
+ assertThat(contentsAfter1Main)
.extracting(IcebergView::getSchemaId)
.describedAs("schema ID must be same across branches")
.isEqualTo(contentsAfter1Branch.getSchemaId());
// verify updates
- Assertions.assertThat(viewAfter1Main.currentVersion().versionId()).isEqualTo(3);
- Assertions.assertThat(
+ assertThat(viewAfter1Main.currentVersion().versionId()).isEqualTo(3);
+ assertThat(
((SQLViewRepresentation) viewAfter1Main.currentVersion().representations().get(0))
.dialect())
.isEqualTo("trino");
@@ -178,16 +179,16 @@
// --> assert getValue() against both branches returns the updated metadata-location
// verify view-metadata-location
- Assertions.assertThat(contentsAfter2Main.getVersionId()).isEqualTo(4);
- Assertions.assertThat(contentsAfter2Main.getMetadataLocation())
+ assertThat(contentsAfter2Main.getVersionId()).isEqualTo(4);
+ assertThat(contentsAfter2Main.getMetadataLocation())
.describedAs("metadata-location must change on %s", BRANCH)
.isNotEqualTo(contentsAfter1Main.getMetadataLocation());
- Assertions.assertThat(contentsAfter1Main.getVersionId()).isEqualTo(3);
- Assertions.assertThat(contentsAfter2Branch.getMetadataLocation())
+ assertThat(contentsAfter1Main.getVersionId()).isEqualTo(3);
+ assertThat(contentsAfter2Branch.getMetadataLocation())
.describedAs("on-reference-state must not change on %s", testCaseBranch)
.isEqualTo(contentsAfter1Branch.getMetadataLocation());
- Assertions.assertThat(viewAfter2Main.currentVersion().versionId()).isEqualTo(4);
- Assertions.assertThat(
+ assertThat(viewAfter2Main.currentVersion().versionId()).isEqualTo(4);
+ assertThat(
((SQLViewRepresentation) viewAfter2Main.currentVersion().representations().get(0))
.dialect())
.isEqualTo("flink");
@@ -208,9 +209,9 @@
getView(KEY); // sanity, check view exists
// check parameters are in expected state
String expected = temp.toUri() + DB_NAME + "/" + viewName;
- Assertions.assertThat(getViewBasePath(viewName)).isEqualTo(expected);
+ assertThat(getViewBasePath(viewName)).isEqualTo(expected);
- Assertions.assertThat(metadataVersionFiles(viewLocation)).isNotNull().hasSize(2);
+ assertThat(metadataVersionFiles(viewLocation)).isNotNull().hasSize(2);
verifyCommitMetadata();
}
@@ -237,10 +238,10 @@
TableIdentifier.of(VIEW_IDENTIFIER.namespace(), toTableReference.toString());
catalog.renameView(fromIdentifier, toIdentifier);
- Assertions.assertThat(catalog.viewExists(fromIdentifier)).isFalse();
- Assertions.assertThat(catalog.viewExists(toIdentifier)).isTrue();
+ assertThat(catalog.viewExists(fromIdentifier)).isFalse();
+ assertThat(catalog.viewExists(toIdentifier)).isTrue();
- Assertions.assertThat(catalog.dropView(toIdentifier)).isTrue();
+ assertThat(catalog.dropView(toIdentifier)).isTrue();
verifyCommitMetadata();
}
@@ -266,7 +267,7 @@
TableIdentifier toIdentifier =
TableIdentifier.of(VIEW_IDENTIFIER.namespace(), toTableReference.toString());
- Assertions.assertThatThrownBy(() -> catalog.renameView(fromIdentifier, toIdentifier))
+ assertThatThrownBy(() -> catalog.renameView(fromIdentifier, toIdentifier))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Cannot rename view 'view' on reference 'Something' to 'rename_view_name' on reference 'iceberg-view-test': source and target references must be the same.");
@@ -286,7 +287,7 @@
TableIdentifier toIdentifierNew =
TableIdentifier.of(VIEW_IDENTIFIER.namespace(), toTableReference.toString());
- Assertions.assertThatThrownBy(() -> catalog.renameView(fromIdentifierNew, toIdentifierNew))
+ assertThatThrownBy(() -> catalog.renameView(fromIdentifierNew, toIdentifierNew))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Cannot rename view 'view' on reference 'iceberg-view-test' to 'rename_view_name' on reference 'Something': source and target references must be the same.");
@@ -295,29 +296,29 @@
private void verifyCommitMetadata() throws NessieNotFoundException {
// check that the author is properly set
List<LogEntry> log = api.getCommitLog().refName(BRANCH).get().getLogEntries();
- Assertions.assertThat(log)
+ assertThat(log)
.isNotNull()
.isNotEmpty()
.filteredOn(e -> !e.getCommitMeta().getMessage().startsWith("create namespace "))
.allSatisfy(
logEntry -> {
CommitMeta commit = logEntry.getCommitMeta();
- Assertions.assertThat(commit.getAuthor())
+ assertThat(commit.getAuthor())
.isNotNull()
.isNotEmpty()
.isEqualTo(System.getProperty("user.name"));
- Assertions.assertThat(commit.getProperties())
+ assertThat(commit.getProperties())
.containsEntry(NessieUtil.APPLICATION_TYPE, "iceberg");
- Assertions.assertThat(commit.getMessage()).startsWith("Iceberg");
+ assertThat(commit.getMessage()).startsWith("Iceberg");
});
}
@Test
public void testDrop() throws NessieNotFoundException {
- Assertions.assertThat(catalog.viewExists(VIEW_IDENTIFIER)).isTrue();
- Assertions.assertThat(catalog.dropView(VIEW_IDENTIFIER)).isTrue();
- Assertions.assertThat(catalog.viewExists(VIEW_IDENTIFIER)).isFalse();
- Assertions.assertThat(catalog.dropView(VIEW_IDENTIFIER)).isFalse();
+ assertThat(catalog.viewExists(VIEW_IDENTIFIER)).isTrue();
+ assertThat(catalog.dropView(VIEW_IDENTIFIER)).isTrue();
+ assertThat(catalog.viewExists(VIEW_IDENTIFIER)).isFalse();
+ assertThat(catalog.dropView(VIEW_IDENTIFIER)).isFalse();
verifyCommitMetadata();
}
@@ -327,9 +328,9 @@
createView(catalog, newIdentifier, SCHEMA);
List<TableIdentifier> viewIdents = catalog.listViews(VIEW_IDENTIFIER.namespace());
- Assertions.assertThat(viewIdents).contains(VIEW_IDENTIFIER, newIdentifier);
- Assertions.assertThat(catalog.viewExists(VIEW_IDENTIFIER)).isTrue();
- Assertions.assertThat(catalog.viewExists(newIdentifier)).isTrue();
+ assertThat(viewIdents).contains(VIEW_IDENTIFIER, newIdentifier);
+ assertThat(catalog.viewExists(VIEW_IDENTIFIER)).isTrue();
+ assertThat(catalog.viewExists(newIdentifier)).isTrue();
}
private String getViewBasePath(String viewName) {
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestBloomFilter.java b/orc/src/test/java/org/apache/iceberg/orc/TestBloomFilter.java
index ca932f2..cfc2962 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestBloomFilter.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestBloomFilter.java
@@ -19,6 +19,9 @@
package org.apache.iceberg.orc;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.assertj.core.api.Assertions.offset;
import java.io.File;
import java.lang.reflect.Field;
@@ -41,7 +44,6 @@
import org.apache.orc.impl.OrcIndex;
import org.apache.orc.impl.RecordReaderImpl;
import org.apache.orc.impl.WriterImpl;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -56,7 +58,7 @@
@Test
public void testWriteOption() throws Exception {
- Assertions.assertThat(testFile.delete()).as("Delete should succeed").isTrue();
+ assertThat(testFile.delete()).as("Delete should succeed").isTrue();
OutputFile outFile = Files.localOutput(testFile);
try (FileAppender<Record> writer =
@@ -81,9 +83,9 @@
double bloomFilterFpp = (double) bloomFilterFppField.get(orcWriter);
// Validate whether the bloom filters are set in ORC SDK or not
- Assertions.assertThat(bloomFilterColumns[1]).isTrue();
- Assertions.assertThat(bloomFilterColumns[2]).isTrue();
- Assertions.assertThat(bloomFilterFpp).isCloseTo(0.04, Assertions.offset(1e-15));
+ assertThat(bloomFilterColumns[1]).isTrue();
+ assertThat(bloomFilterColumns[2]).isTrue();
+ assertThat(bloomFilterFpp).isCloseTo(0.04, offset(1e-15));
Record recordTemplate = GenericRecord.create(DATA_SCHEMA);
Record record1 = recordTemplate.copy("id", 1L, "name", "foo", "price", 1.0);
@@ -123,15 +125,15 @@
footer.getColumns(1));
// Validate whether the bloom filters are written ORC files or not
- Assertions.assertThat(bloomFilterString).contains("Bloom filters for column");
+ assertThat(bloomFilterString).contains("Bloom filters for column");
}
}
@Test
public void testInvalidFppOption() throws Exception {
- Assertions.assertThat(testFile.delete()).as("Delete should succeed").isTrue();
+ assertThat(testFile.delete()).as("Delete should succeed").isTrue();
- Assertions.assertThatThrownBy(
+ assertThatThrownBy(
() ->
ORC.write(Files.localOutput(testFile))
.createWriterFunc(GenericOrcWriter::buildWriter)
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestBuildOrcProjection.java b/orc/src/test/java/org/apache/iceberg/orc/TestBuildOrcProjection.java
index b8f17f3..a179cb2 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestBuildOrcProjection.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestBuildOrcProjection.java
@@ -20,11 +20,12 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.Types;
import org.apache.orc.TypeDescription;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
/** Test projections on ORC types. */
@@ -38,13 +39,11 @@
// Original mapping (stored in ORC)
TypeDescription orcSchema = ORCSchemaUtil.convert(originalSchema);
- Assertions.assertThat(orcSchema.getChildren()).hasSize(2);
- Assertions.assertThat(orcSchema.findSubtype("a").getId()).isEqualTo(1);
- Assertions.assertThat(orcSchema.findSubtype("a").getCategory())
- .isEqualTo(TypeDescription.Category.INT);
- Assertions.assertThat(orcSchema.findSubtype("b").getId()).isEqualTo(2);
- Assertions.assertThat(orcSchema.findSubtype("b").getCategory())
- .isEqualTo(TypeDescription.Category.STRING);
+ assertThat(orcSchema.getChildren()).hasSize(2);
+ assertThat(orcSchema.findSubtype("a").getId()).isEqualTo(1);
+ assertThat(orcSchema.findSubtype("a").getCategory()).isEqualTo(TypeDescription.Category.INT);
+ assertThat(orcSchema.findSubtype("b").getId()).isEqualTo(2);
+ assertThat(orcSchema.findSubtype("b").getCategory()).isEqualTo(TypeDescription.Category.STRING);
}
@Test
@@ -64,12 +63,12 @@
);
TypeDescription newOrcSchema = ORCSchemaUtil.buildOrcProjection(evolveSchema, orcSchema);
- Assertions.assertThat(newOrcSchema.getChildren()).hasSize(2);
- Assertions.assertThat(newOrcSchema.findSubtype("b").getId()).isEqualTo(1);
- Assertions.assertThat(newOrcSchema.findSubtype("b").getCategory())
+ assertThat(newOrcSchema.getChildren()).hasSize(2);
+ assertThat(newOrcSchema.findSubtype("b").getId()).isEqualTo(1);
+ assertThat(newOrcSchema.findSubtype("b").getCategory())
.isEqualTo(TypeDescription.Category.STRING);
- Assertions.assertThat(newOrcSchema.findSubtype("c_r3").getId()).isEqualTo(2);
- Assertions.assertThat(newOrcSchema.findSubtype("c_r3").getCategory())
+ assertThat(newOrcSchema.findSubtype("c_r3").getId()).isEqualTo(2);
+ assertThat(newOrcSchema.findSubtype("c_r3").getCategory())
.isEqualTo(TypeDescription.Category.DATE);
}
@@ -84,16 +83,14 @@
TypeDescription orcSchema = ORCSchemaUtil.convert(originalSchema);
TypeDescription newOrcSchema = ORCSchemaUtil.buildOrcProjection(originalSchema, orcSchema);
- Assertions.assertThat(newOrcSchema.getChildren()).hasSize(1);
- Assertions.assertThat(newOrcSchema.findSubtype("a").getCategory())
+ assertThat(newOrcSchema.getChildren()).hasSize(1);
+ assertThat(newOrcSchema.findSubtype("a").getCategory())
.isEqualTo(TypeDescription.Category.STRUCT);
TypeDescription nestedCol = newOrcSchema.findSubtype("a");
- Assertions.assertThat(nestedCol.findSubtype("b").getId()).isEqualTo(2);
- Assertions.assertThat(nestedCol.findSubtype("b").getCategory())
- .isEqualTo(TypeDescription.Category.STRING);
- Assertions.assertThat(nestedCol.findSubtype("c").getId()).isEqualTo(3);
- Assertions.assertThat(nestedCol.findSubtype("c").getCategory())
- .isEqualTo(TypeDescription.Category.DATE);
+ assertThat(nestedCol.findSubtype("b").getId()).isEqualTo(2);
+ assertThat(nestedCol.findSubtype("b").getCategory()).isEqualTo(TypeDescription.Category.STRING);
+ assertThat(nestedCol.findSubtype("c").getId()).isEqualTo(3);
+ assertThat(nestedCol.findSubtype("c").getCategory()).isEqualTo(TypeDescription.Category.DATE);
}
@Test
@@ -113,16 +110,14 @@
Schema evolveSchema = new Schema(optional(1, "aa", newNestedStructType));
TypeDescription newOrcSchema = ORCSchemaUtil.buildOrcProjection(evolveSchema, orcSchema);
- Assertions.assertThat(newOrcSchema.getChildren()).hasSize(1);
- Assertions.assertThat(newOrcSchema.findSubtype("a").getCategory())
+ assertThat(newOrcSchema.getChildren()).hasSize(1);
+ assertThat(newOrcSchema.findSubtype("a").getCategory())
.isEqualTo(TypeDescription.Category.STRUCT);
TypeDescription nestedCol = newOrcSchema.findSubtype("a");
- Assertions.assertThat(nestedCol.findSubtype("c").getId()).isEqualTo(2);
- Assertions.assertThat(nestedCol.findSubtype("c").getCategory())
- .isEqualTo(TypeDescription.Category.DATE);
- Assertions.assertThat(nestedCol.findSubtype("b").getId()).isEqualTo(3);
- Assertions.assertThat(nestedCol.findSubtype("b").getCategory())
- .isEqualTo(TypeDescription.Category.STRING);
+ assertThat(nestedCol.findSubtype("c").getId()).isEqualTo(2);
+ assertThat(nestedCol.findSubtype("c").getCategory()).isEqualTo(TypeDescription.Category.DATE);
+ assertThat(nestedCol.findSubtype("b").getId()).isEqualTo(3);
+ assertThat(nestedCol.findSubtype("b").getCategory()).isEqualTo(TypeDescription.Category.STRING);
}
@Test
@@ -136,15 +131,14 @@
optional(2, "b", Types.StructType.of(required(3, "c", Types.LongType.get()))));
TypeDescription newOrcSchema = ORCSchemaUtil.buildOrcProjection(evolvedSchema, baseOrcSchema);
- Assertions.assertThat(newOrcSchema.getChildren()).hasSize(2);
- Assertions.assertThat(newOrcSchema.findSubtype("a").getCategory())
- .isEqualTo(TypeDescription.Category.INT);
- Assertions.assertThat(newOrcSchema.findSubtype("b_r2").getId()).isEqualTo(2);
- Assertions.assertThat(newOrcSchema.findSubtype("b_r2").getCategory())
+ assertThat(newOrcSchema.getChildren()).hasSize(2);
+ assertThat(newOrcSchema.findSubtype("a").getCategory()).isEqualTo(TypeDescription.Category.INT);
+ assertThat(newOrcSchema.findSubtype("b_r2").getId()).isEqualTo(2);
+ assertThat(newOrcSchema.findSubtype("b_r2").getCategory())
.isEqualTo(TypeDescription.Category.STRUCT);
TypeDescription nestedCol = newOrcSchema.findSubtype("b_r2");
- Assertions.assertThat(nestedCol.findSubtype("c_r3").getId()).isEqualTo(3);
- Assertions.assertThat(nestedCol.findSubtype("c_r3").getCategory())
+ assertThat(nestedCol.findSubtype("c_r3").getId()).isEqualTo(3);
+ assertThat(nestedCol.findSubtype("c_r3").getCategory())
.isEqualTo(TypeDescription.Category.LONG);
}
@@ -166,8 +160,7 @@
required(3, "c", Types.LongType.get()),
required(4, "d", Types.LongType.get()))));
- Assertions.assertThatThrownBy(
- () -> ORCSchemaUtil.buildOrcProjection(evolvedSchema, baseOrcSchema))
+ assertThatThrownBy(() -> ORCSchemaUtil.buildOrcProjection(evolvedSchema, baseOrcSchema))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Field 4 of type long is required and was not found.");
}
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestEstimateOrcAvgWidthVisitor.java b/orc/src/test/java/org/apache/iceberg/orc/TestEstimateOrcAvgWidthVisitor.java
index 1aa7dda..0be1254 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestEstimateOrcAvgWidthVisitor.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestEstimateOrcAvgWidthVisitor.java
@@ -20,11 +20,11 @@
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.Types;
import org.apache.orc.TypeDescription;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestEstimateOrcAvgWidthVisitor {
@@ -78,9 +78,7 @@
Schema integerSchema = new Schema(ID_FIELD);
TypeDescription integerOrcSchema = ORCSchemaUtil.convert(integerSchema);
long estimateLength = getEstimateLength(integerOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of integer must be 8.")
- .isEqualTo(8);
+ assertThat(estimateLength).as("Estimated average length of integer must be 8.").isEqualTo(8);
}
@Test
@@ -88,9 +86,7 @@
Schema stringSchema = new Schema(DATA_FIELD);
TypeDescription stringOrcSchema = ORCSchemaUtil.convert(stringSchema);
long estimateLength = getEstimateLength(stringOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of string must be 128.")
- .isEqualTo(128);
+ assertThat(estimateLength).as("Estimated average length of string must be 128.").isEqualTo(128);
}
@Test
@@ -98,9 +94,7 @@
Schema floatSchema = new Schema(FLOAT_FIELD);
TypeDescription floatOrcSchema = ORCSchemaUtil.convert(floatSchema);
long estimateLength = getEstimateLength(floatOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of float must be 8.")
- .isEqualTo(8);
+ assertThat(estimateLength).as("Estimated average length of float must be 8.").isEqualTo(8);
}
@Test
@@ -108,9 +102,7 @@
Schema doubleSchema = new Schema(DOUBLE_FIELD);
TypeDescription doubleOrcSchema = ORCSchemaUtil.convert(doubleSchema);
long estimateLength = getEstimateLength(doubleOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of double must be 8.")
- .isEqualTo(8);
+ assertThat(estimateLength).as("Estimated average length of double must be 8.").isEqualTo(8);
}
@Test
@@ -118,9 +110,7 @@
Schema decimalSchema = new Schema(DECIMAL_FIELD);
TypeDescription decimalOrcSchema = ORCSchemaUtil.convert(decimalSchema);
long estimateLength = getEstimateLength(decimalOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of decimal must be 7.")
- .isEqualTo(7);
+ assertThat(estimateLength).as("Estimated average length of decimal must be 7.").isEqualTo(7);
}
@Test
@@ -128,9 +118,7 @@
Schema fixedSchema = new Schema(FIXED_FIELD);
TypeDescription fixedOrcSchema = ORCSchemaUtil.convert(fixedSchema);
long estimateLength = getEstimateLength(fixedOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of fixed must be 128.")
- .isEqualTo(128);
+ assertThat(estimateLength).as("Estimated average length of fixed must be 128.").isEqualTo(128);
}
@Test
@@ -138,9 +126,7 @@
Schema binarySchema = new Schema(BINARY_FIELD);
TypeDescription binaryOrcSchema = ORCSchemaUtil.convert(binarySchema);
long estimateLength = getEstimateLength(binaryOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of binary must be 128.")
- .isEqualTo(128);
+ assertThat(estimateLength).as("Estimated average length of binary must be 128.").isEqualTo(128);
}
@Test
@@ -148,9 +134,7 @@
Schema listSchema = new Schema(FLOAT_LIST_FIELD);
TypeDescription listOrcSchema = ORCSchemaUtil.convert(listSchema);
long estimateLength = getEstimateLength(listOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of list must be 8.")
- .isEqualTo(8);
+ assertThat(estimateLength).as("Estimated average length of list must be 8.").isEqualTo(8);
}
@Test
@@ -158,9 +142,7 @@
Schema longSchema = new Schema(LONG_FIELD);
TypeDescription longOrcSchema = ORCSchemaUtil.convert(longSchema);
long estimateLength = getEstimateLength(longOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of long must be 8.")
- .isEqualTo(8);
+ assertThat(estimateLength).as("Estimated average length of long must be 8.").isEqualTo(8);
}
@Test
@@ -168,9 +150,7 @@
Schema booleanSchema = new Schema(BOOLEAN_FIELD);
TypeDescription booleanOrcSchema = ORCSchemaUtil.convert(booleanSchema);
long estimateLength = getEstimateLength(booleanOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of boolean must be 8.")
- .isEqualTo(8);
+ assertThat(estimateLength).as("Estimated average length of boolean must be 8.").isEqualTo(8);
}
@Test
@@ -178,14 +158,14 @@
Schema timestampZoneSchema = new Schema(TIMESTAMP_ZONE_FIELD);
TypeDescription timestampZoneOrcSchema = ORCSchemaUtil.convert(timestampZoneSchema);
long estimateLength = getEstimateLength(timestampZoneOrcSchema);
- Assertions.assertThat(estimateLength)
+ assertThat(estimateLength)
.as("Estimated average length of timestamps with zone must be 12.")
.isEqualTo(12);
Schema timestampSchema = new Schema(TIMESTAMP_FIELD);
TypeDescription timestampOrcSchema = ORCSchemaUtil.convert(timestampSchema);
estimateLength = getEstimateLength(timestampOrcSchema);
- Assertions.assertThat(estimateLength)
+ assertThat(estimateLength)
.as("Estimated average length of timestamp must be 12.")
.isEqualTo(12);
}
@@ -195,9 +175,7 @@
Schema dateSchema = new Schema(DATE_FIELD);
TypeDescription dateOrcSchema = ORCSchemaUtil.convert(dateSchema);
long estimateLength = getEstimateLength(dateOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of date must be 8.")
- .isEqualTo(8);
+ assertThat(estimateLength).as("Estimated average length of date must be 8.").isEqualTo(8);
}
@Test
@@ -205,9 +183,7 @@
Schema uuidSchema = new Schema(UUID_FIELD);
TypeDescription uuidOrcSchema = ORCSchemaUtil.convert(uuidSchema);
long estimateLength = getEstimateLength(uuidOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of uuid must be 128.")
- .isEqualTo(128);
+ assertThat(estimateLength).as("Estimated average length of uuid must be 128.").isEqualTo(128);
}
@Test
@@ -215,9 +191,7 @@
Schema mapSchema = new Schema(MAP_FIELD_1);
TypeDescription mapOrcSchema = ORCSchemaUtil.convert(mapSchema);
long estimateLength = getEstimateLength(mapOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of map must be 136.")
- .isEqualTo(136);
+ assertThat(estimateLength).as("Estimated average length of map must be 136.").isEqualTo(136);
}
@Test
@@ -225,9 +199,7 @@
Schema structSchema = new Schema(STRUCT_FIELD);
TypeDescription structOrcSchema = ORCSchemaUtil.convert(structSchema);
long estimateLength = getEstimateLength(structOrcSchema);
- Assertions.assertThat(estimateLength)
- .as("Estimated average length of struct must be 28.")
- .isEqualTo(28);
+ assertThat(estimateLength).as("Estimated average length of struct must be 28.").isEqualTo(28);
}
@Test
@@ -248,7 +220,7 @@
STRUCT_FIELD);
TypeDescription fullOrcSchema = ORCSchemaUtil.convert(fullSchema);
long estimateLength = getEstimateLength(fullOrcSchema);
- Assertions.assertThat(estimateLength)
+ assertThat(estimateLength)
.as("Estimated average length of the row must be 611.")
.isEqualTo(611);
}
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestExpressionToSearchArgument.java b/orc/src/test/java/org/apache/iceberg/orc/TestExpressionToSearchArgument.java
index c7c7a8a..32b815f 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestExpressionToSearchArgument.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestExpressionToSearchArgument.java
@@ -34,6 +34,7 @@
import static org.apache.iceberg.expressions.Expressions.year;
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
@@ -59,7 +60,6 @@
import org.apache.orc.storage.ql.io.sarg.SearchArgument.TruthValue;
import org.apache.orc.storage.ql.io.sarg.SearchArgumentFactory;
import org.apache.orc.storage.serde2.io.HiveDecimalWritable;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestExpressionToSearchArgument {
@@ -135,7 +135,7 @@
SearchArgument actual =
ExpressionToSearchArgument.convert(boundFilter, ORCSchemaUtil.convert(schema));
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
@Test
@@ -175,7 +175,7 @@
SearchArgument actual =
ExpressionToSearchArgument.convert(boundFilter, ORCSchemaUtil.convert(schema));
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
} finally {
TimeZone.setDefault(currentTz);
@@ -213,7 +213,7 @@
SearchArgument actual =
ExpressionToSearchArgument.convert(boundFilter, ORCSchemaUtil.convert(schema));
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
@Test
@@ -262,7 +262,7 @@
SearchArgument actual =
ExpressionToSearchArgument.convert(boundFilter, ORCSchemaUtil.convert(schema));
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
@Test
@@ -292,7 +292,7 @@
SearchArgument actual =
ExpressionToSearchArgument.convert(boundFilter, ORCSchemaUtil.convert(schema));
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
@Test
@@ -316,7 +316,7 @@
SearchArgumentFactory.newBuilder().equals("`int`", Type.LONG, 1L).build();
SearchArgument actual = ExpressionToSearchArgument.convert(boundFilter, readSchema);
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
// for columns not in the file, buildOrcProjection will append field names with _r<ID>
// this will be passed down to ORC, but ORC will handle such cases and return a TruthValue
@@ -327,7 +327,7 @@
SearchArgumentFactory.newBuilder().equals("`float_added_r3`", Type.FLOAT, 1.0).build();
actual = ExpressionToSearchArgument.convert(boundFilter, readSchema);
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
@Test
@@ -353,7 +353,7 @@
.build();
SearchArgument actual = ExpressionToSearchArgument.convert(boundFilter, readSchema);
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
@Test
@@ -380,7 +380,7 @@
SearchArgumentFactory.newBuilder().equals("`int`", Type.LONG, 1L).build();
SearchArgument actual = ExpressionToSearchArgument.convert(boundFilter, readSchema);
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
// for columns not in the file, buildOrcProjection will append field names with _r<ID>
// this will be passed down to ORC, but ORC will handle such cases and return a TruthValue
@@ -391,7 +391,7 @@
SearchArgumentFactory.newBuilder().equals("`new_float_field_r3`", Type.FLOAT, 1.0).build();
actual = ExpressionToSearchArgument.convert(boundFilter, readSchema);
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
@Test
@@ -475,7 +475,7 @@
.build();
SearchArgument actual = ExpressionToSearchArgument.convert(boundFilter, readSchema);
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
@Test
@@ -490,6 +490,6 @@
SearchArgument actual =
ExpressionToSearchArgument.convert(boundFilter, ORCSchemaUtil.convert(schema));
- Assertions.assertThat(actual.toString()).isEqualTo(expected.toString());
+ assertThat(actual.toString()).isEqualTo(expected.toString());
}
}
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestIdToOrcName.java b/orc/src/test/java/org/apache/iceberg/orc/TestIdToOrcName.java
index a323499..34bc215 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestIdToOrcName.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestIdToOrcName.java
@@ -19,11 +19,11 @@
package org.apache.iceberg.orc;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
import java.util.Map;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestIdToOrcName {
@@ -70,31 +70,31 @@
required(26, "colWith`Quotes`", Types.LongType.get()));
Map<Integer, String> actual = ORCSchemaUtil.idToOrcName(schema);
- Assertions.assertThat(actual.get(1)).isEqualTo("`long`");
- Assertions.assertThat(actual.get(2)).isEqualTo("`struct`");
- Assertions.assertThat(actual.get(3)).isEqualTo("`struct`.`long`");
- Assertions.assertThat(actual.get(4)).isEqualTo("`listOfLongs`");
- Assertions.assertThat(actual.get(5)).isEqualTo("`listOfLongs`.`_elem`");
- Assertions.assertThat(actual.get(6)).isEqualTo("`listOfStructs`");
- Assertions.assertThat(actual.get(7)).isEqualTo("`listOfStructs`.`_elem`");
- Assertions.assertThat(actual.get(8)).isEqualTo("`listOfStructs`.`_elem`.`long`");
- Assertions.assertThat(actual.get(9)).isEqualTo("`map`");
- Assertions.assertThat(actual.get(10)).isEqualTo("`map`.`_key`");
- Assertions.assertThat(actual.get(11)).isEqualTo("`map`.`_value`");
- Assertions.assertThat(actual.get(12)).isEqualTo("`mapOfStructs`");
- Assertions.assertThat(actual.get(13)).isEqualTo("`mapOfStructs`.`_key`");
- Assertions.assertThat(actual.get(14)).isEqualTo("`mapOfStructs`.`_value`");
- Assertions.assertThat(actual.get(15)).isEqualTo("`mapOfStructs`.`_key`.`long`");
- Assertions.assertThat(actual.get(16)).isEqualTo("`mapOfStructs`.`_value`.`long`");
- Assertions.assertThat(actual.get(17)).isEqualTo("`listOfMapsOfStruct`");
- Assertions.assertThat(actual.get(18)).isEqualTo("`listOfMapsOfStruct`.`_elem`");
- Assertions.assertThat(actual.get(19)).isEqualTo("`listOfMapsOfStruct`.`_elem`.`_key`");
- Assertions.assertThat(actual.get(20)).isEqualTo("`listOfMapsOfStruct`.`_elem`.`_value`");
- Assertions.assertThat(actual.get(21)).isEqualTo("`listOfMapsOfStruct`.`_elem`.`_key`.`long`");
- Assertions.assertThat(actual.get(22)).isEqualTo("`listOfMapsOfStruct`.`_elem`.`_value`.`long`");
- Assertions.assertThat(actual.get(23)).isEqualTo("`col.with.dots`");
- Assertions.assertThat(actual.get(24)).isEqualTo("`col.with.dots`.`inner.col.with.dots`");
- Assertions.assertThat(actual.get(25)).isEqualTo("`colW!th$peci@lCh@rs`");
- Assertions.assertThat(actual.get(26)).isEqualTo("`colWith``Quotes```");
+ assertThat(actual.get(1)).isEqualTo("`long`");
+ assertThat(actual.get(2)).isEqualTo("`struct`");
+ assertThat(actual.get(3)).isEqualTo("`struct`.`long`");
+ assertThat(actual.get(4)).isEqualTo("`listOfLongs`");
+ assertThat(actual.get(5)).isEqualTo("`listOfLongs`.`_elem`");
+ assertThat(actual.get(6)).isEqualTo("`listOfStructs`");
+ assertThat(actual.get(7)).isEqualTo("`listOfStructs`.`_elem`");
+ assertThat(actual.get(8)).isEqualTo("`listOfStructs`.`_elem`.`long`");
+ assertThat(actual.get(9)).isEqualTo("`map`");
+ assertThat(actual.get(10)).isEqualTo("`map`.`_key`");
+ assertThat(actual.get(11)).isEqualTo("`map`.`_value`");
+ assertThat(actual.get(12)).isEqualTo("`mapOfStructs`");
+ assertThat(actual.get(13)).isEqualTo("`mapOfStructs`.`_key`");
+ assertThat(actual.get(14)).isEqualTo("`mapOfStructs`.`_value`");
+ assertThat(actual.get(15)).isEqualTo("`mapOfStructs`.`_key`.`long`");
+ assertThat(actual.get(16)).isEqualTo("`mapOfStructs`.`_value`.`long`");
+ assertThat(actual.get(17)).isEqualTo("`listOfMapsOfStruct`");
+ assertThat(actual.get(18)).isEqualTo("`listOfMapsOfStruct`.`_elem`");
+ assertThat(actual.get(19)).isEqualTo("`listOfMapsOfStruct`.`_elem`.`_key`");
+ assertThat(actual.get(20)).isEqualTo("`listOfMapsOfStruct`.`_elem`.`_value`");
+ assertThat(actual.get(21)).isEqualTo("`listOfMapsOfStruct`.`_elem`.`_key`.`long`");
+ assertThat(actual.get(22)).isEqualTo("`listOfMapsOfStruct`.`_elem`.`_value`.`long`");
+ assertThat(actual.get(23)).isEqualTo("`col.with.dots`");
+ assertThat(actual.get(24)).isEqualTo("`col.with.dots`.`inner.col.with.dots`");
+ assertThat(actual.get(25)).isEqualTo("`colW!th$peci@lCh@rs`");
+ assertThat(actual.get(26)).isEqualTo("`colWith``Quotes```");
}
}
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestORCFileIOProxies.java b/orc/src/test/java/org/apache/iceberg/orc/TestORCFileIOProxies.java
index 9b9eb68..9338c27 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestORCFileIOProxies.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestORCFileIOProxies.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.orc;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
@@ -27,7 +30,6 @@
import org.apache.iceberg.Files;
import org.apache.iceberg.io.InputFile;
import org.apache.iceberg.io.OutputFile;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestORCFileIOProxies {
@@ -39,14 +41,14 @@
InputFile localFile = Files.localInput(inputFile);
FileIOFSUtil.InputFileSystem ifs = new FileIOFSUtil.InputFileSystem(localFile);
InputStream is = ifs.open(new Path(localFile.location()));
- Assertions.assertThat(is).isNotNull();
+ assertThat(is).isNotNull();
// Cannot use the filesystem for any other operation
- Assertions.assertThatThrownBy(() -> ifs.getFileStatus(new Path(localFile.location())))
+ assertThatThrownBy(() -> ifs.getFileStatus(new Path(localFile.location())))
.isInstanceOf(UnsupportedOperationException.class);
// Cannot use the filesystem for any other path
- Assertions.assertThatThrownBy(() -> ifs.open(new Path("/tmp/dummy")))
+ assertThatThrownBy(() -> ifs.open(new Path("/tmp/dummy")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Input /tmp/dummy does not equal expected");
}
@@ -64,19 +66,19 @@
os.write('C');
}
// No other operation is supported
- Assertions.assertThatThrownBy(() -> ofs.open(new Path(outputFile.location())))
+ assertThatThrownBy(() -> ofs.open(new Path(outputFile.location())))
.isInstanceOf(UnsupportedOperationException.class);
// No other path is supported
- Assertions.assertThatThrownBy(() -> ofs.create(new Path("/tmp/dummy")))
+ assertThatThrownBy(() -> ofs.create(new Path("/tmp/dummy")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Input /tmp/dummy does not equal expected");
FileSystem ifs = new FileIOFSUtil.InputFileSystem(outputFile.toInputFile());
try (InputStream is = ifs.open(new Path(outputFile.location()))) {
- Assertions.assertThat(is.read()).isEqualTo('O');
- Assertions.assertThat(is.read()).isEqualTo('R');
- Assertions.assertThat(is.read()).isEqualTo('C');
- Assertions.assertThat(is.read()).isEqualTo(-1);
+ assertThat(is.read()).isEqualTo('O');
+ assertThat(is.read()).isEqualTo('R');
+ assertThat(is.read()).isEqualTo('C');
+ assertThat(is.read()).isEqualTo(-1);
}
}
}
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestORCSchemaUtil.java b/orc/src/test/java/org/apache/iceberg/orc/TestORCSchemaUtil.java
index 24a376c..f0f02da 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestORCSchemaUtil.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestORCSchemaUtil.java
@@ -22,6 +22,8 @@
import static org.apache.iceberg.orc.ORCSchemaUtil.ICEBERG_REQUIRED_ATTRIBUTE;
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.Collections;
import java.util.List;
@@ -33,7 +35,6 @@
import org.apache.iceberg.types.TypeUtil;
import org.apache.iceberg.types.Types;
import org.apache.orc.TypeDescription;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestORCSchemaUtil {
@@ -62,8 +63,7 @@
@Test
public void testRoundtripConversionPrimitive() {
TypeDescription orcSchema = ORCSchemaUtil.convert(new Schema(SUPPORTED_PRIMITIVES.fields()));
- Assertions.assertThat(ORCSchemaUtil.convert(orcSchema).asStruct())
- .isEqualTo(SUPPORTED_PRIMITIVES);
+ assertThat(ORCSchemaUtil.convert(orcSchema).asStruct()).isEqualTo(SUPPORTED_PRIMITIVES);
}
@Test
@@ -189,8 +189,7 @@
Types.ListType.ofRequired(
1250, nestedStructTypeForStruct))))))));
TypeDescription orcSchema = ORCSchemaUtil.convert(expectedSchema);
- Assertions.assertThat(ORCSchemaUtil.convert(orcSchema).asStruct())
- .isEqualTo(expectedSchema.asStruct());
+ assertThat(ORCSchemaUtil.convert(orcSchema).asStruct()).isEqualTo(expectedSchema.asStruct());
}
@Test
@@ -212,18 +211,18 @@
optional(3, "c", Types.DecimalType.of(15, 2)));
TypeDescription newOrcSchema = ORCSchemaUtil.buildOrcProjection(evolveSchema, orcSchema);
- Assertions.assertThat(newOrcSchema.getChildren()).hasSize(3);
- Assertions.assertThat(newOrcSchema.findSubtype("a").getId()).isEqualTo(1);
- Assertions.assertThat(newOrcSchema.findSubtype("a").getCategory())
+ assertThat(newOrcSchema.getChildren()).hasSize(3);
+ assertThat(newOrcSchema.findSubtype("a").getId()).isEqualTo(1);
+ assertThat(newOrcSchema.findSubtype("a").getCategory())
.isEqualTo(TypeDescription.Category.LONG);
- Assertions.assertThat(newOrcSchema.findSubtype("b").getId()).isEqualTo(2);
- Assertions.assertThat(newOrcSchema.findSubtype("b").getCategory())
+ assertThat(newOrcSchema.findSubtype("b").getId()).isEqualTo(2);
+ assertThat(newOrcSchema.findSubtype("b").getCategory())
.isEqualTo(TypeDescription.Category.DOUBLE);
TypeDescription decimalC = newOrcSchema.findSubtype("c");
- Assertions.assertThat(decimalC.getId()).isEqualTo(3);
- Assertions.assertThat(decimalC.getCategory()).isEqualTo(TypeDescription.Category.DECIMAL);
- Assertions.assertThat(decimalC.getPrecision()).isEqualTo(15);
- Assertions.assertThat(decimalC.getScale()).isEqualTo(2);
+ assertThat(decimalC.getId()).isEqualTo(3);
+ assertThat(decimalC.getCategory()).isEqualTo(TypeDescription.Category.DECIMAL);
+ assertThat(decimalC.getPrecision()).isEqualTo(15);
+ assertThat(decimalC.getScale()).isEqualTo(2);
}
@Test
@@ -233,7 +232,7 @@
TypeDescription orcSchema = ORCSchemaUtil.convert(originalSchema);
Schema evolveSchema = new Schema(optional(1, "a", Types.IntegerType.get()));
- Assertions.assertThatThrownBy(() -> ORCSchemaUtil.buildOrcProjection(evolveSchema, orcSchema))
+ assertThatThrownBy(() -> ORCSchemaUtil.buildOrcProjection(evolveSchema, orcSchema))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Can not promote LONG type to INTEGER");
}
@@ -268,7 +267,7 @@
5,
"mapCol",
Types.MapType.ofOptional(3, 4, Types.StringType.get(), Types.BooleanType.get())));
- Assertions.assertThat(icebergSchema.asStruct())
+ assertThat(icebergSchema.asStruct())
.as("Schemas must match.")
.isEqualTo(expectedSchema.asStruct());
@@ -304,7 +303,7 @@
required(6, "doubleCol", Types.DoubleType.get())
// Skipped mapCol2 since value has no Iceberg ID
)));
- Assertions.assertThat(icebergSchema2.asStruct())
+ assertThat(icebergSchema2.asStruct())
.as("Schemas must match.")
.isEqualTo(expectedSchema2.asStruct());
}
@@ -327,13 +326,13 @@
optional(4, "listCol", Types.ListType.ofOptional(40, Types.DoubleType.get())));
TypeDescription orcSchema = ORCSchemaUtil.removeIds(ORCSchemaUtil.convert(schema));
- Assertions.assertThat(ORCSchemaUtil.hasIds(orcSchema)).as("Should not have Ids").isFalse();
+ assertThat(ORCSchemaUtil.hasIds(orcSchema)).as("Should not have Ids").isFalse();
TypeDescription map2Col =
TypeDescription.createMap(TypeDescription.createString(), TypeDescription.createBinary());
map2Col.setAttribute(ICEBERG_ID_ATTRIBUTE, "4");
orcSchema.addField("map2Col", map2Col);
- Assertions.assertThat(ORCSchemaUtil.hasIds(orcSchema))
+ assertThat(ORCSchemaUtil.hasIds(orcSchema))
.as("Should have Ids after adding one type with Id")
.isTrue();
}
@@ -403,8 +402,7 @@
ORCSchemaUtil.applyNameMapping(
ORCSchemaUtil.removeIds(typeDescriptionWithIds), nameMapping);
- Assertions.assertThat(
- equalsWithIds(typeDescriptionWithIds, typeDescriptionWithIdsFromNameMapping))
+ assertThat(equalsWithIds(typeDescriptionWithIds, typeDescriptionWithIdsFromNameMapping))
.as("TypeDescription schemas should be equal, including IDs")
.isTrue();
}
@@ -512,14 +510,14 @@
longField.setAttribute(ICEBERG_ID_ATTRIBUTE, "40");
expected.addField("long_r40", longField);
- Assertions.assertThat(typeDescriptionWithIdsFromNameMapping.equals(fileSchema, false))
+ assertThat(typeDescriptionWithIdsFromNameMapping.equals(fileSchema, false))
.as("ORC Schema must have the same structure, but one has Iceberg IDs")
.isTrue();
TypeDescription projectedOrcSchema =
ORCSchemaUtil.buildOrcProjection(mappingSchema, typeDescriptionWithIdsFromNameMapping);
- Assertions.assertThat(equalsWithIds(expected, projectedOrcSchema))
+ assertThat(equalsWithIds(expected, projectedOrcSchema))
.as("Schema should be the prunned by projection")
.isTrue();
}
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestOrcDataWriter.java b/orc/src/test/java/org/apache/iceberg/orc/TestOrcDataWriter.java
index f95884d..e026c0e 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestOrcDataWriter.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestOrcDataWriter.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.orc;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
import java.io.File;
import java.io.IOException;
import java.util.List;
@@ -47,7 +50,6 @@
import org.apache.iceberg.types.Types;
import org.apache.orc.OrcFile;
import org.apache.orc.StripeInformation;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -112,13 +114,13 @@
}
DataFile dataFile = dataWriter.toDataFile();
- Assertions.assertThat(dataFile.splitOffsets()).isEqualTo(stripeOffsetsFromReader(dataFile));
- Assertions.assertThat(dataFile.format()).isEqualTo(FileFormat.ORC);
- Assertions.assertThat(dataFile.content()).isEqualTo(FileContent.DATA);
- Assertions.assertThat(dataFile.recordCount()).isEqualTo(records.size());
- Assertions.assertThat(dataFile.partition().size()).isEqualTo(0);
- Assertions.assertThat(dataFile.sortOrderId()).isEqualTo(sortOrder.orderId());
- Assertions.assertThat(dataFile.keyMetadata()).isNull();
+ assertThat(dataFile.splitOffsets()).isEqualTo(stripeOffsetsFromReader(dataFile));
+ assertThat(dataFile.format()).isEqualTo(FileFormat.ORC);
+ assertThat(dataFile.content()).isEqualTo(FileContent.DATA);
+ assertThat(dataFile.recordCount()).isEqualTo(records.size());
+ assertThat(dataFile.partition().size()).isEqualTo(0);
+ assertThat(dataFile.sortOrderId()).isEqualTo(sortOrder.orderId());
+ assertThat(dataFile.keyMetadata()).isNull();
List<Record> writtenRecords;
try (CloseableIterable<Record> reader =
ORC.read(file.toInputFile())
@@ -127,7 +129,7 @@
.build()) {
writtenRecords = Lists.newArrayList(reader);
}
- Assertions.assertThat(writtenRecords).as("Written records should match").isEqualTo(records);
+ assertThat(writtenRecords).as("Written records should match").isEqualTo(records);
}
@Test
@@ -138,8 +140,7 @@
// use a scheme `dummy` that is not handled. Note that Hadoop 2.7.3 throws IOException
// while latest Hadoop versions throw UnsupportedFileSystemException (extends IOException)
ProxyOutputFile outFile = new ProxyOutputFile(Files.localOutput(temp));
- Assertions.assertThatThrownBy(
- () -> new Path(outFile.location()).getFileSystem(new Configuration()))
+ assertThatThrownBy(() -> new Path(outFile.location()).getFileSystem(new Configuration()))
.isInstanceOf(IOException.class)
.hasMessageStartingWith("No FileSystem for scheme");
@@ -167,13 +168,12 @@
OrcFile.readerOptions(new Configuration())
.filesystem(new FileIOFSUtil.InputFileSystem(outFile.toInputFile()))
.maxLength(outFile.toInputFile().getLength());
- Assertions.assertThat(dataFile.splitOffsets())
- .isEqualTo(stripeOffsetsFromReader(dataFile, options));
- Assertions.assertThat(dataFile.format()).isEqualTo(FileFormat.ORC);
- Assertions.assertThat(dataFile.content()).isEqualTo(FileContent.DATA);
- Assertions.assertThat(dataFile.recordCount()).isEqualTo(records.size());
- Assertions.assertThat(dataFile.partition().size()).isEqualTo(0);
- Assertions.assertThat(dataFile.keyMetadata()).isNull();
+ assertThat(dataFile.splitOffsets()).isEqualTo(stripeOffsetsFromReader(dataFile, options));
+ assertThat(dataFile.format()).isEqualTo(FileFormat.ORC);
+ assertThat(dataFile.content()).isEqualTo(FileContent.DATA);
+ assertThat(dataFile.recordCount()).isEqualTo(records.size());
+ assertThat(dataFile.partition().size()).isEqualTo(0);
+ assertThat(dataFile.keyMetadata()).isNull();
List<Record> writtenRecords;
try (CloseableIterable<Record> reader =
ORC.read(outFile.toInputFile())
@@ -182,7 +182,7 @@
.build()) {
writtenRecords = Lists.newArrayList(reader);
}
- Assertions.assertThat(writtenRecords).as("Written records should match").isEqualTo(records);
+ assertThat(writtenRecords).as("Written records should match").isEqualTo(records);
}
private static class ProxyInputFile implements InputFile {
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestOrcDeleteWriters.java b/orc/src/test/java/org/apache/iceberg/orc/TestOrcDeleteWriters.java
index 3e0c133..112885b 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestOrcDeleteWriters.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestOrcDeleteWriters.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.orc;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.io.File;
import java.io.IOException;
import java.util.List;
@@ -41,7 +43,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -87,11 +88,11 @@
}
DeleteFile metadata = deleteWriter.toDeleteFile();
- Assertions.assertThat(metadata.format()).isEqualTo(FileFormat.ORC);
- Assertions.assertThat(metadata.content()).isEqualTo(FileContent.EQUALITY_DELETES);
- Assertions.assertThat(metadata.recordCount()).isEqualTo(records.size());
- Assertions.assertThat(metadata.partition().size()).isEqualTo(0);
- Assertions.assertThat(metadata.keyMetadata()).isNull();
+ assertThat(metadata.format()).isEqualTo(FileFormat.ORC);
+ assertThat(metadata.content()).isEqualTo(FileContent.EQUALITY_DELETES);
+ assertThat(metadata.recordCount()).isEqualTo(records.size());
+ assertThat(metadata.partition().size()).isEqualTo(0);
+ assertThat(metadata.keyMetadata()).isNull();
List<Record> deletedRecords;
try (CloseableIterable<Record> reader =
@@ -102,9 +103,7 @@
deletedRecords = Lists.newArrayList(reader);
}
- Assertions.assertThat(deletedRecords)
- .as("Deleted records should match expected")
- .isEqualTo(records);
+ assertThat(deletedRecords).as("Deleted records should match expected").isEqualTo(records);
}
@Test
@@ -143,11 +142,11 @@
}
DeleteFile metadata = deleteWriter.toDeleteFile();
- Assertions.assertThat(metadata.format()).isEqualTo(FileFormat.ORC);
- Assertions.assertThat(metadata.content()).isEqualTo(FileContent.POSITION_DELETES);
- Assertions.assertThat(metadata.recordCount()).isEqualTo(records.size());
- Assertions.assertThat(metadata.partition().size()).isEqualTo(0);
- Assertions.assertThat(metadata.keyMetadata()).isNull();
+ assertThat(metadata.format()).isEqualTo(FileFormat.ORC);
+ assertThat(metadata.content()).isEqualTo(FileContent.POSITION_DELETES);
+ assertThat(metadata.recordCount()).isEqualTo(records.size());
+ assertThat(metadata.partition().size()).isEqualTo(0);
+ assertThat(metadata.keyMetadata()).isNull();
List<Record> deletedRecords;
try (CloseableIterable<Record> reader =
@@ -158,7 +157,7 @@
deletedRecords = Lists.newArrayList(reader);
}
- Assertions.assertThat(deletedRecords)
+ assertThat(deletedRecords)
.as("Deleted records should match expected")
.isEqualTo(expectedDeleteRecords);
}
@@ -196,11 +195,11 @@
}
DeleteFile metadata = deleteWriter.toDeleteFile();
- Assertions.assertThat(metadata.format()).isEqualTo(FileFormat.ORC);
- Assertions.assertThat(metadata.content()).isEqualTo(FileContent.POSITION_DELETES);
- Assertions.assertThat(metadata.recordCount()).isEqualTo(records.size());
- Assertions.assertThat(metadata.partition().size()).isEqualTo(0);
- Assertions.assertThat(metadata.keyMetadata()).isNull();
+ assertThat(metadata.format()).isEqualTo(FileFormat.ORC);
+ assertThat(metadata.content()).isEqualTo(FileContent.POSITION_DELETES);
+ assertThat(metadata.recordCount()).isEqualTo(records.size());
+ assertThat(metadata.partition().size()).isEqualTo(0);
+ assertThat(metadata.keyMetadata()).isNull();
List<Record> deletedRecords;
try (CloseableIterable<Record> reader =
@@ -210,7 +209,7 @@
.build()) {
deletedRecords = Lists.newArrayList(reader);
}
- Assertions.assertThat(deletedRecords)
+ assertThat(deletedRecords)
.as("Deleted records should match expected")
.isEqualTo(expectedDeleteRecords);
}
diff --git a/orc/src/test/java/org/apache/iceberg/orc/TestTableProperties.java b/orc/src/test/java/org/apache/iceberg/orc/TestTableProperties.java
index 339e885..ce39855 100644
--- a/orc/src/test/java/org/apache/iceberg/orc/TestTableProperties.java
+++ b/orc/src/test/java/org/apache/iceberg/orc/TestTableProperties.java
@@ -18,6 +18,8 @@
*/
package org.apache.iceberg.orc;
+import static org.assertj.core.api.Assertions.assertThat;
+
import java.io.File;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
@@ -38,7 +40,6 @@
import org.apache.orc.CompressionKind;
import org.apache.orc.OrcConf;
import org.apache.orc.OrcFile.CompressionStrategy;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -73,14 +74,12 @@
String warehouse = folder.getAbsolutePath();
String tablePath = warehouse.concat("/test");
- Assertions.assertThat(new File(tablePath).mkdir())
- .as("Should create the table path correctly.")
- .isTrue();
+ assertThat(new File(tablePath).mkdir()).as("Should create the table path correctly.").isTrue();
PartitionSpec spec = PartitionSpec.unpartitioned();
Table table = new HadoopTables().create(SCHEMA, spec, properties, tablePath);
- Assertions.assertThat(testFile.delete()).isTrue();
+ assertThat(testFile.delete()).isTrue();
FileAppender<Record> writer =
ORC.write(Files.localOutput(testFile))
@@ -92,12 +91,11 @@
DynFields.builder().hiddenImpl(writer.getClass(), "conf").build(writer);
Configuration configuration = confField.get();
- Assertions.assertThat(OrcConf.BLOCK_SIZE.getLong(configuration)).isEqualTo(blockSizeBytes);
- Assertions.assertThat(OrcConf.STRIPE_SIZE.getLong(configuration)).isEqualTo(stripeSizeBytes);
- Assertions.assertThat(OrcConf.COMPRESS.getString(configuration)).isEqualTo(codecAsString);
- Assertions.assertThat(OrcConf.COMPRESSION_STRATEGY.getString(configuration))
- .isEqualTo(strategyAsString);
- Assertions.assertThat(configuration.get(TableProperties.DEFAULT_FILE_FORMAT))
+ assertThat(OrcConf.BLOCK_SIZE.getLong(configuration)).isEqualTo(blockSizeBytes);
+ assertThat(OrcConf.STRIPE_SIZE.getLong(configuration)).isEqualTo(stripeSizeBytes);
+ assertThat(OrcConf.COMPRESS.getString(configuration)).isEqualTo(codecAsString);
+ assertThat(OrcConf.COMPRESSION_STRATEGY.getString(configuration)).isEqualTo(strategyAsString);
+ assertThat(configuration.get(TableProperties.DEFAULT_FILE_FORMAT))
.isEqualTo(FileFormat.ORC.name());
}
@@ -122,14 +120,12 @@
String warehouse = folder.getAbsolutePath();
String tablePath = warehouse.concat("/test");
- Assertions.assertThat(new File(tablePath).mkdir())
- .as("Should create the table path correctly.")
- .isTrue();
+ assertThat(new File(tablePath).mkdir()).as("Should create the table path correctly.").isTrue();
PartitionSpec spec = PartitionSpec.unpartitioned();
Table table = new HadoopTables().create(SCHEMA, spec, properties, tablePath);
- Assertions.assertThat(testFile.delete()).isTrue();
+ assertThat(testFile.delete()).isTrue();
EqualityDeleteWriter<Object> deleteWriter =
ORC.writeDeletes(Files.localOutput(testFile))
@@ -146,12 +142,11 @@
DynFields.builder().hiddenImpl(orcFileAppender.getClass(), "conf").build(orcFileAppender);
Configuration configuration = confField.get();
- Assertions.assertThat(OrcConf.BLOCK_SIZE.getLong(configuration)).isEqualTo(blockSizeBytes);
- Assertions.assertThat(OrcConf.STRIPE_SIZE.getLong(configuration)).isEqualTo(stripeSizeBytes);
- Assertions.assertThat(OrcConf.COMPRESS.getString(configuration)).isEqualTo(codecAsString);
- Assertions.assertThat(OrcConf.COMPRESSION_STRATEGY.getString(configuration))
- .isEqualTo(strategyAsString);
- Assertions.assertThat(configuration.get(TableProperties.DEFAULT_FILE_FORMAT))
+ assertThat(OrcConf.BLOCK_SIZE.getLong(configuration)).isEqualTo(blockSizeBytes);
+ assertThat(OrcConf.STRIPE_SIZE.getLong(configuration)).isEqualTo(stripeSizeBytes);
+ assertThat(OrcConf.COMPRESS.getString(configuration)).isEqualTo(codecAsString);
+ assertThat(OrcConf.COMPRESSION_STRATEGY.getString(configuration)).isEqualTo(strategyAsString);
+ assertThat(configuration.get(TableProperties.DEFAULT_FILE_FORMAT))
.isEqualTo(FileFormat.ORC.name());
}
}
diff --git a/snowflake/src/test/java/org/apache/iceberg/snowflake/JdbcSnowflakeClientTest.java b/snowflake/src/test/java/org/apache/iceberg/snowflake/JdbcSnowflakeClientTest.java
index c2ea335..f0c5f2a 100644
--- a/snowflake/src/test/java/org/apache/iceberg/snowflake/JdbcSnowflakeClientTest.java
+++ b/snowflake/src/test/java/org/apache/iceberg/snowflake/JdbcSnowflakeClientTest.java
@@ -21,6 +21,8 @@
import static org.apache.iceberg.snowflake.JdbcSnowflakeClient.DATABASE_NOT_FOUND_ERROR_CODES;
import static org.apache.iceberg.snowflake.JdbcSnowflakeClient.SCHEMA_NOT_FOUND_ERROR_CODES;
import static org.apache.iceberg.snowflake.JdbcSnowflakeClient.TABLE_NOT_FOUND_ERROR_CODES;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
@@ -37,7 +39,6 @@
import org.apache.iceberg.jdbc.JdbcClientPool;
import org.apache.iceberg.jdbc.UncheckedInterruptedException;
import org.apache.iceberg.jdbc.UncheckedSQLException;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@@ -79,7 +80,7 @@
@Test
public void testNullClientPoolInConstructor() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new JdbcSnowflakeClient(null))
.withMessageContaining("JdbcClientPool must be non-null");
}
@@ -91,8 +92,7 @@
when(mockResultSet.getString("database_name")).thenReturn("DB_1");
when(mockResultSet.getString("name")).thenReturn("SCHEMA_1");
- Assertions.assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1")))
- .isTrue();
+ assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))).isTrue();
verify(mockQueryHarness)
.query(
@@ -113,16 +113,13 @@
new SQLException("Database does not exist or not authorized", "2000", 2001, null));
// Error code 2003
- Assertions.assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1")))
- .isFalse();
+ assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))).isFalse();
// Error code 2043
- Assertions.assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1")))
- .isFalse();
+ assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))).isFalse();
// Error code 2001
- Assertions.assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1")))
- .isFalse();
+ assertThat(snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1"))).isFalse();
}
@Test
@@ -130,7 +127,7 @@
Exception injectedException = new SQLException("Some other exception", "2000", 2, null);
when(mockResultSet.next()).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedSQLException.class)
+ assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(() -> snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1")))
.withMessageContaining("Failed to check if database 'DATABASE: 'DB_1'' exists")
.withCause(injectedException);
@@ -143,7 +140,7 @@
Exception injectedException = new InterruptedException("Fake interrupted exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedInterruptedException.class)
+ assertThatExceptionOfType(UncheckedInterruptedException.class)
.isThrownBy(() -> snowflakeClient.databaseExists(SnowflakeIdentifier.ofDatabase("DB_1")))
.withMessageContaining("Interrupted while checking if database 'DATABASE: 'DB_1'' exists")
.withCause(injectedException);
@@ -161,8 +158,7 @@
when(mockResultSet.getString("database_name")).thenReturn("DB1");
when(mockResultSet.getString("schema_name")).thenReturn("SCHEMA1");
- Assertions.assertThat(
- snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB1", "SCHEMA1")))
+ assertThat(snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB1", "SCHEMA1")))
.isTrue();
verify(mockQueryHarness)
@@ -201,18 +197,15 @@
when(mockResultSet.getString("database_name")).thenReturn("DB1");
// Error code 2003
- Assertions.assertThat(
- snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")))
+ assertThat(snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")))
.isFalse();
// Error code 2043
- Assertions.assertThat(
- snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")))
+ assertThat(snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")))
.isFalse();
// Error code 2001
- Assertions.assertThat(
- snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")))
+ assertThat(snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")))
.isFalse();
}
@@ -228,7 +221,7 @@
when(mockResultSet.getString("name")).thenReturn("DB1").thenReturn("SCHEMA1");
when(mockResultSet.getString("database_name")).thenReturn("DB1");
- Assertions.assertThatExceptionOfType(UncheckedSQLException.class)
+ assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(
() -> snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")))
.withMessageContaining("Failed to check if schema 'SCHEMA: 'DB_1.SCHEMA_2'' exists")
@@ -242,7 +235,7 @@
Exception injectedException = new InterruptedException("Fake Interrupted exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedInterruptedException.class)
+ assertThatExceptionOfType(UncheckedInterruptedException.class)
.isThrownBy(
() -> snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")))
.withMessageContaining("Interrupted while checking if database 'DATABASE: 'DB_1'' exists")
@@ -263,7 +256,7 @@
eq("SHOW DATABASES IN ACCOUNT"),
any(JdbcSnowflakeClient.ResultSetParser.class));
- Assertions.assertThat(actualList)
+ assertThat(actualList)
.containsExactly(
SnowflakeIdentifier.ofDatabase("DB_1"),
SnowflakeIdentifier.ofDatabase("DB_2"),
@@ -281,7 +274,7 @@
new SQLException(String.format("SQL exception with Error Code %d", 0), "2000", 0, null);
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedSQLException.class)
+ assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(() -> snowflakeClient.listDatabases())
.withMessageContaining("Failed to list databases")
.withCause(injectedException);
@@ -298,7 +291,7 @@
Exception injectedException = new SQLException("Fake SQL exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedSQLException.class)
+ assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(() -> snowflakeClient.listDatabases())
.withMessageContaining("Failed to list databases")
.withCause(injectedException);
@@ -314,7 +307,7 @@
Exception injectedException = new InterruptedException("Fake interrupted exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedInterruptedException.class)
+ assertThatExceptionOfType(UncheckedInterruptedException.class)
.isThrownBy(() -> snowflakeClient.listDatabases())
.withMessageContaining("Interrupted while listing databases")
.withCause(injectedException);
@@ -347,7 +340,7 @@
any(JdbcSnowflakeClient.ResultSetParser.class),
eq(null));
- Assertions.assertThat(actualList)
+ assertThat(actualList)
.containsExactly(
SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_1"),
SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2"),
@@ -375,7 +368,7 @@
any(JdbcSnowflakeClient.ResultSetParser.class),
eq("DB_1"));
- Assertions.assertThat(actualList)
+ assertThat(actualList)
.containsExactly(
SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_1"),
SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2"));
@@ -392,7 +385,7 @@
new SQLException(String.format("SQL exception with Error Code %d", 0), "2000", 0, null);
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedSQLException.class)
+ assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofRoot()))
.withMessageContaining("Failed to list schemas for scope 'ROOT: '''")
.withCause(injectedException);
@@ -415,7 +408,7 @@
null);
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(NoSuchNamespaceException.class)
+ assertThatExceptionOfType(NoSuchNamespaceException.class)
.isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1")))
.withMessageContaining(
String.format(
@@ -428,7 +421,7 @@
/** List schemas is not supported at Schema level */
@Test
public void testListSchemasAtSchemaLevel() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(
() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")))
.withMessageContaining("Unsupported scope type for listSchemas: SCHEMA: 'DB_1.SCHEMA_2'");
@@ -445,7 +438,7 @@
Exception injectedException = new SQLException("Fake SQL exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedSQLException.class)
+ assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1")))
.withMessageContaining("Failed to list schemas for scope 'DATABASE: 'DB_1''")
.withCause(injectedException);
@@ -461,7 +454,7 @@
Exception injectedException = new InterruptedException("Fake interrupted exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedInterruptedException.class)
+ assertThatExceptionOfType(UncheckedInterruptedException.class)
.isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1")))
.withMessageContaining("Interrupted while listing schemas for scope 'DATABASE: 'DB_1''")
.withCause(injectedException);
@@ -506,7 +499,7 @@
any(JdbcSnowflakeClient.ResultSetParser.class),
eq(null));
- Assertions.assertThat(actualList)
+ assertThat(actualList)
.containsExactly(
SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"),
SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_2"),
@@ -545,7 +538,7 @@
any(JdbcSnowflakeClient.ResultSetParser.class),
eq("DB_1"));
- Assertions.assertThat(actualList)
+ assertThat(actualList)
.containsExactly(
SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"),
SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_2"),
@@ -574,7 +567,7 @@
any(JdbcSnowflakeClient.ResultSetParser.class),
eq("DB_1.SCHEMA_1"));
- Assertions.assertThat(actualList)
+ assertThat(actualList)
.containsExactly(
SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"),
SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_2"));
@@ -592,7 +585,7 @@
new SQLException(String.format("SQL exception with Error Code %d", 0), "2000", 0, null);
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedSQLException.class)
+ assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(() -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofRoot()))
.withMessageContaining("Failed to list tables for scope 'ROOT: '''")
.withCause(injectedException);
@@ -615,7 +608,7 @@
null);
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(NoSuchNamespaceException.class)
+ assertThatExceptionOfType(NoSuchNamespaceException.class)
.isThrownBy(
() -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1")))
.withMessageContaining(
@@ -643,7 +636,7 @@
null);
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(NoSuchNamespaceException.class)
+ assertThatExceptionOfType(NoSuchNamespaceException.class)
.isThrownBy(
() ->
snowflakeClient.listIcebergTables(
@@ -667,7 +660,7 @@
Exception injectedException = new SQLException("Fake SQL exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedSQLException.class)
+ assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(() -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1")))
.withMessageContaining("Failed to list tables for scope 'DATABASE: 'DB_1''")
.withCause(injectedException);
@@ -684,7 +677,7 @@
Exception injectedException = new InterruptedException("Fake interrupted exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedInterruptedException.class)
+ assertThatExceptionOfType(UncheckedInterruptedException.class)
.isThrownBy(() -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1")))
.withMessageContaining("Interrupted while listing tables for scope 'DATABASE: 'DB_1''")
.withCause(injectedException);
@@ -719,7 +712,7 @@
"s3://tab1/metadata/v3.metadata.json",
"success",
null);
- Assertions.assertThat(actualMetadata).isEqualTo(expectedMetadata);
+ assertThat(actualMetadata).isEqualTo(expectedMetadata);
}
/**
@@ -751,7 +744,7 @@
"wasbs://mycontainer@myaccount.blob.core.windows.net/tab3/metadata/v334.metadata.json",
"success",
null);
- Assertions.assertThat(actualMetadata).isEqualTo(expectedMetadata);
+ assertThat(actualMetadata).isEqualTo(expectedMetadata);
}
/**
@@ -783,7 +776,7 @@
"gs://tab5/metadata/v793.metadata.json",
"success",
null);
- Assertions.assertThat(actualMetadata).isEqualTo(expectedMetadata);
+ assertThat(actualMetadata).isEqualTo(expectedMetadata);
}
/** Malformed JSON from a ResultSet should propagate as an IllegalArgumentException. */
@@ -791,7 +784,7 @@
public void testGetTableMetadataMalformedJson() throws SQLException {
when(mockResultSet.next()).thenReturn(true);
when(mockResultSet.getString("METADATA")).thenReturn("{\"malformed_no_closing_bracket");
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(
() ->
snowflakeClient.loadTableMetadata(
@@ -815,7 +808,7 @@
null);
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(NoSuchTableException.class)
+ assertThatExceptionOfType(NoSuchTableException.class)
.isThrownBy(
() ->
snowflakeClient.loadTableMetadata(
@@ -839,7 +832,7 @@
Exception injectedException = new SQLException("Fake SQL exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedSQLException.class)
+ assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(
() ->
snowflakeClient.loadTableMetadata(
@@ -858,7 +851,7 @@
Exception injectedException = new InterruptedException("Fake interrupted exception");
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
- Assertions.assertThatExceptionOfType(UncheckedInterruptedException.class)
+ assertThatExceptionOfType(UncheckedInterruptedException.class)
.isThrownBy(
() ->
snowflakeClient.loadTableMetadata(
diff --git a/snowflake/src/test/java/org/apache/iceberg/snowflake/NamespaceHelpersTest.java b/snowflake/src/test/java/org/apache/iceberg/snowflake/NamespaceHelpersTest.java
index ef47f5b..4de368a 100644
--- a/snowflake/src/test/java/org/apache/iceberg/snowflake/NamespaceHelpersTest.java
+++ b/snowflake/src/test/java/org/apache/iceberg/snowflake/NamespaceHelpersTest.java
@@ -18,9 +18,11 @@
*/
package org.apache.iceberg.snowflake;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
+
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
public class NamespaceHelpersTest {
@@ -29,8 +31,8 @@
Namespace icebergNamespace = Namespace.empty();
SnowflakeIdentifier snowflakeIdentifier =
NamespaceHelpers.toSnowflakeIdentifier(icebergNamespace);
- Assertions.assertThat(snowflakeIdentifier).isEqualTo(SnowflakeIdentifier.ofRoot());
- Assertions.assertThat(NamespaceHelpers.toIcebergNamespace(snowflakeIdentifier))
+ assertThat(snowflakeIdentifier).isEqualTo(SnowflakeIdentifier.ofRoot());
+ assertThat(NamespaceHelpers.toIcebergNamespace(snowflakeIdentifier))
.isEqualTo(icebergNamespace);
}
@@ -39,8 +41,8 @@
Namespace icebergNamespace = Namespace.of("DB1");
SnowflakeIdentifier snowflakeIdentifier =
NamespaceHelpers.toSnowflakeIdentifier(icebergNamespace);
- Assertions.assertThat(snowflakeIdentifier).isEqualTo(SnowflakeIdentifier.ofDatabase("DB1"));
- Assertions.assertThat(NamespaceHelpers.toIcebergNamespace(snowflakeIdentifier))
+ assertThat(snowflakeIdentifier).isEqualTo(SnowflakeIdentifier.ofDatabase("DB1"));
+ assertThat(NamespaceHelpers.toIcebergNamespace(snowflakeIdentifier))
.isEqualTo(icebergNamespace);
}
@@ -49,9 +51,8 @@
Namespace icebergNamespace = Namespace.of("DB1", "SCHEMA1");
SnowflakeIdentifier snowflakeIdentifier =
NamespaceHelpers.toSnowflakeIdentifier(icebergNamespace);
- Assertions.assertThat(snowflakeIdentifier)
- .isEqualTo(SnowflakeIdentifier.ofSchema("DB1", "SCHEMA1"));
- Assertions.assertThat(NamespaceHelpers.toIcebergNamespace(snowflakeIdentifier))
+ assertThat(snowflakeIdentifier).isEqualTo(SnowflakeIdentifier.ofSchema("DB1", "SCHEMA1"));
+ assertThat(NamespaceHelpers.toIcebergNamespace(snowflakeIdentifier))
.isEqualTo(icebergNamespace);
}
@@ -59,15 +60,15 @@
public void testRoundTripTable() {
TableIdentifier icebergTable = TableIdentifier.of("DB1", "SCHEMA1", "TABLE1");
SnowflakeIdentifier snowflakeIdentifier = NamespaceHelpers.toSnowflakeIdentifier(icebergTable);
- Assertions.assertThat(snowflakeIdentifier)
+ assertThat(snowflakeIdentifier)
.isEqualTo(SnowflakeIdentifier.ofTable("DB1", "SCHEMA1", "TABLE1"));
- Assertions.assertThat(NamespaceHelpers.toIcebergTableIdentifier(snowflakeIdentifier))
+ assertThat(NamespaceHelpers.toIcebergTableIdentifier(snowflakeIdentifier))
.isEqualTo(icebergTable);
}
@Test
public void testToSnowflakeIdentifierMaxNamespaceLevel() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(
() ->
NamespaceHelpers.toSnowflakeIdentifier(
@@ -77,7 +78,7 @@
@Test
public void testToSnowflakeIdentifierTableBadNamespace() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(
() ->
NamespaceHelpers.toSnowflakeIdentifier(
@@ -87,7 +88,7 @@
@Test
public void testToIcebergNamespaceTableFails() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(
() ->
NamespaceHelpers.toIcebergNamespace(
@@ -97,7 +98,7 @@
@Test
public void testToIcebergTableIdentifier() {
- Assertions.assertThat(
+ assertThat(
NamespaceHelpers.toIcebergTableIdentifier(
SnowflakeIdentifier.ofTable("DB1", "SCHEMA1", "TABLE1")))
.isEqualTo(TableIdentifier.of("DB1", "SCHEMA1", "TABLE1"));
@@ -105,7 +106,7 @@
@Test
public void testToIcebergTableIdentifierWrongType() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(
() ->
NamespaceHelpers.toIcebergTableIdentifier(
diff --git a/snowflake/src/test/java/org/apache/iceberg/snowflake/SnowflakeCatalogTest.java b/snowflake/src/test/java/org/apache/iceberg/snowflake/SnowflakeCatalogTest.java
index adda8bc..b3d5cef 100644
--- a/snowflake/src/test/java/org/apache/iceberg/snowflake/SnowflakeCatalogTest.java
+++ b/snowflake/src/test/java/org/apache/iceberg/snowflake/SnowflakeCatalogTest.java
@@ -18,6 +18,9 @@
*/
package org.apache.iceberg.snowflake;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
+
import java.io.IOException;
import java.util.Map;
import org.apache.iceberg.PartitionSpec;
@@ -32,7 +35,6 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.types.Types;
-import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -119,7 +121,7 @@
@Test
public void testInitializeNullClient() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(
() -> catalog.initialize(TEST_CATALOG_NAME, null, fakeFileIOFactory, properties))
.withMessageContaining("snowflakeClient must be non-null");
@@ -127,21 +129,21 @@
@Test
public void testInitializeNullFileIO() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> catalog.initialize(TEST_CATALOG_NAME, fakeClient, null, properties))
.withMessageContaining("fileIOFactory must be non-null");
}
@Test
public void testListNamespaceInRoot() {
- Assertions.assertThat(catalog.listNamespaces())
+ assertThat(catalog.listNamespaces())
.containsExactly(Namespace.of("DB_1"), Namespace.of("DB_2"), Namespace.of("DB_3"));
}
@Test
public void testListNamespaceWithinDB() {
String dbName = "DB_1";
- Assertions.assertThat(catalog.listNamespaces(Namespace.of(dbName)))
+ assertThat(catalog.listNamespaces(Namespace.of(dbName)))
.containsExactly(Namespace.of(dbName, "SCHEMA_1"));
}
@@ -150,7 +152,7 @@
// Existence check for nonexistent parent namespaces is optional in the SupportsNamespaces
// interface.
String dbName = "NONEXISTENT_DB";
- Assertions.assertThatExceptionOfType(RuntimeException.class)
+ assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(() -> catalog.listNamespaces(Namespace.of(dbName)))
.withMessageContaining("does not exist")
.withMessageContaining(dbName);
@@ -162,7 +164,7 @@
// a database.schema.
String dbName = "DB_3";
String schemaName = "SCHEMA_4";
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> catalog.listNamespaces(Namespace.of(dbName, schemaName)))
.withMessageContaining("level")
.withMessageContaining("DB_3.SCHEMA_4");
@@ -170,7 +172,7 @@
@Test
public void testListTables() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> catalog.listTables(Namespace.empty()))
.withMessageContaining("listTables must be at SCHEMA level");
}
@@ -178,7 +180,7 @@
@Test
public void testListTablesWithinDB() {
String dbName = "DB_1";
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> catalog.listTables(Namespace.of(dbName)))
.withMessageContaining("listTables must be at SCHEMA level");
}
@@ -187,7 +189,7 @@
public void testListTablesWithinNonexistentDB() {
String dbName = "NONEXISTENT_DB";
String schemaName = "NONEXISTENT_SCHEMA";
- Assertions.assertThatExceptionOfType(RuntimeException.class)
+ assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(() -> catalog.listTables(Namespace.of(dbName, schemaName)))
.withMessageContaining("does not exist")
.withMessageContaining(dbName);
@@ -197,7 +199,7 @@
public void testListTablesWithinSchema() {
String dbName = "DB_2";
String schemaName = "SCHEMA_2";
- Assertions.assertThat(catalog.listTables(Namespace.of(dbName, schemaName)))
+ assertThat(catalog.listTables(Namespace.of(dbName, schemaName)))
.containsExactly(
TableIdentifier.of("DB_2", "SCHEMA_2", "TAB_3"),
TableIdentifier.of("DB_2", "SCHEMA_2", "TAB_4"));
@@ -207,7 +209,7 @@
public void testListTablesWithinNonexistentSchema() {
String dbName = "DB_2";
String schemaName = "NONEXISTENT_SCHEMA";
- Assertions.assertThatExceptionOfType(RuntimeException.class)
+ assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(() -> catalog.listTables(Namespace.of(dbName, schemaName)))
.withMessageContaining("does not exist")
.withMessageContaining("DB_2.NONEXISTENT_SCHEMA");
@@ -216,32 +218,32 @@
@Test
public void testLoadS3Table() {
Table table = catalog.loadTable(TableIdentifier.of(Namespace.of("DB_1", "SCHEMA_1"), "TAB_1"));
- Assertions.assertThat(table.location()).isEqualTo("s3://tab1");
+ assertThat(table.location()).isEqualTo("s3://tab1");
}
@Test
public void testLoadAzureTable() {
Table table = catalog.loadTable(TableIdentifier.of(Namespace.of("DB_2", "SCHEMA_2"), "TAB_3"));
- Assertions.assertThat(table.location())
+ assertThat(table.location())
.isEqualTo("wasbs://mycontainer@myaccount.blob.core.windows.net/tab1");
}
@Test
public void testLoadGcsTable() {
Table table = catalog.loadTable(TableIdentifier.of(Namespace.of("DB_3", "SCHEMA_3"), "TAB_5"));
- Assertions.assertThat(table.location()).isEqualTo("gs://tab5");
+ assertThat(table.location()).isEqualTo("gs://tab5");
}
@Test
public void testLoadTableWithMalformedTableIdentifier() {
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(
() ->
catalog.loadTable(
TableIdentifier.of(Namespace.of("DB_1", "SCHEMA_1", "BAD_NS_LEVEL"), "TAB_1")))
.withMessageContaining("level")
.withMessageContaining("DB_1.SCHEMA_1.BAD_NS_LEVEL");
- Assertions.assertThatExceptionOfType(IllegalArgumentException.class)
+ assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(
() -> catalog.loadTable(TableIdentifier.of(Namespace.of("DB_WITHOUT_SCHEMA"), "TAB_1")))
.withMessageContaining("level")
@@ -256,7 +258,7 @@
// add a catalog to auto-close() helpers but end up never using/initializing a catalog.
catalog.close();
- Assertions.assertThat(fakeClient.isClosed())
+ assertThat(fakeClient.isClosed())
.overridingErrorMessage("expected not to have called close() on snowflakeClient")
.isFalse();
}
@@ -265,10 +267,10 @@
public void testClose() throws IOException {
catalog.newTableOps(TableIdentifier.of("DB_1", "SCHEMA_1", "TAB_1"));
catalog.close();
- Assertions.assertThat(fakeClient.isClosed())
+ assertThat(fakeClient.isClosed())
.overridingErrorMessage("expected close() to propagate to snowflakeClient")
.isTrue();
- Assertions.assertThat(fakeFileIO.isClosed())
+ assertThat(fakeFileIO.isClosed())
.overridingErrorMessage("expected close() to propagate to fileIO")
.isTrue();
}
@@ -278,21 +280,19 @@
SnowflakeTableOperations castedTableOps =
(SnowflakeTableOperations)
catalog.newTableOps(TableIdentifier.of("DB_1", "SCHEMA_1", "TAB_1"));
- Assertions.assertThat(castedTableOps.fullTableName()).isEqualTo("slushLog.DB_1.SCHEMA_1.TAB_1");
+ assertThat(castedTableOps.fullTableName()).isEqualTo("slushLog.DB_1.SCHEMA_1.TAB_1");
}
@Test
public void testDatabaseExists() {
- Assertions.assertThat(catalog.namespaceExists(Namespace.of("DB_1"))).isTrue();
- Assertions.assertThat(catalog.namespaceExists(Namespace.of("NONEXISTENT_DB"))).isFalse();
+ assertThat(catalog.namespaceExists(Namespace.of("DB_1"))).isTrue();
+ assertThat(catalog.namespaceExists(Namespace.of("NONEXISTENT_DB"))).isFalse();
}
@Test
public void testSchemaExists() {
- Assertions.assertThat(catalog.namespaceExists(Namespace.of("DB_1", "SCHEMA_1"))).isTrue();
- Assertions.assertThat(catalog.namespaceExists(Namespace.of("DB_1", "NONEXISTENT_SCHEMA")))
- .isFalse();
- Assertions.assertThat(catalog.namespaceExists(Namespace.of("NONEXISTENT_DB", "SCHEMA_1")))
- .isFalse();
+ assertThat(catalog.namespaceExists(Namespace.of("DB_1", "SCHEMA_1"))).isTrue();
+ assertThat(catalog.namespaceExists(Namespace.of("DB_1", "NONEXISTENT_SCHEMA"))).isFalse();
+ assertThat(catalog.namespaceExists(Namespace.of("NONEXISTENT_DB", "SCHEMA_1"))).isFalse();
}
}