1
0

[HUDI-4224] Fix CI issues (#5842)

- Upgrade junit to 5.7.2
- Downgrade surefire and failsafe to 2.22.2
- Fix test failures that were previously not reported
- Improve azure pipeline configs

Co-authored-by: liujinhui1994 <965147871@qq.com>
Co-authored-by: Y Ethan Guo <ethan.guoyihua@gmail.com>
This commit is contained in:
Shiyan Xu
2022-06-12 11:44:18 -07:00
committed by GitHub
parent fd8f7c5f6c
commit 5aaac21d1d
61 changed files with 811 additions and 157 deletions

View File

@@ -21,12 +21,68 @@ trigger:
pool:
vmImage: 'ubuntu-18.04'
parameters:
- name: job1Modules
type: object
default:
- 'hudi-common'
- 'hudi-flink-datasource/hudi-flink'
- 'hudi-flink-datasource/hudi-flink1.13.x'
- 'hudi-flink-datasource/hudi-flink1.14.x'
- name: job2Modules
type: object
default:
- 'hudi-client/hudi-spark-client'
- name: job3Modules
type: object
default:
- 'hudi-cli'
- 'hudi-client/hudi-client-common'
- 'hudi-client/hudi-flink-client'
- 'hudi-client/hudi-java-client'
- 'hudi-sync/hudi-adb-sync'
- 'hudi-sync/hudi-datahub-sync'
- 'hudi-sync/hudi-hive-sync'
- 'hudi-sync/hudi-sync-common'
- 'hudi-utilities'
- name: job4Modules
type: object
default:
- '!hudi-cli'
- '!hudi-client'
- '!hudi-client/hudi-client-common'
- '!hudi-client/hudi-flink-client'
- '!hudi-client/hudi-java-client'
- '!hudi-client/hudi-spark-client'
- '!hudi-common'
- '!hudi-examples'
- '!hudi-examples/hudi-examples-common'
- '!hudi-examples/hudi-examples-flink'
- '!hudi-examples/hudi-examples-java'
- '!hudi-examples/hudi-examples-spark'
- '!hudi-flink-datasource'
- '!hudi-flink-datasource/hudi-flink'
- '!hudi-flink-datasource/hudi-flink1.13.x'
- '!hudi-flink-datasource/hudi-flink1.14.x'
- '!hudi-sync'
- '!hudi-sync/hudi-adb-sync'
- '!hudi-sync/hudi-datahub-sync'
- '!hudi-sync/hudi-hive-sync'
- '!hudi-sync/hudi-sync-common'
- '!hudi-utilities'
variables:
MAVEN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true'
BUILD_PROFILES: '-Dscala-2.11 -Dspark2 -Dflink1.14'
PLUGIN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true'
MVN_OPTS_INSTALL: '-T 2.5C -DskipTests $(BUILD_PROFILES) $(PLUGIN_OPTS)'
MVN_OPTS_TEST: '-fae $(BUILD_PROFILES) $(PLUGIN_OPTS)'
SPARK_VERSION: '2.4.4'
HADOOP_VERSION: '2.7'
SPARK_ARCHIVE: spark-$(SPARK_VERSION)-bin-hadoop$(HADOOP_VERSION)
EXCLUDE_TESTED_MODULES: '!hudi-examples/hudi-examples-common,!hudi-examples/hudi-examples-flink,!hudi-examples/hudi-examples-java,!hudi-examples/hudi-examples-spark,!hudi-common,!hudi-flink-datasource/hudi-flink,!hudi-client/hudi-spark-client,!hudi-client/hudi-client-common,!hudi-client/hudi-flink-client,!hudi-client/hudi-java-client,!hudi-cli,!hudi-utilities,!hudi-sync/hudi-hive-sync'
JOB1_MODULES: ${{ join(',',parameters.job1Modules) }}
JOB2_MODULES: ${{ join(',',parameters.job2Modules) }}
JOB3_MODULES: ${{ join(',',parameters.job3Modules) }}
JOB4_MODULES: ${{ join(',',parameters.job4Modules) }}
stages:
- stage: test
@@ -40,28 +96,27 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: -T 2.5C -DskipTests
options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
displayName: UT common flink client/spark-client
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: -Punit-tests -pl hudi-common,hudi-flink-datasource/hudi-flink,hudi-client/hudi-spark-client
options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB1_MODULES),hudi-client/hudi-spark-client
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
mavenOptions: '-Xmx4g'
- task: Maven@3
displayName: FT common flink
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: -Pfunctional-tests -pl hudi-common,hudi-flink-datasource/hudi-flink
options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB1_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
mavenOptions: '-Xmx4g'
- job: UT_FT_2
displayName: FT client/spark-client
timeoutInMinutes: '120'
@@ -71,21 +126,20 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: -T 2.5C -DskipTests
options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
displayName: FT client/spark-client
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: -Pfunctional-tests -pl hudi-client/hudi-spark-client
options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB2_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
mavenOptions: '-Xmx4g'
- job: UT_FT_3
displayName: UT FT clients & cli & utilities & sync/hive-sync
displayName: UT FT clients & cli & utilities & sync
timeoutInMinutes: '120'
steps:
- task: Maven@3
@@ -93,28 +147,27 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: -T 2.5C -DskipTests
options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
displayName: UT clients & cli & utilities & sync/hive-sync
displayName: UT clients & cli & utilities & sync
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: -Punit-tests -pl hudi-client/hudi-client-common,hudi-client/hudi-flink-client,hudi-client/hudi-java-client,hudi-cli,hudi-utilities,hudi-sync/hudi-hive-sync
options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB3_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
mavenOptions: '-Xmx4g'
- task: Maven@3
displayName: FT clients & cli & utilities & sync/hive-sync
displayName: FT clients & cli & utilities & sync
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: -Pfunctional-tests -pl hudi-client/hudi-client-common,hudi-client/hudi-flink-client,hudi-client/hudi-java-client,hudi-cli,hudi-utilities,hudi-sync/hudi-hive-sync
options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB3_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
mavenOptions: '-Xmx4g'
- job: UT_FT_4
displayName: UT FT other modules
timeoutInMinutes: '120'
@@ -124,28 +177,27 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: -T 2.5C -DskipTests
options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
displayName: UT other modules
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: -Punit-tests -pl $(EXCLUDE_TESTED_MODULES)
options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB4_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
mavenOptions: '-Xmx4g'
- task: Maven@3
displayName: FT other modules
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: -Pfunctional-tests -pl $(EXCLUDE_TESTED_MODULES)
options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB4_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
mavenOptions: '-Xmx4g'
- job: IT
displayName: IT modules
timeoutInMinutes: '120'
@@ -155,19 +207,18 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: -T 2.5C -Pintegration-tests -DskipTests
options: $(MVN_OPTS_INSTALL) -Pintegration-tests
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
displayName: UT integ-test
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: -Pintegration-tests -DskipUTs=false -DskipITs=true -pl hudi-integ-test test
options: $(MVN_OPTS_TEST) -Pintegration-tests -DskipUTs=false -DskipITs=true -pl hudi-integ-test
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
mavenOptions: '-Xmx4g'
- task: AzureCLI@2
displayName: Prepare for IT
inputs:
@@ -180,5 +231,6 @@ stages:
tar -xvf $(Pipeline.Workspace)/$(SPARK_ARCHIVE).tgz -C $(Pipeline.Workspace)/
mkdir /tmp/spark-events/
- script: |
mvn $(MAVEN_OPTS) -Pintegration-tests verify
export SPARK_HOME=$(Pipeline.Workspace)/$(SPARK_ARCHIVE)
mvn $(MVN_OPTS_TEST) -Pintegration-tests verify
displayName: IT

View File

@@ -144,11 +144,41 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@@ -205,6 +205,24 @@
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
@@ -259,31 +277,71 @@
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<!-- HDFS test dependencies -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mortbay.jetty</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<artifactId>netty-all</artifactId>
<groupId>io.netty</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- Test -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>

View File

@@ -148,7 +148,7 @@ public class SparkMain {
}
configs = new ArrayList<>();
if (args.length > 9) {
configs.addAll(Arrays.asList(args).subList(8, args.length));
configs.addAll(Arrays.asList(args).subList(9, args.length));
}
returnCode = compact(jsc, args[3], args[4], null, Integer.parseInt(args[5]), args[6],

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.cli.testutils;
package org.apache.hudi.cli;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.cli.utils.SparkUtil;
@@ -30,7 +30,7 @@ import java.net.URISyntaxException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class SparkUtilTest {
public class TestSparkUtil {
@Test
public void testInitSparkLauncher() throws URISyntaxException {

View File

@@ -22,7 +22,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.commands.TableCommand;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.functional.TestBootstrap;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
@@ -44,7 +44,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Test class of {@link org.apache.hudi.cli.commands.BootstrapCommand}.
*/
public class ITTestBootstrapCommand extends AbstractShellIntegrationTest {
public class ITTestBootstrapCommand extends HoodieCLIIntegrationTestBase {
private static final int NUM_OF_RECORDS = 100;
private static final String PARTITION_FIELD = "datestr";

View File

@@ -20,7 +20,7 @@ package org.apache.hudi.cli.integ;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.TableCommand;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.client.SparkRDDWriteClient;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.client.common.HoodieSparkEngineContext;
@@ -57,7 +57,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
public class ITTestClusteringCommand extends AbstractShellIntegrationTest {
public class ITTestClusteringCommand extends HoodieCLIIntegrationTestBase {
@BeforeEach
public void init() throws IOException {
@@ -105,9 +105,10 @@ public class ITTestClusteringCommand extends AbstractShellIntegrationTest {
// get clustering instance
HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
Option<String> instance =
Option<String> instanceOpt =
timeline.filterPendingReplaceTimeline().firstInstant().map(HoodieInstant::getTimestamp);
assertTrue(instance.isPresent(), "Must have pending clustering.");
assertTrue(instanceOpt.isPresent(), "Must have pending clustering.");
final String instance = instanceOpt.get();
CommandResult cr2 = getShell().executeCommand(
String.format("clustering run --parallelism %s --clusteringInstant %s --sparkMaster %s",

View File

@@ -21,7 +21,7 @@ package org.apache.hudi.cli.integ;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.RollbacksCommand;
import org.apache.hudi.cli.commands.TableCommand;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
@@ -29,6 +29,7 @@ import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.HoodieTestTable;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult;
@@ -41,6 +42,7 @@ import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_F
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS;
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH;
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -50,7 +52,13 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
public class ITTestCommitsCommand extends AbstractShellIntegrationTest {
@Disabled("HUDI-4226")
public class ITTestCommitsCommand extends HoodieCLIIntegrationTestBase {
@Override
protected HoodieTableType getTableType() {
return HoodieTableType.COPY_ON_WRITE;
}
@BeforeEach
public void init() throws IOException {
@@ -79,19 +87,21 @@ public class ITTestCommitsCommand extends AbstractShellIntegrationTest {
put(DEFAULT_THIRD_PARTITION_PATH, "file-3");
}
};
final String rollbackCommit = "102";
HoodieTestTable.of(metaClient)
.withPartitionMetaFiles(DEFAULT_PARTITION_PATHS)
.addCommit("100")
.withBaseFilesInPartitions(partitionAndFileId)
.addCommit("101")
.withBaseFilesInPartitions(partitionAndFileId)
.addCommit(rollbackCommit)
.addCommit("102")
.withBaseFilesInPartitions(partitionAndFileId);
CommandResult cr = getShell().executeCommand(String.format("commit rollback --commit %s --sparkMaster %s --sparkMemory %s",
rollbackCommit, "local", "4G"));
assertTrue(cr.isSuccess());
"102", "local", "4G"));
assertAll("Command run failed",
() -> assertTrue(cr.isSuccess()),
() -> assertEquals("Commit 102 rolled back", cr.getResult().toString()));
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
@@ -103,15 +113,17 @@ public class ITTestCommitsCommand extends AbstractShellIntegrationTest {
// rollback complete commit
CommandResult cr2 = getShell().executeCommand(String.format("commit rollback --commit %s --sparkMaster %s --sparkMemory %s",
"101", "local", "4G"));
assertTrue(cr2.isSuccess());
"101", "local", "4G"));
assertAll("Command run failed",
() -> assertTrue(cr2.isSuccess()),
() -> assertEquals("Commit 101 rolled back", cr2.getResult().toString()));
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
HoodieActiveTimeline rollbackTimeline2 = new RollbacksCommand.RollbackTimeline(metaClient);
assertEquals(1, rollbackTimeline2.getRollbackTimeline().countInstants(), "There should have 2 rollback instant.");
assertEquals(2, rollbackTimeline2.getRollbackTimeline().countInstants(), "There should have 2 rollback instant.");
HoodieActiveTimeline timeline2 = metaClient.reloadActiveTimeline();
assertEquals(2, timeline2.getCommitsTimeline().countInstants(), "There should have 1 instants.");
assertEquals(1, timeline2.getCommitsTimeline().countInstants(), "There should have 1 instants.");
}
}

View File

@@ -20,7 +20,7 @@ package org.apache.hudi.cli.integ;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.TableCommand;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.client.CompactionAdminClient;
import org.apache.hudi.client.SparkRDDWriteClient;
import org.apache.hudi.client.TestCompactionAdminClient;
@@ -69,7 +69,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
public class ITTestCompactionCommand extends AbstractShellIntegrationTest {
public class ITTestCompactionCommand extends HoodieCLIIntegrationTestBase {
@BeforeEach
public void init() throws IOException {
@@ -147,7 +147,8 @@ public class ITTestCompactionCommand extends AbstractShellIntegrationTest {
writeSchemaToTmpFile(schemaPath);
CommandResult cr2 = getShell().executeCommand(
String.format("compaction scheduleAndExecute --parallelism %s --schemaFilePath %s --sparkMaster %s",
String.format("compaction scheduleAndExecute --parallelism %s --schemaFilePath %s --sparkMaster %s "
+ "--hoodieConfigs hoodie.compact.inline.max.delta.commits=1",
2, schemaPath, "local"));
assertAll("Command run failed",

View File

@@ -20,7 +20,7 @@ package org.apache.hudi.cli.integ;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.TableCommand;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
@@ -55,7 +55,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
* Test class for {@link org.apache.hudi.cli.commands.HDFSParquetImportCommand}.
*/
@Disabled("Disable due to flakiness and feature deprecation.")
public class ITTestHDFSParquetImportCommand extends AbstractShellIntegrationTest {
public class ITTestHDFSParquetImportCommand extends HoodieCLIIntegrationTestBase {
private Path sourcePath;
private Path targetPath;

View File

@@ -20,7 +20,7 @@ package org.apache.hudi.cli.integ;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.cli.commands.TableCommand;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.IOType;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
@@ -40,7 +40,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
public class ITTestMarkersCommand extends AbstractShellIntegrationTest {
public class ITTestMarkersCommand extends HoodieCLIIntegrationTestBase {
private String tablePath;

View File

@@ -22,7 +22,7 @@ import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.RepairsCommand;
import org.apache.hudi.cli.commands.TableCommand;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileFormat;
import org.apache.hudi.common.model.HoodieRecord;
@@ -57,7 +57,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
public class ITTestRepairsCommand extends AbstractShellIntegrationTest {
public class ITTestRepairsCommand extends HoodieCLIIntegrationTestBase {
private String duplicatedPartitionPath;
private String duplicatedPartitionPathWithUpdates;

View File

@@ -21,7 +21,7 @@ package org.apache.hudi.cli.integ;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.TableCommand;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.client.common.HoodieSparkEngineContext;
import org.apache.hudi.common.config.HoodieMetadataConfig;
import org.apache.hudi.common.model.HoodieTableType;
@@ -51,7 +51,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
public class ITTestSavepointsCommand extends AbstractShellIntegrationTest {
public class ITTestSavepointsCommand extends HoodieCLIIntegrationTestBase {
private String tablePath;
@@ -139,11 +139,7 @@ public class ITTestSavepointsCommand extends AbstractShellIntegrationTest {
HoodieTestDataGenerator.createSavepointFile(tablePath, savepoint, jsc.hadoopConfiguration());
// re-bootstrap metadata table
// delete first
String basePath = metaClient.getBasePath();
Path metadataTableBasePath = new Path(HoodieTableMetadata.getMetadataTableBasePath(basePath));
metaClient.getFs().delete(metadataTableBasePath, true);
Path metadataTableBasePath = new Path(HoodieTableMetadata.getMetadataTableBasePath(HoodieCLI.basePath));
// then bootstrap metadata table at instant 104
HoodieWriteConfig writeConfig = HoodieWriteConfig.newBuilder().withPath(HoodieCLI.basePath)
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(true).build()).build();

View File

@@ -25,7 +25,7 @@ import org.junit.jupiter.api.BeforeEach;
/**
* Class to initial resources for shell.
*/
public abstract class AbstractShellIntegrationTest extends AbstractShellBaseIntegrationTest {
public class HoodieCLIIntegrationTestBase extends HoodieCLIIntegrationTestHarness {
@Override
@BeforeEach

View File

@@ -30,7 +30,7 @@ import org.springframework.shell.core.JLineShellComponent;
/**
* Class to start Bootstrap and JLineShellComponent.
*/
public class AbstractShellBaseIntegrationTest extends HoodieClientTestHarness {
public class HoodieCLIIntegrationTestHarness extends HoodieClientTestHarness {
private static JLineShellComponent shell;

View File

@@ -105,5 +105,33 @@
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
</dependency>
<!-- Junit dependencies -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@@ -125,5 +125,33 @@
<artifactId>hudi-java-client</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Junit dependencies -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@@ -292,6 +292,26 @@
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
<scope>test</scope>
</dependency>
<!-- Hoodie dependencies -->
<dependency>
<groupId>org.apache.hudi</groupId>

View File

@@ -58,6 +58,50 @@
<scope>test</scope>
<type>test-jar</type>
</dependency>
<!-- Test dependencies -->
<!-- Junit 5 dependencies -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
@@ -87,4 +131,4 @@
</plugin>
</plugins>
</build>
</project>
</project>

View File

@@ -70,6 +70,50 @@
<scope>test</scope>
<type>test-jar</type>
</dependency>
<!-- Test dependencies -->
<!-- Junit 5 dependencies -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
@@ -99,4 +143,4 @@
</plugin>
</plugins>
</build>
</project>
</project>

View File

@@ -83,6 +83,42 @@ See https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@@ -88,6 +88,13 @@
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-avro_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>test</scope>
</dependency>
<!-- Hoodie -->
<dependency>
<groupId>org.apache.hudi</groupId>
@@ -162,6 +169,24 @@
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<!-- Hoodie - Import -->
<dependency>
<groupId>org.apache.hudi</groupId>
@@ -284,6 +309,7 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mortbay.jetty</groupId>
@@ -366,36 +392,47 @@
<scope>test</scope>
</dependency>
<!-- Test -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>

View File

@@ -65,7 +65,7 @@ public class TestDFSHoodieTestSuiteWriterAdapter extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass();
UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll

View File

@@ -58,7 +58,7 @@ public class TestFileDeltaInputWriter extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass();
UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll

View File

@@ -90,7 +90,7 @@ public class TestHoodieTestSuiteJob extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass();
UtilitiesTestBase.initTestServices(true, true);
// prepare the configs.
UtilitiesTestBase.Helpers.copyToDFSFromAbsolutePath(System.getProperty("user.dir") + "/.."
+ BASE_PROPERTIES_DOCKER_DEMO_RELATIVE_PATH, dfs, dfsBasePath + "/base.properties");

View File

@@ -43,7 +43,7 @@ public class TestDFSAvroDeltaInputReader extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass();
UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll

View File

@@ -51,7 +51,7 @@ public class TestDFSHoodieDatasetInputReader extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass();
UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll

View File

@@ -242,6 +242,43 @@
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@@ -29,7 +29,6 @@ import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.keygen.SimpleKeyGenerator;
import org.apache.hudi.testutils.HoodieClientTestHarness;
import org.apache.hudi.testutils.SparkDatasetTestUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.junit.jupiter.api.AfterEach;
@@ -62,6 +61,7 @@ public class HoodieBulkInsertInternalWriterTestBase extends HoodieClientTestHarn
initFileSystem();
initTestDataGenerator();
initMetaClient();
initTimelineService();
}
@AfterEach
@@ -87,11 +87,11 @@ public class HoodieBulkInsertInternalWriterTestBase extends HoodieClientTestHarn
protected void assertWriteStatuses(List<HoodieInternalWriteStatus> writeStatuses, int batches, int size,
Option<List<String>> fileAbsPaths, Option<List<String>> fileNames) {
assertWriteStatuses(writeStatuses, batches, size, false, fileAbsPaths, fileNames);
assertWriteStatuses(writeStatuses, batches, size, false, fileAbsPaths, fileNames, false);
}
protected void assertWriteStatuses(List<HoodieInternalWriteStatus> writeStatuses, int batches, int size, boolean areRecordsSorted,
Option<List<String>> fileAbsPaths, Option<List<String>> fileNames) {
Option<List<String>> fileAbsPaths, Option<List<String>> fileNames, boolean isHiveStylePartitioning) {
if (areRecordsSorted) {
assertEquals(batches, writeStatuses.size());
} else {
@@ -114,7 +114,8 @@ public class HoodieBulkInsertInternalWriterTestBase extends HoodieClientTestHarn
int counter = 0;
for (HoodieInternalWriteStatus writeStatus : writeStatuses) {
// verify write status
assertEquals(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3], writeStatus.getPartitionPath());
String actualPartitionPathFormat = isHiveStylePartitioning ? SparkDatasetTestUtils.PARTITION_PATH_FIELD_NAME + "=%s" : "%s";
assertEquals(String.format(actualPartitionPathFormat, HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3]), writeStatus.getPartitionPath());
if (areRecordsSorted) {
assertEquals(writeStatus.getTotalRecords(), size);
} else {
@@ -142,7 +143,7 @@ public class HoodieBulkInsertInternalWriterTestBase extends HoodieClientTestHarn
assertEquals(sizeMap.get(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3]), writeStat.getNumWrites());
}
assertEquals(fileId, writeStat.getFileId());
assertEquals(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter++ % 3], writeStat.getPartitionPath());
assertEquals(String.format(actualPartitionPathFormat, HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter++ % 3]), writeStat.getPartitionPath());
assertEquals(0, writeStat.getNumDeletes());
assertEquals(0, writeStat.getNumUpdateWrites());
assertEquals(0, writeStat.getTotalWriteErrors());

View File

@@ -0,0 +1,30 @@
###
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
log4j.rootLogger=WARN, CONSOLE
log4j.logger.org.apache.hudi=DEBUG
log4j.logger.org.apache.hadoop.hbase=ERROR
# CONSOLE is set to be a ConsoleAppender.
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
# CONSOLE uses PatternLayout.
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern=[%-5p] %d %c %x - %m%n
log4j.appender.CONSOLE.filter.a=org.apache.log4j.varia.LevelRangeFilter
log4j.appender.CONSOLE.filter.a.AcceptOnMatch=true
log4j.appender.CONSOLE.filter.a.LevelMin=WARN
log4j.appender.CONSOLE.filter.a.LevelMax=FATAL

View File

@@ -0,0 +1,31 @@
###
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
log4j.rootLogger=WARN, CONSOLE
log4j.logger.org.apache=INFO
log4j.logger.org.apache.hudi=DEBUG
log4j.logger.org.apache.hadoop.hbase=ERROR
# CONSOLE is set to be a ConsoleAppender.
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
# CONSOLE uses PatternLayout.
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
log4j.appender.CONSOLE.filter.a=org.apache.log4j.varia.LevelRangeFilter
log4j.appender.CONSOLE.filter.a.AcceptOnMatch=true
log4j.appender.CONSOLE.filter.a.LevelMin=WARN
log4j.appender.CONSOLE.filter.a.LevelMax=FATAL

View File

@@ -32,4 +32,47 @@
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@@ -242,11 +242,71 @@
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
<!-- Parquet -->
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
<!-- Need these exclusions to make sure JavaSparkContext can be setup. https://issues.apache.org/jira/browse/SPARK-1693 -->
<exclusions>
<exclusion>
<groupId>org.mortbay.jetty</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</project>

View File

@@ -18,18 +18,14 @@
package org.apache.hudi.internal;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.table.HoodieSparkTable;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.testutils.SparkDatasetTestUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.catalyst.InternalRow;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
@@ -104,7 +100,7 @@ public class TestHoodieBulkInsertDataInternalWriter extends
Option<List<String>> fileNames = Option.of(new ArrayList<>());
// verify write statuses
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames);
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames, false);
// verify rows
Dataset<Row> result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));
@@ -146,14 +142,11 @@ public class TestHoodieBulkInsertDataInternalWriter extends
Option<List<String>> fileNames = Option.of(new ArrayList<>());
// verify write statuses
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames);
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames, true);
// verify rows
Dataset<Row> result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));
assertOutput(totalInputRows, result, instantTime, fileNames, populateMetaFields);
result.collectAsList().forEach(entry -> Assertions.assertTrue(entry.getAs(HoodieRecord.PARTITION_PATH_METADATA_FIELD).toString()
.contains(SparkDatasetTestUtils.PARTITION_PATH_FIELD_NAME + "=")));
}
}
@@ -202,7 +195,7 @@ public class TestHoodieBulkInsertDataInternalWriter extends
Option<List<String>> fileAbsPaths = Option.of(new ArrayList<>());
Option<List<String>> fileNames = Option.of(new ArrayList<>());
// verify write statuses
assertWriteStatuses(commitMetadata.getWriteStatuses(), 1, size / 2, false, fileAbsPaths, fileNames);
assertWriteStatuses(commitMetadata.getWriteStatuses(), 1, size / 2, false, fileAbsPaths, fileNames, false);
// verify rows
Dataset<Row> result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));

View File

@@ -24,7 +24,6 @@ import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.testutils.HoodieClientTestUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.catalyst.InternalRow;

View File

@@ -102,7 +102,7 @@ public class TestHoodieBulkInsertDataInternalWriter extends
Option<List<String>> fileNames = Option.of(new ArrayList<>());
// verify write statuses
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames);
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames, false);
// verify rows
Dataset<Row> result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));

View File

@@ -123,6 +123,42 @@
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@@ -115,6 +115,24 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@@ -97,7 +97,7 @@ public class HoodieDeltaStreamerTestBase extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass(true);
UtilitiesTestBase.initTestServices(true, true);
PARQUET_SOURCE_ROOT = dfsBasePath + "/parquetFiles";
ORC_SOURCE_ROOT = dfsBasePath + "/orcFiles";
JSON_KAFKA_SOURCE_ROOT = dfsBasePath + "/jsonKafkaFiles";

View File

@@ -23,13 +23,14 @@ import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.common.config.HoodieMetadataConfig;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.config.HoodieCompactionConfig;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.testutils.HoodieClientTestHarness;
import org.apache.hudi.testutils.SparkClientFunctionalTestHarness;
import org.apache.hudi.utilities.schema.SchemaProvider;
import org.apache.hudi.utilities.sources.helpers.IncrSourceHelper;
@@ -37,7 +38,6 @@ import org.apache.avro.Schema;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -50,28 +50,27 @@ import static org.apache.hudi.testutils.Assertions.assertNoWriteErrors;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
public class TestHoodieIncrSource extends HoodieClientTestHarness {
public class TestHoodieIncrSource extends SparkClientFunctionalTestHarness {
private HoodieTestDataGenerator dataGen;
private HoodieTableMetaClient metaClient;
@BeforeEach
public void setUp() throws IOException {
initResources();
}
@AfterEach
public void tearDown() throws IOException {
cleanupResources();
dataGen = new HoodieTestDataGenerator();
metaClient = getHoodieMetaClient(hadoopConf(), basePath());
}
@Test
public void testHoodieIncrSource() throws IOException {
HoodieWriteConfig writeConfig = getConfigBuilder(basePath)
HoodieWriteConfig writeConfig = getConfigBuilder(basePath(), metaClient)
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
.archiveCommitsWith(2, 3).retainCommits(1).build())
.withMetadataConfig(HoodieMetadataConfig.newBuilder()
.withMaxNumDeltaCommitsBeforeCompaction(1).build())
.build();
SparkRDDWriteClient writeClient = new SparkRDDWriteClient(context, writeConfig);
SparkRDDWriteClient writeClient = getHoodieWriteClient(writeConfig);
Pair<String, List<HoodieRecord>> inserts = writeRecords(writeClient, true, null, "100");
Pair<String, List<HoodieRecord>> inserts2 = writeRecords(writeClient, true, null, "200");
Pair<String, List<HoodieRecord>> inserts3 = writeRecords(writeClient, true, null, "300");
@@ -97,15 +96,16 @@ public class TestHoodieIncrSource extends HoodieClientTestHarness {
// insert new batch and ensure the checkpoint moves
readAndAssert(IncrSourceHelper.MissingCheckpointStrategy.READ_LATEST, Option.of(inserts5.getKey()), 100, inserts6.getKey());
writeClient.close();
}
private void readAndAssert(IncrSourceHelper.MissingCheckpointStrategy missingCheckpointStrategy, Option<String> checkpointToPull, int expectedCount, String expectedCheckpoint) {
Properties properties = new Properties();
properties.setProperty("hoodie.deltastreamer.source.hoodieincr.path", basePath);
properties.setProperty("hoodie.deltastreamer.source.hoodieincr.path", basePath());
properties.setProperty("hoodie.deltastreamer.source.hoodieincr.missing.checkpoint.strategy", missingCheckpointStrategy.name());
TypedProperties typedProperties = new TypedProperties(properties);
HoodieIncrSource incrSource = new HoodieIncrSource(typedProperties, jsc, sparkSession, new TestSchemaProvider(HoodieTestDataGenerator.AVRO_SCHEMA));
HoodieIncrSource incrSource = new HoodieIncrSource(typedProperties, jsc(), spark(), new DummySchemaProvider(HoodieTestDataGenerator.AVRO_SCHEMA));
// read everything until latest
Pair<Option<Dataset<Row>>, String> batchCheckPoint = incrSource.fetchNextBatch(checkpointToPull, 500);
@@ -118,27 +118,27 @@ public class TestHoodieIncrSource extends HoodieClientTestHarness {
Assertions.assertEquals(batchCheckPoint.getRight(), expectedCheckpoint);
}
public Pair<String, List<HoodieRecord>> writeRecords(SparkRDDWriteClient writeClient, boolean insert, List<HoodieRecord> insertRecords, String commit) throws IOException {
private Pair<String, List<HoodieRecord>> writeRecords(SparkRDDWriteClient writeClient, boolean insert, List<HoodieRecord> insertRecords, String commit) throws IOException {
writeClient.startCommitWithTime(commit);
List<HoodieRecord> records = insert ? dataGen.generateInserts(commit, 100) : dataGen.generateUpdates(commit, insertRecords);
JavaRDD<WriteStatus> result = writeClient.upsert(jsc.parallelize(records, 1), commit);
JavaRDD<WriteStatus> result = writeClient.upsert(jsc().parallelize(records, 1), commit);
List<WriteStatus> statuses = result.collect();
assertNoWriteErrors(statuses);
return Pair.of(commit, records);
}
public HoodieWriteConfig.Builder getConfigBuilder(String basePath) {
private HoodieWriteConfig.Builder getConfigBuilder(String basePath, HoodieTableMetaClient metaClient) {
return HoodieWriteConfig.newBuilder().withPath(basePath).withSchema(HoodieTestDataGenerator.TRIP_EXAMPLE_SCHEMA)
.withParallelism(2, 2).withBulkInsertParallelism(2).withFinalizeWriteParallelism(2).withDeleteParallelism(2)
.withTimelineLayoutVersion(TimelineLayoutVersion.CURR_VERSION)
.forTable("test-hoodie-incr-source");
.forTable(metaClient.getTableConfig().getTableName());
}
class TestSchemaProvider extends SchemaProvider {
private static class DummySchemaProvider extends SchemaProvider {
private final Schema schema;
public TestSchemaProvider(Schema schema) {
public DummySchemaProvider(Schema schema) {
super(new TypedProperties());
this.schema = schema;
}

View File

@@ -59,7 +59,7 @@ public class TestSqlSource extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass();
UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll

View File

@@ -64,7 +64,7 @@ public abstract class TestAbstractDebeziumSource extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass(false);
UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll

View File

@@ -104,30 +104,30 @@ public class UtilitiesTestBase {
protected static HiveServer2 hiveServer;
protected static HiveTestService hiveTestService;
protected static ZookeeperTestService zookeeperTestService;
private static ObjectMapper mapper = new ObjectMapper();
private static final ObjectMapper MAPPER = new ObjectMapper();
@BeforeAll
public static void initClass() throws Exception {
// Set log level to WARN for spark logs to avoid exceeding log limit in travis
public static void setLogLevel() {
Logger rootLogger = Logger.getRootLogger();
rootLogger.setLevel(Level.ERROR);
Logger.getLogger("org.apache.spark").setLevel(Level.WARN);
initClass(true);
}
public static void initClass(boolean startHiveService) throws Exception {
public static void initTestServices(boolean needsHive, boolean needsZookeeper) throws Exception {
hdfsTestService = new HdfsTestService();
zookeeperTestService = new ZookeeperTestService(hdfsTestService.getHadoopConf());
dfsCluster = hdfsTestService.start(true);
dfs = dfsCluster.getFileSystem();
dfsBasePath = dfs.getWorkingDirectory().toString();
dfs.mkdirs(new Path(dfsBasePath));
if (startHiveService) {
if (needsHive) {
hiveTestService = new HiveTestService(hdfsTestService.getHadoopConf());
hiveServer = hiveTestService.start();
clearHiveDb();
}
zookeeperTestService.start();
if (needsZookeeper) {
zookeeperTestService = new ZookeeperTestService(hdfsTestService.getHadoopConf());
zookeeperTestService.start();
}
}
@AfterAll
@@ -288,11 +288,11 @@ public class UtilitiesTestBase {
String[] lines, FileSystem fs, String targetPath) throws IOException {
Builder csvSchemaBuilder = CsvSchema.builder();
ArrayNode arrayNode = mapper.createArrayNode();
ArrayNode arrayNode = MAPPER.createArrayNode();
Arrays.stream(lines).forEachOrdered(
line -> {
try {
arrayNode.add(mapper.readValue(line, ObjectNode.class));
arrayNode.add(MAPPER.readValue(line, ObjectNode.class));
} catch (IOException e) {
throw new HoodieIOException(
"Error converting json records into CSV format: " + e.getMessage());

View File

@@ -54,7 +54,7 @@ public abstract class AbstractCloudObjectsSourceTestBase extends UtilitiesTestBa
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass();
UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll

View File

@@ -62,7 +62,7 @@ public abstract class AbstractDFSSourceTestBase extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass();
UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll

View File

@@ -49,7 +49,7 @@ public class TestSqlFileBasedTransformer extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
UtilitiesTestBase.initClass();
UtilitiesTestBase.initTestServices(false, false);
UtilitiesTestBase.Helpers.copyToDFS(
"delta-streamer-config/sql-file-transformer.sql",
UtilitiesTestBase.dfs,

View File

@@ -35,6 +35,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<build>

View File

@@ -31,6 +31,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<build>

View File

@@ -31,6 +31,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
<flink.bundle.hive.scope>provided</flink.bundle.hive.scope>
<flink.bundle.shade.prefix>org.apache.hudi.</flink.bundle.shade.prefix>
<javax.servlet.version>3.1.0</javax.servlet.version>

View File

@@ -32,6 +32,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<dependencyManagement>
<dependencies>

View File

@@ -29,6 +29,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<build>

View File

@@ -29,6 +29,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<build>

View File

@@ -27,6 +27,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<build>

View File

@@ -30,6 +30,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
<kafka.connect.bundle.shade.prefix>org.apache.hudi.</kafka.connect.bundle.shade.prefix>
</properties>

View File

@@ -29,6 +29,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<build>

View File

@@ -29,6 +29,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
<javax.servlet.version>3.1.0</javax.servlet.version>
</properties>

View File

@@ -334,6 +334,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>

View File

@@ -30,6 +30,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<build>

View File

@@ -29,6 +29,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<build>

View File

@@ -29,6 +29,7 @@
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
</properties>
<build>

45
pom.xml
View File

@@ -78,8 +78,8 @@
<properties>
<maven-jar-plugin.version>3.2.0</maven-jar-plugin.version>
<maven-surefire-plugin.version>3.0.0-M4</maven-surefire-plugin.version>
<maven-failsafe-plugin.version>3.0.0-M4</maven-failsafe-plugin.version>
<maven-surefire-plugin.version>2.22.2</maven-surefire-plugin.version>
<maven-failsafe-plugin.version>2.22.2</maven-failsafe-plugin.version>
<maven-shade-plugin.version>3.2.4</maven-shade-plugin.version>
<maven-javadoc-plugin.version>3.1.1</maven-javadoc-plugin.version>
<maven-compiler-plugin.version>3.8.0</maven-compiler-plugin.version>
@@ -102,12 +102,12 @@
<glassfish.version>2.17</glassfish.version>
<glassfish.el.version>3.0.1-b12</glassfish.el.version>
<parquet.version>1.10.1</parquet.version>
<junit.jupiter.version>5.7.0-M1</junit.jupiter.version>
<junit.vintage.version>5.7.0-M1</junit.vintage.version>
<junit.platform.version>1.7.0-M1</junit.platform.version>
<junit.jupiter.version>5.7.2</junit.jupiter.version>
<junit.vintage.version>5.7.2</junit.vintage.version>
<junit.platform.version>1.7.2</junit.platform.version>
<mockito.jupiter.version>3.3.3</mockito.jupiter.version>
<log4j.version>1.2.17</log4j.version>
<log4j.test.version>2.17.0</log4j.test.version>
<log4j.test.version>2.17.2</log4j.test.version>
<slf4j.version>1.7.30</slf4j.version>
<joda.version>2.9.9</joda.version>
<hadoop.version>2.10.1</hadoop.version>
@@ -339,6 +339,7 @@
<configuration>
<skip>${skipITs}</skip>
<argLine>@{argLine}</argLine>
<useSystemClassLoader>false</useSystemClassLoader>
</configuration>
</plugin>
<plugin>
@@ -371,6 +372,7 @@
${surefire-log4j.file}
</log4j.configuration>
</systemPropertyVariables>
<useSystemClassLoader>false</useSystemClassLoader>
</configuration>
</plugin>
<plugin>
@@ -1067,55 +1069,49 @@
<version>${zk-curator.version}</version>
</dependency>
<!-- Junit 5 -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<version>${junit.vintage.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
<version>${mockito.jupiter.version}</version>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<version>${junit.platform.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<version>${junit.platform.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
@@ -1165,6 +1161,20 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${log4j.test.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j.test.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</dependencyManagement>
<repositories>
@@ -1277,13 +1287,6 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
</dependency>
</dependencies>
<configuration combine.self="append">
<skip>${skipUTs}</skip>
<forkedProcessExitTimeoutInSeconds>120</forkedProcessExitTimeoutInSeconds>
@@ -1398,6 +1401,7 @@
<version>${maven-surefire-plugin.version}</version>
<configuration combine.self="override">
<skip>${skipUTs}</skip>
<useSystemClassLoader>false</useSystemClassLoader>
</configuration>
</plugin>
<plugin>
@@ -1411,6 +1415,7 @@
<systemPropertyVariables>
<dynamodb-local.endpoint>${dynamodb-local.endpoint}</dynamodb-local.endpoint>
</systemPropertyVariables>
<useSystemClassLoader>false</useSystemClassLoader>
</configuration>
<executions>
<execution>