1
0

[HUDI-2079] Make CLI command tests functional (#3601)

Make all tests in org.apache.hudi.cli.commands extend org.apache.hudi.cli.functional.CLIFunctionalTestHarness and tag as "functional".

This also resolves a blocker where DFS init consistently failed when moving to ubuntu 18.04
This commit is contained in:
Raymond Xu
2021-09-06 15:53:53 -07:00
committed by GitHub
parent f218693f5d
commit cf002b6918
20 changed files with 459 additions and 247 deletions

View File

@@ -287,5 +287,20 @@
<artifactId>mockito-junit-jupiter</artifactId> <artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
</project> </project>

View File

@@ -44,7 +44,7 @@ public class HoodieCLI {
protected static HoodieTableMetaClient tableMetadata; protected static HoodieTableMetaClient tableMetadata;
public static HoodieTableMetaClient syncTableMetadata; public static HoodieTableMetaClient syncTableMetadata;
public static TimelineLayoutVersion layoutVersion; public static TimelineLayoutVersion layoutVersion;
private static TempViewProvider tempViewProvider; public static TempViewProvider tempViewProvider;
/** /**
* Enum for CLI state. * Enum for CLI state.
@@ -114,17 +114,4 @@ public class HoodieCLI {
return tempViewProvider; return tempViewProvider;
} }
/**
* Close tempViewProvider.
* <p/>
* For test, avoid multiple SparkContexts.
*/
public static synchronized void closeTempViewProvider() {
if (tempViewProvider != null) {
tempViewProvider.close();
tempViewProvider = null;
}
}
} }

View File

@@ -56,6 +56,11 @@ public class SparkTempViewProvider implements TempViewProvider {
} }
} }
public SparkTempViewProvider(JavaSparkContext jsc, SQLContext sqlContext) {
this.jsc = jsc;
this.sqlContext = sqlContext;
}
@Override @Override
public void createOrReplace(String tableName, List<String> headers, List<List<Comparable>> rows) { public void createOrReplace(String tableName, List<String> headers, List<List<Comparable>> rows) {
try { try {

View File

@@ -21,7 +21,7 @@ package org.apache.hudi.cli.commands;
import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.TableHeader;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator;
import org.apache.hudi.cli.testutils.HoodieTestCommitUtilities; import org.apache.hudi.cli.testutils.HoodieTestCommitUtilities;
import org.apache.hudi.common.model.HoodieCommitMetadata; import org.apache.hudi.common.model.HoodieCommitMetadata;
@@ -33,13 +33,11 @@ import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.table.HoodieSparkTable; import org.apache.hudi.table.HoodieSparkTable;
import org.apache.hudi.table.HoodieTimelineArchiveLog; import org.apache.hudi.table.HoodieTimelineArchiveLog;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@@ -50,25 +48,24 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test Cases for {@link ArchivedCommitsCommand}. * Test Cases for {@link ArchivedCommitsCommand}.
*/ */
public class TestArchivedCommitsCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestArchivedCommitsCommand extends CLIFunctionalTestHarness {
private String tablePath; private String tablePath;
@BeforeEach @BeforeEach
public void init() throws Exception { public void init() throws Exception {
initDFS(); HoodieCLI.conf = hadoopConf();
jsc.hadoopConfiguration().addResource(dfs.getConf());
HoodieCLI.conf = dfs.getConf();
// Create table and connect // Create table and connect
String tableName = "test_table"; String tableName = tableName();
tablePath = basePath + File.separator + tableName; tablePath = tablePath(tableName);
new TableCommand().createTable( new TableCommand().createTable(
tablePath, tableName, tablePath, tableName,
"COPY_ON_WRITE", "", 1, "org.apache.hudi.common.model.HoodieAvroPayload"); "COPY_ON_WRITE", "", 1, "org.apache.hudi.common.model.HoodieAvroPayload");
metaClient = HoodieCLI.getTableMetaClient(); HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient();
// Generate archive // Generate archive
HoodieWriteConfig cfg = HoodieWriteConfig.newBuilder().withPath(tablePath) HoodieWriteConfig cfg = HoodieWriteConfig.newBuilder().withPath(tablePath)
@@ -81,11 +78,11 @@ public class TestArchivedCommitsCommand extends AbstractShellIntegrationTest {
String timestamp = String.valueOf(i); String timestamp = String.valueOf(i);
// Requested Compaction // Requested Compaction
HoodieTestCommitMetadataGenerator.createCompactionAuxiliaryMetadata(tablePath, HoodieTestCommitMetadataGenerator.createCompactionAuxiliaryMetadata(tablePath,
new HoodieInstant(HoodieInstant.State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, timestamp), dfs.getConf()); new HoodieInstant(HoodieInstant.State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, timestamp), hadoopConf());
// Inflight Compaction // Inflight Compaction
HoodieTestCommitMetadataGenerator.createCompactionAuxiliaryMetadata(tablePath, HoodieTestCommitMetadataGenerator.createCompactionAuxiliaryMetadata(tablePath,
new HoodieInstant(HoodieInstant.State.INFLIGHT, HoodieTimeline.COMPACTION_ACTION, timestamp), dfs.getConf()); new HoodieInstant(HoodieInstant.State.INFLIGHT, HoodieTimeline.COMPACTION_ACTION, timestamp), hadoopConf());
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, timestamp, dfs.getConf()); HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, timestamp, hadoopConf());
} }
metaClient = HoodieTableMetaClient.reload(metaClient); metaClient = HoodieTableMetaClient.reload(metaClient);
@@ -93,14 +90,9 @@ public class TestArchivedCommitsCommand extends AbstractShellIntegrationTest {
metaClient.getActiveTimeline().reload().getAllCommitsTimeline().filterCompletedInstants(); metaClient.getActiveTimeline().reload().getAllCommitsTimeline().filterCompletedInstants();
// archive // archive
HoodieSparkTable table = HoodieSparkTable.create(cfg, context, metaClient); HoodieSparkTable table = HoodieSparkTable.create(cfg, context(), metaClient);
HoodieTimelineArchiveLog archiveLog = new HoodieTimelineArchiveLog(cfg, table); HoodieTimelineArchiveLog archiveLog = new HoodieTimelineArchiveLog(cfg, table);
archiveLog.archiveIfRequired(context); archiveLog.archiveIfRequired(context());
}
@AfterEach
public void clean() throws IOException {
cleanupDFS();
} }
/** /**
@@ -108,7 +100,7 @@ public class TestArchivedCommitsCommand extends AbstractShellIntegrationTest {
*/ */
@Test @Test
public void testShowArchivedCommits() { public void testShowArchivedCommits() {
CommandResult cr = getShell().executeCommand("show archived commit stats"); CommandResult cr = shell().executeCommand("show archived commit stats");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
TableHeader header = new TableHeader().addTableHeaderField("action").addTableHeaderField("instant") TableHeader header = new TableHeader().addTableHeaderField("action").addTableHeaderField("instant")
@@ -159,7 +151,7 @@ public class TestArchivedCommitsCommand extends AbstractShellIntegrationTest {
*/ */
@Test @Test
public void testShowCommits() throws Exception { public void testShowCommits() throws Exception {
CommandResult cr = getShell().executeCommand("show archived commits"); CommandResult cr = shell().executeCommand("show archived commits");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
final List<Comparable[]> rows = new ArrayList<>(); final List<Comparable[]> rows = new ArrayList<>();
@@ -179,7 +171,7 @@ public class TestArchivedCommitsCommand extends AbstractShellIntegrationTest {
assertEquals(expected, got); assertEquals(expected, got);
// Test with Metadata and no limit // Test with Metadata and no limit
cr = getShell().executeCommand("show archived commits --skipMetadata false --limit -1"); cr = shell().executeCommand("show archived commits --skipMetadata false --limit -1");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
rows.clear(); rows.clear();

View File

@@ -23,8 +23,9 @@ import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.HoodieTableHeaderFields;
import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.TableHeader;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.HoodieCleaningPolicy; import org.apache.hudi.common.model.HoodieCleaningPolicy;
import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.common.table.HoodieTableMetaClient;
@@ -37,11 +38,12 @@ import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option; import org.apache.hudi.common.util.Option;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.URL; import java.net.URL;
import java.util.ArrayList; import java.util.ArrayList;
@@ -56,17 +58,18 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test Cases for {@link CleansCommand}. * Test Cases for {@link CleansCommand}.
*/ */
public class TestCleansCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestCleansCommand extends CLIFunctionalTestHarness {
private String tablePath;
private URL propsFilePath; private URL propsFilePath;
private HoodieTableMetaClient metaClient;
@BeforeEach @BeforeEach
public void init() throws Exception { public void init() throws Exception {
HoodieCLI.conf = jsc.hadoopConfiguration(); HoodieCLI.conf = hadoopConf();
String tableName = "test_table"; String tableName = tableName();
tablePath = basePath + File.separator + tableName; String tablePath = tablePath(tableName);
propsFilePath = TestCleansCommand.class.getClassLoader().getResource("clean.properties"); propsFilePath = TestCleansCommand.class.getClassLoader().getResource("clean.properties");
// Create table and connect // Create table and connect
@@ -79,6 +82,7 @@ public class TestCleansCommand extends AbstractShellIntegrationTest {
metaClient = HoodieCLI.getTableMetaClient(); metaClient = HoodieCLI.getTableMetaClient();
String fileId1 = UUID.randomUUID().toString(); String fileId1 = UUID.randomUUID().toString();
String fileId2 = UUID.randomUUID().toString(); String fileId2 = UUID.randomUUID().toString();
FileSystem fs = FSUtils.getFs(basePath(), hadoopConf());
HoodieTestDataGenerator.writePartitionMetadata(fs, HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS, tablePath); HoodieTestDataGenerator.writePartitionMetadata(fs, HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS, tablePath);
// Create four commits // Create four commits
@@ -108,11 +112,11 @@ public class TestCleansCommand extends AbstractShellIntegrationTest {
assertNotNull(propsFilePath, "Not found properties file"); assertNotNull(propsFilePath, "Not found properties file");
// First, run clean // First, run clean
SparkMain.clean(jsc, HoodieCLI.basePath, propsFilePath.getPath(), new ArrayList<>()); SparkMain.clean(jsc(), HoodieCLI.basePath, propsFilePath.getPath(), new ArrayList<>());
assertEquals(1, metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().count(), assertEquals(1, metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().count(),
"Loaded 1 clean and the count should match"); "Loaded 1 clean and the count should match");
CommandResult cr = getShell().executeCommand("cleans show"); CommandResult cr = shell().executeCommand("cleans show");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
HoodieInstant clean = metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().findFirst().orElse(null); HoodieInstant clean = metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().findFirst().orElse(null);
@@ -139,18 +143,18 @@ public class TestCleansCommand extends AbstractShellIntegrationTest {
* Test case for show partitions of a clean instant. * Test case for show partitions of a clean instant.
*/ */
@Test @Test
public void testShowCleanPartitions() throws IOException { public void testShowCleanPartitions() {
// Check properties file exists. // Check properties file exists.
assertNotNull(propsFilePath, "Not found properties file"); assertNotNull(propsFilePath, "Not found properties file");
// First, run clean with two partition // First, run clean with two partition
SparkMain.clean(jsc, HoodieCLI.basePath, propsFilePath.toString(), new ArrayList<>()); SparkMain.clean(jsc(), HoodieCLI.basePath, propsFilePath.toString(), new ArrayList<>());
assertEquals(1, metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().count(), assertEquals(1, metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().count(),
"Loaded 1 clean and the count should match"); "Loaded 1 clean and the count should match");
HoodieInstant clean = metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().findFirst().get(); HoodieInstant clean = metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().findFirst().get();
CommandResult cr = getShell().executeCommand("clean showpartitions --clean " + clean.getTimestamp()); CommandResult cr = shell().executeCommand("clean showpartitions --clean " + clean.getTimestamp());
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARTITION_PATH) TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARTITION_PATH)

View File

@@ -22,7 +22,7 @@ import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.HoodieTableHeaderFields;
import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.TableHeader;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator;
import org.apache.hudi.cli.testutils.HoodieTestReplaceCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestReplaceCommitMetadataGenerator;
import org.apache.hudi.common.fs.FSUtils; import org.apache.hudi.common.fs.FSUtils;
@@ -42,10 +42,12 @@ import org.apache.hudi.table.HoodieTimelineArchiveLog;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@@ -62,20 +64,25 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test class for {@link org.apache.hudi.cli.commands.CommitsCommand}. * Test class for {@link org.apache.hudi.cli.commands.CommitsCommand}.
*/ */
public class TestCommitsCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestCommitsCommand extends CLIFunctionalTestHarness {
private String tableName; private String tableName1;
private String tablePath; private String tableName2;
private String tablePath1;
private String tablePath2;
private HoodieTableMetaClient metaClient;
@BeforeEach @BeforeEach
public void init() throws IOException { public void init() throws IOException {
tableName = "test_table"; tableName1 = tableName("_1");
tablePath = basePath + File.separator + tableName; tableName2 = tableName("_2");
tablePath1 = tablePath(tableName1);
HoodieCLI.conf = jsc.hadoopConfiguration(); tablePath2 = tablePath(tableName2);
HoodieCLI.conf = hadoopConf();
// Create table and connect // Create table and connect
new TableCommand().createTable( new TableCommand().createTable(
tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), tablePath1, tableName1, HoodieTableType.COPY_ON_WRITE.name(),
"", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload"); "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
metaClient = HoodieCLI.getTableMetaClient(); metaClient = HoodieCLI.getTableMetaClient();
} }
@@ -90,7 +97,7 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
for (Map.Entry<String, Integer[]> entry : data.entrySet()) { for (Map.Entry<String, Integer[]> entry : data.entrySet()) {
String key = entry.getKey(); String key = entry.getKey();
Integer[] value = entry.getValue(); Integer[] value = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, key, jsc.hadoopConfiguration(), HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath1, key, hadoopConf(),
Option.of(value[0]), Option.of(value[1])); Option.of(value[0]), Option.of(value[1]));
} }
@@ -115,14 +122,14 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
for (Map.Entry<HoodieInstant, Integer[]> entry : commitData.entrySet()) { for (Map.Entry<HoodieInstant, Integer[]> entry : commitData.entrySet()) {
String key = entry.getKey().getTimestamp(); String key = entry.getKey().getTimestamp();
Integer[] value = entry.getValue(); Integer[] value = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, key, jsc.hadoopConfiguration(), HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath1, key, hadoopConf(),
Option.of(value[0]), Option.of(value[1])); Option.of(value[0]), Option.of(value[1]));
} }
for (Map.Entry<HoodieInstant, Integer[]> entry : replaceCommitData.entrySet()) { for (Map.Entry<HoodieInstant, Integer[]> entry : replaceCommitData.entrySet()) {
String key = entry.getKey().getTimestamp(); String key = entry.getKey().getTimestamp();
Integer[] value = entry.getValue(); Integer[] value = entry.getValue();
HoodieTestReplaceCommitMetadataGenerator.createReplaceCommitFileWithMetadata(tablePath, key, HoodieTestReplaceCommitMetadataGenerator.createReplaceCommitFileWithMetadata(tablePath1, key,
Option.of(value[0]), Option.of(value[1]), metaClient); Option.of(value[0]), Option.of(value[1]), metaClient);
} }
@@ -137,9 +144,9 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
} }
private String generateExpectData(int records, Map<String, Integer[]> data) throws IOException { private String generateExpectData(int records, Map<String, Integer[]> data) throws IOException {
FileSystem fs = FileSystem.get(jsc.hadoopConfiguration()); FileSystem fs = FileSystem.get(hadoopConf());
List<String> partitionPaths = List<String> partitionPaths =
FSUtils.getAllPartitionFoldersThreeLevelsDown(fs, tablePath); FSUtils.getAllPartitionFoldersThreeLevelsDown(fs, tablePath1);
int partitions = partitionPaths.size(); int partitions = partitionPaths.size();
// default pre-commit is not null, file add always be 0 and update always be partition nums // default pre-commit is not null, file add always be 0 and update always be partition nums
@@ -183,7 +190,7 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
public void testShowCommits() throws Exception { public void testShowCommits() throws Exception {
Map<String, Integer[]> data = generateData(); Map<String, Integer[]> data = generateData();
CommandResult cr = getShell().executeCommand("commits show"); CommandResult cr = shell().executeCommand("commits show");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
String expected = generateExpectData(1, data); String expected = generateExpectData(1, data);
@@ -198,7 +205,7 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testShowArchivedCommits() throws Exception { public void testShowArchivedCommits() throws Exception {
// Generate archive // Generate archive
HoodieWriteConfig cfg = HoodieWriteConfig.newBuilder().withPath(tablePath) HoodieWriteConfig cfg = HoodieWriteConfig.newBuilder().withPath(tablePath1)
.withSchema(HoodieTestCommitMetadataGenerator.TRIP_EXAMPLE_SCHEMA).withParallelism(2, 2) .withSchema(HoodieTestCommitMetadataGenerator.TRIP_EXAMPLE_SCHEMA).withParallelism(2, 2)
.withCompactionConfig(HoodieCompactionConfig.newBuilder().retainCommits(1).archiveCommitsWith(2, 3).build()) .withCompactionConfig(HoodieCompactionConfig.newBuilder().retainCommits(1).archiveCommitsWith(2, 3).build())
.forTable("test-trip-table").build(); .forTable("test-trip-table").build();
@@ -213,17 +220,17 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
for (Map.Entry<String, Integer[]> entry : data.entrySet()) { for (Map.Entry<String, Integer[]> entry : data.entrySet()) {
String key = entry.getKey(); String key = entry.getKey();
Integer[] value = entry.getValue(); Integer[] value = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, key, jsc.hadoopConfiguration(), HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath1, key, hadoopConf(),
Option.of(value[0]), Option.of(value[1])); Option.of(value[0]), Option.of(value[1]));
} }
// archive // archive
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient()); metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
HoodieSparkTable table = HoodieSparkTable.create(cfg, context, metaClient); HoodieSparkTable table = HoodieSparkTable.create(cfg, context(), metaClient);
HoodieTimelineArchiveLog archiveLog = new HoodieTimelineArchiveLog(cfg, table); HoodieTimelineArchiveLog archiveLog = new HoodieTimelineArchiveLog(cfg, table);
archiveLog.archiveIfRequired(context); archiveLog.archiveIfRequired(context());
CommandResult cr = getShell().executeCommand(String.format("commits showarchived --startTs %s --endTs %s", "100", "104")); CommandResult cr = shell().executeCommand(String.format("commits showarchived --startTs %s --endTs %s", "100", "104"));
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// archived 101 and 102 instant, generate expect data // archived 101 and 102 instant, generate expect data
@@ -247,7 +254,7 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
Map<String, Integer[]> data = generateData(); Map<String, Integer[]> data = generateData();
String commitInstant = "101"; String commitInstant = "101";
CommandResult cr = getShell().executeCommand(String.format("commit showpartitions --commit %s", commitInstant)); CommandResult cr = shell().executeCommand(String.format("commit showpartitions --commit %s", commitInstant));
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
Integer[] value = data.get(commitInstant); Integer[] value = data.get(commitInstant);
@@ -282,7 +289,7 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
Map<HoodieInstant, Integer[]> data = generateMixedData(); Map<HoodieInstant, Integer[]> data = generateMixedData();
for (HoodieInstant commitInstant : data.keySet()) { for (HoodieInstant commitInstant : data.keySet()) {
CommandResult cr = getShell().executeCommand(String.format("commit showpartitions --commit %s", commitInstant.getTimestamp())); CommandResult cr = shell().executeCommand(String.format("commit showpartitions --commit %s", commitInstant.getTimestamp()));
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
@@ -322,7 +329,7 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
Map<String, Integer[]> data = generateData(); Map<String, Integer[]> data = generateData();
String commitInstant = "101"; String commitInstant = "101";
CommandResult cr = getShell().executeCommand(String.format("commit showfiles --commit %s", commitInstant)); CommandResult cr = shell().executeCommand(String.format("commit showfiles --commit %s", commitInstant));
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
Integer[] value = data.get(commitInstant); Integer[] value = data.get(commitInstant);
@@ -355,7 +362,7 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
Map<HoodieInstant, Integer[]> data = generateMixedData(); Map<HoodieInstant, Integer[]> data = generateMixedData();
for (HoodieInstant commitInstant : data.keySet()) { for (HoodieInstant commitInstant : data.keySet()) {
CommandResult cr = getShell().executeCommand(String.format("commit showfiles --commit %s", commitInstant.getTimestamp())); CommandResult cr = shell().executeCommand(String.format("commit showfiles --commit %s", commitInstant.getTimestamp()));
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
Integer[] value = data.get(commitInstant); Integer[] value = data.get(commitInstant);
@@ -387,56 +394,53 @@ public class TestCommitsCommand extends AbstractShellIntegrationTest {
/** /**
* Test case of 'commits compare' command. * Test case of 'commits compare' command.
*/ */
@Test @ParameterizedTest
public void testCompareCommits() throws Exception { @EnumSource(HoodieTableType.class)
public void testCompareCommits(HoodieTableType tableType) throws Exception {
Map<String, Integer[]> data = generateData(); Map<String, Integer[]> data = generateData();
HoodieTestUtils.init(hadoopConf(), tablePath2, tableType);
String tableName2 = "test_table2";
String tablePath2 = basePath + File.separator + tableName2;
HoodieTestUtils.init(jsc.hadoopConfiguration(), tablePath2, getTableType());
data.remove("102"); data.remove("102");
for (Map.Entry<String, Integer[]> entry : data.entrySet()) { for (Map.Entry<String, Integer[]> entry : data.entrySet()) {
String key = entry.getKey(); String key = entry.getKey();
Integer[] value = entry.getValue(); Integer[] value = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath2, key, jsc.hadoopConfiguration(), HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath2, key, hadoopConf(),
Option.of(value[0]), Option.of(value[1])); Option.of(value[0]), Option.of(value[1]));
} }
CommandResult cr = getShell().executeCommand(String.format("commits compare --path %s", tablePath2)); CommandResult cr = shell().executeCommand(String.format("commits compare --path %s", tablePath2));
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// the latest instant of test_table2 is 101 // the latest instant of test_table2 is 101
List<String> commitsToCatchup = metaClient.getActiveTimeline().findInstantsAfter("101", Integer.MAX_VALUE) List<String> commitsToCatchup = metaClient.getActiveTimeline().findInstantsAfter("101", Integer.MAX_VALUE)
.getInstants().map(HoodieInstant::getTimestamp).collect(Collectors.toList()); .getInstants().map(HoodieInstant::getTimestamp).collect(Collectors.toList());
String expected = String.format("Source %s is ahead by %d commits. Commits to catch up - %s", String expected = String.format("Source %s is ahead by %d commits. Commits to catch up - %s",
tableName, commitsToCatchup.size(), commitsToCatchup); tableName1, commitsToCatchup.size(), commitsToCatchup);
assertEquals(expected, cr.getResult().toString()); assertEquals(expected, cr.getResult().toString());
} }
/** /**
* Test case of 'commits sync' command. * Test case of 'commits sync' command.
*/ */
@Test @ParameterizedTest
public void testSyncCommits() throws Exception { @EnumSource(HoodieTableType.class)
public void testSyncCommits(HoodieTableType tableType) throws Exception {
Map<String, Integer[]> data = generateData(); Map<String, Integer[]> data = generateData();
String tableName2 = "test_table2"; HoodieTestUtils.init(hadoopConf(), tablePath2, tableType, tableName2);
String tablePath2 = basePath + File.separator + tableName2;
HoodieTestUtils.init(jsc.hadoopConfiguration(), tablePath2, getTableType(), tableName2);
data.remove("102"); data.remove("102");
for (Map.Entry<String, Integer[]> entry : data.entrySet()) { for (Map.Entry<String, Integer[]> entry : data.entrySet()) {
String key = entry.getKey(); String key = entry.getKey();
Integer[] value = entry.getValue(); Integer[] value = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath2, key, jsc.hadoopConfiguration(), HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath2, key, hadoopConf(),
Option.of(value[0]), Option.of(value[1])); Option.of(value[0]), Option.of(value[1]));
} }
CommandResult cr = getShell().executeCommand(String.format("commits sync --path %s", tablePath2)); CommandResult cr = shell().executeCommand(String.format("commits sync --path %s", tablePath2));
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
String expected = String.format("Load sync state between %s and %s", tableName, tableName2); String expected = String.format("Load sync state between %s and %s", tableName1, tableName2);
assertEquals(expected, cr.getResult().toString()); assertEquals(expected, cr.getResult().toString());
} }
} }

View File

@@ -22,7 +22,7 @@ import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.TableHeader;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator;
import org.apache.hudi.common.model.HoodieAvroPayload; import org.apache.hudi.common.model.HoodieAvroPayload;
import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.model.HoodieTableType;
@@ -39,7 +39,9 @@ import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException; import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.table.HoodieSparkTable; import org.apache.hudi.table.HoodieSparkTable;
import org.apache.hudi.table.HoodieTimelineArchiveLog; import org.apache.hudi.table.HoodieTimelineArchiveLog;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
@@ -60,15 +62,16 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
/** /**
* Test Cases for {@link CompactionCommand}. * Test Cases for {@link CompactionCommand}.
*/ */
public class TestCompactionCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestCompactionCommand extends CLIFunctionalTestHarness {
private String tableName; private String tableName;
private String tablePath; private String tablePath;
@BeforeEach @BeforeEach
public void init() { public void init() {
tableName = "test_table"; tableName = tableName();
tablePath = basePath + tableName; tablePath = tablePath(tableName);
} }
@Test @Test
@@ -97,7 +100,7 @@ public class TestCompactionCommand extends AbstractShellIntegrationTest {
HoodieCLI.getTableMetaClient().reloadActiveTimeline(); HoodieCLI.getTableMetaClient().reloadActiveTimeline();
CommandResult cr = getShell().executeCommand("compactions show all"); CommandResult cr = shell().executeCommand("compactions show all");
System.out.println(cr.getResult().toString()); System.out.println(cr.getResult().toString());
TableHeader header = new TableHeader().addTableHeaderField("Compaction Instant Time").addTableHeaderField("State") TableHeader header = new TableHeader().addTableHeaderField("Compaction Instant Time").addTableHeaderField("State")
@@ -129,7 +132,7 @@ public class TestCompactionCommand extends AbstractShellIntegrationTest {
HoodieCLI.getTableMetaClient().reloadActiveTimeline(); HoodieCLI.getTableMetaClient().reloadActiveTimeline();
CommandResult cr = getShell().executeCommand("compaction show --instant 001"); CommandResult cr = shell().executeCommand("compaction show --instant 001");
System.out.println(cr.getResult().toString()); System.out.println(cr.getResult().toString());
} }
@@ -148,7 +151,6 @@ public class TestCompactionCommand extends AbstractShellIntegrationTest {
new HoodieInstant(HoodieInstant.State.INFLIGHT, COMPACTION_ACTION, timestamp), Option.empty()); new HoodieInstant(HoodieInstant.State.INFLIGHT, COMPACTION_ACTION, timestamp), Option.empty());
}); });
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
} }
private void generateArchive() throws IOException { private void generateArchive() throws IOException {
@@ -158,10 +160,10 @@ public class TestCompactionCommand extends AbstractShellIntegrationTest {
.withCompactionConfig(HoodieCompactionConfig.newBuilder().retainCommits(1).archiveCommitsWith(2, 3).build()) .withCompactionConfig(HoodieCompactionConfig.newBuilder().retainCommits(1).archiveCommitsWith(2, 3).build())
.forTable("test-trip-table").build(); .forTable("test-trip-table").build();
// archive // archive
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient()); HoodieTableMetaClient metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
HoodieSparkTable table = HoodieSparkTable.create(cfg, context, metaClient); HoodieSparkTable table = HoodieSparkTable.create(cfg, context(), metaClient);
HoodieTimelineArchiveLog archiveLog = new HoodieTimelineArchiveLog(cfg, table); HoodieTimelineArchiveLog archiveLog = new HoodieTimelineArchiveLog(cfg, table);
archiveLog.archiveIfRequired(context); archiveLog.archiveIfRequired(context());
} }
/** /**
@@ -173,7 +175,7 @@ public class TestCompactionCommand extends AbstractShellIntegrationTest {
generateArchive(); generateArchive();
CommandResult cr = getShell().executeCommand("compactions showarchived --startTs 001 --endTs 005"); CommandResult cr = shell().executeCommand("compactions showarchived --startTs 001 --endTs 005");
// generate result // generate result
Map<String, Integer> fileMap = new HashMap<>(); Map<String, Integer> fileMap = new HashMap<>();
@@ -207,7 +209,7 @@ public class TestCompactionCommand extends AbstractShellIntegrationTest {
generateArchive(); generateArchive();
CommandResult cr = getShell().executeCommand("compaction showarchived --instant " + instance); CommandResult cr = shell().executeCommand("compaction showarchived --instant " + instance);
// generate expected // generate expected
String expected = new CompactionCommand().printCompaction(plan, "", false, -1, false); String expected = new CompactionCommand().printCompaction(plan, "", false, -1, false);

View File

@@ -22,7 +22,7 @@ import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.HoodieTableHeaderFields;
import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.TableHeader;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator;
import org.apache.hudi.common.fs.FSUtils; import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.FileSlice; import org.apache.hudi.common.model.FileSlice;
@@ -34,6 +34,7 @@ import org.apache.hudi.common.table.view.SyncableFileSystemView;
import org.apache.hudi.common.util.NumericUtils; import org.apache.hudi.common.util.NumericUtils;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
@@ -55,23 +56,24 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test class for {@link FileSystemViewCommand}. * Test class for {@link FileSystemViewCommand}.
*/ */
public class TestFileSystemViewCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestFileSystemViewCommand extends CLIFunctionalTestHarness {
private String partitionPath; private String partitionPath;
private SyncableFileSystemView fsView; private SyncableFileSystemView fsView;
@BeforeEach @BeforeEach
public void init() throws IOException { public void init() throws IOException {
HoodieCLI.conf = jsc.hadoopConfiguration(); HoodieCLI.conf = hadoopConf();
// Create table and connect // Create table and connect
String tableName = "test_table"; String tableName = tableName();
String tablePath = Paths.get(basePath, tableName).toString(); String tablePath = tablePath(tableName);
new TableCommand().createTable( new TableCommand().createTable(
tablePath, tableName, tablePath, tableName,
"COPY_ON_WRITE", "", 1, "org.apache.hudi.common.model.HoodieAvroPayload"); "COPY_ON_WRITE", "", 1, "org.apache.hudi.common.model.HoodieAvroPayload");
metaClient = HoodieCLI.getTableMetaClient(); HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient();
partitionPath = HoodieTestCommitMetadataGenerator.DEFAULT_FIRST_PARTITION_PATH; partitionPath = HoodieTestCommitMetadataGenerator.DEFAULT_FIRST_PARTITION_PATH;
String fullPartitionPath = Paths.get(tablePath, partitionPath).toString(); String fullPartitionPath = Paths.get(tablePath, partitionPath).toString();
@@ -110,7 +112,7 @@ public class TestFileSystemViewCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testShowCommits() { public void testShowCommits() {
// Test default show fsview all // Test default show fsview all
CommandResult cr = getShell().executeCommand("show fsview all"); CommandResult cr = shell().executeCommand("show fsview all");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// Get all file groups // Get all file groups
@@ -158,7 +160,7 @@ public class TestFileSystemViewCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testShowCommitsWithSpecifiedValues() { public void testShowCommitsWithSpecifiedValues() {
// Test command with options, baseFileOnly and maxInstant is 2 // Test command with options, baseFileOnly and maxInstant is 2
CommandResult cr = getShell().executeCommand("show fsview all --baseFileOnly true --maxInstant 2"); CommandResult cr = shell().executeCommand("show fsview all --baseFileOnly true --maxInstant 2");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
List<Comparable[]> rows = new ArrayList<>(); List<Comparable[]> rows = new ArrayList<>();
@@ -201,7 +203,7 @@ public class TestFileSystemViewCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testShowLatestFileSlices() { public void testShowLatestFileSlices() {
// Test show with partition path '2016/03/15' // Test show with partition path '2016/03/15'
CommandResult cr = getShell().executeCommand("show fsview latest --partitionPath " + partitionPath); CommandResult cr = shell().executeCommand("show fsview latest --partitionPath " + partitionPath);
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
Stream<FileSlice> fileSlice = fsView.getLatestFileSlices(partitionPath); Stream<FileSlice> fileSlice = fsView.getLatestFileSlices(partitionPath);

View File

@@ -23,9 +23,10 @@ import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.HoodieTableHeaderFields;
import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.TableHeader;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator;
import org.apache.hudi.common.config.HoodieCommonConfig; import org.apache.hudi.common.config.HoodieCommonConfig;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.HoodieLogFile; import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodieRecord; import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload; import org.apache.hudi.common.model.HoodieRecordPayload;
@@ -44,8 +45,11 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.avro.Schema; import org.apache.avro.Schema;
import org.apache.avro.generic.IndexedRecord; import org.apache.avro.generic.IndexedRecord;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
@@ -70,34 +74,35 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test Cases for {@link HoodieLogFileCommand}. * Test Cases for {@link HoodieLogFileCommand}.
*/ */
public class TestHoodieLogFileCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestHoodieLogFileCommand extends CLIFunctionalTestHarness {
private String partitionPath; private String partitionPath;
private HoodieAvroDataBlock dataBlock; private HoodieAvroDataBlock dataBlock;
private String tablePath; private String tablePath;
private FileSystem fs;
private static final String INSTANT_TIME = "100"; private static final String INSTANT_TIME = "100";
@BeforeEach @BeforeEach
public void init() throws IOException, InterruptedException, URISyntaxException { public void init() throws IOException, InterruptedException, URISyntaxException {
HoodieCLI.conf = jsc.hadoopConfiguration(); HoodieCLI.conf = hadoopConf();
// Create table and connect // Create table and connect
String tableName = "test_table"; String tableName = tableName();
tablePath = basePath + File.separator + tableName; tablePath = tablePath(tableName);
partitionPath = tablePath + File.separator + HoodieTestCommitMetadataGenerator.DEFAULT_FIRST_PARTITION_PATH; partitionPath = Paths.get(tablePath, HoodieTestCommitMetadataGenerator.DEFAULT_FIRST_PARTITION_PATH).toString();
new TableCommand().createTable( new TableCommand().createTable(
tablePath, tableName, HoodieTableType.MERGE_ON_READ.name(), tablePath, tableName, HoodieTableType.MERGE_ON_READ.name(),
"", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload"); "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
Files.createDirectories(Paths.get(partitionPath)); Files.createDirectories(Paths.get(partitionPath));
fs = FSUtils.getFs(tablePath, hadoopConf());
HoodieLogFormat.Writer writer = null; try (HoodieLogFormat.Writer writer = HoodieLogFormat.newWriterBuilder()
try { .onParentPath(new Path(partitionPath))
writer =
HoodieLogFormat.newWriterBuilder().onParentPath(new Path(partitionPath))
.withFileExtension(HoodieLogFile.DELTA_EXTENSION) .withFileExtension(HoodieLogFile.DELTA_EXTENSION)
.withFileId("test-log-fileid1").overBaseCommit("100").withFs(fs).build(); .withFileId("test-log-fileid1").overBaseCommit("100").withFs(fs).build()) {
// write data to file // write data to file
List<IndexedRecord> records = SchemaTestUtil.generateTestRecords(0, 100); List<IndexedRecord> records = SchemaTestUtil.generateTestRecords(0, 100);
@@ -106,11 +111,12 @@ public class TestHoodieLogFileCommand extends AbstractShellIntegrationTest {
header.put(HoodieLogBlock.HeaderMetadataType.SCHEMA, getSimpleSchema().toString()); header.put(HoodieLogBlock.HeaderMetadataType.SCHEMA, getSimpleSchema().toString());
dataBlock = new HoodieAvroDataBlock(records, header); dataBlock = new HoodieAvroDataBlock(records, header);
writer.appendBlock(dataBlock); writer.appendBlock(dataBlock);
} finally {
if (writer != null) {
writer.close();
} }
} }
@AfterEach
public void cleanUp() throws IOException {
fs.close();
} }
/** /**
@@ -118,7 +124,7 @@ public class TestHoodieLogFileCommand extends AbstractShellIntegrationTest {
*/ */
@Test @Test
public void testShowLogFileCommits() throws JsonProcessingException { public void testShowLogFileCommits() throws JsonProcessingException {
CommandResult cr = getShell().executeCommand("show logfile metadata --logFilePathPattern " + partitionPath + "/*"); CommandResult cr = shell().executeCommand("show logfile metadata --logFilePathPattern " + partitionPath + "/*");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT_TIME) TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT_TIME)
@@ -146,7 +152,7 @@ public class TestHoodieLogFileCommand extends AbstractShellIntegrationTest {
*/ */
@Test @Test
public void testShowLogFileRecords() throws IOException, URISyntaxException { public void testShowLogFileRecords() throws IOException, URISyntaxException {
CommandResult cr = getShell().executeCommand("show logfile records --logFilePathPattern " + partitionPath + "/*"); CommandResult cr = shell().executeCommand("show logfile records --logFilePathPattern " + partitionPath + "/*");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// construct expect result, get 10 records. // construct expect result, get 10 records.
@@ -191,7 +197,7 @@ public class TestHoodieLogFileCommand extends AbstractShellIntegrationTest {
} }
} }
CommandResult cr = getShell().executeCommand("show logfile records --logFilePathPattern " CommandResult cr = shell().executeCommand("show logfile records --logFilePathPattern "
+ partitionPath + "/* --mergeRecords true"); + partitionPath + "/* --mergeRecords true");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());

View File

@@ -21,7 +21,7 @@ package org.apache.hudi.cli.commands;
import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.HoodieTableHeaderFields;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator;
import org.apache.hudi.common.fs.FSUtils; import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.model.HoodieTableType;
@@ -31,8 +31,11 @@ import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator; import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
@@ -55,14 +58,17 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test class for {@link RepairsCommand}. * Test class for {@link RepairsCommand}.
*/ */
public class TestRepairsCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestRepairsCommand extends CLIFunctionalTestHarness {
private String tablePath; private String tablePath;
private FileSystem fs;
@BeforeEach @BeforeEach
public void init() throws IOException { public void init() throws IOException {
String tableName = "test_table"; String tableName = tableName();
tablePath = basePath + File.separator + tableName; tablePath = tablePath(tableName);
fs = FSUtils.getFs(tablePath, hadoopConf());
// Create table and connect // Create table and connect
new TableCommand().createTable( new TableCommand().createTable(
@@ -70,24 +76,29 @@ public class TestRepairsCommand extends AbstractShellIntegrationTest {
HoodieTableConfig.ARCHIVELOG_FOLDER.defaultValue(), TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload"); HoodieTableConfig.ARCHIVELOG_FOLDER.defaultValue(), TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
} }
@AfterEach
public void cleanUp() throws IOException {
fs.close();
}
/** /**
* Test case for dry run 'repair addpartitionmeta'. * Test case for dry run 'repair addpartitionmeta'.
*/ */
@Test @Test
public void testAddPartitionMetaWithDryRun() throws IOException { public void testAddPartitionMetaWithDryRun() throws IOException {
// create commit instant // create commit instant
Files.createFile(Paths.get(tablePath + "/.hoodie/100.commit")); Files.createFile(Paths.get(tablePath, ".hoodie", "100.commit"));
// create partition path // create partition path
String partition1 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH; String partition1 = Paths.get(tablePath, HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH).toString();
String partition2 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH; String partition2 = Paths.get(tablePath, HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH).toString();
String partition3 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH; String partition3 = Paths.get(tablePath, HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH).toString();
assertTrue(fs.mkdirs(new Path(partition1))); assertTrue(fs.mkdirs(new Path(partition1)));
assertTrue(fs.mkdirs(new Path(partition2))); assertTrue(fs.mkdirs(new Path(partition2)));
assertTrue(fs.mkdirs(new Path(partition3))); assertTrue(fs.mkdirs(new Path(partition3)));
// default is dry run. // default is dry run.
CommandResult cr = getShell().executeCommand("repair addpartitionmeta"); CommandResult cr = shell().executeCommand("repair addpartitionmeta");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// expected all 'No'. // expected all 'No'.
@@ -108,17 +119,17 @@ public class TestRepairsCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testAddPartitionMetaWithRealRun() throws IOException { public void testAddPartitionMetaWithRealRun() throws IOException {
// create commit instant // create commit instant
Files.createFile(Paths.get(tablePath + "/.hoodie/100.commit")); Files.createFile(Paths.get(tablePath, ".hoodie", "100.commit"));
// create partition path // create partition path
String partition1 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH; String partition1 = Paths.get(tablePath, HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH).toString();
String partition2 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH; String partition2 = Paths.get(tablePath, HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH).toString();
String partition3 = tablePath + File.separator + HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH; String partition3 = Paths.get(tablePath, HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH).toString();
assertTrue(fs.mkdirs(new Path(partition1))); assertTrue(fs.mkdirs(new Path(partition1)));
assertTrue(fs.mkdirs(new Path(partition2))); assertTrue(fs.mkdirs(new Path(partition2)));
assertTrue(fs.mkdirs(new Path(partition3))); assertTrue(fs.mkdirs(new Path(partition3)));
CommandResult cr = getShell().executeCommand("repair addpartitionmeta --dryrun false"); CommandResult cr = shell().executeCommand("repair addpartitionmeta --dryrun false");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
List<String> paths = FSUtils.getAllPartitionFoldersThreeLevelsDown(fs, tablePath); List<String> paths = FSUtils.getAllPartitionFoldersThreeLevelsDown(fs, tablePath);
@@ -132,7 +143,7 @@ public class TestRepairsCommand extends AbstractShellIntegrationTest {
String got = removeNonWordAndStripSpace(cr.getResult().toString()); String got = removeNonWordAndStripSpace(cr.getResult().toString());
assertEquals(expected, got); assertEquals(expected, got);
cr = getShell().executeCommand("repair addpartitionmeta"); cr = shell().executeCommand("repair addpartitionmeta");
// after real run, Metadata is present now. // after real run, Metadata is present now.
rows = paths.stream() rows = paths.stream()
@@ -153,7 +164,7 @@ public class TestRepairsCommand extends AbstractShellIntegrationTest {
URL newProps = this.getClass().getClassLoader().getResource("table-config.properties"); URL newProps = this.getClass().getClassLoader().getResource("table-config.properties");
assertNotNull(newProps, "New property file must exist"); assertNotNull(newProps, "New property file must exist");
CommandResult cr = getShell().executeCommand("repair overwrite-hoodie-props --new-props-file " + newProps.getPath()); CommandResult cr = shell().executeCommand("repair overwrite-hoodie-props --new-props-file " + newProps.getPath());
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
Map<String, String> oldProps = HoodieCLI.getTableMetaClient().getTableConfig().propsMap(); Map<String, String> oldProps = HoodieCLI.getTableMetaClient().getTableConfig().propsMap();
@@ -185,11 +196,11 @@ public class TestRepairsCommand extends AbstractShellIntegrationTest {
*/ */
@Test @Test
public void testRemoveCorruptedPendingCleanAction() throws IOException { public void testRemoveCorruptedPendingCleanAction() throws IOException {
HoodieCLI.conf = jsc.hadoopConfiguration(); HoodieCLI.conf = hadoopConf();
Configuration conf = HoodieCLI.conf; Configuration conf = HoodieCLI.conf;
metaClient = HoodieCLI.getTableMetaClient(); HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient();
// Create four requested files // Create four requested files
for (int i = 100; i < 104; i++) { for (int i = 100; i < 104; i++) {
@@ -203,7 +214,7 @@ public class TestRepairsCommand extends AbstractShellIntegrationTest {
// first, there are four instants // first, there are four instants
assertEquals(4, metaClient.getActiveTimeline().filterInflightsAndRequested().getInstants().count()); assertEquals(4, metaClient.getActiveTimeline().filterInflightsAndRequested().getInstants().count());
CommandResult cr = getShell().executeCommand("repair corrupted clean files"); CommandResult cr = shell().executeCommand("repair corrupted clean files");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// reload meta client // reload meta client

View File

@@ -23,8 +23,9 @@ import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.HoodieTableHeaderFields;
import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.TableHeader;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.client.AbstractHoodieWriteClient; import org.apache.hudi.client.AbstractHoodieWriteClient;
import org.apache.hudi.client.SparkRDDWriteClient;
import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline; import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
@@ -38,11 +39,11 @@ import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.index.HoodieIndex; import org.apache.hudi.index.HoodieIndex;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@@ -60,16 +61,17 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test class for {@link org.apache.hudi.cli.commands.RollbacksCommand}. * Test class for {@link org.apache.hudi.cli.commands.RollbacksCommand}.
*/ */
public class TestRollbacksCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestRollbacksCommand extends CLIFunctionalTestHarness {
@BeforeEach @BeforeEach
public void init() throws Exception { public void init() throws Exception {
String tableName = "test_table"; String tableName = tableName();
String tablePath = Paths.get(basePath, tableName).toString(); String tablePath = tablePath(tableName);
new TableCommand().createTable( new TableCommand().createTable(
tablePath, tableName, HoodieTableType.MERGE_ON_READ.name(), tablePath, tableName, HoodieTableType.MERGE_ON_READ.name(),
"", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload"); "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient()); HoodieTableMetaClient metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
//Create some commits files and base files //Create some commits files and base files
Map<String, String> partitionAndFileId = new HashMap<String, String>() { Map<String, String> partitionAndFileId = new HashMap<String, String>() {
{ {
@@ -90,7 +92,7 @@ public class TestRollbacksCommand extends AbstractShellIntegrationTest {
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(tablePath) HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(tablePath)
.withIndexConfig(HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.INMEMORY).build()).build(); .withIndexConfig(HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.INMEMORY).build()).build();
try (AbstractHoodieWriteClient client = getHoodieWriteClient(config)) { try (AbstractHoodieWriteClient client = new SparkRDDWriteClient(context(), config)) {
// Rollback inflight commit3 and commit2 // Rollback inflight commit3 and commit2
client.rollback("102"); client.rollback("102");
client.rollback("101"); client.rollback("101");
@@ -102,7 +104,7 @@ public class TestRollbacksCommand extends AbstractShellIntegrationTest {
*/ */
@Test @Test
public void testShowRollbacks() { public void testShowRollbacks() {
CommandResult cr = getShell().executeCommand("show rollbacks"); CommandResult cr = shell().executeCommand("show rollbacks");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// get rollback instants // get rollback instants
@@ -152,7 +154,7 @@ public class TestRollbacksCommand extends AbstractShellIntegrationTest {
HoodieInstant instant = rollback.findFirst().orElse(null); HoodieInstant instant = rollback.findFirst().orElse(null);
assertNotNull(instant, "The instant can not be null."); assertNotNull(instant, "The instant can not be null.");
CommandResult cr = getShell().executeCommand("show rollback --instant " + instant.getTimestamp()); CommandResult cr = shell().executeCommand("show rollback --instant " + instant.getTimestamp());
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
List<Comparable[]> rows = new ArrayList<>(); List<Comparable[]> rows = new ArrayList<>();

View File

@@ -21,20 +21,20 @@ package org.apache.hudi.cli.commands;
import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.HoodieTableHeaderFields;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.HoodieTimeline; import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion; import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator; import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -42,14 +42,15 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test class for {@link org.apache.hudi.cli.commands.SavepointsCommand}. * Test class for {@link org.apache.hudi.cli.commands.SavepointsCommand}.
*/ */
public class TestSavepointsCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestSavepointsCommand extends CLIFunctionalTestHarness {
private String tablePath; private String tablePath;
@BeforeEach @BeforeEach
public void init() throws IOException { public void init() throws IOException {
String tableName = "test_table"; String tableName = tableName();
tablePath = basePath + File.separator + tableName; tablePath = tablePath(tableName);
// Create table and connect // Create table and connect
new TableCommand().createTable( new TableCommand().createTable(
@@ -65,14 +66,14 @@ public class TestSavepointsCommand extends AbstractShellIntegrationTest {
// generate four savepoints // generate four savepoints
for (int i = 100; i < 104; i++) { for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i); String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createSavepointFile(tablePath, instantTime, jsc.hadoopConfiguration()); HoodieTestDataGenerator.createSavepointFile(tablePath, instantTime, hadoopConf());
} }
CommandResult cr = getShell().executeCommand("savepoints show"); CommandResult cr = shell().executeCommand("savepoints show");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// generate expect result // generate expect result
String[][] rows = Arrays.asList("100", "101", "102", "103").stream().sorted(Comparator.reverseOrder()) String[][] rows = Stream.of("100", "101", "102", "103").sorted(Comparator.reverseOrder())
.map(instant -> new String[]{instant}).toArray(String[][]::new); .map(instant -> new String[]{instant}).toArray(String[][]::new);
String expected = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_SAVEPOINT_TIME}, rows); String expected = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_SAVEPOINT_TIME}, rows);
expected = removeNonWordAndStripSpace(expected); expected = removeNonWordAndStripSpace(expected);
@@ -92,7 +93,7 @@ public class TestSavepointsCommand extends AbstractShellIntegrationTest {
// generate four savepoints // generate four savepoints
for (int i = 100; i < 104; i++) { for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i); String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createSavepointFile(tablePath, instantTime, jsc.hadoopConfiguration()); HoodieTestDataGenerator.createSavepointFile(tablePath, instantTime, hadoopConf());
} }
// Before refresh, no instant // Before refresh, no instant
@@ -100,7 +101,7 @@ public class TestSavepointsCommand extends AbstractShellIntegrationTest {
HoodieCLI.getTableMetaClient().getActiveTimeline().getSavePointTimeline().filterCompletedInstants(); HoodieCLI.getTableMetaClient().getActiveTimeline().getSavePointTimeline().filterCompletedInstants();
assertEquals(0, timeline.countInstants(), "there should have no instant"); assertEquals(0, timeline.countInstants(), "there should have no instant");
CommandResult cr = getShell().executeCommand("savepoints refresh"); CommandResult cr = shell().executeCommand("savepoints refresh");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
timeline = timeline =

View File

@@ -19,8 +19,9 @@
package org.apache.hudi.cli.commands; package org.apache.hudi.cli.commands;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
@@ -30,7 +31,8 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test Cases for {@link SparkEnvCommand}. * Test Cases for {@link SparkEnvCommand}.
*/ */
public class TestSparkEnvCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestSparkEnvCommand extends CLIFunctionalTestHarness {
/** /**
* Test Cases for set and get spark env. * Test Cases for set and get spark env.
@@ -38,18 +40,18 @@ public class TestSparkEnvCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testSetAndGetSparkEnv() { public void testSetAndGetSparkEnv() {
// First, be empty // First, be empty
CommandResult cr = getShell().executeCommand("show envs all"); CommandResult cr = shell().executeCommand("show envs all");
String nullResult = HoodiePrintHelper.print(new String[] {"key", "value"}, new String[0][2]); String nullResult = HoodiePrintHelper.print(new String[] {"key", "value"}, new String[0][2]);
nullResult = removeNonWordAndStripSpace(nullResult); nullResult = removeNonWordAndStripSpace(nullResult);
String got = removeNonWordAndStripSpace(cr.getResult().toString()); String got = removeNonWordAndStripSpace(cr.getResult().toString());
assertEquals(nullResult, got); assertEquals(nullResult, got);
// Set SPARK_HOME // Set SPARK_HOME
cr = getShell().executeCommand("set --conf SPARK_HOME=/usr/etc/spark"); cr = shell().executeCommand("set --conf SPARK_HOME=/usr/etc/spark");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
//Get //Get
cr = getShell().executeCommand("show env --key SPARK_HOME"); cr = shell().executeCommand("show env --key SPARK_HOME");
String result = HoodiePrintHelper.print(new String[] {"key", "value"}, new String[][] {new String[] {"SPARK_HOME", "/usr/etc/spark"}}); String result = HoodiePrintHelper.print(new String[] {"key", "value"}, new String[][] {new String[] {"SPARK_HOME", "/usr/etc/spark"}});
result = removeNonWordAndStripSpace(result); result = removeNonWordAndStripSpace(result);
got = removeNonWordAndStripSpace(cr.getResult().toString()); got = removeNonWordAndStripSpace(cr.getResult().toString());

View File

@@ -22,7 +22,7 @@ import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.HoodieTableHeaderFields;
import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.TableHeader;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator;
import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion; import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
@@ -34,10 +34,10 @@ import com.codahale.metrics.Histogram;
import com.codahale.metrics.Snapshot; import com.codahale.metrics.Snapshot;
import com.codahale.metrics.UniformReservoir; import com.codahale.metrics.UniformReservoir;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.text.DecimalFormat; import java.text.DecimalFormat;
import java.util.ArrayList; import java.util.ArrayList;
@@ -52,21 +52,21 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test class of {@link org.apache.hudi.cli.commands.StatsCommand}. * Test class of {@link org.apache.hudi.cli.commands.StatsCommand}.
*/ */
public class TestStatsCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestStatsCommand extends CLIFunctionalTestHarness {
private String tablePath; private String tablePath;
@BeforeEach @BeforeEach
public void init() throws IOException { public void init() throws IOException {
String tableName = "test_table"; String tableName = tableName();
tablePath = basePath + File.separator + tableName; tablePath = tablePath(tableName);
HoodieCLI.conf = jsc.hadoopConfiguration(); HoodieCLI.conf = hadoopConf();
// Create table and connect // Create table and connect
new TableCommand().createTable( new TableCommand().createTable(
tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(),
"", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload"); "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
metaClient = HoodieCLI.getTableMetaClient();
} }
/** /**
@@ -83,11 +83,11 @@ public class TestStatsCommand extends AbstractShellIntegrationTest {
for (Map.Entry<String, Integer[]> entry : data.entrySet()) { for (Map.Entry<String, Integer[]> entry : data.entrySet()) {
String k = entry.getKey(); String k = entry.getKey();
Integer[] v = entry.getValue(); Integer[] v = entry.getValue();
HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, k, jsc.hadoopConfiguration(), HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, k, hadoopConf(),
Option.of(v[0]), Option.of(v[1])); Option.of(v[0]), Option.of(v[1]));
} }
CommandResult cr = getShell().executeCommand("stats wa"); CommandResult cr = shell().executeCommand("stats wa");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// generate expect // generate expect
@@ -127,7 +127,7 @@ public class TestStatsCommand extends AbstractShellIntegrationTest {
String partition2 = HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH; String partition2 = HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH;
String partition3 = HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH; String partition3 = HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH;
HoodieTestTable testTable = HoodieTestTable.of(metaClient); HoodieTestTable testTable = HoodieTestTable.of(HoodieCLI.getTableMetaClient());
Integer[] data1 = data.get(commit1); Integer[] data1 = data.get(commit1);
assertTrue(3 <= data1.length); assertTrue(3 <= data1.length);
testTable.addCommit(commit1) testTable.addCommit(commit1)
@@ -142,7 +142,7 @@ public class TestStatsCommand extends AbstractShellIntegrationTest {
.withBaseFilesInPartition(partition2, data2[1], data2[2]) .withBaseFilesInPartition(partition2, data2[1], data2[2])
.withBaseFilesInPartition(partition3, data2[3]); .withBaseFilesInPartition(partition3, data2[3]);
CommandResult cr = getShell().executeCommand("stats filesizes"); CommandResult cr = shell().executeCommand("stats filesizes");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
Histogram globalHistogram = new Histogram(new UniformReservoir(StatsCommand.MAX_FILES)); Histogram globalHistogram = new Histogram(new UniformReservoir(StatsCommand.MAX_FILES));

View File

@@ -20,7 +20,7 @@ package org.apache.hudi.cli.commands;
import org.apache.hudi.avro.HoodieAvroUtils; import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator;
import org.apache.hudi.common.fs.ConsistencyGuardConfig; import org.apache.hudi.common.fs.ConsistencyGuardConfig;
import org.apache.hudi.common.model.HoodieCommitMetadata; import org.apache.hudi.common.model.HoodieCommitMetadata;
@@ -36,12 +36,15 @@ import org.apache.avro.Schema;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
@@ -56,9 +59,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test Cases for {@link TableCommand}. * Test Cases for {@link TableCommand}.
*/ */
public class TestTableCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestTableCommand extends CLIFunctionalTestHarness {
private final String tableName = "test_table"; private String tableName;
private String tablePath; private String tablePath;
private String metaPath; private String metaPath;
private String archivePath; private String archivePath;
@@ -68,17 +72,18 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
*/ */
@BeforeEach @BeforeEach
public void init() { public void init() {
HoodieCLI.conf = jsc.hadoopConfiguration(); HoodieCLI.conf = hadoopConf();
tablePath = basePath + File.separator + tableName; tableName = tableName();
metaPath = tablePath + File.separator + METAFOLDER_NAME; tablePath = tablePath(tableName);
archivePath = metaPath + File.separator + HoodieTableConfig.ARCHIVELOG_FOLDER.defaultValue(); metaPath = Paths.get(tablePath, METAFOLDER_NAME).toString();
archivePath = Paths.get(metaPath, HoodieTableConfig.ARCHIVELOG_FOLDER.defaultValue()).toString();
} }
/** /**
* Method to create a table for connect or desc. * Method to create a table for connect or desc.
*/ */
private boolean prepareTable() { private boolean prepareTable() {
CommandResult cr = getShell().executeCommand( CommandResult cr = shell().executeCommand(
"create --path " + tablePath + " --tableName " + tableName); "create --path " + tablePath + " --tableName " + tableName);
return cr.isSuccess(); return cr.isSuccess();
} }
@@ -92,7 +97,7 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
assertTrue(prepareTable()); assertTrue(prepareTable());
// Test connect with specified values // Test connect with specified values
CommandResult cr = getShell().executeCommand( CommandResult cr = shell().executeCommand(
"connect --path " + tablePath + " --initialCheckIntervalMs 3000 " "connect --path " + tablePath + " --initialCheckIntervalMs 3000 "
+ "--maxWaitIntervalMs 40000 --maxCheckIntervalMs 8"); + "--maxWaitIntervalMs 40000 --maxCheckIntervalMs 8");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
@@ -131,7 +136,7 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testCreateWithSpecifiedValues() { public void testCreateWithSpecifiedValues() {
// Test create with specified values // Test create with specified values
CommandResult cr = getShell().executeCommand( CommandResult cr = shell().executeCommand(
"create --path " + tablePath + " --tableName " + tableName "create --path " + tablePath + " --tableName " + tableName
+ " --tableType MERGE_ON_READ --archiveLogFolder archive"); + " --tableType MERGE_ON_READ --archiveLogFolder archive");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
@@ -152,7 +157,7 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
assertTrue(prepareTable()); assertTrue(prepareTable());
// Test desc table // Test desc table
CommandResult cr = getShell().executeCommand("desc"); CommandResult cr = shell().executeCommand("desc");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
// check table's basePath metaPath and type // check table's basePath metaPath and type
@@ -175,7 +180,7 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
private void testRefreshCommand(String command) throws IOException { private void testRefreshCommand(String command) throws IOException {
// clean table matedata // clean table matedata
FileSystem fs = FileSystem.get(jsc.hadoopConfiguration()); FileSystem fs = FileSystem.get(hadoopConf());
fs.delete(new Path(tablePath + File.separator + HoodieTableMetaClient.METAFOLDER_NAME), true); fs.delete(new Path(tablePath + File.separator + HoodieTableMetaClient.METAFOLDER_NAME), true);
// Create table // Create table
@@ -188,7 +193,7 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
// generate four savepoints // generate four savepoints
for (int i = 100; i < 104; i++) { for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i); String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, jsc.hadoopConfiguration()); HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, hadoopConf());
} }
// Before refresh, no instant // Before refresh, no instant
@@ -196,7 +201,7 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants(); HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants();
assertEquals(0, timeline.countInstants(), "there should have no instant"); assertEquals(0, timeline.countInstants(), "there should have no instant");
CommandResult cr = getShell().executeCommand(command); CommandResult cr = shell().executeCommand(command);
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
timeline = timeline =
@@ -209,11 +214,10 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testFetchTableSchema() throws Exception { public void testFetchTableSchema() throws Exception {
// Create table and connect // Create table and connect
HoodieCLI.conf = jsc.hadoopConfiguration(); HoodieCLI.conf = hadoopConf();
new TableCommand().createTable( new TableCommand().createTable(
tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(),
"", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload"); "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
metaClient = HoodieCLI.getTableMetaClient();
String schemaStr = "{\n" String schemaStr = "{\n"
+ " \"type\" : \"record\",\n" + " \"type\" : \"record\",\n"
@@ -230,7 +234,7 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
generateData(schemaStr); generateData(schemaStr);
CommandResult cr = getShell().executeCommand("fetch table schema"); CommandResult cr = shell().executeCommand("fetch table schema");
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
String actualSchemaStr = cr.getResult().toString().substring(cr.getResult().toString().indexOf("{")); String actualSchemaStr = cr.getResult().toString().substring(cr.getResult().toString().indexOf("{"));
@@ -241,7 +245,7 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
assertEquals(actualSchema, expectedSchema); assertEquals(actualSchema, expectedSchema);
File file = File.createTempFile("temp", null); File file = File.createTempFile("temp", null);
cr = getShell().executeCommand("fetch table schema --outputFilePath " + file.getAbsolutePath()); cr = shell().executeCommand("fetch table schema --outputFilePath " + file.getAbsolutePath());
assertTrue(cr.isSuccess()); assertTrue(cr.isSuccess());
actualSchemaStr = getFileContent(file.getAbsolutePath()); actualSchemaStr = getFileContent(file.getAbsolutePath());
@@ -262,7 +266,7 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
Option.of(value[0]), Option.of(value[1]), Collections.singletonMap(HoodieCommitMetadata.SCHEMA_KEY, schemaStr)); Option.of(value[0]), Option.of(value[1]), Collections.singletonMap(HoodieCommitMetadata.SCHEMA_KEY, schemaStr));
} }
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient()); HoodieTableMetaClient metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
assertEquals(3, metaClient.reloadActiveTimeline().getCommitsTimeline().countInstants(), assertEquals(3, metaClient.reloadActiveTimeline().getCommitsTimeline().countInstants(),
"There should have 3 commits"); "There should have 3 commits");
return data; return data;
@@ -277,6 +281,6 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
byte[] data = new byte[(int) fileToRead.length()]; byte[] data = new byte[(int) fileToRead.length()];
fis.read(data); fis.read(data);
fis.close(); fis.close();
return new String(data, "UTF-8"); return new String(data, StandardCharsets.UTF_8);
} }
} }

View File

@@ -19,11 +19,14 @@
package org.apache.hudi.cli.commands; package org.apache.hudi.cli.commands;
import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.testutils.AbstractShellBaseIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.cli.utils.SparkTempViewProvider;
import org.apache.hudi.cli.utils.TempViewProvider;
import org.apache.hudi.exception.HoodieException; import org.apache.hudi.exception.HoodieException;
import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
@@ -35,9 +38,11 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestTempViewCommand extends AbstractShellBaseIntegrationTest { @Tag("functional")
public class TestTempViewCommand extends CLIFunctionalTestHarness {
private String tableName = "test_table"; private TempViewProvider tempViewProvider;
private final String tableName = tableName();
@BeforeEach @BeforeEach
public void init() { public void init() {
@@ -45,37 +50,38 @@ public class TestTempViewCommand extends AbstractShellBaseIntegrationTest {
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
rows.add(Arrays.asList(new Comparable[] {"c1", "c2", "c3"})); rows.add(Arrays.asList(new Comparable[] {"c1", "c2", "c3"}));
} }
HoodieCLI.getTempViewProvider().createOrReplace(tableName, Arrays.asList("t1", "t2", "t3"), rows); tempViewProvider = new SparkTempViewProvider(jsc(), sqlContext());
tempViewProvider.createOrReplace(tableName, Arrays.asList("t1", "t2", "t3"), rows);
HoodieCLI.tempViewProvider = tempViewProvider;
} }
@AfterAll @AfterEach
public static void shutdown() { public void cleanUpTempView() {
if (HoodieCLI.getTempViewProvider() != null) { tempViewProvider.close();
HoodieCLI.closeTempViewProvider(); HoodieCLI.tempViewProvider = null;
}
} }
@Test @Test
public void testQueryWithException() { public void testQueryWithException() {
CommandResult cr = getShell().executeCommand(String.format("temp query --sql 'select * from %s'", "table_1")); CommandResult cr = shell().executeCommand(String.format("temp query --sql 'select * from %s'", "table_non_exist"));
assertEquals(TempViewCommand.QUERY_FAIL, cr.getResult().toString()); assertEquals(TempViewCommand.QUERY_FAIL, cr.getResult().toString());
} }
@Test @Test
public void testQuery() { public void testQuery() {
CommandResult cr = getShell().executeCommand(String.format("temp query --sql 'select * from %s'", tableName)); CommandResult cr = shell().executeCommand(String.format("temp query --sql 'select * from %s'", tableName));
assertEquals(TempViewCommand.QUERY_SUCCESS, cr.getResult().toString()); assertEquals(TempViewCommand.QUERY_SUCCESS, cr.getResult().toString());
} }
@Test @Test
public void testShowAll() { public void testShowAll() {
CommandResult cr = getShell().executeCommand("temps show"); CommandResult cr = shell().executeCommand("temps show");
assertEquals(TempViewCommand.SHOW_SUCCESS, cr.getResult().toString()); assertEquals(TempViewCommand.SHOW_SUCCESS, cr.getResult().toString());
} }
@Test @Test
public void testDelete() { public void testDelete() {
CommandResult cr = getShell().executeCommand(String.format("temp delete --view %s", tableName)); CommandResult cr = shell().executeCommand(String.format("temp delete --view %s", tableName));
assertTrue(cr.getResult().toString().endsWith("successfully!")); assertTrue(cr.getResult().toString().endsWith("successfully!"));
// after delete, we can not access table yet. // after delete, we can not access table yet.

View File

@@ -9,17 +9,18 @@
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing,
* distributed under the License is distributed on an "AS IS" BASIS, * software distributed under the License is distributed on an
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* See the License for the specific language governing permissions and * KIND, either express or implied. See the License for the
* limitations under the License. * specific language governing permissions and limitations
* under the License.
*/ */
package org.apache.hudi.cli.commands; package org.apache.hudi.cli.commands;
import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.common.config.HoodieConfig; import org.apache.hudi.common.config.HoodieConfig;
import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.IOType; import org.apache.hudi.common.model.IOType;
@@ -34,10 +35,10 @@ import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Paths;
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH; import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH;
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS; import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS;
@@ -48,14 +49,16 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
/** /**
* Tests {@link UpgradeOrDowngradeCommand}. * Tests {@link UpgradeOrDowngradeCommand}.
*/ */
public class TestUpgradeDowngradeCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestUpgradeDowngradeCommand extends CLIFunctionalTestHarness {
private String tablePath; private String tablePath;
private HoodieTableMetaClient metaClient;
@BeforeEach @BeforeEach
public void init() throws Exception { public void init() throws Exception {
String tableName = "test_table"; String tableName = tableName();
tablePath = Paths.get(basePath, tableName).toString(); tablePath = tablePath(tableName);
new TableCommand().createTable( new TableCommand().createTable(
tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(),
"", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload"); "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
@@ -90,7 +93,7 @@ public class TestUpgradeDowngradeCommand extends AbstractShellIntegrationTest {
assertEquals(1, FileCreateUtils.getTotalMarkerFileCount(tablePath, partitionPath, "101", IOType.MERGE)); assertEquals(1, FileCreateUtils.getTotalMarkerFileCount(tablePath, partitionPath, "101", IOType.MERGE));
} }
SparkMain.upgradeOrDowngradeTable(jsc, tablePath, HoodieTableVersion.ZERO.name()); SparkMain.upgradeOrDowngradeTable(jsc(), tablePath, HoodieTableVersion.ZERO.name());
// verify hoodie.table.version got downgraded // verify hoodie.table.version got downgraded
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient()); metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());

View File

@@ -18,9 +18,10 @@
package org.apache.hudi.cli.commands; package org.apache.hudi.cli.commands;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness;
import org.apache.hudi.table.HoodieTable; import org.apache.hudi.table.HoodieTable;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult; import org.springframework.shell.core.CommandResult;
@@ -32,7 +33,8 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/** /**
* Test class for {@link org.apache.hudi.cli.commands.UtilsCommand}. * Test class for {@link org.apache.hudi.cli.commands.UtilsCommand}.
*/ */
public class TestUtilsCommand extends AbstractShellIntegrationTest { @Tag("functional")
public class TestUtilsCommand extends CLIFunctionalTestHarness {
/** /**
* Test case for success load class. * Test case for success load class.
@@ -40,7 +42,7 @@ public class TestUtilsCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testLoadClass() { public void testLoadClass() {
String name = HoodieTable.class.getName(); String name = HoodieTable.class.getName();
CommandResult cr = getShell().executeCommand(String.format("utils loadClass --class %s", name)); CommandResult cr = shell().executeCommand(String.format("utils loadClass --class %s", name));
assertAll("Command runs success", assertAll("Command runs success",
() -> assertTrue(cr.isSuccess()), () -> assertTrue(cr.isSuccess()),
() -> assertNotNull(cr.getResult().toString()), () -> assertNotNull(cr.getResult().toString()),
@@ -53,7 +55,7 @@ public class TestUtilsCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testLoadClassNotFound() { public void testLoadClassNotFound() {
String name = "test.class.NotFound"; String name = "test.class.NotFound";
CommandResult cr = getShell().executeCommand(String.format("utils loadClass --class %s", name)); CommandResult cr = shell().executeCommand(String.format("utils loadClass --class %s", name));
assertAll("Command runs success", assertAll("Command runs success",
() -> assertTrue(cr.isSuccess()), () -> assertTrue(cr.isSuccess()),
@@ -67,7 +69,7 @@ public class TestUtilsCommand extends AbstractShellIntegrationTest {
@Test @Test
public void testLoadClassNull() { public void testLoadClassNull() {
String name = ""; String name = "";
CommandResult cr = getShell().executeCommand(String.format("utils loadClass --class %s", name)); CommandResult cr = shell().executeCommand(String.format("utils loadClass --class %s", name));
assertAll("Command runs success", assertAll("Command runs success",
() -> assertTrue(cr.isSuccess()), () -> assertTrue(cr.isSuccess()),

View File

@@ -0,0 +1,133 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hudi.cli.functional;
import org.apache.hudi.client.HoodieReadClient;
import org.apache.hudi.client.SparkRDDWriteClient;
import org.apache.hudi.client.common.HoodieSparkEngineContext;
import org.apache.hudi.testutils.providers.SparkProvider;
import org.apache.hadoop.conf.Configuration;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.io.TempDir;
import org.springframework.shell.Bootstrap;
import org.springframework.shell.core.JLineShellComponent;
import java.nio.file.Paths;
public class CLIFunctionalTestHarness implements SparkProvider {
private static transient SparkSession spark;
private static transient SQLContext sqlContext;
private static transient JavaSparkContext jsc;
private static transient HoodieSparkEngineContext context;
private static transient JLineShellComponent shell;
/**
* An indicator of the initialization status.
*/
protected boolean initialized = false;
@TempDir
protected java.nio.file.Path tempDir;
public String basePath() {
return tempDir.toAbsolutePath().toString();
}
@Override
public SparkSession spark() {
return spark;
}
@Override
public SQLContext sqlContext() {
return sqlContext;
}
@Override
public JavaSparkContext jsc() {
return jsc;
}
@Override
public HoodieSparkEngineContext context() {
return context;
}
public JLineShellComponent shell() {
return shell;
}
public String tableName() {
return tableName("_test_table");
}
public String tableName(String suffix) {
return getClass().getSimpleName() + suffix;
}
public String tablePath(String tableName) {
return Paths.get(basePath(), tableName).toString();
}
public Configuration hadoopConf() {
return jsc().hadoopConfiguration();
}
@BeforeEach
public synchronized void runBeforeEach() {
initialized = spark != null && shell != null;
if (!initialized) {
SparkConf sparkConf = conf();
SparkRDDWriteClient.registerClasses(sparkConf);
HoodieReadClient.addHoodieSupport(sparkConf);
spark = SparkSession.builder().config(sparkConf).getOrCreate();
sqlContext = spark.sqlContext();
jsc = new JavaSparkContext(spark.sparkContext());
context = new HoodieSparkEngineContext(jsc);
shell = new Bootstrap().getJLineShellComponent();
}
}
@AfterAll
public static synchronized void cleanUpAfterAll() {
if (spark != null) {
spark.close();
spark = null;
}
if (shell != null) {
shell.stop();
shell = null;
}
}
/**
* Helper to prepare string for matching.
* @param str Input string.
* @return pruned string with non word characters removed.
*/
protected static String removeNonWordAndStripSpace(String str) {
return str.replaceAll("[\\s]+", ",").replaceAll("[\\W]+", ",");
}
}

View File

@@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hudi.cli.functional;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.platform.suite.api.IncludeTags;
import org.junit.platform.suite.api.SelectPackages;
import org.junit.runner.RunWith;
@RunWith(JUnitPlatform.class)
@SelectPackages("org.apache.hudi.cli.commands")
@IncludeTags("functional")
public class CLIFunctionalTestSuite {
}