1
0

[HUDI-1019] Clean refresh command in CLI (#1725)

This commit is contained in:
hongdd
2020-06-14 14:30:28 +08:00
committed by GitHub
parent a7fd331624
commit fcabc8fbca
5 changed files with 66 additions and 22 deletions

View File

@@ -84,12 +84,6 @@ public class CleansCommand implements CommandMarker {
return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, rows);
}
@CliCommand(value = "cleans refresh", help = "Refresh the commits")
public String refreshCleans() {
HoodieCLI.refreshTableMetadata();
return "Metadata for table " + HoodieCLI.getTableMetaClient().getTableConfig().getTableName() + " refreshed.";
}
@CliCommand(value = "clean showpartitions", help = "Show partition level details of a clean")
public String showCleanPartitions(@CliOption(key = {"clean"}, help = "clean to show") final String instantTime,
@CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit,

View File

@@ -214,12 +214,6 @@ public class CommitsCommand implements CommandMarker {
}
}
@CliCommand(value = "commits refresh", help = "Refresh the commits")
public String refreshCommits() throws IOException {
HoodieCLI.refreshTableMetadata();
return "Metadata for table " + HoodieCLI.getTableMetaClient().getTableConfig().getTableName() + " refreshed.";
}
@CliCommand(value = "commit rollback", help = "Rollback a commit")
public String rollbackCommit(@CliOption(key = {"commit"}, help = "Commit to rollback") final String instantTime,
@CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath)
@@ -238,7 +232,7 @@ public class CommitsCommand implements CommandMarker {
InputStreamConsumer.captureOutput(process);
int exitCode = process.waitFor();
// Refresh the current
refreshCommits();
HoodieCLI.refreshTableMetadata();
if (exitCode != 0) {
return "Commit " + instantTime + " failed to roll back";
}

View File

@@ -89,7 +89,7 @@ public class SavepointsCommand implements CommandMarker {
InputStreamConsumer.captureOutput(process);
int exitCode = process.waitFor();
// Refresh the current
refreshMetaClient();
HoodieCLI.refreshTableMetadata();
if (exitCode != 0) {
return String.format("Failed: Could not create savepoint \"%s\".", commitTime);
}
@@ -123,19 +123,13 @@ public class SavepointsCommand implements CommandMarker {
InputStreamConsumer.captureOutput(process);
int exitCode = process.waitFor();
// Refresh the current
refreshMetaClient();
HoodieCLI.refreshTableMetadata();
if (exitCode != 0) {
return String.format("Savepoint \"%s\" failed to roll back", instantTime);
}
return String.format("Savepoint \"%s\" rolled back", instantTime);
}
@CliCommand(value = "savepoints refresh", help = "Refresh the savepoints")
public String refreshMetaClient() {
HoodieCLI.refreshTableMetadata();
return "Metadata for table " + HoodieCLI.getTableMetaClient().getTableConfig().getTableName() + " refreshed.";
}
@CliCommand(value = "savepoint delete", help = "Delete the savepoint")
public String deleteSavepoint(@CliOption(key = {"commit"}, help = "Delete a savepoint") final String instantTime,
@CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath,
@@ -161,7 +155,7 @@ public class SavepointsCommand implements CommandMarker {
InputStreamConsumer.captureOutput(process);
int exitCode = process.waitFor();
// Refresh the current
refreshMetaClient();
HoodieCLI.refreshTableMetadata();
if (exitCode != 0) {
return String.format("Failed: Could not delete savepoint \"%s\".", instantTime);
}

View File

@@ -131,4 +131,14 @@ public class TableCommand implements CommandMarker {
});
return HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
}
/**
* Refresh table metadata.
*/
@CliCommand(value = {"refresh", "metadata refresh", "commits refresh", "cleans refresh", "savepoints refresh"},
help = "Refresh table metadata")
public String refreshMetadata() {
HoodieCLI.refreshTableMetadata();
return "Metadata for table " + HoodieCLI.getTableMetaClient().getTableConfig().getTableName() + " refreshed.";
}
}

View File

@@ -18,17 +18,24 @@
package org.apache.hudi.cli.commands;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
import org.apache.hudi.common.fs.ConsistencyGuardConfig;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.testutils.HoodieTestDataGenerator;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import static org.apache.hudi.common.table.HoodieTableMetaClient.METAFOLDER_NAME;
import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -140,4 +147,49 @@ public class TestTableCommand extends AbstractShellIntegrationTest {
assertTrue(cr.getResult().toString().contains(metaPath));
assertTrue(cr.getResult().toString().contains("COPY_ON_WRITE"));
}
/**
* Test case of command 'refresh'.
*/
@Test
public void testRefresh() throws IOException {
List<String> refreshCommands = Arrays.asList("refresh", "metadata refresh",
"commits refresh", "cleans refresh", "savepoints refresh");
for (String command: refreshCommands) {
testRefreshCommand(command);
}
}
private void testRefreshCommand(String command) throws IOException {
// clean table matedata
FileSystem fs = FileSystem.get(jsc.hadoopConfiguration());
fs.delete(new Path(tablePath + File.separator + HoodieTableMetaClient.METAFOLDER_NAME), true);
// Create table
assertTrue(prepareTable());
HoodieTimeline timeline =
HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants();
assertEquals(0, timeline.countInstants(), "There should have no instant at first");
// generate four savepoints
for (int i = 100; i < 104; i++) {
String instantTime = String.valueOf(i);
HoodieTestDataGenerator.createCommitFile(tablePath, instantTime, jsc.hadoopConfiguration());
}
// Before refresh, no instant
timeline =
HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants();
assertEquals(0, timeline.countInstants(), "there should have no instant");
CommandResult cr = getShell().executeCommand(command);
assertTrue(cr.isSuccess());
timeline =
HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants();
// After refresh, there are 4 instants
assertEquals(4, timeline.countInstants(), "there should have 4 instants");
}
}