[HUDI-1315] Adding builder for HoodieTableMetaClient initialization (#2534)
This commit is contained in:
committed by
GitHub
parent
0d91c451b0
commit
c9fcf964b2
@@ -85,8 +85,8 @@ public class HoodieCLI {
|
||||
}
|
||||
|
||||
public static void refreshTableMetadata() {
|
||||
setTableMetaClient(new HoodieTableMetaClient(HoodieCLI.conf, basePath, false, HoodieCLI.consistencyGuardConfig,
|
||||
Option.of(layoutVersion)));
|
||||
setTableMetaClient(HoodieTableMetaClient.builder().setConf(HoodieCLI.conf).setBasePath(basePath).setLoadActiveTimelineOnLoad(false).setConsistencyGuardConfig(HoodieCLI.consistencyGuardConfig)
|
||||
.setLayoutVersion(Option.of(layoutVersion)).build());
|
||||
}
|
||||
|
||||
public static void connectTo(String basePath, Integer layoutVersion) {
|
||||
|
||||
@@ -401,7 +401,7 @@ public class CommitsCommand implements CommandMarker {
|
||||
public String compareCommits(@CliOption(key = {"path"}, help = "Path of the table to compare to") final String path) {
|
||||
|
||||
HoodieTableMetaClient source = HoodieCLI.getTableMetaClient();
|
||||
HoodieTableMetaClient target = new HoodieTableMetaClient(HoodieCLI.conf, path);
|
||||
HoodieTableMetaClient target = HoodieTableMetaClient.builder().setConf(HoodieCLI.conf).setBasePath(path).build();
|
||||
HoodieTimeline targetTimeline = target.getActiveTimeline().getCommitsTimeline().filterCompletedInstants();
|
||||
HoodieTimeline sourceTimeline = source.getActiveTimeline().getCommitsTimeline().filterCompletedInstants();
|
||||
String targetLatestCommit =
|
||||
@@ -426,7 +426,7 @@ public class CommitsCommand implements CommandMarker {
|
||||
|
||||
@CliCommand(value = "commits sync", help = "Compare commits with another Hoodie table")
|
||||
public String syncCommits(@CliOption(key = {"path"}, help = "Path of the table to compare to") final String path) {
|
||||
HoodieCLI.syncTableMetadata = new HoodieTableMetaClient(HoodieCLI.conf, path);
|
||||
HoodieCLI.syncTableMetadata = HoodieTableMetaClient.builder().setConf(HoodieCLI.conf).setBasePath(path).build();
|
||||
HoodieCLI.state = HoodieCLI.CLIState.SYNC;
|
||||
return "Load sync state between " + HoodieCLI.getTableMetaClient().getTableConfig().getTableName() + " and "
|
||||
+ HoodieCLI.syncTableMetadata.getTableConfig().getTableName();
|
||||
|
||||
@@ -237,7 +237,7 @@ public class FileSystemViewCommand implements CommandMarker {
|
||||
boolean includeMaxInstant, boolean includeInflight, boolean excludeCompaction) throws IOException {
|
||||
HoodieTableMetaClient client = HoodieCLI.getTableMetaClient();
|
||||
HoodieTableMetaClient metaClient =
|
||||
new HoodieTableMetaClient(client.getHadoopConf(), client.getBasePath(), true);
|
||||
HoodieTableMetaClient.builder().setConf(client.getHadoopConf()).setBasePath(client.getBasePath()).setLoadActiveTimelineOnLoad(true).build();
|
||||
FileSystem fs = HoodieCLI.fs;
|
||||
String globPath = String.format("%s/%s/*", client.getBasePath(), globRegex);
|
||||
List<FileStatus> statuses = FSUtils.getGlobStatusExcludingMetaFolder(fs, new Path(globPath));
|
||||
|
||||
@@ -402,8 +402,10 @@ public class SparkMain {
|
||||
*/
|
||||
protected static int upgradeOrDowngradeTable(JavaSparkContext jsc, String basePath, String toVersion) {
|
||||
HoodieWriteConfig config = getWriteConfig(basePath);
|
||||
HoodieTableMetaClient metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), false,
|
||||
config.getConsistencyGuardConfig(), Option.of(new TimelineLayoutVersion(config.getTimelineLayoutVersion())));
|
||||
HoodieTableMetaClient metaClient =
|
||||
HoodieTableMetaClient.builder().setConf(jsc.hadoopConfiguration()).setBasePath(config.getBasePath())
|
||||
.setLoadActiveTimelineOnLoad(false).setConsistencyGuardConfig(config.getConsistencyGuardConfig())
|
||||
.setLayoutVersion(Option.of(new TimelineLayoutVersion(config.getTimelineLayoutVersion()))).build();
|
||||
try {
|
||||
new SparkUpgradeDowngrade(metaClient, config, new HoodieSparkEngineContext(jsc)).run(metaClient, HoodieTableVersion.valueOf(toVersion), config, new HoodieSparkEngineContext(jsc), null);
|
||||
LOG.info(String.format("Table at \"%s\" upgraded / downgraded to version \"%s\".", basePath, toVersion));
|
||||
|
||||
@@ -95,7 +95,7 @@ public class TableCommand implements CommandMarker {
|
||||
|
||||
boolean existing = false;
|
||||
try {
|
||||
new HoodieTableMetaClient(HoodieCLI.conf, path);
|
||||
HoodieTableMetaClient.builder().setConf(HoodieCLI.conf).setBasePath(path).build();
|
||||
existing = true;
|
||||
} catch (TableNotFoundException dfe) {
|
||||
// expected
|
||||
|
||||
@@ -75,7 +75,7 @@ class DedupeSparkJob(basePath: String,
|
||||
val tmpTableName = s"htbl_${System.currentTimeMillis()}"
|
||||
val dedupeTblName = s"${tmpTableName}_dupeKeys"
|
||||
|
||||
val metadata = new HoodieTableMetaClient(fs.getConf, basePath)
|
||||
val metadata = HoodieTableMetaClient.builder().setConf(fs.getConf).setBasePath(basePath).build()
|
||||
|
||||
val allFiles = fs.listStatus(new org.apache.hadoop.fs.Path(s"$basePath/$duplicatedPartitionPath"))
|
||||
val fsView = new HoodieTableFileSystemView(metadata, metadata.getActiveTimeline.getCommitTimeline.filterCompletedInstants(), allFiles)
|
||||
@@ -184,7 +184,7 @@ class DedupeSparkJob(basePath: String,
|
||||
}
|
||||
|
||||
def fixDuplicates(dryRun: Boolean = true) = {
|
||||
val metadata = new HoodieTableMetaClient(fs.getConf, basePath)
|
||||
val metadata = HoodieTableMetaClient.builder().setConf(fs.getConf).setBasePath(basePath).build()
|
||||
|
||||
val allFiles = fs.listStatus(new Path(s"$basePath/$duplicatedPartitionPath"))
|
||||
val fsView = new HoodieTableFileSystemView(metadata, metadata.getActiveTimeline.getCommitTimeline.filterCompletedInstants(), allFiles)
|
||||
|
||||
Reference in New Issue
Block a user