1
0

[HUDI-2596] Make class names consistent in hudi-client (#4680)

This commit is contained in:
Raymond Xu
2022-01-27 17:05:08 -08:00
committed by GitHub
parent 4a9f826382
commit 0bd38f26ca
68 changed files with 216 additions and 175 deletions

View File

@@ -19,8 +19,8 @@
package org.apache.hudi.async;
import org.apache.hudi.client.AbstractClusteringClient;
import org.apache.hudi.client.AbstractHoodieWriteClient;
import org.apache.hudi.client.BaseClusterer;
import org.apache.hudi.client.BaseHoodieWriteClient;
import org.apache.hudi.client.HoodieSparkClusteringClient;
/**
@@ -28,12 +28,12 @@ import org.apache.hudi.client.HoodieSparkClusteringClient;
*/
public class SparkAsyncClusteringService extends AsyncClusteringService {
public SparkAsyncClusteringService(AbstractHoodieWriteClient writeClient) {
public SparkAsyncClusteringService(BaseHoodieWriteClient writeClient) {
super(writeClient);
}
@Override
protected AbstractClusteringClient createClusteringClient(AbstractHoodieWriteClient client) {
protected BaseClusterer createClusteringClient(BaseHoodieWriteClient client) {
return new HoodieSparkClusteringClient(client);
}
}

View File

@@ -18,19 +18,19 @@
package org.apache.hudi.async;
import org.apache.hudi.client.AbstractCompactor;
import org.apache.hudi.client.AbstractHoodieWriteClient;
import org.apache.hudi.client.BaseCompactor;
import org.apache.hudi.client.BaseHoodieWriteClient;
import org.apache.hudi.client.HoodieSparkCompactor;
import org.apache.hudi.common.engine.HoodieEngineContext;
public class SparkAsyncCompactService extends AsyncCompactService {
public SparkAsyncCompactService(HoodieEngineContext context, AbstractHoodieWriteClient client) {
public SparkAsyncCompactService(HoodieEngineContext context, BaseHoodieWriteClient client) {
super(context, client);
}
@Override
protected AbstractCompactor createCompactor(AbstractHoodieWriteClient client) {
protected BaseCompactor createCompactor(BaseHoodieWriteClient client) {
return new HoodieSparkCompactor(client, this.context);
}
}

View File

@@ -38,12 +38,12 @@ import java.util.stream.Stream;
* Async clustering client for Spark datasource.
*/
public class HoodieSparkClusteringClient<T extends HoodieRecordPayload> extends
AbstractClusteringClient<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> {
BaseClusterer<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> {
private static final Logger LOG = LogManager.getLogger(HoodieSparkClusteringClient.class);
public HoodieSparkClusteringClient(
AbstractHoodieWriteClient<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> clusteringClient) {
BaseHoodieWriteClient<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> clusteringClient) {
super(clusteringClient);
}

View File

@@ -31,12 +31,12 @@ import org.apache.spark.api.java.JavaRDD;
import java.io.IOException;
public class HoodieSparkCompactor<T extends HoodieRecordPayload> extends AbstractCompactor<T,
public class HoodieSparkCompactor<T extends HoodieRecordPayload> extends BaseCompactor<T,
JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> {
private static final Logger LOG = LogManager.getLogger(HoodieSparkCompactor.class);
private transient HoodieEngineContext context;
public HoodieSparkCompactor(AbstractHoodieWriteClient<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> compactionClient,
public HoodieSparkCompactor(BaseHoodieWriteClient<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> compactionClient,
HoodieEngineContext context) {
super(compactionClient);
this.context = context;

View File

@@ -74,7 +74,7 @@ import java.util.stream.Collectors;
@SuppressWarnings("checkstyle:LineLength")
public class SparkRDDWriteClient<T extends HoodieRecordPayload> extends
AbstractHoodieWriteClient<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> {
BaseHoodieWriteClient<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> {
private static final Logger LOG = LogManager.getLogger(SparkRDDWriteClient.class);

View File

@@ -41,12 +41,12 @@ import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
* A spark implementation of {@link AbstractBulkInsertHelper}.
* A spark implementation of {@link BaseBulkInsertHelper}.
*
* @param <T>
*/
@SuppressWarnings("checkstyle:LineLength")
public class SparkBulkInsertHelper<T extends HoodieRecordPayload, R> extends AbstractBulkInsertHelper<T, JavaRDD<HoodieRecord<T>>,
public class SparkBulkInsertHelper<T extends HoodieRecordPayload, R> extends BaseBulkInsertHelper<T, JavaRDD<HoodieRecord<T>>,
JavaRDD<HoodieKey>, JavaRDD<WriteStatus>, R> {
private SparkBulkInsertHelper() {

View File

@@ -42,13 +42,13 @@ import java.time.Instant;
import java.util.HashMap;
/**
* A spark implementation of {@link AbstractDeleteHelper}.
* A spark implementation of {@link BaseDeleteHelper}.
*
* @param <T>
*/
@SuppressWarnings("checkstyle:LineLength")
public class SparkDeleteHelper<T extends HoodieRecordPayload,R> extends
AbstractDeleteHelper<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>, R> {
BaseDeleteHelper<T, JavaRDD<HoodieRecord<T>>, JavaRDD<HoodieKey>, JavaRDD<WriteStatus>, R> {
private SparkDeleteHelper() {
}

View File

@@ -43,7 +43,7 @@ import org.apache.spark.api.java.JavaRDD;
import java.io.IOException;
import java.util.Iterator;
public class SparkMergeHelper<T extends HoodieRecordPayload> extends AbstractMergeHelper<T, JavaRDD<HoodieRecord<T>>,
public class SparkMergeHelper<T extends HoodieRecordPayload> extends BaseMergeHelper<T, JavaRDD<HoodieRecord<T>>,
JavaRDD<HoodieKey>, JavaRDD<WriteStatus>> {
private SparkMergeHelper() {

View File

@@ -32,11 +32,11 @@ import org.apache.spark.api.java.JavaRDD;
import scala.Tuple2;
/**
* A spark implementation of {@link AbstractWriteHelper}.
* A spark implementation of {@link BaseWriteHelper}.
*
* @param <T>
*/
public class SparkWriteHelper<T extends HoodieRecordPayload,R> extends AbstractWriteHelper<T, JavaRDD<HoodieRecord<T>>,
public class SparkWriteHelper<T extends HoodieRecordPayload,R> extends BaseWriteHelper<T, JavaRDD<HoodieRecord<T>>,
JavaRDD<HoodieKey>, JavaRDD<WriteStatus>, R> {
private SparkWriteHelper() {
}

View File

@@ -43,19 +43,19 @@ import java.util.Collections;
import java.util.Iterator;
import java.util.List;
public abstract class AbstractSparkDeltaCommitActionExecutor<T extends HoodieRecordPayload<T>>
public abstract class BaseSparkDeltaCommitActionExecutor<T extends HoodieRecordPayload<T>>
extends BaseSparkCommitActionExecutor<T> {
private static final Logger LOG = LogManager.getLogger(AbstractSparkDeltaCommitActionExecutor.class);
private static final Logger LOG = LogManager.getLogger(BaseSparkDeltaCommitActionExecutor.class);
// UpsertPartitioner for MergeOnRead table type
private SparkUpsertDeltaCommitPartitioner mergeOnReadUpsertPartitioner;
public AbstractSparkDeltaCommitActionExecutor(HoodieSparkEngineContext context, HoodieWriteConfig config, HoodieTable table,
public BaseSparkDeltaCommitActionExecutor(HoodieSparkEngineContext context, HoodieWriteConfig config, HoodieTable table,
String instantTime, WriteOperationType operationType) {
this(context, config, table, instantTime, operationType, Option.empty());
}
public AbstractSparkDeltaCommitActionExecutor(HoodieSparkEngineContext context, HoodieWriteConfig config, HoodieTable table,
public BaseSparkDeltaCommitActionExecutor(HoodieSparkEngineContext context, HoodieWriteConfig config, HoodieTable table,
String instantTime, WriteOperationType operationType,
Option<Map<String, String>> extraMetadata) {
super(context, config, table, instantTime, operationType, extraMetadata);

View File

@@ -18,8 +18,6 @@
package org.apache.hudi.table.action.deltacommit;
import java.util.Map;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.client.common.HoodieSparkEngineContext;
import org.apache.hudi.common.model.HoodieRecord;
@@ -28,15 +26,17 @@ import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieInsertException;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.BulkInsertPartitioner;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.hudi.table.action.commit.SparkBulkInsertHelper;
import org.apache.spark.api.java.JavaRDD;
import java.util.Map;
public class SparkBulkInsertDeltaCommitActionExecutor<T extends HoodieRecordPayload<T>>
extends AbstractSparkDeltaCommitActionExecutor<T> {
extends BaseSparkDeltaCommitActionExecutor<T> {
private final JavaRDD<HoodieRecord<T>> inputRecordsRDD;
private final Option<BulkInsertPartitioner<T>> bulkInsertPartitioner;

View File

@@ -26,15 +26,15 @@ import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieInsertException;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.BulkInsertPartitioner;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.hudi.table.action.commit.SparkBulkInsertHelper;
import org.apache.spark.api.java.JavaRDD;
public class SparkBulkInsertPreppedDeltaCommitActionExecutor<T extends HoodieRecordPayload<T>>
extends AbstractSparkDeltaCommitActionExecutor<T> {
extends BaseSparkDeltaCommitActionExecutor<T> {
private final JavaRDD<HoodieRecord<T>> preppedInputRecordRdd;
private final Option<BulkInsertPartitioner<T>> bulkInsertPartitioner;
@@ -61,4 +61,4 @@ public class SparkBulkInsertPreppedDeltaCommitActionExecutor<T extends HoodieRec
}
}
}
}

View File

@@ -25,13 +25,13 @@ import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.hudi.table.action.commit.SparkDeleteHelper;
import org.apache.spark.api.java.JavaRDD;
public class SparkDeleteDeltaCommitActionExecutor<T extends HoodieRecordPayload<T>>
extends AbstractSparkDeltaCommitActionExecutor<T> {
extends BaseSparkDeltaCommitActionExecutor<T> {
private final JavaRDD<HoodieKey> keys;

View File

@@ -25,13 +25,13 @@ import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.hudi.table.action.commit.SparkWriteHelper;
import org.apache.spark.api.java.JavaRDD;
public class SparkInsertDeltaCommitActionExecutor<T extends HoodieRecordPayload<T>>
extends AbstractSparkDeltaCommitActionExecutor<T> {
extends BaseSparkDeltaCommitActionExecutor<T> {
private final JavaRDD<HoodieRecord<T>> inputRecordsRDD;

View File

@@ -26,10 +26,11 @@ import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.spark.api.java.JavaRDD;
public class SparkInsertPreppedDeltaCommitActionExecutor<T extends HoodieRecordPayload<T>>
extends AbstractSparkDeltaCommitActionExecutor<T> {
extends BaseSparkDeltaCommitActionExecutor<T> {
private final JavaRDD<HoodieRecord<T>> preppedRecords;

View File

@@ -24,13 +24,13 @@ import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.hudi.table.action.commit.SparkWriteHelper;
import org.apache.spark.api.java.JavaRDD;
public class SparkUpsertDeltaCommitActionExecutor<T extends HoodieRecordPayload<T>>
extends AbstractSparkDeltaCommitActionExecutor<T> {
extends BaseSparkDeltaCommitActionExecutor<T> {
private JavaRDD<HoodieRecord<T>> inputRecordsRDD;

View File

@@ -26,10 +26,11 @@ import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.spark.api.java.JavaRDD;
public class SparkUpsertPreppedDeltaCommitActionExecutor<T extends HoodieRecordPayload<T>>
extends AbstractSparkDeltaCommitActionExecutor<T> {
extends BaseSparkDeltaCommitActionExecutor<T> {
private final JavaRDD<HoodieRecord<T>> preppedRecords;

View File

@@ -28,7 +28,7 @@ import org.apache.hudi.table.HoodieTable;
/**
* Spark upgrade and downgrade helper.
*/
public class SparkUpgradeDowngradeHelper implements BaseUpgradeDowngradeHelper {
public class SparkUpgradeDowngradeHelper implements SupportsUpgradeDowngrade {
private static final SparkUpgradeDowngradeHelper SINGLETON_INSTANCE =
new SparkUpgradeDowngradeHelper();