1
0

[MINOR] Fix typo,'Kakfa' corrected to 'Kafka' & 'parquest' corrected to 'parquet' (#3717)

This commit is contained in:
董可伦
2021-09-26 21:53:39 +08:00
committed by GitHub
parent 7e887b54d7
commit 36be287121
3 changed files with 4 additions and 4 deletions

View File

@@ -74,7 +74,7 @@ public class ConfigGroups {
+ "Hudi stats and metrics."; + "Hudi stats and metrics.";
break; break;
case KAFKA_CONNECT: case KAFKA_CONNECT:
description = "These set of configs are used for Kakfa Connect Sink Connector for writing Hudi Tables"; description = "These set of configs are used for Kafka Connect Sink Connector for writing Hudi Tables";
break; break;
default: default:
description = "Please fill in the description for Config Group Name: " + names.name; description = "Please fill in the description for Config Group Name: " + names.name;

View File

@@ -36,7 +36,7 @@ import java.util.Properties;
@Immutable @Immutable
@ConfigClassProperty(name = "Kafka Sink Connect Configurations", @ConfigClassProperty(name = "Kafka Sink Connect Configurations",
groupName = ConfigGroups.Names.KAFKA_CONNECT, groupName = ConfigGroups.Names.KAFKA_CONNECT,
description = "Configurations for Kakfa Connect Sink Connector for Hudi.") description = "Configurations for Kafka Connect Sink Connector for Hudi.")
public class KafkaConnectConfigs extends HoodieConfig { public class KafkaConnectConfigs extends HoodieConfig {
public static final String KAFKA_VALUE_CONVERTER = "value.converter"; public static final String KAFKA_VALUE_CONVERTER = "value.converter";

View File

@@ -1514,8 +1514,8 @@ public class TestHoodieDeltaStreamer extends TestHoodieDeltaStreamerBase {
prepareParquetDFSSource(true, false, "source_uber.avsc", "target_uber.avsc", PROPS_FILENAME_TEST_PARQUET, prepareParquetDFSSource(true, false, "source_uber.avsc", "target_uber.avsc", PROPS_FILENAME_TEST_PARQUET,
PARQUET_SOURCE_ROOT, false); PARQUET_SOURCE_ROOT, false);
// delta streamer w/ parquest source // delta streamer w/ parquet source
String tableBasePath = dfsBasePath + "/test_dfs_to_kakfa" + testNum; String tableBasePath = dfsBasePath + "/test_dfs_to_kafka" + testNum;
HoodieDeltaStreamer deltaStreamer = new HoodieDeltaStreamer( HoodieDeltaStreamer deltaStreamer = new HoodieDeltaStreamer(
TestHelpers.makeConfig(tableBasePath, WriteOperationType.INSERT, ParquetDFSSource.class.getName(), TestHelpers.makeConfig(tableBasePath, WriteOperationType.INSERT, ParquetDFSSource.class.getName(),
Collections.EMPTY_LIST, PROPS_FILENAME_TEST_PARQUET, false, Collections.EMPTY_LIST, PROPS_FILENAME_TEST_PARQUET, false,