[HUDI-2394] Implement Kafka Sink Protocol for Hudi for Ingesting Immutable Data (#3592)
- Fixing packaging, naming of classes - Use of log4j over slf4j for uniformity - More follow-on fixes - Added a version to control/coordinator events. - Eliminated the config added to write config - Fixed fetching of checkpoints based on table type - Clean up of naming, code placement Co-authored-by: Rajesh Mahindra <rmahindra@Rajeshs-MacBook-Pro.local> Co-authored-by: Vinoth Chandar <vinoth@apache.org>
This commit is contained in:
@@ -30,7 +30,8 @@ public class ConfigGroups {
|
||||
FLINK_SQL("Flink Sql Configs"),
|
||||
WRITE_CLIENT("Write Client Configs"),
|
||||
METRICS("Metrics Configs"),
|
||||
RECORD_PAYLOAD("Record Payload Config");
|
||||
RECORD_PAYLOAD("Record Payload Config"),
|
||||
KAFKA_CONNECT("Kafka Connect Configs");
|
||||
|
||||
public final String name;
|
||||
|
||||
@@ -72,6 +73,9 @@ public class ConfigGroups {
|
||||
description = "These set of configs are used to enable monitoring and reporting of key"
|
||||
+ "Hudi stats and metrics.";
|
||||
break;
|
||||
case KAFKA_CONNECT:
|
||||
description = "These set of configs are used for Kakfa Connect Sink Connector for writing Hudi Tables";
|
||||
break;
|
||||
default:
|
||||
description = "Please fill in the description for Config Group Name: " + names.name;
|
||||
break;
|
||||
|
||||
@@ -26,6 +26,7 @@ import org.apache.hudi.common.model.WriteOperationType;
|
||||
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
|
||||
import org.apache.hudi.common.table.timeline.HoodieTimeline;
|
||||
import org.apache.hudi.exception.HoodieException;
|
||||
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user