[HUDI-2394] Implement Kafka Sink Protocol for Hudi for Ingesting Immutable Data (#3592)
- Fixing packaging, naming of classes - Use of log4j over slf4j for uniformity - More follow-on fixes - Added a version to control/coordinator events. - Eliminated the config added to write config - Fixed fetching of checkpoints based on table type - Clean up of naming, code placement Co-authored-by: Rajesh Mahindra <rmahindra@Rajeshs-MacBook-Pro.local> Co-authored-by: Vinoth Chandar <vinoth@apache.org>
This commit is contained in:
19
hudi-kafka-connect/configs/config-sink.json
Normal file
19
hudi-kafka-connect/configs/config-sink.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "hudi-sink",
|
||||
"config": {
|
||||
"bootstrap.servers": "localhost:9092",
|
||||
"connector.class": "org.apache.hudi.connect.HoodieSinkConnector",
|
||||
"tasks.max": "4",
|
||||
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
|
||||
"value.converter": "org.apache.kafka.connect.storage.StringConverter",
|
||||
"value.converter.schemas.enable": "false",
|
||||
"topics": "hudi-test-topic",
|
||||
"hoodie.table.name": "hudi-test-topic",
|
||||
"hoodie.base.path": "file:///tmp/hoodie/sample-table",
|
||||
"hoodie.datasource.write.recordkey.field": "volume",
|
||||
"hoodie.datasource.write.partitionpath.field": "year",
|
||||
"hoodie.schemaprovider.class": "org.apache.hudi.schema.FilebasedSchemaProvider",
|
||||
"hoodie.deltastreamer.schemaprovider.source.schema.file": "file:///tmp/hoodie/schema.avsc",
|
||||
"hoodie.deltastreamer.schemaprovider.target.schema.file": "file:///tmp/hoodie/schema.avsc"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user