* trigger rebuild * [HUDI-1156] Remove unused dependencies from HoodieDeltaStreamerWrapper Class (#1927) * Adding support for validating records and long running tests in test sutie framework * Adding partial validate node * Fixing spark session initiation in Validate nodes * Fixing validation * Adding hive table validation to ValidateDatasetNode * Rebasing with latest commits from master * Addressing feedback * Addressing comments Co-authored-by: lamber-ken <lamberken@163.com> Co-authored-by: linshan-ma <mabin194046@163.com>
36 lines
1.7 KiB
INI
36 lines
1.7 KiB
INI
|
|
hoodie.insert.shuffle.parallelism=100
|
|
hoodie.upsert.shuffle.parallelism=100
|
|
hoodie.bulkinsert.shuffle.parallelism=100
|
|
|
|
hoodie.deltastreamer.source.test.num_partitions=100
|
|
hoodie.deltastreamer.source.test.datagen.use_rocksdb_for_storing_existing_keys=false
|
|
hoodie.deltastreamer.source.test.max_unique_records=100000000
|
|
hoodie.embed.timeline.server=false
|
|
hoodie.deltastreamer.source.input.selector=org.apache.hudi.integ.testsuite.helpers.DFSTestSuitePathSelector
|
|
|
|
hoodie.insert.shuffle.parallelism=100
|
|
hoodie.upsert.shuffle.parallelism=100
|
|
hoodie.bulkinsert.shuffle.parallelism=100
|
|
|
|
hoodie.deltastreamer.source.input.selector=org.apache.hudi.integ.testsuite.helpers.DFSTestSuitePathSelector
|
|
hoodie.datasource.hive_sync.skip_ro_suffix=true
|
|
|
|
hoodie.datasource.write.recordkey.field=_row_key
|
|
hoodie.datasource.write.keygenerator.class=org.apache.hudi.keygen.TimestampBasedKeyGenerator
|
|
hoodie.datasource.write.partitionpath.field=timestamp
|
|
|
|
hoodie.deltastreamer.source.dfs.root=/user/hive/warehouse/hudi-integ-test-suite/input
|
|
hoodie.deltastreamer.schemaprovider.target.schema.file=file:/var/hoodie/ws/docker/demo/config/test-suite/source.avsc
|
|
hoodie.deltastreamer.schemaprovider.source.schema.file=file:/var/hoodie/ws/docker/demo/config/test-suite/source.avsc
|
|
hoodie.deltastreamer.keygen.timebased.timestamp.type=UNIX_TIMESTAMP
|
|
hoodie.deltastreamer.keygen.timebased.output.dateformat=yyyy/MM/dd
|
|
|
|
hoodie.datasource.hive_sync.jdbcurl=jdbc:hive2://hiveserver:10000/
|
|
hoodie.datasource.hive_sync.database=testdb
|
|
hoodie.datasource.hive_sync.table=table1
|
|
hoodie.datasource.hive_sync.assume_date_partitioning=false
|
|
hoodie.datasource.hive_sync.partition_fields=_hoodie_partition_path
|
|
hoodie.datasource.hive_sync.partition_extractor_class=org.apache.hudi.hive.SlashEncodedDayPartitionValueExtractor
|
|
|