[HUDI-3659] Reducing the validation frequency with integ tests (#5067)
This commit is contained in:
committed by
GitHub
parent
2551c26183
commit
316e38c71e
@@ -25,11 +25,6 @@ dag_content:
|
||||
num_records_insert: 10000
|
||||
type: SparkInsertNode
|
||||
deps: none
|
||||
first_validate:
|
||||
config:
|
||||
validate_hive: false
|
||||
type: ValidateDatasetNode
|
||||
deps: first_insert
|
||||
first_upsert:
|
||||
config:
|
||||
record_size: 200
|
||||
@@ -39,7 +34,7 @@ dag_content:
|
||||
num_records_upsert: 3000
|
||||
num_partitions_upsert: 50
|
||||
type: SparkUpsertNode
|
||||
deps: first_validate
|
||||
deps: first_insert
|
||||
first_delete:
|
||||
config:
|
||||
num_partitions_delete: 50
|
||||
@@ -48,6 +43,7 @@ dag_content:
|
||||
deps: first_upsert
|
||||
second_validate:
|
||||
config:
|
||||
validate_once_every_itr : 5
|
||||
validate_hive: false
|
||||
delete_input_data: true
|
||||
type: ValidateDatasetNode
|
||||
|
||||
@@ -47,11 +47,6 @@ dag_content:
|
||||
engine: "mr"
|
||||
type: HiveSyncNode
|
||||
deps: third_insert
|
||||
first_validate:
|
||||
config:
|
||||
validate_hive: false
|
||||
type: ValidateDatasetNode
|
||||
deps: first_hive_sync
|
||||
first_upsert:
|
||||
config:
|
||||
record_size: 1000
|
||||
@@ -61,7 +56,7 @@ dag_content:
|
||||
num_records_upsert: 100
|
||||
num_partitions_upsert: 1
|
||||
type: UpsertNode
|
||||
deps: first_validate
|
||||
deps: first_hive_sync
|
||||
first_delete:
|
||||
config:
|
||||
num_partitions_delete: 50
|
||||
@@ -76,6 +71,7 @@ dag_content:
|
||||
deps: first_delete
|
||||
second_validate:
|
||||
config:
|
||||
validate_once_every_itr : 5
|
||||
validate_hive: true
|
||||
delete_input_data: true
|
||||
type: ValidateDatasetNode
|
||||
|
||||
@@ -59,6 +59,7 @@ dag_content:
|
||||
deps: first_upsert
|
||||
second_validate:
|
||||
config:
|
||||
validate_once_every_itr : 5
|
||||
validate_hive: false
|
||||
delete_input_data: true
|
||||
type: ValidateDatasetNode
|
||||
|
||||
@@ -59,6 +59,7 @@ dag_content:
|
||||
deps: first_upsert
|
||||
second_validate:
|
||||
config:
|
||||
validate_once_every_itr : 5
|
||||
validate_hive: false
|
||||
delete_input_data: true
|
||||
type: ValidateDatasetNode
|
||||
|
||||
@@ -62,6 +62,7 @@ dag_content:
|
||||
deps: first_upsert
|
||||
second_validate:
|
||||
config:
|
||||
validate_once_every_itr : 5
|
||||
validate_hive: false
|
||||
delete_input_data: false
|
||||
type: ValidateDatasetNode
|
||||
|
||||
@@ -41,11 +41,6 @@ dag_content:
|
||||
num_records_insert: 300
|
||||
deps: second_insert
|
||||
type: InsertNode
|
||||
first_validate:
|
||||
config:
|
||||
validate_hive: false
|
||||
type: ValidateDatasetNode
|
||||
deps: third_insert
|
||||
first_upsert:
|
||||
config:
|
||||
record_size: 1000
|
||||
@@ -55,7 +50,7 @@ dag_content:
|
||||
num_records_upsert: 100
|
||||
num_partitions_upsert: 1
|
||||
type: UpsertNode
|
||||
deps: first_validate
|
||||
deps: third_insert
|
||||
first_delete:
|
||||
config:
|
||||
num_partitions_delete: 1
|
||||
@@ -64,6 +59,7 @@ dag_content:
|
||||
deps: first_upsert
|
||||
second_validate:
|
||||
config:
|
||||
validate_once_every_itr : 5
|
||||
validate_hive: false
|
||||
delete_input_data: true
|
||||
type: ValidateDatasetNode
|
||||
|
||||
@@ -89,6 +89,7 @@ public class DeltaConfig implements Serializable {
|
||||
private static String START_PARTITION = "start_partition";
|
||||
private static String DELETE_INPUT_DATA = "delete_input_data";
|
||||
private static String VALIDATE_HIVE = "validate_hive";
|
||||
private static String VALIDATE_ONCE_EVERY_ITR = "validate_once_every_itr";
|
||||
private static String EXECUTE_ITR_COUNT = "execute_itr_count";
|
||||
private static String VALIDATE_ARCHIVAL = "validate_archival";
|
||||
private static String VALIDATE_CLEAN = "validate_clean";
|
||||
@@ -216,6 +217,10 @@ public class DeltaConfig implements Serializable {
|
||||
return Boolean.valueOf(configsMap.getOrDefault(VALIDATE_HIVE, false).toString());
|
||||
}
|
||||
|
||||
public int validateOnceEveryIteration() {
|
||||
return Integer.valueOf(configsMap.getOrDefault(VALIDATE_ONCE_EVERY_ITR, 1).toString());
|
||||
}
|
||||
|
||||
public boolean isValidateFullData() {
|
||||
return Boolean.valueOf(configsMap.getOrDefault(VALIDATE_FULL_DATA, false).toString());
|
||||
}
|
||||
|
||||
@@ -74,9 +74,11 @@ public abstract class BaseValidateDatasetNode extends DagNode<Boolean> {
|
||||
|
||||
@Override
|
||||
public void execute(ExecutionContext context, int curItrCount) throws Exception {
|
||||
|
||||
int validateOnceEveryItr = config.validateOnceEveryIteration();
|
||||
int itrCountToExecute = config.getIterationCountToExecute();
|
||||
if ((itrCountToExecute != -1 && itrCountToExecute == curItrCount) ||
|
||||
(itrCountToExecute == -1 && ((curItrCount % validateOnceEveryItr) == 0))) {
|
||||
SparkSession session = SparkSession.builder().sparkContext(context.getJsc().sc()).getOrCreate();
|
||||
|
||||
// todo: Fix partitioning schemes. For now, assumes data based partitioning.
|
||||
String inputPath = context.getHoodieTestSuiteWriter().getCfg().inputBasePath + "/*/*";
|
||||
log.warn("Validation using data from input path " + inputPath);
|
||||
@@ -153,6 +155,7 @@ public abstract class BaseValidateDatasetNode extends DagNode<Boolean> {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Dataset<Row> getInputDf(ExecutionContext context, SparkSession session, String inputPath) {
|
||||
String recordKeyField = context.getWriterContext().getProps().getString(DataSourceWriteOptions.RECORDKEY_FIELD().key());
|
||||
|
||||
Reference in New Issue
Block a user