1
0

[HUDI-3659] Reducing the validation frequency with integ tests (#5067)

This commit is contained in:
Sivabalan Narayanan
2022-03-18 09:45:33 -07:00
committed by GitHub
parent 2551c26183
commit 316e38c71e
8 changed files with 88 additions and 89 deletions

View File

@@ -25,11 +25,6 @@ dag_content:
num_records_insert: 10000 num_records_insert: 10000
type: SparkInsertNode type: SparkInsertNode
deps: none deps: none
first_validate:
config:
validate_hive: false
type: ValidateDatasetNode
deps: first_insert
first_upsert: first_upsert:
config: config:
record_size: 200 record_size: 200
@@ -39,7 +34,7 @@ dag_content:
num_records_upsert: 3000 num_records_upsert: 3000
num_partitions_upsert: 50 num_partitions_upsert: 50
type: SparkUpsertNode type: SparkUpsertNode
deps: first_validate deps: first_insert
first_delete: first_delete:
config: config:
num_partitions_delete: 50 num_partitions_delete: 50
@@ -48,6 +43,7 @@ dag_content:
deps: first_upsert deps: first_upsert
second_validate: second_validate:
config: config:
validate_once_every_itr : 5
validate_hive: false validate_hive: false
delete_input_data: true delete_input_data: true
type: ValidateDatasetNode type: ValidateDatasetNode

View File

@@ -47,11 +47,6 @@ dag_content:
engine: "mr" engine: "mr"
type: HiveSyncNode type: HiveSyncNode
deps: third_insert deps: third_insert
first_validate:
config:
validate_hive: false
type: ValidateDatasetNode
deps: first_hive_sync
first_upsert: first_upsert:
config: config:
record_size: 1000 record_size: 1000
@@ -61,7 +56,7 @@ dag_content:
num_records_upsert: 100 num_records_upsert: 100
num_partitions_upsert: 1 num_partitions_upsert: 1
type: UpsertNode type: UpsertNode
deps: first_validate deps: first_hive_sync
first_delete: first_delete:
config: config:
num_partitions_delete: 50 num_partitions_delete: 50
@@ -76,6 +71,7 @@ dag_content:
deps: first_delete deps: first_delete
second_validate: second_validate:
config: config:
validate_once_every_itr : 5
validate_hive: true validate_hive: true
delete_input_data: true delete_input_data: true
type: ValidateDatasetNode type: ValidateDatasetNode

View File

@@ -59,6 +59,7 @@ dag_content:
deps: first_upsert deps: first_upsert
second_validate: second_validate:
config: config:
validate_once_every_itr : 5
validate_hive: false validate_hive: false
delete_input_data: true delete_input_data: true
type: ValidateDatasetNode type: ValidateDatasetNode

View File

@@ -59,6 +59,7 @@ dag_content:
deps: first_upsert deps: first_upsert
second_validate: second_validate:
config: config:
validate_once_every_itr : 5
validate_hive: false validate_hive: false
delete_input_data: true delete_input_data: true
type: ValidateDatasetNode type: ValidateDatasetNode

View File

@@ -62,6 +62,7 @@ dag_content:
deps: first_upsert deps: first_upsert
second_validate: second_validate:
config: config:
validate_once_every_itr : 5
validate_hive: false validate_hive: false
delete_input_data: false delete_input_data: false
type: ValidateDatasetNode type: ValidateDatasetNode

View File

@@ -41,11 +41,6 @@ dag_content:
num_records_insert: 300 num_records_insert: 300
deps: second_insert deps: second_insert
type: InsertNode type: InsertNode
first_validate:
config:
validate_hive: false
type: ValidateDatasetNode
deps: third_insert
first_upsert: first_upsert:
config: config:
record_size: 1000 record_size: 1000
@@ -55,7 +50,7 @@ dag_content:
num_records_upsert: 100 num_records_upsert: 100
num_partitions_upsert: 1 num_partitions_upsert: 1
type: UpsertNode type: UpsertNode
deps: first_validate deps: third_insert
first_delete: first_delete:
config: config:
num_partitions_delete: 1 num_partitions_delete: 1
@@ -64,6 +59,7 @@ dag_content:
deps: first_upsert deps: first_upsert
second_validate: second_validate:
config: config:
validate_once_every_itr : 5
validate_hive: false validate_hive: false
delete_input_data: true delete_input_data: true
type: ValidateDatasetNode type: ValidateDatasetNode

View File

@@ -89,6 +89,7 @@ public class DeltaConfig implements Serializable {
private static String START_PARTITION = "start_partition"; private static String START_PARTITION = "start_partition";
private static String DELETE_INPUT_DATA = "delete_input_data"; private static String DELETE_INPUT_DATA = "delete_input_data";
private static String VALIDATE_HIVE = "validate_hive"; private static String VALIDATE_HIVE = "validate_hive";
private static String VALIDATE_ONCE_EVERY_ITR = "validate_once_every_itr";
private static String EXECUTE_ITR_COUNT = "execute_itr_count"; private static String EXECUTE_ITR_COUNT = "execute_itr_count";
private static String VALIDATE_ARCHIVAL = "validate_archival"; private static String VALIDATE_ARCHIVAL = "validate_archival";
private static String VALIDATE_CLEAN = "validate_clean"; private static String VALIDATE_CLEAN = "validate_clean";
@@ -216,6 +217,10 @@ public class DeltaConfig implements Serializable {
return Boolean.valueOf(configsMap.getOrDefault(VALIDATE_HIVE, false).toString()); return Boolean.valueOf(configsMap.getOrDefault(VALIDATE_HIVE, false).toString());
} }
public int validateOnceEveryIteration() {
return Integer.valueOf(configsMap.getOrDefault(VALIDATE_ONCE_EVERY_ITR, 1).toString());
}
public boolean isValidateFullData() { public boolean isValidateFullData() {
return Boolean.valueOf(configsMap.getOrDefault(VALIDATE_FULL_DATA, false).toString()); return Boolean.valueOf(configsMap.getOrDefault(VALIDATE_FULL_DATA, false).toString());
} }

View File

@@ -74,9 +74,11 @@ public abstract class BaseValidateDatasetNode extends DagNode<Boolean> {
@Override @Override
public void execute(ExecutionContext context, int curItrCount) throws Exception { public void execute(ExecutionContext context, int curItrCount) throws Exception {
int validateOnceEveryItr = config.validateOnceEveryIteration();
int itrCountToExecute = config.getIterationCountToExecute();
if ((itrCountToExecute != -1 && itrCountToExecute == curItrCount) ||
(itrCountToExecute == -1 && ((curItrCount % validateOnceEveryItr) == 0))) {
SparkSession session = SparkSession.builder().sparkContext(context.getJsc().sc()).getOrCreate(); SparkSession session = SparkSession.builder().sparkContext(context.getJsc().sc()).getOrCreate();
// todo: Fix partitioning schemes. For now, assumes data based partitioning. // todo: Fix partitioning schemes. For now, assumes data based partitioning.
String inputPath = context.getHoodieTestSuiteWriter().getCfg().inputBasePath + "/*/*"; String inputPath = context.getHoodieTestSuiteWriter().getCfg().inputBasePath + "/*/*";
log.warn("Validation using data from input path " + inputPath); log.warn("Validation using data from input path " + inputPath);
@@ -153,6 +155,7 @@ public abstract class BaseValidateDatasetNode extends DagNode<Boolean> {
} }
} }
} }
}
private Dataset<Row> getInputDf(ExecutionContext context, SparkSession session, String inputPath) { private Dataset<Row> getInputDf(ExecutionContext context, SparkSession session, String inputPath) {
String recordKeyField = context.getWriterContext().getProps().getString(DataSourceWriteOptions.RECORDKEY_FIELD().key()); String recordKeyField = context.getWriterContext().getProps().getString(DataSourceWriteOptions.RECORDKEY_FIELD().key());