1
0

[HUDI-1153] Spark DataSource and Streaming Write must fail when operation type is misconfigured (#2014)

This commit is contained in:
Sreeram Ramji
2020-09-04 09:08:30 -07:00
committed by GitHub
parent 8d19ebfd0f
commit 6537af2676
5 changed files with 47 additions and 36 deletions

View File

@@ -22,6 +22,7 @@ import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.client.HoodieWriteClient;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
@@ -108,7 +109,7 @@ public class TestDataSourceUtils {
when(hoodieWriteClient.getConfig()).thenReturn(config);
DataSourceUtils.doWriteOperation(hoodieWriteClient, hoodieRecords, "test-time",
DataSourceWriteOptions.BULK_INSERT_OPERATION_OPT_VAL());
WriteOperationType.BULK_INSERT);
verify(hoodieWriteClient, times(1)).bulkInsert(any(hoodieRecords.getClass()), anyString(),
optionCaptor.capture());
@@ -121,7 +122,7 @@ public class TestDataSourceUtils {
Exception exception = assertThrows(HoodieException.class, () -> {
DataSourceUtils.doWriteOperation(hoodieWriteClient, hoodieRecords, "test-time",
DataSourceWriteOptions.BULK_INSERT_OPERATION_OPT_VAL());
WriteOperationType.BULK_INSERT);
});
assertThat(exception.getMessage(), containsString("Could not create UserDefinedBulkInsertPartitioner"));
@@ -132,7 +133,7 @@ public class TestDataSourceUtils {
setAndVerifyHoodieWriteClientWith(NoOpBulkInsertPartitioner.class.getName());
DataSourceUtils.doWriteOperation(hoodieWriteClient, hoodieRecords, "test-time",
DataSourceWriteOptions.BULK_INSERT_OPERATION_OPT_VAL());
WriteOperationType.BULK_INSERT);
verify(hoodieWriteClient, times(1)).bulkInsert(any(hoodieRecords.getClass()), anyString(),
optionCaptor.capture());