[HUDI-4140] Fixing hive style partitioning and default partition with bulk insert row writer with SimpleKeyGen and virtual keys (#5664)
Bulk insert row writer code path had a gap wrt hive style partitioning and default partition when virtual keys are enabled with SimpleKeyGen. This patch fixes the issue.
This commit is contained in:
committed by
GitHub
parent
4f7ea8c79a
commit
4f6fc726d0
@@ -23,6 +23,7 @@ import org.apache.hudi.client.HoodieInternalWriteStatus;
|
||||
import org.apache.hudi.common.config.TypedProperties;
|
||||
import org.apache.hudi.common.model.HoodieRecord;
|
||||
import org.apache.hudi.common.util.Option;
|
||||
import org.apache.hudi.common.util.PartitionPathEncodeUtils;
|
||||
import org.apache.hudi.config.HoodieWriteConfig;
|
||||
import org.apache.hudi.exception.HoodieIOException;
|
||||
import org.apache.hudi.io.storage.row.HoodieRowCreateHandle;
|
||||
@@ -128,7 +129,11 @@ public class BulkInsertDataInternalWriterHelper {
|
||||
if (!keyGeneratorOpt.isPresent()) { // NoPartitionerKeyGen
|
||||
partitionPath = "";
|
||||
} else if (simpleKeyGen) { // SimpleKeyGen
|
||||
partitionPath = (record.get(simplePartitionFieldIndex, simplePartitionFieldDataType)).toString();
|
||||
Object parititionPathValue = record.get(simplePartitionFieldIndex, simplePartitionFieldDataType);
|
||||
partitionPath = parititionPathValue != null ? parititionPathValue.toString() : PartitionPathEncodeUtils.DEFAULT_PARTITION_PATH;
|
||||
if (writeConfig.isHiveStylePartitioningEnabled()) {
|
||||
partitionPath = (keyGeneratorOpt.get()).getPartitionPathFields().get(0) + "=" + partitionPath;
|
||||
}
|
||||
} else {
|
||||
// only BuiltIn key generators are supported if meta fields are disabled.
|
||||
partitionPath = keyGeneratorOpt.get().getPartitionPath(record, structType);
|
||||
|
||||
@@ -70,6 +70,10 @@ public class HoodieBulkInsertInternalWriterTestBase extends HoodieClientTestHarn
|
||||
}
|
||||
|
||||
protected HoodieWriteConfig getWriteConfig(boolean populateMetaFields) {
|
||||
return getWriteConfig(populateMetaFields, DataSourceWriteOptions.HIVE_STYLE_PARTITIONING().defaultValue());
|
||||
}
|
||||
|
||||
protected HoodieWriteConfig getWriteConfig(boolean populateMetaFields, String hiveStylePartitioningValue) {
|
||||
Properties properties = new Properties();
|
||||
if (!populateMetaFields) {
|
||||
properties.setProperty(DataSourceWriteOptions.KEYGENERATOR_CLASS_NAME().key(), SimpleKeyGenerator.class.getName());
|
||||
@@ -77,6 +81,7 @@ public class HoodieBulkInsertInternalWriterTestBase extends HoodieClientTestHarn
|
||||
properties.setProperty(DataSourceWriteOptions.PARTITIONPATH_FIELD().key(), SparkDatasetTestUtils.PARTITION_PATH_FIELD_NAME);
|
||||
properties.setProperty(HoodieTableConfig.POPULATE_META_FIELDS.key(), "false");
|
||||
}
|
||||
properties.setProperty(DataSourceWriteOptions.HIVE_STYLE_PARTITIONING().key(), hiveStylePartitioningValue);
|
||||
return getConfigBuilder(basePath, timelineServicePort).withProperties(properties).build();
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user