[HUDI-1415] Read Hoodie Table As Spark DataSource Table (#2283)
This commit is contained in:
@@ -88,6 +88,12 @@ public class HiveSyncConfig implements Serializable {
|
||||
@Parameter(names = {"--verify-metadata-file-listing"}, description = "Verify file listing from Hudi's metadata against file system")
|
||||
public Boolean verifyMetadataFileListing = HoodieMetadataConfig.DEFAULT_METADATA_VALIDATE;
|
||||
|
||||
@Parameter(names = {"--table-properties"}, description = "Table properties to hive table")
|
||||
public String tableProperties;
|
||||
|
||||
@Parameter(names = {"--serde-properties"}, description = "Serde properties to hive table")
|
||||
public String serdeProperties;
|
||||
|
||||
@Parameter(names = {"--help", "-h"}, help = true)
|
||||
public Boolean help = false;
|
||||
|
||||
@@ -114,32 +120,36 @@ public class HiveSyncConfig implements Serializable {
|
||||
newConfig.verifyMetadataFileListing = cfg.verifyMetadataFileListing;
|
||||
newConfig.supportTimestamp = cfg.supportTimestamp;
|
||||
newConfig.decodePartition = cfg.decodePartition;
|
||||
newConfig.tableProperties = cfg.tableProperties;
|
||||
newConfig.serdeProperties = cfg.serdeProperties;
|
||||
return newConfig;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "HiveSyncConfig{"
|
||||
+ "databaseName='" + databaseName + '\''
|
||||
+ ", tableName='" + tableName + '\''
|
||||
+ ", baseFileFormat='" + baseFileFormat + '\''
|
||||
+ ", hiveUser='" + hiveUser + '\''
|
||||
+ ", hivePass='" + hivePass + '\''
|
||||
+ ", jdbcUrl='" + jdbcUrl + '\''
|
||||
+ ", basePath='" + basePath + '\''
|
||||
+ ", partitionFields=" + partitionFields
|
||||
+ ", partitionValueExtractorClass='" + partitionValueExtractorClass + '\''
|
||||
+ ", assumeDatePartitioning=" + assumeDatePartitioning
|
||||
+ ", usePreApacheInputFormat=" + usePreApacheInputFormat
|
||||
+ ", useJdbc=" + useJdbc
|
||||
+ ", autoCreateDatabase=" + autoCreateDatabase
|
||||
+ ", ignoreExceptions=" + ignoreExceptions
|
||||
+ ", skipROSuffix=" + skipROSuffix
|
||||
+ ", help=" + help
|
||||
+ ", supportTimestamp=" + supportTimestamp
|
||||
+ ", decodePartition=" + decodePartition
|
||||
+ ", useFileListingFromMetadata=" + useFileListingFromMetadata
|
||||
+ ", verifyMetadataFileListing=" + verifyMetadataFileListing
|
||||
+ '}';
|
||||
+ "databaseName='" + databaseName + '\''
|
||||
+ ", tableName='" + tableName + '\''
|
||||
+ ", baseFileFormat='" + baseFileFormat + '\''
|
||||
+ ", hiveUser='" + hiveUser + '\''
|
||||
+ ", hivePass='" + hivePass + '\''
|
||||
+ ", jdbcUrl='" + jdbcUrl + '\''
|
||||
+ ", basePath='" + basePath + '\''
|
||||
+ ", partitionFields=" + partitionFields
|
||||
+ ", partitionValueExtractorClass='" + partitionValueExtractorClass + '\''
|
||||
+ ", assumeDatePartitioning=" + assumeDatePartitioning
|
||||
+ ", usePreApacheInputFormat=" + usePreApacheInputFormat
|
||||
+ ", useJdbc=" + useJdbc
|
||||
+ ", autoCreateDatabase=" + autoCreateDatabase
|
||||
+ ", ignoreExceptions=" + ignoreExceptions
|
||||
+ ", skipROSuffix=" + skipROSuffix
|
||||
+ ", useFileListingFromMetadata=" + useFileListingFromMetadata
|
||||
+ ", verifyMetadataFileListing=" + verifyMetadataFileListing
|
||||
+ ", tableProperties='" + tableProperties + '\''
|
||||
+ ", serdeProperties='" + serdeProperties + '\''
|
||||
+ ", help=" + help
|
||||
+ ", supportTimestamp=" + supportTimestamp
|
||||
+ ", decodePartition=" + decodePartition
|
||||
+ '}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import org.apache.hudi.common.util.Option;
|
||||
import org.apache.hudi.exception.HoodieException;
|
||||
import org.apache.hudi.exception.InvalidTableException;
|
||||
import org.apache.hudi.hadoop.utils.HoodieInputFormatUtils;
|
||||
import org.apache.hudi.hive.util.ConfigUtils;
|
||||
import org.apache.hudi.sync.common.AbstractSyncHoodieClient.PartitionEvent;
|
||||
import org.apache.hudi.sync.common.AbstractSyncHoodieClient.PartitionEvent.PartitionEventType;
|
||||
import org.apache.hudi.hive.util.HiveSchemaUtil;
|
||||
@@ -162,9 +163,9 @@ public class HiveSyncTool extends AbstractSyncTool {
|
||||
LOG.info("Last commit time synced was found to be " + lastCommitTimeSynced.orElse("null"));
|
||||
List<String> writtenPartitionsSince = hoodieHiveClient.getPartitionsWrittenToSince(lastCommitTimeSynced);
|
||||
LOG.info("Storage partitions scan complete. Found " + writtenPartitionsSince.size());
|
||||
|
||||
// Sync the partitions if needed
|
||||
syncPartitions(tableName, writtenPartitionsSince);
|
||||
|
||||
hoodieHiveClient.updateLastCommitTimeSynced(tableName);
|
||||
LOG.info("Sync complete for " + tableName);
|
||||
}
|
||||
@@ -196,7 +197,8 @@ public class HiveSyncTool extends AbstractSyncTool {
|
||||
// Custom serde will not work with ALTER TABLE REPLACE COLUMNS
|
||||
// https://github.com/apache/hive/blob/release-1.1.0/ql/src/java/org/apache/hadoop/hive
|
||||
// /ql/exec/DDLTask.java#L3488
|
||||
hoodieHiveClient.createTable(tableName, schema, inputFormatClassName, outputFormatClassName, serDeFormatClassName);
|
||||
hoodieHiveClient.createTable(tableName, schema, inputFormatClassName,
|
||||
outputFormatClassName, serDeFormatClassName, ConfigUtils.toMap(cfg.serdeProperties), ConfigUtils.toMap(cfg.tableProperties));
|
||||
} else {
|
||||
// Check if the table schema has evolved
|
||||
Map<String, String> tableSchema = hoodieHiveClient.getTableSchema(tableName);
|
||||
@@ -204,6 +206,12 @@ public class HiveSyncTool extends AbstractSyncTool {
|
||||
if (!schemaDiff.isEmpty()) {
|
||||
LOG.info("Schema difference found for " + tableName);
|
||||
hoodieHiveClient.updateTableDefinition(tableName, schema);
|
||||
// Sync the table properties if the schema has changed
|
||||
if (cfg.tableProperties != null) {
|
||||
Map<String, String> tableProperties = ConfigUtils.toMap(cfg.tableProperties);
|
||||
hoodieHiveClient.updateTableProperties(tableName, tableProperties);
|
||||
LOG.info("Sync table properties for " + tableName + ", table properties is: " + cfg.tableProperties);
|
||||
}
|
||||
} else {
|
||||
LOG.info("No Schema difference for " + tableName);
|
||||
}
|
||||
|
||||
@@ -138,6 +138,26 @@ public class HoodieHiveClient extends AbstractSyncHoodieClient {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the table properties to the table.
|
||||
*/
|
||||
@Override
|
||||
public void updateTableProperties(String tableName, Map<String, String> tableProperties) {
|
||||
if (tableProperties == null || tableProperties.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
Table table = client.getTable(syncConfig.databaseName, tableName);
|
||||
for (Map.Entry<String, String> entry: tableProperties.entrySet()) {
|
||||
table.putToParameters(entry.getKey(), entry.getValue());
|
||||
}
|
||||
client.alter_table(syncConfig.databaseName, tableName, table);
|
||||
} catch (Exception e) {
|
||||
throw new HoodieHiveSyncException("Failed to update table properties for table: "
|
||||
+ tableName, e);
|
||||
}
|
||||
}
|
||||
|
||||
private String constructAddPartitions(String tableName, List<String> partitions) {
|
||||
StringBuilder alterSQL = new StringBuilder("ALTER TABLE ");
|
||||
alterSQL.append(HIVE_ESCAPE_CHARACTER).append(syncConfig.databaseName)
|
||||
@@ -255,10 +275,13 @@ public class HoodieHiveClient extends AbstractSyncHoodieClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createTable(String tableName, MessageType storageSchema, String inputFormatClass, String outputFormatClass, String serdeClass) {
|
||||
public void createTable(String tableName, MessageType storageSchema, String inputFormatClass,
|
||||
String outputFormatClass, String serdeClass,
|
||||
Map<String, String> serdeProperties, Map<String, String> tableProperties) {
|
||||
try {
|
||||
String createSQLQuery =
|
||||
HiveSchemaUtil.generateCreateDDL(tableName, storageSchema, syncConfig, inputFormatClass, outputFormatClass, serdeClass);
|
||||
HiveSchemaUtil.generateCreateDDL(tableName, storageSchema, syncConfig, inputFormatClass,
|
||||
outputFormatClass, serdeClass, serdeProperties, tableProperties);
|
||||
LOG.info("Creating table with " + createSQLQuery);
|
||||
updateHiveSQL(createSQLQuery);
|
||||
} catch (IOException e) {
|
||||
|
||||
@@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hudi.hive.util;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.apache.hudi.common.util.StringUtils;
|
||||
|
||||
public class ConfigUtils {
|
||||
|
||||
/**
|
||||
* Convert the key-value config to a map.The format of the config
|
||||
* is a key-value pair just like "k1=v1\nk2=v2\nk3=v3".
|
||||
* @param keyValueConfig
|
||||
* @return
|
||||
*/
|
||||
public static Map<String, String> toMap(String keyValueConfig) {
|
||||
if (StringUtils.isNullOrEmpty(keyValueConfig)) {
|
||||
return new HashMap<>();
|
||||
}
|
||||
String[] keyvalues = keyValueConfig.split("\n");
|
||||
Map<String, String> tableProperties = new HashMap<>();
|
||||
for (String keyValue : keyvalues) {
|
||||
String[] keyValueArray = keyValue.split("=");
|
||||
if (keyValueArray.length == 1 || keyValueArray.length == 2) {
|
||||
String key = keyValueArray[0].trim();
|
||||
String value = keyValueArray.length == 2 ? keyValueArray[1].trim() : "";
|
||||
tableProperties.put(key, value);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Bad key-value config: " + keyValue + ", must be the"
|
||||
+ " format 'key = value'");
|
||||
}
|
||||
}
|
||||
return tableProperties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert map config to key-value string.The format of the config
|
||||
* is a key-value pair just like "k1=v1\nk2=v2\nk3=v3".
|
||||
* @param config
|
||||
* @return
|
||||
*/
|
||||
public static String configToString(Map<String, String> config) {
|
||||
if (config == null) {
|
||||
return null;
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (Map.Entry<String, String> entry : config.entrySet()) {
|
||||
if (sb.length() > 0) {
|
||||
sb.append("\n");
|
||||
}
|
||||
sb.append(entry.getKey()).append("=").append(entry.getValue());
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -395,7 +395,8 @@ public class HiveSchemaUtil {
|
||||
}
|
||||
|
||||
public static String generateCreateDDL(String tableName, MessageType storageSchema, HiveSyncConfig config, String inputFormatClass,
|
||||
String outputFormatClass, String serdeClass) throws IOException {
|
||||
String outputFormatClass, String serdeClass, Map<String, String> serdeProperties,
|
||||
Map<String, String> tableProperties) throws IOException {
|
||||
Map<String, String> hiveSchema = convertParquetSchemaToHiveSchema(storageSchema, config.supportTimestamp);
|
||||
String columns = generateSchemaString(storageSchema, config.partitionFields, config.supportTimestamp);
|
||||
|
||||
@@ -415,8 +416,31 @@ public class HiveSchemaUtil {
|
||||
sb.append(" PARTITIONED BY (").append(partitionsStr).append(")");
|
||||
}
|
||||
sb.append(" ROW FORMAT SERDE '").append(serdeClass).append("'");
|
||||
if (serdeProperties != null && !serdeProperties.isEmpty()) {
|
||||
sb.append(" WITH SERDEPROPERTIES (").append(propertyToString(serdeProperties)).append(")");
|
||||
}
|
||||
sb.append(" STORED AS INPUTFORMAT '").append(inputFormatClass).append("'");
|
||||
sb.append(" OUTPUTFORMAT '").append(outputFormatClass).append("' LOCATION '").append(config.basePath).append("'");
|
||||
|
||||
if (tableProperties != null && !tableProperties.isEmpty()) {
|
||||
sb.append(" TBLPROPERTIES(").append(propertyToString(tableProperties)).append(")");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static String propertyToString(Map<String, String> properties) {
|
||||
if (properties == null || properties.isEmpty()) {
|
||||
return "";
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
boolean first = true;
|
||||
for (Map.Entry<String, String> entry: properties.entrySet()) {
|
||||
if (!first) {
|
||||
sb.append(",");
|
||||
}
|
||||
sb.append("'").append(entry.getKey()).append("'='").append(entry.getValue()).append("'");
|
||||
first = false;
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
|
||||
@@ -18,13 +18,19 @@
|
||||
|
||||
package org.apache.hudi.hive;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.apache.hadoop.hive.ql.Driver;
|
||||
import org.apache.hadoop.hive.ql.session.SessionState;
|
||||
import org.apache.hudi.common.model.HoodieRecord;
|
||||
import org.apache.hudi.common.testutils.SchemaTestUtil;
|
||||
import org.apache.hudi.common.util.Option;
|
||||
import org.apache.hudi.hive.testutils.HiveTestUtil;
|
||||
import org.apache.hudi.hive.util.HiveSchemaUtil;
|
||||
import org.apache.hudi.hive.util.ConfigUtils;
|
||||
import org.apache.hudi.sync.common.AbstractSyncHoodieClient.PartitionEvent;
|
||||
import org.apache.hudi.sync.common.AbstractSyncHoodieClient.PartitionEvent.PartitionEventType;
|
||||
import org.apache.hudi.hive.testutils.HiveTestUtil;
|
||||
import org.apache.hudi.hive.util.HiveSchemaUtil;
|
||||
|
||||
import org.apache.hadoop.hive.metastore.api.Partition;
|
||||
import org.apache.parquet.schema.MessageType;
|
||||
@@ -250,6 +256,54 @@ public class TestHiveSyncTool {
|
||||
"The last commit that was synced should be 100");
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource({"useJdbcAndSchemaFromCommitMetadata"})
|
||||
public void testSyncWithProperties(boolean useJdbc, boolean useSchemaFromCommitMetadata) throws Exception {
|
||||
HiveSyncConfig hiveSyncConfig = HiveTestUtil.hiveSyncConfig;
|
||||
Map<String, String> serdeProperties = new HashMap<String, String>() {
|
||||
{
|
||||
put("path", hiveSyncConfig.basePath);
|
||||
}
|
||||
};
|
||||
|
||||
Map<String, String> tableProperties = new HashMap<String, String>() {
|
||||
{
|
||||
put("tp_0", "p0");
|
||||
put("tp_1", "p1");
|
||||
}
|
||||
};
|
||||
hiveSyncConfig.useJdbc = useJdbc;
|
||||
hiveSyncConfig.serdeProperties = ConfigUtils.configToString(serdeProperties);
|
||||
hiveSyncConfig.tableProperties = ConfigUtils.configToString(tableProperties);
|
||||
String instantTime = "100";
|
||||
HiveTestUtil.createCOWTable(instantTime, 5, useSchemaFromCommitMetadata);
|
||||
|
||||
HiveSyncTool tool = new HiveSyncTool(hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
||||
tool.syncHoodieTable();
|
||||
|
||||
SessionState.start(HiveTestUtil.getHiveConf());
|
||||
Driver hiveDriver = new org.apache.hadoop.hive.ql.Driver(HiveTestUtil.getHiveConf());
|
||||
String dbTableName = hiveSyncConfig.databaseName + "." + hiveSyncConfig.tableName;
|
||||
hiveDriver.run("SHOW TBLPROPERTIES " + dbTableName);
|
||||
List<String> results = new ArrayList<>();
|
||||
hiveDriver.getResults(results);
|
||||
|
||||
String tblPropertiesWithoutDdlTime = String.join("\n",
|
||||
results.subList(0, results.size() - 1));
|
||||
assertEquals(
|
||||
"EXTERNAL\tTRUE\n"
|
||||
+ "last_commit_time_sync\t100\n"
|
||||
+ "tp_0\tp0\n"
|
||||
+ "tp_1\tp1", tblPropertiesWithoutDdlTime);
|
||||
assertTrue(results.get(results.size() - 1).startsWith("transient_lastDdlTime"));
|
||||
|
||||
results.clear();
|
||||
hiveDriver.run("SHOW CREATE TABLE " + dbTableName);
|
||||
hiveDriver.getResults(results);
|
||||
String ddl = String.join("\n", results);
|
||||
assertTrue(ddl.contains("'path'='" + hiveSyncConfig.basePath + "'"));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("useJdbc")
|
||||
public void testSyncIncremental(boolean useJdbc) throws Exception {
|
||||
|
||||
Reference in New Issue
Block a user