[HUDI-2362] Add external config file support (#3416)
Co-authored-by: Wenning Ding <wenningd@amazon.com>
This commit is contained in:
@@ -22,7 +22,7 @@ import java.util.Properties
|
||||
import org.apache.hudi.DataSourceOptionsHelper.allAlternatives
|
||||
import org.apache.hudi.DataSourceWriteOptions._
|
||||
import org.apache.hudi.common.config.HoodieMetadataConfig.ENABLE
|
||||
import org.apache.hudi.common.config.{HoodieConfig, TypedProperties}
|
||||
import org.apache.hudi.common.config.{DFSPropertiesConfiguration, HoodieConfig, TypedProperties}
|
||||
import org.apache.hudi.common.table.HoodieTableConfig
|
||||
import org.apache.hudi.exception.HoodieException
|
||||
import org.apache.spark.sql.SparkSession
|
||||
@@ -47,6 +47,7 @@ object HoodieWriterUtils {
|
||||
* @return
|
||||
*/
|
||||
def parametersWithWriteDefaults(parameters: Map[String, String]): Map[String, String] = {
|
||||
val globalProps = DFSPropertiesConfiguration.getGlobalProps.asScala
|
||||
Map(OPERATION.key -> OPERATION.defaultValue,
|
||||
TABLE_TYPE.key -> TABLE_TYPE.defaultValue,
|
||||
PRECOMBINE_FIELD.key -> PRECOMBINE_FIELD.defaultValue,
|
||||
@@ -81,7 +82,7 @@ object HoodieWriterUtils {
|
||||
ENABLE_ROW_WRITER.key -> ENABLE_ROW_WRITER.defaultValue,
|
||||
RECONCILE_SCHEMA.key -> RECONCILE_SCHEMA.defaultValue.toString,
|
||||
DROP_PARTITION_COLUMNS.key -> DROP_PARTITION_COLUMNS.defaultValue
|
||||
) ++ DataSourceOptionsHelper.translateConfigurations(parameters)
|
||||
) ++ globalProps ++ DataSourceOptionsHelper.translateConfigurations(parameters)
|
||||
}
|
||||
|
||||
def toProperties(params: Map[String, String]): TypedProperties = {
|
||||
@@ -170,4 +171,23 @@ object HoodieWriterUtils {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val sparkDatasourceConfigsToTableConfigsMap = Map(
|
||||
TABLE_NAME -> HoodieTableConfig.NAME,
|
||||
TABLE_TYPE -> HoodieTableConfig.TYPE,
|
||||
PRECOMBINE_FIELD -> HoodieTableConfig.PRECOMBINE_FIELD,
|
||||
PARTITIONPATH_FIELD -> HoodieTableConfig.PARTITION_FIELDS,
|
||||
RECORDKEY_FIELD -> HoodieTableConfig.RECORDKEY_FIELDS,
|
||||
PAYLOAD_CLASS_NAME -> HoodieTableConfig.PAYLOAD_CLASS_NAME
|
||||
)
|
||||
def mappingSparkDatasourceConfigsToTableConfigs(options: Map[String, String]): Map[String, String] = {
|
||||
val includingTableConfigs = scala.collection.mutable.Map() ++ options
|
||||
sparkDatasourceConfigsToTableConfigsMap.foreach(kv => {
|
||||
if (options.containsKey(kv._1.key)) {
|
||||
includingTableConfigs(kv._2.key) = options(kv._1.key)
|
||||
includingTableConfigs.remove(kv._1.key)
|
||||
}
|
||||
})
|
||||
includingTableConfigs.toMap
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ import org.apache.hadoop.conf.Configuration
|
||||
import org.apache.hadoop.fs.Path
|
||||
import org.apache.hudi.SparkAdapterSupport
|
||||
import org.apache.hudi.client.common.HoodieSparkEngineContext
|
||||
import org.apache.hudi.common.config.DFSPropertiesConfiguration
|
||||
import org.apache.hudi.common.config.HoodieMetadataConfig
|
||||
import org.apache.hudi.common.fs.FSUtils
|
||||
import org.apache.hudi.common.model.HoodieRecord
|
||||
@@ -273,7 +274,7 @@ object HoodieSqlUtils extends SparkAdapterSupport {
|
||||
*/
|
||||
def withSparkConf(spark: SparkSession, options: Map[String, String])
|
||||
(baseConfig: Map[String, String] = Map.empty): Map[String, String] = {
|
||||
baseConfig ++ // Table options has the highest priority
|
||||
baseConfig ++ DFSPropertiesConfiguration.getGlobalProps.asScala ++ // Table options has the highest priority
|
||||
(spark.sessionState.conf.getAllConfs ++ HoodieOptionConfig.mappingSqlOptionToHoodieParam(options))
|
||||
.filterKeys(_.startsWith("hoodie."))
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ class AlterHoodieTableRenameCommand(
|
||||
.setConf(hadoopConf).build()
|
||||
// Init table with new name.
|
||||
HoodieTableMetaClient.withPropertyBuilder()
|
||||
.fromProperties(metaClient.getTableConfig.getProps)
|
||||
.fromProperties(metaClient.getTableConfig.getProps(true))
|
||||
.setTableName(newName.table)
|
||||
.initTable(hadoopConf, path)
|
||||
// Call AlterTableRenameCommand#run to rename table in meta.
|
||||
|
||||
@@ -19,9 +19,9 @@ package org.apache.spark.sql.hudi.command
|
||||
|
||||
import org.apache.hadoop.conf.Configuration
|
||||
import org.apache.hadoop.fs.Path
|
||||
|
||||
import org.apache.hudi.{DataSourceWriteOptions, SparkAdapterSupport}
|
||||
import org.apache.hudi.{DataSourceWriteOptions, HoodieWriterUtils, SparkAdapterSupport}
|
||||
import org.apache.hudi.HoodieWriterUtils._
|
||||
import org.apache.hudi.common.config.DFSPropertiesConfiguration
|
||||
import org.apache.hudi.common.model.HoodieFileFormat
|
||||
import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient}
|
||||
import org.apache.hudi.hadoop.HoodieParquetInputFormat
|
||||
@@ -47,7 +47,7 @@ import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
|
||||
import org.apache.spark.{SPARK_VERSION, SparkConf}
|
||||
|
||||
import java.util.{Locale, Properties}
|
||||
|
||||
import scala.collection.JavaConversions.mapAsJavaMap
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.collection.mutable
|
||||
import scala.util.control.NonFatal
|
||||
@@ -100,7 +100,10 @@ case class CreateHoodieTableCommand(table: CatalogTable, ignoreIfExists: Boolean
|
||||
// if CTAS, we treat the table we just created as nonexistent
|
||||
val isTableExists = if (ctas) false else tableExistsInPath(path, conf)
|
||||
var existingTableConfig = Map.empty[String, String]
|
||||
val sqlOptions = HoodieOptionConfig.withDefaultSqlOptions(tblProperties)
|
||||
val globalProps = DFSPropertiesConfiguration.getGlobalProps.asScala.toMap
|
||||
val globalSqlProps = HoodieOptionConfig.mappingTableConfigToSqlOption(
|
||||
HoodieWriterUtils.mappingSparkDatasourceConfigsToTableConfigs(globalProps))
|
||||
val sqlOptions = HoodieOptionConfig.withDefaultSqlOptions(globalSqlProps ++ tblProperties)
|
||||
val catalogTableProps = HoodieOptionConfig.mappingSqlOptionToTableConfig(tblProperties)
|
||||
|
||||
// get final schema and parameters
|
||||
@@ -341,7 +344,7 @@ case class CreateHoodieTableCommand(table: CatalogTable, ignoreIfExists: Boolean
|
||||
}
|
||||
} else {
|
||||
extraConfig(HoodieTableConfig.HIVE_STYLE_PARTITIONING_ENABLE.key) = "true"
|
||||
extraConfig(HoodieTableConfig.URL_ENCODE_PARTITIONING.key) = HoodieTableConfig.URL_ENCODE_PARTITIONING.defaultValue()
|
||||
extraConfig(HoodieTableConfig.URL_ENCODE_PARTITIONING.key) = HoodieTableConfig.URL_ENCODE_PARTITIONING.defaultValue
|
||||
}
|
||||
|
||||
if (originTableConfig.contains(HoodieTableConfig.KEY_GENERATOR_CLASS_NAME.key)) {
|
||||
@@ -374,7 +377,7 @@ case class CreateHoodieTableCommand(table: CatalogTable, ignoreIfExists: Boolean
|
||||
checkTableConfigEqual(originTableConfig, tableOptions, HoodieTableConfig.URL_ENCODE_PARTITIONING.key)
|
||||
checkTableConfigEqual(originTableConfig, tableOptions, HoodieTableConfig.HIVE_STYLE_PARTITIONING_ENABLE.key)
|
||||
// Save all the table config to the hoodie.properties.
|
||||
val parameters = originTableConfig ++ tableOptions
|
||||
val parameters = HoodieWriterUtils.mappingSparkDatasourceConfigsToTableConfigs(originTableConfig ++ tableOptions)
|
||||
val properties = new Properties()
|
||||
properties.putAll(parameters.asJava)
|
||||
HoodieTableMetaClient.withPropertyBuilder()
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
package org.apache.spark.sql.hudi.command
|
||||
|
||||
import org.apache.hudi.DataSourceWriteOptions.{OPERATION, _}
|
||||
import org.apache.hudi.DataSourceWriteOptions._
|
||||
import org.apache.hudi.common.table.HoodieTableMetaClient
|
||||
import org.apache.hudi.config.HoodieWriteConfig
|
||||
import org.apache.hudi.config.HoodieWriteConfig.TBL_NAME
|
||||
|
||||
@@ -45,7 +45,7 @@ class TruncateHoodieTableCommand(
|
||||
// Create MetaClient
|
||||
val metaClient = HoodieTableMetaClient.builder().setBasePath(path)
|
||||
.setConf(hadoopConf).build()
|
||||
Some(metaClient.getTableConfig.getProps)
|
||||
Some(metaClient.getTableConfig.getProps(true))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user