1
0

[HUDI-2520] Fix CTAS statment issue when sync to hive (#5145)

This commit is contained in:
ForwardXu
2022-03-30 03:25:31 +08:00
committed by GitHub
parent e5a2baeed0
commit 941c254c33

View File

@@ -18,19 +18,18 @@
package org.apache.spark.sql.hudi.command
import org.apache.hadoop.fs.Path
import org.apache.hudi.{DataSourceWriteOptions, SparkAdapterSupport}
import org.apache.hudi.common.model.HoodieFileFormat
import org.apache.hudi.common.table.HoodieTableConfig
import org.apache.hudi.hadoop.HoodieParquetInputFormat
import org.apache.hudi.hadoop.realtime.HoodieParquetRealtimeInputFormat
import org.apache.hudi.hadoop.utils.HoodieInputFormatUtils
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, TableAlreadyExistsException}
import org.apache.hudi.{DataSourceWriteOptions, SparkAdapterSupport}
import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.hive.HiveClientUtils
import org.apache.spark.sql.hive.HiveExternalCatalog._
import org.apache.spark.sql.hudi.{HoodieOptionConfig, HoodieSqlCommonUtils}
import org.apache.spark.sql.hudi.HoodieSqlCommonUtils.isEnableHive
import org.apache.spark.sql.hudi.{HoodieOptionConfig, HoodieSqlCommonUtils}
import org.apache.spark.sql.internal.StaticSQLConf.SCHEMA_STRING_LENGTH_THRESHOLD
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{Row, SparkSession}
@@ -167,10 +166,6 @@ object CreateHoodieTableCommand {
if (!dbExists) {
throw new NoSuchDatabaseException(dbName)
}
// check table exists
if (sparkSession.sessionState.catalog.tableExists(table.identifier)) {
throw new TableAlreadyExistsException(dbName, table.identifier.table)
}
// append some table properties need for spark data source table.
val dataSourceProps = tableMetaToTableProps(sparkSession.sparkContext.conf,
table, table.schema)
@@ -179,7 +174,7 @@ object CreateHoodieTableCommand {
val client = HiveClientUtils.newClientForMetadata(sparkSession.sparkContext.conf,
sparkSession.sessionState.newHadoopConf())
// create hive table.
client.createTable(tableWithDataSourceProps, ignoreIfExists)
client.createTable(tableWithDataSourceProps, ignoreIfExists = true)
}
// This code is forked from org.apache.spark.sql.hive.HiveExternalCatalog#tableMetaToTableProps