From 941c254c33a83909c58dea6b7eb2b1ffa7b5aaec Mon Sep 17 00:00:00 2001 From: ForwardXu Date: Wed, 30 Mar 2022 03:25:31 +0800 Subject: [PATCH] [HUDI-2520] Fix CTAS statment issue when sync to hive (#5145) --- .../sql/hudi/command/CreateHoodieTableCommand.scala | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableCommand.scala b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableCommand.scala index da9fcb8d4..195bf4153 100644 --- a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableCommand.scala +++ b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableCommand.scala @@ -18,19 +18,18 @@ package org.apache.spark.sql.hudi.command import org.apache.hadoop.fs.Path - -import org.apache.hudi.{DataSourceWriteOptions, SparkAdapterSupport} import org.apache.hudi.common.model.HoodieFileFormat import org.apache.hudi.common.table.HoodieTableConfig import org.apache.hudi.hadoop.HoodieParquetInputFormat import org.apache.hudi.hadoop.realtime.HoodieParquetRealtimeInputFormat import org.apache.hudi.hadoop.utils.HoodieInputFormatUtils -import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, TableAlreadyExistsException} +import org.apache.hudi.{DataSourceWriteOptions, SparkAdapterSupport} +import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.hive.HiveClientUtils import org.apache.spark.sql.hive.HiveExternalCatalog._ -import org.apache.spark.sql.hudi.{HoodieOptionConfig, HoodieSqlCommonUtils} import org.apache.spark.sql.hudi.HoodieSqlCommonUtils.isEnableHive +import org.apache.spark.sql.hudi.{HoodieOptionConfig, HoodieSqlCommonUtils} import org.apache.spark.sql.internal.StaticSQLConf.SCHEMA_STRING_LENGTH_THRESHOLD import org.apache.spark.sql.types.StructType import org.apache.spark.sql.{Row, SparkSession} @@ -167,10 +166,6 @@ object CreateHoodieTableCommand { if (!dbExists) { throw new NoSuchDatabaseException(dbName) } - // check table exists - if (sparkSession.sessionState.catalog.tableExists(table.identifier)) { - throw new TableAlreadyExistsException(dbName, table.identifier.table) - } // append some table properties need for spark data source table. val dataSourceProps = tableMetaToTableProps(sparkSession.sparkContext.conf, table, table.schema) @@ -179,7 +174,7 @@ object CreateHoodieTableCommand { val client = HiveClientUtils.newClientForMetadata(sparkSession.sparkContext.conf, sparkSession.sessionState.newHadoopConf()) // create hive table. - client.createTable(tableWithDataSourceProps, ignoreIfExists) + client.createTable(tableWithDataSourceProps, ignoreIfExists = true) } // This code is forked from org.apache.spark.sql.hive.HiveExternalCatalog#tableMetaToTableProps