1
0

[HUDI-3493] Not table to get execution plan (#4894)

This commit is contained in:
ForwardXu
2022-02-25 09:04:44 +08:00
committed by GitHub
parent 85e8a5c4de
commit aa1810d737
2 changed files with 5 additions and 5 deletions

View File

@@ -19,17 +19,14 @@ package org.apache.spark.sql.hudi.command
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hudi.DataSourceWriteOptions
import org.apache.hudi.hive.util.ConfigUtils
import org.apache.hudi.sql.InsertMode
import org.apache.spark.sql.{Row, SaveMode, SparkSession}
import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType, HoodieCatalogTable}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project}
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.execution.command.DataWritingCommand
import org.apache.spark.sql.hudi.HoodieSqlCommonUtils
import org.apache.spark.sql.{Row, SaveMode, SparkSession}
import scala.collection.JavaConverters._
@@ -40,6 +37,7 @@ case class CreateHoodieTableAsSelectCommand(
table: CatalogTable,
mode: SaveMode,
query: LogicalPlan) extends HoodieLeafRunnableCommand {
override def innerChildren: Seq[QueryPlan[_]] = Seq(query)
override def run(sparkSession: SparkSession): Seq[Row] = {
assert(table.tableType != CatalogTableType.VIEW)

View File

@@ -21,6 +21,7 @@ import org.apache.hudi.HoodieSparkSqlWriter
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.catalog.{CatalogTable, HoodieCatalogTable}
import org.apache.spark.sql.catalyst.expressions.{Alias, Literal}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project}
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.hudi.HoodieSqlCommonUtils._
@@ -37,6 +38,7 @@ case class InsertIntoHoodieTableCommand(
partition: Map[String, Option[String]],
overwrite: Boolean)
extends HoodieLeafRunnableCommand {
override def innerChildren: Seq[QueryPlan[_]] = Seq(query)
override def run(sparkSession: SparkSession): Seq[Row] = {
assert(logicalRelation.catalogTable.isDefined, "Missing catalog table")