1
0

[HUDI-4309] Spark3.2 custom parser should not throw exception (#5947)

This commit is contained in:
cxzl25
2022-06-27 09:37:23 +08:00
committed by GitHub
parent 0a9e568ff5
commit 7a6eb0f6e1
2 changed files with 22 additions and 3 deletions

View File

@@ -238,4 +238,16 @@ class TestTimeTravelTable extends HoodieSparkSqlTestBase {
}
}
}
test("Test Unsupported syntax can be parsed") {
if (HoodieSparkUtils.gteqSpark3_2) {
checkAnswer("select 1 distribute by 1")(Seq(1))
withTempDir { dir =>
val path = dir.toURI.getPath
spark.sql(s"insert overwrite local directory '$path' using parquet select 1")
// Requires enable hive support, so didn't test it
// spark.sql(s"insert overwrite local directory '$path' stored as orc select 1")
}
}
}
}

View File

@@ -32,6 +32,8 @@ import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{AnalysisException, SparkSession}
import scala.util.control.NonFatal
class HoodieSpark3_2ExtendedSqlParser(session: SparkSession, delegate: ParserInterface)
extends ParserInterface with Logging {
@@ -39,9 +41,14 @@ class HoodieSpark3_2ExtendedSqlParser(session: SparkSession, delegate: ParserInt
private lazy val builder = new HoodieSpark3_2ExtendedSqlAstBuilder(conf, delegate)
override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) { parser =>
builder.visit(parser.singleStatement()) match {
case plan: LogicalPlan => plan
case _=> delegate.parsePlan(sqlText)
try {
builder.visit(parser.singleStatement()) match {
case plan: LogicalPlan => plan
case _=> delegate.parsePlan(sqlText)
}
} catch {
case NonFatal(_) =>
delegate.parsePlan(sqlText)
}
}