1
0

[HUDI-2811] Support Spark 3.2 (#4270)

This commit is contained in:
Yann Byron
2021-12-28 16:12:44 +08:00
committed by GitHub
parent 32505d5adb
commit 05942e018c
36 changed files with 596 additions and 167 deletions

View File

@@ -47,8 +47,14 @@ import scala.collection.JavaConverters.asScalaBufferConverter
object HoodieSparkUtils extends SparkAdapterSupport {
def isSpark2: Boolean = SPARK_VERSION.startsWith("2.")
def isSpark3: Boolean = SPARK_VERSION.startsWith("3.")
def isSpark3_0: Boolean = SPARK_VERSION.startsWith("3.0")
def isSpark3_2: Boolean = SPARK_VERSION.startsWith("3.2")
def getMetaSchema: StructType = {
StructType(HoodieRecord.HOODIE_META_COLUMNS.asScala.map(col => {
StructField(col, StringType, nullable = true)

View File

@@ -20,7 +20,8 @@ package org.apache.spark.sql.execution.datasources
import java.util.TimeZone
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.execution.datasources.PartitioningUtils.PartitionValues
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.types.DataType
trait SparkParsePartitionUtil extends Serializable {
@@ -30,5 +31,5 @@ trait SparkParsePartitionUtil extends Serializable {
typeInference: Boolean,
basePaths: Set[Path],
userSpecifiedDataTypes: Map[String, DataType],
timeZone: TimeZone): Option[PartitionValues]
timeZone: TimeZone): InternalRow
}