[HUDI-3726] Switching from non-partitioned to partitioned key gen does not throw any exception (#5205)
This commit is contained in:
@@ -17,11 +17,13 @@
|
||||
|
||||
package org.apache.hudi
|
||||
|
||||
import java.io.IOException
|
||||
import java.time.Instant
|
||||
import java.util.{Collections, Date, UUID}
|
||||
|
||||
import org.apache.commons.io.FileUtils
|
||||
import org.apache.hadoop.fs.Path
|
||||
import org.apache.hudi.DataSourceWriteOptions._
|
||||
import org.apache.hudi.client.SparkRDDWriteClient
|
||||
import org.apache.hudi.common.config.HoodieConfig
|
||||
import org.apache.hudi.common.model._
|
||||
import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient, TableSchemaResolver}
|
||||
import org.apache.hudi.common.testutils.HoodieTestDataGenerator
|
||||
@@ -29,7 +31,6 @@ import org.apache.hudi.config.{HoodieBootstrapConfig, HoodieWriteConfig}
|
||||
import org.apache.hudi.exception.HoodieException
|
||||
import org.apache.hudi.execution.bulkinsert.BulkInsertSortMode
|
||||
import org.apache.hudi.functional.TestBootstrap
|
||||
import org.apache.hudi.hive.HiveSyncConfig
|
||||
import org.apache.hudi.keygen.{ComplexKeyGenerator, NonpartitionedKeyGenerator, SimpleKeyGenerator}
|
||||
import org.apache.hudi.testutils.DataSourceTestUtils
|
||||
import org.apache.spark.api.java.JavaSparkContext
|
||||
@@ -37,7 +38,6 @@ import org.apache.spark.sql._
|
||||
import org.apache.spark.sql.functions.{expr, lit}
|
||||
import org.apache.spark.sql.hudi.HoodieSparkSessionExtension
|
||||
import org.apache.spark.sql.hudi.command.SqlKeyGenerator
|
||||
import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}
|
||||
import org.apache.spark.{SparkConf, SparkContext}
|
||||
import org.junit.jupiter.api.Assertions.{assertEquals, assertFalse, assertTrue, fail}
|
||||
import org.junit.jupiter.api.{AfterEach, BeforeEach, Test}
|
||||
@@ -46,11 +46,8 @@ import org.junit.jupiter.params.provider.{CsvSource, EnumSource, ValueSource}
|
||||
import org.mockito.ArgumentMatchers.any
|
||||
import org.mockito.Mockito.{spy, times, verify}
|
||||
import org.scalatest.Assertions.assertThrows
|
||||
import org.scalatest.Matchers.{assertResult, be, convertToAnyShouldWrapper, intercept}
|
||||
import org.scalatest.Matchers.{be, convertToAnyShouldWrapper, intercept}
|
||||
|
||||
import java.io.IOException
|
||||
import java.time.Instant
|
||||
import java.util.{Collections, Date, UUID}
|
||||
import scala.collection.JavaConversions._
|
||||
import scala.collection.JavaConverters
|
||||
|
||||
@@ -887,6 +884,139 @@ class TestHoodieSparkSqlWriter {
|
||||
assert(data.select("_hoodie_partition_path").map(_.getString(0)).distinct.collect.head == "2021-10-16")
|
||||
}
|
||||
|
||||
@Test
|
||||
def testNonpartitonedToDefaultKeyGen(): Unit = {
|
||||
val _spark = spark
|
||||
import _spark.implicits._
|
||||
val df = Seq((1, "a1", 10, 1000, "2021-10-16")).toDF("id", "name", "value", "ts", "dt")
|
||||
val options = Map(
|
||||
DataSourceWriteOptions.RECORDKEY_FIELD.key -> "id",
|
||||
DataSourceWriteOptions.PRECOMBINE_FIELD.key -> "ts",
|
||||
DataSourceWriteOptions.PARTITIONPATH_FIELD.key -> "dt"
|
||||
)
|
||||
|
||||
// case 1: When commit C1 specificies a key generator and commit C2 does not specify key generator
|
||||
val (tableName1, tablePath1) = ("hoodie_test_params_1", s"$tempBasePath" + "_1")
|
||||
|
||||
// the first write need to specify KEYGENERATOR_CLASS_NAME params
|
||||
df.write.format("hudi")
|
||||
.options(options)
|
||||
.option(HoodieWriteConfig.TBL_NAME.key, tableName1)
|
||||
.option(HoodieWriteConfig.KEYGENERATOR_CLASS_NAME.key, classOf[NonpartitionedKeyGenerator].getName)
|
||||
.mode(SaveMode.Overwrite).save(tablePath1)
|
||||
|
||||
val df2 = Seq((2, "a2", 20, 1000, "2021-10-16")).toDF("id", "name", "value", "ts", "dt")
|
||||
// raise exception when no KEYGENERATOR_CLASS_NAME is specified and it is expected to default to SimpleKeyGenerator
|
||||
val configConflictException = intercept[HoodieException] {
|
||||
df2.write.format("hudi")
|
||||
.options(options)
|
||||
.option(HoodieWriteConfig.TBL_NAME.key, tableName1)
|
||||
.mode(SaveMode.Append).save(tablePath1)
|
||||
}
|
||||
assert(configConflictException.getMessage.contains("Config conflict"))
|
||||
assert(configConflictException.getMessage.contains(s"KeyGenerator:\t${classOf[SimpleKeyGenerator].getName}\t${classOf[NonpartitionedKeyGenerator].getName}"))
|
||||
}
|
||||
|
||||
@Test
|
||||
def testDefaultKeyGenToNonpartitoned(): Unit = {
|
||||
val _spark = spark
|
||||
import _spark.implicits._
|
||||
val df = Seq((1, "a1", 10, 1000, "2021-10-16")).toDF("id", "name", "value", "ts", "dt")
|
||||
val options = Map(
|
||||
DataSourceWriteOptions.RECORDKEY_FIELD.key -> "id",
|
||||
DataSourceWriteOptions.PRECOMBINE_FIELD.key -> "ts",
|
||||
DataSourceWriteOptions.PARTITIONPATH_FIELD.key -> "dt"
|
||||
)
|
||||
|
||||
// case 1: When commit C1 does not specify key generator and commit C2 specificies a key generator
|
||||
val (tableName1, tablePath1) = ("hoodie_test_params_1", s"$tempBasePath" + "_1")
|
||||
|
||||
// the first write need to specify KEYGENERATOR_CLASS_NAME params
|
||||
df.write.format("hudi")
|
||||
.options(options)
|
||||
.option(HoodieWriteConfig.TBL_NAME.key, tableName1)
|
||||
.mode(SaveMode.Overwrite).save(tablePath1)
|
||||
|
||||
val df2 = Seq((2, "a2", 20, 1000, "2021-10-16")).toDF("id", "name", "value", "ts", "dt")
|
||||
// raise exception when NonpartitionedKeyGenerator is specified
|
||||
val configConflictException = intercept[HoodieException] {
|
||||
df2.write.format("hudi")
|
||||
.options(options)
|
||||
.option(HoodieWriteConfig.TBL_NAME.key, tableName1)
|
||||
.option(HoodieWriteConfig.KEYGENERATOR_CLASS_NAME.key, classOf[NonpartitionedKeyGenerator].getName)
|
||||
.mode(SaveMode.Append).save(tablePath1)
|
||||
}
|
||||
assert(configConflictException.getMessage.contains("Config conflict"))
|
||||
assert(configConflictException.getMessage.contains(s"KeyGenerator:\t${classOf[NonpartitionedKeyGenerator].getName}\t${classOf[SimpleKeyGenerator].getName}"))
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
def testNoKeyGenToSimpleKeyGen(): Unit = {
|
||||
val _spark = spark
|
||||
import _spark.implicits._
|
||||
val df = Seq((1, "a1", 10, 1000, "2021-10-16")).toDF("id", "name", "value", "ts", "dt")
|
||||
val options = Map(
|
||||
DataSourceWriteOptions.RECORDKEY_FIELD.key -> "id",
|
||||
DataSourceWriteOptions.PRECOMBINE_FIELD.key -> "ts",
|
||||
DataSourceWriteOptions.PARTITIONPATH_FIELD.key -> "dt"
|
||||
)
|
||||
|
||||
// case 1: When commit C1 specificies a key generator and commkt C2 does not specify key generator
|
||||
val (tableName1, tablePath1) = ("hoodie_test_params_1", s"$tempBasePath" + "_1")
|
||||
|
||||
// the first write need to specify KEYGENERATOR_CLASS_NAME params
|
||||
df.write.format("hudi")
|
||||
.options(options)
|
||||
.option(HoodieWriteConfig.TBL_NAME.key, tableName1)
|
||||
.mode(SaveMode.Overwrite).save(tablePath1)
|
||||
|
||||
val df2 = Seq((2, "a2", 20, 1000, "2021-10-16")).toDF("id", "name", "value", "ts", "dt")
|
||||
// No Exception Should be raised
|
||||
try {
|
||||
df2.write.format("hudi")
|
||||
.options(options)
|
||||
.option(HoodieWriteConfig.TBL_NAME.key, tableName1)
|
||||
.option(HoodieWriteConfig.KEYGENERATOR_CLASS_NAME.key, classOf[SimpleKeyGenerator].getName)
|
||||
.mode(SaveMode.Append).save(tablePath1)
|
||||
} catch {
|
||||
case _ => fail("Switching from no keygen to explicit SimpleKeyGenerator should not fail");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
def testSimpleKeyGenToNoKeyGen(): Unit = {
|
||||
val _spark = spark
|
||||
import _spark.implicits._
|
||||
val df = Seq((1, "a1", 10, 1000, "2021-10-16")).toDF("id", "name", "value", "ts", "dt")
|
||||
val options = Map(
|
||||
DataSourceWriteOptions.RECORDKEY_FIELD.key -> "id",
|
||||
DataSourceWriteOptions.PRECOMBINE_FIELD.key -> "ts",
|
||||
DataSourceWriteOptions.PARTITIONPATH_FIELD.key -> "dt"
|
||||
)
|
||||
|
||||
// case 1: When commit C1 specificies a key generator and commkt C2 does not specify key generator
|
||||
val (tableName1, tablePath1) = ("hoodie_test_params_1", s"$tempBasePath" + "_1")
|
||||
|
||||
// the first write need to specify KEYGENERATOR_CLASS_NAME params
|
||||
df.write.format("hudi")
|
||||
.options(options)
|
||||
.option(HoodieWriteConfig.TBL_NAME.key, tableName1)
|
||||
.option(HoodieWriteConfig.KEYGENERATOR_CLASS_NAME.key, classOf[SimpleKeyGenerator].getName)
|
||||
.mode(SaveMode.Overwrite).save(tablePath1)
|
||||
|
||||
val df2 = Seq((2, "a2", 20, 1000, "2021-10-16")).toDF("id", "name", "value", "ts", "dt")
|
||||
// No Exception Should be raised when default keygen is used
|
||||
try {
|
||||
df2.write.format("hudi")
|
||||
.options(options)
|
||||
.option(HoodieWriteConfig.TBL_NAME.key, tableName1)
|
||||
.mode(SaveMode.Append).save(tablePath1)
|
||||
} catch {
|
||||
case _ => fail("Switching from explicit SimpleKeyGenerator to default keygen should not fail");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
def testGetOriginKeyGenerator(): Unit = {
|
||||
// for dataframe write
|
||||
|
||||
Reference in New Issue
Block a user