From f109c6cb0d187aedfbbcfab48ef5c5bf717e9cca Mon Sep 17 00:00:00 2001 From: pengzhiwei Date: Thu, 29 Jul 2021 17:29:10 +0800 Subject: [PATCH] [MINOR] fix check style error (#3365) --- .../hudi/functional/TestCOWDataSource.scala | 13 ---- .../hudi/functional/TestEmptyCommit.scala | 66 +++++++++++++++++++ 2 files changed, 66 insertions(+), 13 deletions(-) create mode 100644 hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestEmptyCommit.scala diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala index 3a1165a62..0cdb16a9e 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala @@ -790,17 +790,4 @@ class TestCOWDataSource extends HoodieClientTestBase { val resultSchema = new StructType(recordsReadDF.schema.filter(p=> !p.name.startsWith("_hoodie")).toArray) assertEquals(resultSchema, schema1) } - - @ParameterizedTest - @ValueSource(booleans = Array(true, false)) - def testWithEmptyInput(allowEmptyCommit: Boolean): Unit = { - val inputDF1 = spark.read.json(spark.sparkContext.parallelize(Seq.empty[String], 1)) - inputDF1.write.format("org.apache.hudi") - .options(commonOpts) - .option(DataSourceWriteOptions.OPERATION_OPT_KEY.key(), DataSourceWriteOptions.INSERT_OPERATION_OPT_VAL) - .option(HoodieWriteConfig.ALLOW_EMPTY_COMMIT.key(), allowEmptyCommit.toString) - .mode(SaveMode.Overwrite) - .save(basePath) - assertEquals(allowEmptyCommit, HoodieDataSourceHelpers.hasNewCommits(fs, basePath, "000")) - } } diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestEmptyCommit.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestEmptyCommit.scala new file mode 100644 index 000000000..2f9dfc62c --- /dev/null +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestEmptyCommit.scala @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hudi.functional + +import org.apache.hudi.{DataSourceWriteOptions, HoodieDataSourceHelpers} +import org.apache.hudi.config.HoodieWriteConfig +import org.apache.hudi.testutils.HoodieClientTestBase +import org.apache.spark.sql.{SaveMode, SparkSession} +import org.junit.jupiter.api.{AfterEach, BeforeEach} +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource + +class TestEmptyCommit extends HoodieClientTestBase { + var spark: SparkSession = _ + val commonOpts = Map( + "hoodie.insert.shuffle.parallelism" -> "4", + "hoodie.upsert.shuffle.parallelism" -> "4", + DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY.key -> "_row_key", + DataSourceWriteOptions.PARTITIONPATH_FIELD_OPT_KEY.key -> "partition", + DataSourceWriteOptions.PRECOMBINE_FIELD_OPT_KEY.key -> "timestamp", + HoodieWriteConfig.TABLE_NAME.key -> "hoodie_test" + ) + + @BeforeEach override def setUp() { + initPath() + initSparkContexts() + spark = sqlContext.sparkSession + initTestDataGenerator() + initFileSystem() + } + + @AfterEach override def tearDown() = { + cleanupSparkContexts() + cleanupTestDataGenerator() + cleanupFileSystem() + } + + @ParameterizedTest + @ValueSource(booleans = Array(true, false)) + def testWithEmptyInput(allowEmptyCommit: Boolean): Unit = { + val inputDF1 = spark.read.json(spark.sparkContext.parallelize(Seq.empty[String], 1)) + inputDF1.write.format("org.apache.hudi") + .options(commonOpts) + .option(DataSourceWriteOptions.OPERATION_OPT_KEY.key(), DataSourceWriteOptions.INSERT_OPERATION_OPT_VAL) + .option(HoodieWriteConfig.ALLOW_EMPTY_COMMIT.key(), allowEmptyCommit.toString) + .mode(SaveMode.Overwrite) + .save(basePath) + assertEquals(allowEmptyCommit, HoodieDataSourceHelpers.hasNewCommits(fs, basePath, "000")) + } +}