1
0

[MINOR] Fix Call Procedure code style (#6186)

* Fix Call Procedure code style.
Co-authored-by: superche <superche@tencent.com>
This commit is contained in:
superche
2022-07-23 17:18:38 +08:00
committed by GitHub
parent a5348cc685
commit 859157ec01
33 changed files with 167 additions and 162 deletions

View File

@@ -55,12 +55,12 @@ class TestBootstrapProcedure extends HoodieSparkSqlTestBase {
checkAnswer(
s"""call run_bootstrap(
|table => '$tableName',
|basePath => '$tablePath',
|tableType => '${HoodieTableType.COPY_ON_WRITE.name}',
|bootstrapPath => '$sourcePath',
|rowKeyField => '$RECORD_KEY_FIELD',
|partitionPathField => '$PARTITION_FIELD',
|bootstrapOverwrite => true)""".stripMargin) {
|base_path => '$tablePath',
|table_type => '${HoodieTableType.COPY_ON_WRITE.name}',
|bootstrap_path => '$sourcePath',
|rowKey_field => '$RECORD_KEY_FIELD',
|partition_path_field => '$PARTITION_FIELD',
|bootstrap_overwrite => true)""".stripMargin) {
Seq(0)
}

View File

@@ -156,7 +156,7 @@ class TestCallProcedure extends HoodieSparkSqlTestBase {
// Check required fields
checkExceptionContain(s"""call delete_marker(table => '$tableName')""")(
s"Argument: instant_Time is required")
s"Argument: instant_time is required")
val instantTime = "101"
FileCreateUtils.createMarkerFile(tablePath, "", instantTime, "f0", IOType.APPEND)
@@ -164,7 +164,7 @@ class TestCallProcedure extends HoodieSparkSqlTestBase {
FileCreateUtils.getTotalMarkerFileCount(tablePath, "", instantTime, IOType.APPEND)
}
checkAnswer(s"""call delete_marker(table => '$tableName', instant_Time => '$instantTime')""")(Seq(true))
checkAnswer(s"""call delete_marker(table => '$tableName', instant_time => '$instantTime')""")(Seq(true))
assertResult(0) {
FileCreateUtils.getTotalMarkerFileCount(tablePath, "", instantTime, IOType.APPEND)

View File

@@ -48,7 +48,7 @@ class TestCleanProcedure extends HoodieSparkSqlTestBase {
spark.sql(s"update $tableName set price = 12 where id = 1")
spark.sql(s"update $tableName set price = 13 where id = 1")
val result1 = spark.sql(s"call run_clean(table => '$tableName', retainCommits => 1)")
val result1 = spark.sql(s"call run_clean(table => '$tableName', retain_commits => 1)")
.collect()
.map(row => Seq(row.getString(0), row.getLong(1), row.getInt(2), row.getString(3), row.getString(4), row.getInt(5)))

View File

@@ -62,7 +62,7 @@ class TestCommitsProcedure extends HoodieSparkSqlTestBase {
// collect archived commits for table
val endTs = commits(0).get(0).toString
val archivedCommits = spark.sql(s"""call show_archived_commits(table => '$tableName', endTs => '$endTs')""").collect()
val archivedCommits = spark.sql(s"""call show_archived_commits(table => '$tableName', end_ts => '$endTs')""").collect()
assertResult(4) {
archivedCommits.length
}
@@ -110,7 +110,7 @@ class TestCommitsProcedure extends HoodieSparkSqlTestBase {
// collect archived commits for table
val endTs = commits(0).get(0).toString
val archivedCommits = spark.sql(s"""call show_archived_commits_metadata(table => '$tableName', endTs => '$endTs')""").collect()
val archivedCommits = spark.sql(s"""call show_archived_commits_metadata(table => '$tableName', end_ts => '$endTs')""").collect()
assertResult(4) {
archivedCommits.length
}

View File

@@ -43,7 +43,7 @@ class TestExportInstantsProcedure extends HoodieSparkSqlTestBase {
// insert data to table
spark.sql(s"insert into $tableName select 1, 'a1', 10, 1000")
val result = spark.sql(s"""call export_instants(table => '$tableName', localFolder => '${tmp.getCanonicalPath}/$tableName')""").limit(1).collect()
val result = spark.sql(s"""call export_instants(table => '$tableName', local_folder => '${tmp.getCanonicalPath}/$tableName')""").limit(1).collect()
assertResult(1) {
result.length
}

View File

@@ -56,15 +56,15 @@ class TestHdfsParquetImportProcedure extends HoodieSparkSqlTestBase {
val insertData: util.List[GenericRecord] = createInsertRecords(sourcePath)
// Check required fields
checkExceptionContain(s"""call hdfs_parquet_import(tableType => 'mor')""")(
checkExceptionContain(s"""call hdfs_parquet_import(table_type => 'mor')""")(
s"Argument: table is required")
checkAnswer(
s"""call hdfs_parquet_import(
|table => '$tableName', tableType => '${HoodieTableType.COPY_ON_WRITE.name}',
|srcPath => '$sourcePath', targetPath => '$targetPath',
|rowKey => '_row_key', partitionKey => 'timestamp',
|schemaFilePath => '$schemaFile')""".stripMargin) {
|table => '$tableName', table_type => '${HoodieTableType.COPY_ON_WRITE.name}',
|src_path => '$sourcePath', target_path => '$targetPath',
|row_key => '_row_key', partition_key => 'timestamp',
|schema_file_path => '$schemaFile')""".stripMargin) {
Seq(0)
}
@@ -89,15 +89,15 @@ class TestHdfsParquetImportProcedure extends HoodieSparkSqlTestBase {
val insertData: util.List[GenericRecord] = createUpsertRecords(sourcePath)
// Check required fields
checkExceptionContain(s"""call hdfs_parquet_import(tableType => 'mor')""")(
checkExceptionContain(s"""call hdfs_parquet_import(table_type => 'mor')""")(
s"Argument: table is required")
checkAnswer(
s"""call hdfs_parquet_import(
|table => '$tableName', tableType => '${HoodieTableType.COPY_ON_WRITE.name}',
|srcPath => '$sourcePath', targetPath => '$targetPath',
|rowKey => '_row_key', partitionKey => 'timestamp',
|schemaFilePath => '$schemaFile', command => 'upsert')""".stripMargin) {
|table => '$tableName', table_type => '${HoodieTableType.COPY_ON_WRITE.name}',
|src_path => '$sourcePath', target_path => '$targetPath',
|row_key => '_row_key', partition_key => 'timestamp',
|schema_file_path => '$schemaFile', command => 'upsert')""".stripMargin) {
Seq(0)
}

View File

@@ -21,7 +21,7 @@ import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
class TestMetadataProcedure extends HoodieSparkSqlTestBase {
test("Test Call metadata_delete Procedure") {
test("Test Call delete_metadata_table Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -44,14 +44,14 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
// delete the metadata
val deleteResult = spark.sql(s"""call metadata_delete(table => '$tableName')""").collect()
val deleteResult = spark.sql(s"""call delete_metadata_table(table => '$tableName')""").collect()
assertResult(1) {
deleteResult.length
}
}
}
test("Test Call metadata_create Procedure") {
test("Test Call create_metadata_table Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -74,20 +74,20 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
// The first step is delete the metadata
val deleteResult = spark.sql(s"""call metadata_delete(table => '$tableName')""").collect()
val deleteResult = spark.sql(s"""call delete_metadata_table(table => '$tableName')""").collect()
assertResult(1) {
deleteResult.length
}
// The second step is create the metadata
val createResult = spark.sql(s"""call metadata_create(table => '$tableName')""").collect()
val createResult = spark.sql(s"""call create_metadata_table(table => '$tableName')""").collect()
assertResult(1) {
createResult.length
}
}
}
test("Test Call metadata_init Procedure") {
test("Test Call init_metadata_table Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -110,20 +110,20 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
// read only, no initialize
val readResult = spark.sql(s"""call metadata_init(table => '$tableName', readOnly => true)""").collect()
val readResult = spark.sql(s"""call init_metadata_table(table => '$tableName', read_only => true)""").collect()
assertResult(1) {
readResult.length
}
// initialize metadata
val initResult = spark.sql(s"""call metadata_init(table => '$tableName')""").collect()
val initResult = spark.sql(s"""call init_metadata_table(table => '$tableName')""").collect()
assertResult(1) {
initResult.length
}
}
}
test("Test Call show_metadata_stats Procedure") {
test("Test Call show_metadata_table_stats Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -147,14 +147,14 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
// collect metadata stats for table
val metadataStats = spark.sql(s"""call show_metadata_stats(table => '$tableName')""").collect()
val metadataStats = spark.sql(s"""call show_metadata_table_stats(table => '$tableName')""").collect()
assertResult(0) {
metadataStats.length
}
}
}
test("Test Call list_metadata_partitions Procedure") {
test("Test Call show_metadata_table_partitions Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -178,14 +178,14 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
// collect metadata partitions for table
val partitions = spark.sql(s"""call list_metadata_partitions(table => '$tableName')""").collect()
val partitions = spark.sql(s"""call show_metadata_table_partitions(table => '$tableName')""").collect()
assertResult(2) {
partitions.length
}
}
}
test("Test Call list_metadata_files Procedure") {
test("Test Call show_metadata_table_files Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -209,21 +209,21 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
// collect metadata partitions for table
val partitions = spark.sql(s"""call list_metadata_partitions(table => '$tableName')""").collect()
val partitions = spark.sql(s"""call show_metadata_table_partitions(table => '$tableName')""").collect()
assertResult(2) {
partitions.length
}
// collect metadata files for a partition of a table
val partition = partitions(0).get(0).toString
val filesResult = spark.sql(s"""call list_metadata_files(table => '$tableName', partition => '$partition')""").collect()
val filesResult = spark.sql(s"""call show_metadata_table_files(table => '$tableName', partition => '$partition')""").collect()
assertResult(1) {
filesResult.length
}
}
}
test("Test Call validate_metadata_files Procedure") {
test("Test Call validate_metadata_table_files Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -247,13 +247,13 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
// collect validate metadata files result
val validateFilesResult = spark.sql(s"""call validate_metadata_files(table => '$tableName')""").collect()
val validateFilesResult = spark.sql(s"""call validate_metadata_table_files(table => '$tableName')""").collect()
assertResult(0) {
validateFilesResult.length
}
// collect validate metadata files result with verbose
val validateFilesVerboseResult = spark.sql(s"""call validate_metadata_files(table => '$tableName', verbose => true)""").collect()
val validateFilesVerboseResult = spark.sql(s"""call validate_metadata_table_files(table => '$tableName', verbose => true)""").collect()
assertResult(2) {
validateFilesVerboseResult.length
}

View File

@@ -21,7 +21,7 @@ import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
test("Test Call create_savepoints Procedure") {
test("Test Call create_savepoint Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -49,7 +49,7 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
}
val commitTime = commits.apply(0).getString(0)
checkAnswer(s"""call create_savepoints('$tableName', '$commitTime', 'admin', '1')""")(Seq(true))
checkAnswer(s"""call create_savepoint('$tableName', '$commitTime', 'admin', '1')""")(Seq(true))
}
}
@@ -83,7 +83,7 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
}
val commitTime = commits.apply(1).getString(0)
checkAnswer(s"""call create_savepoints('$tableName', '$commitTime')""")(Seq(true))
checkAnswer(s"""call create_savepoint('$tableName', '$commitTime')""")(Seq(true))
// show savepoints
val savepoints = spark.sql(s"""call show_savepoints(table => '$tableName')""").collect()
@@ -93,7 +93,7 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
}
}
test("Test Call delete_savepoints Procedure") {
test("Test Call delete_savepoint Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -124,11 +124,11 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
// create 3 savepoints
commits.foreach(r => {
checkAnswer(s"""call create_savepoints('$tableName', '${r.getString(0)}')""")(Seq(true))
checkAnswer(s"""call create_savepoint('$tableName', '${r.getString(0)}')""")(Seq(true))
})
// delete savepoints
checkAnswer(s"""call delete_savepoints('$tableName', '${commits.apply(1).getString(0)}')""")(Seq(true))
checkAnswer(s"""call delete_savepoint('$tableName', '${commits.apply(1).getString(0)}')""")(Seq(true))
// show savepoints with only 2
val savepoints = spark.sql(s"""call show_savepoints(table => '$tableName')""").collect()
@@ -138,7 +138,7 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
}
}
test("Test Call rollback_savepoints Procedure") {
test("Test Call rollback_to_savepoint Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
// create table
@@ -168,11 +168,11 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
// create 2 savepoints
commits.foreach(r => {
checkAnswer(s"""call create_savepoints('$tableName', '${r.getString(0)}')""")(Seq(true))
checkAnswer(s"""call create_savepoint('$tableName', '${r.getString(0)}')""")(Seq(true))
})
// rollback savepoints
checkAnswer(s"""call rollback_savepoints('$tableName', '${commits.apply(0).getString(0)}')""")(Seq(true))
checkAnswer(s"""call rollback_to_savepoint('$tableName', '${commits.apply(0).getString(0)}')""")(Seq(true))
}
}
}

View File

@@ -60,7 +60,7 @@ class TestStatsProcedure extends HoodieSparkSqlTestBase {
}
}
test("Test Call stats_filesizes Procedure") {
test("Test Call stats_file_sizes Procedure") {
withTempDir { tmp =>
val tableName = generateTableName
val tablePath = s"${tmp.getCanonicalPath}/$tableName"
@@ -85,12 +85,12 @@ class TestStatsProcedure extends HoodieSparkSqlTestBase {
spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
// Check required fields
checkExceptionContain(s"""call stats_filesizes(limit => 10)""")(
checkExceptionContain(s"""call stats_file_sizes(limit => 10)""")(
s"Argument: table is required")
// collect result for table
val result = spark.sql(
s"""call stats_filesizes(table => '$tableName', partition_path => '/*')""".stripMargin).collect()
s"""call stats_file_sizes(table => '$tableName', partition_path => '/*')""".stripMargin).collect()
assertResult(3) {
result.length
}

View File

@@ -48,7 +48,7 @@ class TestUpgradeOrDowngradeProcedure extends HoodieSparkSqlTestBase {
""".stripMargin)
// Check required fields
checkExceptionContain(s"""call downgrade_table(table => '$tableName')""")(
s"Argument: toVersion is required")
s"Argument: to_version is required")
var metaClient = HoodieTableMetaClient.builder
.setConf(new JavaSparkContext(spark.sparkContext).hadoopConfiguration())
@@ -62,7 +62,7 @@ class TestUpgradeOrDowngradeProcedure extends HoodieSparkSqlTestBase {
assertTableVersionFromPropertyFile(metaClient, HoodieTableVersion.FOUR.versionCode)
// downgrade table to ZERO
checkAnswer(s"""call downgrade_table(table => '$tableName', toVersion => 'ZERO')""")(Seq(true))
checkAnswer(s"""call downgrade_table(table => '$tableName', to_version => 'ZERO')""")(Seq(true))
// verify the downgraded hoodie.table.version
metaClient = HoodieTableMetaClient.reload(metaClient)
@@ -72,7 +72,7 @@ class TestUpgradeOrDowngradeProcedure extends HoodieSparkSqlTestBase {
assertTableVersionFromPropertyFile(metaClient, HoodieTableVersion.ZERO.versionCode)
// upgrade table to ONE
checkAnswer(s"""call upgrade_table(table => '$tableName', toVersion => 'ONE')""")(Seq(true))
checkAnswer(s"""call upgrade_table(table => '$tableName', to_version => 'ONE')""")(Seq(true))
// verify the upgraded hoodie.table.version
metaClient = HoodieTableMetaClient.reload(metaClient)