1
0

[HUDI-3237] gracefully fail to change column data type (#4677)

This commit is contained in:
Yann Byron
2022-01-25 08:33:36 +08:00
committed by GitHub
parent bc7882cbe9
commit 26c3f797b0
4 changed files with 17 additions and 7 deletions

View File

@@ -312,4 +312,10 @@ object HoodieSqlCommonUtils extends SparkAdapterSupport {
case field if resolver(field.name, name) => field
}
}
// Compare a [[StructField]] to another, return true if they have the same column
// name(by resolver) and dataType.
def columnEqual(field: StructField, other: StructField, resolver: Resolver): Boolean = {
resolver(field.name, other.name) && field.dataType == other.dataType
}
}

View File

@@ -49,6 +49,13 @@ case class AlterHoodieTableChangeColumnCommand(
throw new AnalysisException(s"Can't find column `$columnName` given table data columns " +
s"${hoodieCatalogTable.dataSchema.fieldNames.mkString("[`", "`, `", "`]")}")
)
// Throw an AnalysisException if the column name/dataType is changed.
if (!columnEqual(originColumn, newColumn, resolver)) {
throw new AnalysisException(
"ALTER TABLE CHANGE COLUMN is not supported for changing column " +
s"'${originColumn.name}' with type '${originColumn.dataType}' to " +
s"'${newColumn.name}' with type '${newColumn.dataType}'")
}
// Get the new schema
val newTableSchema = StructType(

View File

@@ -239,10 +239,6 @@ alter table h2_p add columns(ext0 int);
+----------+
| ok |
+----------+
alter table h2_p change column ext0 ext0 bigint;
+----------+
| ok |
+----------+
# DROP TABLE
drop table h0;

View File

@@ -91,9 +91,10 @@ class TestAlterTable extends TestHoodieSqlBase {
)
// change column's data type
spark.sql(s"alter table $newTableName change column id id bigint")
assertResult(StructType(Seq(StructField("id", LongType, nullable = true))))(
spark.sql(s"select id from $newTableName").schema)
checkExceptionContain(s"alter table $newTableName change column id id bigint") (
"ALTER TABLE CHANGE COLUMN is not supported for changing column 'id'" +
" with type 'IntegerType' to 'id' with type 'LongType'"
)
// Insert data to the new table.
spark.sql(s"insert into $newTableName values(2, 'a2', 12, 1000, 'e0')")