1
0

[HUDI-2551] Support DefaultHoodieRecordPayload for flink (#3792)

This commit is contained in:
Danny Chan
2021-10-14 13:46:53 +08:00
committed by GitHub
parent abf3e3fe71
commit f897e6d73e
6 changed files with 48 additions and 2 deletions

View File

@@ -18,6 +18,7 @@
package org.apache.hudi.table;
import org.apache.hudi.common.model.DefaultHoodieRecordPayload;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.configuration.FlinkOptions;
@@ -584,6 +585,34 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
assertRowsEquals(result, "[+I[id1, Sophia, 18, 1970-01-01T00:00:05, par1]]");
}
@Test
void testUpdateWithDefaultHoodieRecordPayload() {
TableEnvironment tableEnv = batchTableEnv;
String hoodieTableDDL = sql("t1")
.field("id int")
.field("name string")
.field("price double")
.field("ts bigint")
.pkField("id")
.noPartition()
.option(FlinkOptions.PATH, tempFile.getAbsolutePath())
.option(FlinkOptions.PAYLOAD_CLASS_NAME, DefaultHoodieRecordPayload.class.getName())
.end();
tableEnv.executeSql(hoodieTableDDL);
final String insertInto1 = "insert into t1 values\n"
+ "(1,'a1',20,20)";
execInsertSql(tableEnv, insertInto1);
final String insertInto4 = "insert into t1 values\n"
+ "(1,'a1',20,1)";
execInsertSql(tableEnv, insertInto4);
List<Row> result = CollectionUtil.iterableToList(
() -> tableEnv.sqlQuery("select * from t1").execute().collect());
assertRowsEquals(result, "[+I[1, a1, 20.0, 20]]");
}
@ParameterizedTest
@MethodSource("executionModeAndTableTypeParams")
void testWriteNonPartitionedTable(ExecMode execMode, HoodieTableType tableType) {