1
0

[HUDI-2191] Bump flink version to 1.13.1 (#3291)

This commit is contained in:
Danny Chan
2021-08-16 18:14:05 +08:00
committed by GitHub
parent 4d508ef673
commit 66f951322a
19 changed files with 304 additions and 232 deletions

View File

@@ -57,8 +57,8 @@ import org.apache.flink.api.java.io.TextInputFormat;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.core.fs.Path;
import org.apache.flink.formats.common.TimestampFormat;
import org.apache.flink.formats.json.JsonRowDataDeserializationSchema;
import org.apache.flink.formats.json.TimestampFormat;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

View File

@@ -110,7 +110,7 @@ public class StreamWriteFunctionWrapper<I> {
this.gateway = new MockOperatorEventGateway();
this.conf = conf;
// one function
this.coordinatorContext = new MockOperatorCoordinatorContext(new OperatorID(), 1, false);
this.coordinatorContext = new MockOperatorCoordinatorContext(new OperatorID(), 1);
this.coordinator = new StreamWriteOperatorCoordinator(conf, this.coordinatorContext);
this.compactFunctionWrapper = new CompactFunctionWrapper(this.conf);
this.bucketAssignOperatorContext = new MockBucketAssignOperatorContext();

View File

@@ -69,10 +69,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestStreamReadOperator {
private static final Map<String, String> EXPECTED = new HashMap<>();
static {
EXPECTED.put("par1", "id1,Danny,23,1970-01-01T00:00:00.001,par1, id2,Stephen,33,1970-01-01T00:00:00.002,par1");
EXPECTED.put("par2", "id3,Julian,53,1970-01-01T00:00:00.003,par2, id4,Fabian,31,1970-01-01T00:00:00.004,par2");
EXPECTED.put("par3", "id5,Sophia,18,1970-01-01T00:00:00.005,par3, id6,Emma,20,1970-01-01T00:00:00.006,par3");
EXPECTED.put("par4", "id7,Bob,44,1970-01-01T00:00:00.007,par4, id8,Han,56,1970-01-01T00:00:00.008,par4");
EXPECTED.put("par1", "+I[id1, Danny, 23, 1970-01-01T00:00:00.001, par1], +I[id2, Stephen, 33, 1970-01-01T00:00:00.002, par1]");
EXPECTED.put("par2", "+I[id3, Julian, 53, 1970-01-01T00:00:00.003, par2], +I[id4, Fabian, 31, 1970-01-01T00:00:00.004, par2]");
EXPECTED.put("par3", "+I[id5, Sophia, 18, 1970-01-01T00:00:00.005, par3], +I[id6, Emma, 20, 1970-01-01T00:00:00.006, par3]");
EXPECTED.put("par4", "+I[id7, Bob, 44, 1970-01-01T00:00:00.007, par4], +I[id8, Han, 56, 1970-01-01T00:00:00.008, par4]");
}
private Configuration conf;

View File

@@ -68,8 +68,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* IT cases for Hoodie table source and sink.
* <p>
* Note: should add more SQL cases when batch write is supported.
*/
public class HoodieDataSourceITCase extends AbstractTestBase {
private TableEnvironment streamTableEnv;
@@ -289,7 +287,7 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
+ ")";
List<Row> result = execSelectSql(streamTableEnv,
"select name, sum(age) from t1 group by name", sinkDDL, 10);
final String expected = "[+I(Danny,24), +I(Stephen,34)]";
final String expected = "[+I(+I[Danny, 24]), +I(+I[Stephen, 34])]";
assertRowsEquals(result, expected, true);
}
@@ -314,9 +312,9 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
List<Row> result2 = CollectionUtil.iterableToList(
() -> tableEnv.sqlQuery("select * from t1 where uuid > 'id5'").execute().collect());
assertRowsEquals(result2, "["
+ "id6,Emma,20,1970-01-01T00:00:06,par3, "
+ "id7,Bob,44,1970-01-01T00:00:07,par4, "
+ "id8,Han,56,1970-01-01T00:00:08,par4]");
+ "+I[id6, Emma, 20, 1970-01-01T00:00:06, par3], "
+ "+I[id7, Bob, 44, 1970-01-01T00:00:07, par4], "
+ "+I[id8, Han, 56, 1970-01-01T00:00:08, par4]]");
}
@ParameterizedTest
@@ -350,14 +348,14 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
execInsertSql(streamTableEnv, insertInto);
final String expected = "["
+ "id1,Danny,23,par1,1970-01-01T00:00:01, "
+ "id2,Stephen,33,par1,1970-01-01T00:00:02, "
+ "id3,Julian,53,par2,1970-01-01T00:00:03, "
+ "id4,Fabian,31,par2,1970-01-01T00:00:04, "
+ "id5,Sophia,18,par3,1970-01-01T00:00:05, "
+ "id6,Emma,20,par3,1970-01-01T00:00:06, "
+ "id7,Bob,44,par4,1970-01-01T00:00:07, "
+ "id8,Han,56,par4,1970-01-01T00:00:08]";
+ "+I[id1, Danny, 23, par1, 1970-01-01T00:00:01], "
+ "+I[id2, Stephen, 33, par1, 1970-01-01T00:00:02], "
+ "+I[id3, Julian, 53, par2, 1970-01-01T00:00:03], "
+ "+I[id4, Fabian, 31, par2, 1970-01-01T00:00:04], "
+ "+I[id5, Sophia, 18, par3, 1970-01-01T00:00:05], "
+ "+I[id6, Emma, 20, par3, 1970-01-01T00:00:06], "
+ "+I[id7, Bob, 44, par4, 1970-01-01T00:00:07], "
+ "+I[id8, Han, 56, par4, 1970-01-01T00:00:08]]";
List<Row> result = execSelectSql(streamTableEnv, "select * from t1", execMode);
@@ -401,8 +399,8 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
List<Row> result2 = CollectionUtil.iterableToList(
() -> tableEnv.sqlQuery("select * from t1").execute().collect());
final String expected = "["
+ "id1,Danny,24,1970-01-01T00:00:01,par1, "
+ "id2,Stephen,34,1970-01-01T00:00:02,par2]";
+ "+I[id1, Danny, 24, 1970-01-01T00:00:01, par1], "
+ "+I[id2, Stephen, 34, 1970-01-01T00:00:02, par2]]";
assertRowsEquals(result2, expected);
}
@@ -431,7 +429,7 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
List<Row> result = CollectionUtil.iterableToList(
() -> tableEnv.sqlQuery("select * from t1").execute().collect());
assertRowsEquals(result, "[id1,Sophia,18,1970-01-01T00:00:05,par1]");
assertRowsEquals(result, "[+I[id1, Sophia, 18, 1970-01-01T00:00:05, par1]]");
}
@ParameterizedTest
@@ -467,7 +465,7 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
List<Row> result = CollectionUtil.iterableToList(
() -> tableEnv.sqlQuery("select * from t1").execute().collect());
assertRowsEquals(result, "[id1,Sophia,18,1970-01-01T00:00:05,par5]");
assertRowsEquals(result, "[+I[id1, Sophia, 18, 1970-01-01T00:00:05, par5]]");
}
@Test
@@ -490,7 +488,7 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
List<Row> result = CollectionUtil.iterableToList(
() -> streamTableEnv.sqlQuery("select * from t1").execute().collect());
assertRowsEquals(result, "[id1,Phoebe,52,1970-01-01T00:00:08,par4]");
assertRowsEquals(result, "[+I[id1, Phoebe, 52, 1970-01-01T00:00:08, par4]]");
}
@Test
@@ -514,10 +512,10 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
List<Row> result = CollectionUtil.iterableToList(
() -> streamTableEnv.sqlQuery("select * from t1").execute().collect());
final String expected = "["
+ "id1,Stephen,34,1970-01-01T00:00:02,par1, "
+ "id1,Fabian,32,1970-01-01T00:00:04,par2, "
+ "id1,Jane,19,1970-01-01T00:00:06,par3, "
+ "id1,Phoebe,52,1970-01-01T00:00:08,par4]";
+ "+I[id1, Stephen, 34, 1970-01-01T00:00:02, par1], "
+ "+I[id1, Fabian, 32, 1970-01-01T00:00:04, par2], "
+ "+I[id1, Jane, 19, 1970-01-01T00:00:06, par3], "
+ "+I[id1, Phoebe, 52, 1970-01-01T00:00:08, par4]]";
assertRowsEquals(result, expected, 3);
}
@@ -577,16 +575,16 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
execInsertSql(streamTableEnv, insertInto);
final String expected = "["
+ "101,1000,scooter,3.140000104904175, "
+ "102,2000,car battery,8.100000381469727, "
+ "103,3000,12-pack drill bits,0.800000011920929, "
+ "104,4000,hammer,0.75, "
+ "105,5000,hammer,0.875, "
+ "106,10000,hammer,1.0, "
+ "107,11000,rocks,5.099999904632568, "
+ "108,8000,jacket,0.10000000149011612, "
+ "109,9000,spare tire,22.200000762939453, "
+ "110,14000,jacket,0.5]";
+ "+I[101, 1000, scooter, 3.140000104904175], "
+ "+I[102, 2000, car battery, 8.100000381469727], "
+ "+I[103, 3000, 12-pack drill bits, 0.800000011920929], "
+ "+I[104, 4000, hammer, 0.75], "
+ "+I[105, 5000, hammer, 0.875], "
+ "+I[106, 10000, hammer, 1.0], "
+ "+I[107, 11000, rocks, 5.099999904632568], "
+ "+I[108, 8000, jacket, 0.10000000149011612], "
+ "+I[109, 9000, spare tire, 22.200000762939453], "
+ "+I[110, 14000, jacket, 0.5]]";
List<Row> result = execSelectSql(streamTableEnv, "select * from hoodie_sink", execMode);
@@ -621,9 +619,9 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
List<Row> result2 = CollectionUtil.iterableToList(
() -> tableEnv.sqlQuery("select * from hoodie_sink where uuid > 'id5'").execute().collect());
assertRowsEquals(result2, "["
+ "id6,Emma,20,1970-01-01T00:00:06,par3, "
+ "id7,Bob,44,1970-01-01T00:00:07,par4, "
+ "id8,Han,56,1970-01-01T00:00:08,par4]");
+ "+I[id6, Emma, 20, 1970-01-01T00:00:06, par3], "
+ "+I[id7, Bob, 44, 1970-01-01T00:00:07, par4], "
+ "+I[id8, Han, 56, 1970-01-01T00:00:08, par4]]");
}
@Test
@@ -660,11 +658,11 @@ public class HoodieDataSourceITCase extends AbstractTestBase {
List<Row> result = CollectionUtil.iterableToList(
() -> tableEnv.sqlQuery("select * from t1").execute().collect());
assertRowsEquals(result, "["
+ "id1,Danny,23,1970-01-01T00:00:01,par1, "
+ "id1,Stephen,33,1970-01-01T00:00:02,par2, "
+ "id1,Julian,53,1970-01-01T00:00:03,par3, "
+ "id1,Fabian,31,1970-01-01T00:00:04,par4, "
+ "id1,Sophia,18,1970-01-01T00:00:05,par5]", 3);
+ "+I[id1, Danny, 23, 1970-01-01T00:00:01, par1], "
+ "+I[id1, Stephen, 33, 1970-01-01T00:00:02, par2], "
+ "+I[id1, Julian, 53, 1970-01-01T00:00:03, par3], "
+ "+I[id1, Fabian, 31, 1970-01-01T00:00:04, par4], "
+ "+I[id1, Sophia, 18, 1970-01-01T00:00:05, par5]]", 3);
}
// -------------------------------------------------------------------------

View File

@@ -24,16 +24,18 @@ import org.apache.hudi.hive.SlashEncodedDayPartitionValueExtractor;
import org.apache.hudi.keygen.ComplexAvroKeyGenerator;
import org.apache.hudi.keygen.NonpartitionedAvroKeyGenerator;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.SchemaBuilder;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.catalog.CatalogTable;
import org.apache.flink.table.catalog.CatalogTableImpl;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.ResolvedCatalogTable;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.factories.DynamicTableFactory;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -82,7 +84,7 @@ public class TestHoodieTableFactory {
@Test
void testRequiredOptionsForSource() {
// miss pk and pre combine key will throw exception
TableSchema schema1 = TableSchema.builder()
ResolvedSchema schema1 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20))
.field("f2", DataTypes.TIMESTAMP(3))
@@ -92,7 +94,7 @@ public class TestHoodieTableFactory {
assertThrows(ValidationException.class, () -> new HoodieTableFactory().createDynamicTableSink(sourceContext1));
// given the pk and miss the pre combine key will throw exception
TableSchema schema2 = TableSchema.builder()
ResolvedSchema schema2 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20))
.field("f2", DataTypes.TIMESTAMP(3))
@@ -103,7 +105,7 @@ public class TestHoodieTableFactory {
assertThrows(ValidationException.class, () -> new HoodieTableFactory().createDynamicTableSink(sourceContext2));
// given pk and pre combine key will be ok
TableSchema schema3 = TableSchema.builder()
ResolvedSchema schema3 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20))
.field("f2", DataTypes.TIMESTAMP(3))
@@ -137,7 +139,7 @@ public class TestHoodieTableFactory {
this.conf.setString(FlinkOptions.RECORD_KEY_FIELD, "dummyField");
this.conf.setString(FlinkOptions.KEYGEN_CLASS, "dummyKeyGenClass");
// definition with simple primary key and partition path
TableSchema schema1 = TableSchema.builder()
ResolvedSchema schema1 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20))
.field("f2", DataTypes.TIMESTAMP(3))
@@ -152,7 +154,7 @@ public class TestHoodieTableFactory {
// definition with complex primary keys and partition paths
this.conf.setString(FlinkOptions.KEYGEN_CLASS, FlinkOptions.KEYGEN_CLASS.defaultValue());
TableSchema schema2 = TableSchema.builder()
ResolvedSchema schema2 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20).notNull())
.field("f2", DataTypes.TIMESTAMP(3))
@@ -177,7 +179,7 @@ public class TestHoodieTableFactory {
@Test
void testSetupHiveOptionsForSource() {
// definition with simple primary key and partition path
TableSchema schema1 = TableSchema.builder()
ResolvedSchema schema1 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20))
.field("f2", DataTypes.TIMESTAMP(3))
@@ -202,7 +204,7 @@ public class TestHoodieTableFactory {
@Test
void testSetupCleaningOptionsForSource() {
// definition with simple primary key and partition path
TableSchema schema1 = TableSchema.builder()
ResolvedSchema schema1 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20))
.field("f2", DataTypes.TIMESTAMP(3))
@@ -249,7 +251,7 @@ public class TestHoodieTableFactory {
this.conf.setString(FlinkOptions.RECORD_KEY_FIELD, "dummyField");
this.conf.setString(FlinkOptions.KEYGEN_CLASS, "dummyKeyGenClass");
// definition with simple primary key and partition path
TableSchema schema1 = TableSchema.builder()
ResolvedSchema schema1 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20))
.field("f2", DataTypes.TIMESTAMP(3))
@@ -264,7 +266,7 @@ public class TestHoodieTableFactory {
// definition with complex primary keys and partition paths
this.conf.setString(FlinkOptions.KEYGEN_CLASS, FlinkOptions.KEYGEN_CLASS.defaultValue());
TableSchema schema2 = TableSchema.builder()
ResolvedSchema schema2 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20).notNull())
.field("f2", DataTypes.TIMESTAMP(3))
@@ -289,7 +291,7 @@ public class TestHoodieTableFactory {
@Test
void testSetupHiveOptionsForSink() {
// definition with simple primary key and partition path
TableSchema schema1 = TableSchema.builder()
ResolvedSchema schema1 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20))
.field("f2", DataTypes.TIMESTAMP(3))
@@ -314,7 +316,7 @@ public class TestHoodieTableFactory {
@Test
void testSetupCleaningOptionsForSink() {
// definition with simple primary key and partition path
TableSchema schema1 = TableSchema.builder()
ResolvedSchema schema1 = SchemaBuilder.instance()
.field("f0", DataTypes.INT().notNull())
.field("f1", DataTypes.VARCHAR(20))
.field("f2", DataTypes.TIMESTAMP(3))
@@ -349,10 +351,10 @@ public class TestHoodieTableFactory {
*/
private static class MockContext implements DynamicTableFactory.Context {
private final Configuration conf;
private final TableSchema schema;
private final ResolvedSchema schema;
private final List<String> partitions;
private MockContext(Configuration conf, TableSchema schema, List<String> partitions) {
private MockContext(Configuration conf, ResolvedSchema schema, List<String> partitions) {
this.conf = conf;
this.schema = schema;
this.partitions = partitions;
@@ -362,11 +364,11 @@ public class TestHoodieTableFactory {
return getInstance(conf, TestConfigurations.TABLE_SCHEMA, Collections.singletonList("partition"));
}
static MockContext getInstance(Configuration conf, TableSchema schema, String partition) {
static MockContext getInstance(Configuration conf, ResolvedSchema schema, String partition) {
return getInstance(conf, schema, Collections.singletonList(partition));
}
static MockContext getInstance(Configuration conf, TableSchema schema, List<String> partitions) {
static MockContext getInstance(Configuration conf, ResolvedSchema schema, List<String> partitions) {
return new MockContext(conf, schema, partitions);
}
@@ -376,8 +378,10 @@ public class TestHoodieTableFactory {
}
@Override
public CatalogTable getCatalogTable() {
return new CatalogTableImpl(schema, partitions, conf.toMap(), "mock source table");
public ResolvedCatalogTable getCatalogTable() {
CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(schema).build(),
"mock source table", partitions, conf.toMap());
return new ResolvedCatalogTable(catalogTable, schema);
}
@Override

View File

@@ -104,17 +104,18 @@ public class TestInputFormat {
result = readData(inputFormat);
actual = TestData.rowDataToString(result);
expected = "[id1,Danny,24,1970-01-01T00:00:00.001,par1, "
+ "id10,Ella,38,1970-01-01T00:00:00.007,par4, "
+ "id11,Phoebe,52,1970-01-01T00:00:00.008,par4, "
+ "id2,Stephen,34,1970-01-01T00:00:00.002,par1, "
+ "id3,Julian,54,1970-01-01T00:00:00.003,par2, "
+ "id4,Fabian,32,1970-01-01T00:00:00.004,par2, "
+ "id5,Sophia,18,1970-01-01T00:00:00.005,par3, "
+ "id6,Emma,20,1970-01-01T00:00:00.006,par3, "
+ "id7,Bob,44,1970-01-01T00:00:00.007,par4, "
+ "id8,Han,56,1970-01-01T00:00:00.008,par4, "
+ "id9,Jane,19,1970-01-01T00:00:00.006,par3]";
expected = "["
+ "+I[id1, Danny, 24, 1970-01-01T00:00:00.001, par1], "
+ "+I[id2, Stephen, 34, 1970-01-01T00:00:00.002, par1], "
+ "+I[id3, Julian, 54, 1970-01-01T00:00:00.003, par2], "
+ "+I[id4, Fabian, 32, 1970-01-01T00:00:00.004, par2], "
+ "+I[id5, Sophia, 18, 1970-01-01T00:00:00.005, par3], "
+ "+I[id6, Emma, 20, 1970-01-01T00:00:00.006, par3], "
+ "+I[id7, Bob, 44, 1970-01-01T00:00:00.007, par4], "
+ "+I[id8, Han, 56, 1970-01-01T00:00:00.008, par4], "
+ "+I[id9, Jane, 19, 1970-01-01T00:00:00.006, par3], "
+ "+I[id10, Ella, 38, 1970-01-01T00:00:00.007, par4], "
+ "+I[id11, Phoebe, 52, 1970-01-01T00:00:00.008, par4]]";
assertThat(actual, is(expected));
}
@@ -150,21 +151,22 @@ public class TestInputFormat {
result = readData(inputFormat);
actual = TestData.rowDataToString(result);
expected = "[id1,Danny,24,1970-01-01T00:00:00.001,par1, "
+ "id10,Ella,38,1970-01-01T00:00:00.007,par4, "
+ "id11,Phoebe,52,1970-01-01T00:00:00.008,par4, "
+ "id12,Monica,27,1970-01-01T00:00:00.009,par5, "
+ "id13,Phoebe,31,1970-01-01T00:00:00.010,par5, "
+ "id14,Rachel,52,1970-01-01T00:00:00.011,par6, "
+ "id15,Ross,29,1970-01-01T00:00:00.012,par6, "
+ "id2,Stephen,34,1970-01-01T00:00:00.002,par1, "
+ "id3,Julian,54,1970-01-01T00:00:00.003,par2, "
+ "id4,Fabian,32,1970-01-01T00:00:00.004,par2, "
+ "id5,Sophia,18,1970-01-01T00:00:00.005,par3, "
+ "id6,Emma,20,1970-01-01T00:00:00.006,par3, "
+ "id7,Bob,44,1970-01-01T00:00:00.007,par4, "
+ "id8,Han,56,1970-01-01T00:00:00.008,par4, "
+ "id9,Jane,19,1970-01-01T00:00:00.006,par3]";
expected = "["
+ "+I[id1, Danny, 24, 1970-01-01T00:00:00.001, par1], "
+ "+I[id2, Stephen, 34, 1970-01-01T00:00:00.002, par1], "
+ "+I[id3, Julian, 54, 1970-01-01T00:00:00.003, par2], "
+ "+I[id4, Fabian, 32, 1970-01-01T00:00:00.004, par2], "
+ "+I[id5, Sophia, 18, 1970-01-01T00:00:00.005, par3], "
+ "+I[id6, Emma, 20, 1970-01-01T00:00:00.006, par3], "
+ "+I[id7, Bob, 44, 1970-01-01T00:00:00.007, par4], "
+ "+I[id8, Han, 56, 1970-01-01T00:00:00.008, par4], "
+ "+I[id9, Jane, 19, 1970-01-01T00:00:00.006, par3], "
+ "+I[id10, Ella, 38, 1970-01-01T00:00:00.007, par4], "
+ "+I[id11, Phoebe, 52, 1970-01-01T00:00:00.008, par4], "
+ "+I[id12, Monica, 27, 1970-01-01T00:00:00.009, par5], "
+ "+I[id13, Phoebe, 31, 1970-01-01T00:00:00.010, par5], "
+ "+I[id14, Rachel, 52, 1970-01-01T00:00:00.011, par6], "
+ "+I[id15, Ross, 29, 1970-01-01T00:00:00.012, par6]]";
assertThat(actual, is(expected));
}
@@ -189,14 +191,14 @@ public class TestInputFormat {
// when isEmitDelete is false.
List<RowData> result1 = readData(inputFormat);
final String actual1 = TestData.rowDataToString(result1, true);
final String actual1 = TestData.rowDataToString(result1);
final String expected1 = "["
+ "+I(id1,Danny,24,1970-01-01T00:00:00.001,par1), "
+ "+I(id2,Stephen,34,1970-01-01T00:00:00.002,par1), "
+ "+I(id4,Fabian,31,1970-01-01T00:00:00.004,par2), "
+ "+I(id6,Emma,20,1970-01-01T00:00:00.006,par3), "
+ "+I(id7,Bob,44,1970-01-01T00:00:00.007,par4), "
+ "+I(id8,Han,56,1970-01-01T00:00:00.008,par4)]";
+ "+I[id1, Danny, 24, 1970-01-01T00:00:00.001, par1], "
+ "+I[id2, Stephen, 34, 1970-01-01T00:00:00.002, par1], "
+ "+I[id4, Fabian, 31, 1970-01-01T00:00:00.004, par2], "
+ "+I[id6, Emma, 20, 1970-01-01T00:00:00.006, par3], "
+ "+I[id7, Bob, 44, 1970-01-01T00:00:00.007, par4], "
+ "+I[id8, Han, 56, 1970-01-01T00:00:00.008, par4]]";
assertThat(actual1, is(expected1));
// refresh the input format and set isEmitDelete to true.
@@ -206,17 +208,17 @@ public class TestInputFormat {
List<RowData> result2 = readData(inputFormat);
final String actual2 = TestData.rowDataToString(result2, true);
final String actual2 = TestData.rowDataToString(result2);
final String expected2 = "["
+ "+I(id1,Danny,24,1970-01-01T00:00:00.001,par1), "
+ "+I(id2,Stephen,34,1970-01-01T00:00:00.002,par1), "
+ "-D(id3,Julian,53,1970-01-01T00:00:00.003,par2), "
+ "+I(id4,Fabian,31,1970-01-01T00:00:00.004,par2), "
+ "-D(id5,Sophia,18,1970-01-01T00:00:00.005,par3), "
+ "+I(id6,Emma,20,1970-01-01T00:00:00.006,par3), "
+ "+I(id7,Bob,44,1970-01-01T00:00:00.007,par4), "
+ "+I(id8,Han,56,1970-01-01T00:00:00.008,par4), "
+ "-D(id9,Jane,19,1970-01-01T00:00:00.006,par3)]";
+ "+I[id1, Danny, 24, 1970-01-01T00:00:00.001, par1], "
+ "+I[id2, Stephen, 34, 1970-01-01T00:00:00.002, par1], "
+ "-D[id3, Julian, 53, 1970-01-01T00:00:00.003, par2], "
+ "+I[id4, Fabian, 31, 1970-01-01T00:00:00.004, par2], "
+ "-D[id5, Sophia, 18, 1970-01-01T00:00:00.005, par3], "
+ "+I[id6, Emma, 20, 1970-01-01T00:00:00.006, par3], "
+ "+I[id7, Bob, 44, 1970-01-01T00:00:00.007, par4], "
+ "+I[id8, Han, 56, 1970-01-01T00:00:00.008, par4], "
+ "-D[id9, Jane, 19, 1970-01-01T00:00:00.006, par3]]";
assertThat(actual2, is(expected2));
}
@@ -241,8 +243,8 @@ public class TestInputFormat {
// when isEmitDelete is false.
List<RowData> result1 = readData(inputFormat);
final String actual1 = TestData.rowDataToString(result1, true);
final String expected1 = "[+U(id1,Danny,22,1970-01-01T00:00:00.004,par1)]";
final String actual1 = TestData.rowDataToString(result1);
final String expected1 = "[+U[id1, Danny, 22, 1970-01-01T00:00:00.004, par1]]";
assertThat(actual1, is(expected1));
// refresh the input format and set isEmitDelete to true.
@@ -252,8 +254,8 @@ public class TestInputFormat {
List<RowData> result2 = readData(inputFormat);
final String actual2 = TestData.rowDataToString(result2, true);
final String expected2 = "[+U(id1,Danny,22,1970-01-01T00:00:00.004,par1)]";
final String actual2 = TestData.rowDataToString(result2);
final String expected2 = "[+U[id1, Danny, 22, 1970-01-01T00:00:00.004, par1]]";
assertThat(actual2, is(expected2));
}
@@ -272,13 +274,13 @@ public class TestInputFormat {
List<RowData> result = readData(inputFormat);
final String actual = TestData.rowDataToString(result, true);
final String actual = TestData.rowDataToString(result);
final String expected = "["
+ "+I(id1,Danny,24,1970-01-01T00:00:00.001,par1), "
+ "+I(id2,Stephen,34,1970-01-01T00:00:00.002,par1), "
+ "-D(id3,Julian,53,1970-01-01T00:00:00.003,par2), "
+ "-D(id5,Sophia,18,1970-01-01T00:00:00.005,par3), "
+ "-D(id9,Jane,19,1970-01-01T00:00:00.006,par3)]";
+ "+I[id1, Danny, 24, 1970-01-01T00:00:00.001, par1], "
+ "+I[id2, Stephen, 34, 1970-01-01T00:00:00.002, par1], "
+ "-D[id3, Julian, 53, 1970-01-01T00:00:00.003, par2], "
+ "-D[id5, Sophia, 18, 1970-01-01T00:00:00.005, par3], "
+ "-D[id9, Jane, 19, 1970-01-01T00:00:00.006, par3]]";
assertThat(actual, is(expected));
}
@@ -294,10 +296,10 @@ public class TestInputFormat {
List<RowData> result = readData(inputFormat);
final String actual = TestData.rowDataToString(result, true);
final String actual = TestData.rowDataToString(result);
final String expected = "["
+ "+I(id1,Danny,24,1970-01-01T00:00:00.001,par1), "
+ "+I(id2,Stephen,34,1970-01-01T00:00:00.002,par1)]";
+ "+I[id1, Danny, 24, 1970-01-01T00:00:00.001, par1], "
+ "+I[id2, Stephen, 34, 1970-01-01T00:00:00.002, par1]]";
assertThat(actual, is(expected));
}
@@ -317,7 +319,9 @@ public class TestInputFormat {
List<RowData> result = readData(inputFormat);
String actual = TestData.rowDataToString(result);
String expected = "[id1,Danny,23,1970-01-01T00:00:00.001,par1, id2,Stephen,33,1970-01-01T00:00:00.002,par1]";
String expected = "["
+ "+I[id1, Danny, 23, 1970-01-01T00:00:00.001, par1], "
+ "+I[id2, Stephen, 33, 1970-01-01T00:00:00.002, par1]]";
assertThat(actual, is(expected));
}
@@ -335,16 +339,16 @@ public class TestInputFormat {
List<RowData> result = readData(inputFormat);
final String actual = TestData.rowDataToString(result, true);
final String actual = TestData.rowDataToString(result);
final String expected = "["
+ "+I(id1,Danny,19,1970-01-01T00:00:00.001,par1), "
+ "-U(id1,Danny,19,1970-01-01T00:00:00.001,par1), "
+ "+U(id1,Danny,20,1970-01-01T00:00:00.002,par1), "
+ "-U(id1,Danny,20,1970-01-01T00:00:00.002,par1), "
+ "+U(id1,Danny,21,1970-01-01T00:00:00.003,par1), "
+ "-U(id1,Danny,21,1970-01-01T00:00:00.003,par1), "
+ "+U(id1,Danny,22,1970-01-01T00:00:00.004,par1), "
+ "-D(id1,Danny,22,1970-01-01T00:00:00.005,par1)]";
+ "+I[id1, Danny, 19, 1970-01-01T00:00:00.001, par1], "
+ "-U[id1, Danny, 19, 1970-01-01T00:00:00.001, par1], "
+ "+U[id1, Danny, 20, 1970-01-01T00:00:00.002, par1], "
+ "-U[id1, Danny, 20, 1970-01-01T00:00:00.002, par1], "
+ "+U[id1, Danny, 21, 1970-01-01T00:00:00.003, par1], "
+ "-U[id1, Danny, 21, 1970-01-01T00:00:00.003, par1], "
+ "+U[id1, Danny, 22, 1970-01-01T00:00:00.004, par1], "
+ "-D[id1, Danny, 22, 1970-01-01T00:00:00.005, par1]]";
assertThat(actual, is(expected));
}

View File

@@ -0,0 +1,71 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.utils;
import org.apache.flink.table.catalog.Column;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.catalog.UniqueConstraint;
import org.apache.flink.table.catalog.WatermarkSpec;
import org.apache.flink.table.types.DataType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
* Builder for {@link ResolvedSchema}.
*/
public class SchemaBuilder {
private List<Column> columns;
private List<WatermarkSpec> watermarkSpecs;
private UniqueConstraint constraint;
public static SchemaBuilder instance() {
return new SchemaBuilder();
}
private SchemaBuilder() {
this.columns = new ArrayList<>();
this.watermarkSpecs = new ArrayList<>();
}
public SchemaBuilder field(String name, DataType type) {
this.columns.add(Column.physical(name, type));
return this;
}
public SchemaBuilder fields(List<String> names, List<DataType> types) {
List<Column> columns = IntStream.range(0, names.size())
.mapToObj(idx -> Column.physical(names.get(idx), types.get(idx)))
.collect(Collectors.toList());
this.columns.addAll(columns);
return this;
}
public SchemaBuilder primaryKey(String... columns) {
this.constraint = UniqueConstraint.primaryKey("pk", Arrays.asList(columns));
return this;
}
public ResolvedSchema build() {
return new ResolvedSchema(columns, watermarkSpecs, constraint);
}
}

View File

@@ -26,6 +26,7 @@ import org.apache.hudi.utils.factory.ContinuousFileSourceFactory;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.runtime.typeutils.RowDataSerializer;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.RowType;
@@ -50,10 +51,8 @@ public class TestConfigurations {
public static final RowType ROW_TYPE = (RowType) ROW_DATA_TYPE.getLogicalType();
public static final TableSchema TABLE_SCHEMA = TableSchema.builder()
.fields(
ROW_TYPE.getFieldNames().toArray(new String[0]),
ROW_DATA_TYPE.getChildren().toArray(new DataType[0]))
public static final ResolvedSchema TABLE_SCHEMA = SchemaBuilder.instance()
.fields(ROW_TYPE.getFieldNames(), ROW_DATA_TYPE.getChildren())
.build();
public static String getCreateHoodieTableDDL(String tableName, Map<String, String> options) {

View File

@@ -73,7 +73,9 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
/** Data set for testing, also some utilities to check the results. */
/**
* Data set for testing, also some utilities to check the results.
*/
public class TestData {
public static List<RowData> DATA_SET_INSERT = Arrays.asList(
insertRow(StringData.fromString("id1"), StringData.fromString("Danny"), 23,
@@ -128,6 +130,7 @@ public class TestData {
);
public static List<RowData> DATA_SET_INSERT_DUPLICATES = new ArrayList<>();
static {
IntStream.range(0, 5).forEach(i -> DATA_SET_INSERT_DUPLICATES.add(
insertRow(StringData.fromString("id1"), StringData.fromString("Danny"), 23,
@@ -135,6 +138,7 @@ public class TestData {
}
public static List<RowData> DATA_SET_INSERT_SAME_KEY = new ArrayList<>();
static {
IntStream.range(0, 5).forEach(i -> DATA_SET_INSERT_SAME_KEY.add(
insertRow(StringData.fromString("id1"), StringData.fromString("Danny"), 23,
@@ -280,39 +284,34 @@ public class TestData {
TimestampData.fromEpochMillis(2), StringData.fromString("par1"))
);
/**
* Returns string format of a list of RowData.
*/
public static String rowDataToString(List<RowData> rows) {
return rowDataToString(rows, false);
private static Integer toIdSafely(Object id) {
if (id == null) {
return -1;
}
final String idStr = id.toString();
if (idStr.startsWith("id")) {
return Integer.parseInt(idStr.substring(2));
}
return -1;
}
/**
* Returns string format of a list of RowData.
*
* @param withChangeFlag whether to print the change flag
*/
public static String rowDataToString(List<RowData> rows, boolean withChangeFlag) {
public static String rowDataToString(List<RowData> rows) {
DataStructureConverter<Object, Object> converter =
DataStructureConverters.getConverter(TestConfigurations.ROW_DATA_TYPE);
return rows.stream()
.sorted(Comparator.comparing(o -> toStringSafely(o.getString(0))))
.map(row -> {
final String rowStr = converter.toExternal(row).toString();
if (withChangeFlag) {
return row.getRowKind().shortString() + "(" + rowStr + ")";
} else {
return rowStr;
}
})
.sorted(Comparator.comparing(o -> toIdSafely(o.getString(0))))
.map(row -> converter.toExternal(row).toString())
.collect(Collectors.toList()).toString();
}
/**
* Write a list of row data with Hoodie format base on the given configuration.
*
* @param dataBuffer The data buffer to write
* @param conf The flink configuration
* @param dataBuffer The data buffer to write
* @param conf The flink configuration
* @throws Exception if error occurs
*/
public static void writeData(
@@ -379,8 +378,8 @@ public class TestData {
* Sort the {@code rows} using field at index {@code orderingPos} and asserts
* it equals with the expected string {@code expected}.
*
* @param rows Actual result rows
* @param expected Expected string of the sorted rows
* @param rows Actual result rows
* @param expected Expected string of the sorted rows
* @param orderingPos Field position for ordering
*/
public static void assertRowsEquals(List<Row> rows, String expected, int orderingPos) {
@@ -399,9 +398,9 @@ public class TestData {
*/
public static void assertRowsEquals(List<Row> rows, List<RowData> expected) {
String rowsString = rows.stream()
.sorted(Comparator.comparing(o -> toStringSafely(o.getField(0))))
.sorted(Comparator.comparing(o -> toIdSafely(o.getField(0))))
.collect(Collectors.toList()).toString();
assertThat(rowsString, is(rowDataToString(expected)));
assertThat(rowDataToString(expected), is(rowsString));
}
/**
@@ -425,7 +424,7 @@ public class TestData {
*/
public static void assertRowDataEquals(List<RowData> rows, List<RowData> expected) {
String rowsString = rowDataToString(rows);
assertThat(rowsString, is(rowDataToString(expected)));
assertThat(rowDataToString(expected), is(rowsString));
}
/**
@@ -526,8 +525,8 @@ public class TestData {
*
* <p>Note: Replace it with the Flink reader when it is supported.
*
* @param basePath The file base to check, should be a directory
* @param expected The expected results mapping, the key should be the partition path
* @param basePath The file base to check, should be a directory
* @param expected The expected results mapping, the key should be the partition path
*/
public static void checkWrittenFullData(
File basePath,
@@ -571,12 +570,12 @@ public class TestData {
*
* <p>Note: Replace it with the Flink reader when it is supported.
*
* @param fs The file system
* @param fs The file system
* @param latestInstant The latest committed instant of current table
* @param baseFile The file base to check, should be a directory
* @param expected The expected results mapping, the key should be the partition path
* @param partitions The expected partition number
* @param schema The read schema
* @param baseFile The file base to check, should be a directory
* @param expected The expected results mapping, the key should be the partition path
* @param partitions The expected partition number
* @param schema The read schema
*/
public static void checkWrittenDataMOR(
FileSystem fs,

View File

@@ -20,8 +20,8 @@ package org.apache.hudi.utils.source;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.Path;
import org.apache.flink.formats.common.TimestampFormat;
import org.apache.flink.formats.json.JsonRowDataDeserializationSchema;
import org.apache.flink.formats.json.TimestampFormat;
import org.apache.flink.runtime.state.CheckpointListener;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;