[MINOR] Removing un-used files and references (#3150)
This commit is contained in:
@@ -943,137 +943,4 @@ public class TestHiveSyncTool {
|
||||
.containsValue("BIGINT"), errorMsg);
|
||||
hiveClient.updateHiveSQL(dropTableSql);
|
||||
}
|
||||
/*
|
||||
private void verifyOldParquetFileTest(HoodieHiveClient hiveClient, String emptyCommitTime) throws Exception {
|
||||
assertTrue(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should exist after sync completes");
|
||||
assertEquals(hiveClient.getTableSchema(HiveTestUtil.hiveSyncConfig.tableName).size(),
|
||||
hiveClient.getDataSchema().getColumns().size() + 1,
|
||||
"Hive Schema should match the table schema + partition field");
|
||||
assertEquals(1,
|
||||
hiveClient.scanTablePartitions(HiveTestUtil.hiveSyncConfig.tableName).size(),
|
||||
"Table partitions should match the number of partitions we wrote");
|
||||
assertEquals("The last commit that was sycned should be updated in the TBLPROPERTIES", emptyCommitTime,
|
||||
hiveClient.getLastCommitTimeSynced(HiveTestUtil.hiveSyncConfig.tableName).get());
|
||||
|
||||
// make sure correct schema is picked
|
||||
Schema schema = SchemaTestUtil.getSimpleSchema();
|
||||
for (Field field : schema.getFields()) {
|
||||
assertEquals(String.format("Hive Schema Field %s was added", field), field.schema().getType().getName(),
|
||||
hiveClient.getTableSchema(HiveTestUtil.hiveSyncConfig.tableName).get(field.name()).toLowerCase());
|
||||
}
|
||||
assertEquals("Hive Schema Field datestr was added", "string",
|
||||
hiveClient.getTableSchema(HiveTestUtil.hiveSyncConfig.tableName).get("datestr").toLowerCase());
|
||||
assertEquals(schema.getFields().size() + 1,
|
||||
hiveClient.getTableSchema(HiveTestUtil.hiveSyncConfig.tableName).size(),
|
||||
"Hive Schema fields size");
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("useJdbc")
|
||||
public void testPickingOlderParquetFileIfLatestIsEmptyCommit(boolean useJdbc) throws Exception {
|
||||
HiveTestUtil.hiveSyncConfig.useJdbc = useJdbc;
|
||||
final String commitTime = "100";
|
||||
HiveTestUtil.createCOWTable(commitTime, 1, false);
|
||||
HoodieCommitMetadata commitMetadata = new HoodieCommitMetadata();
|
||||
// create empty commit
|
||||
final String emptyCommitTime = "200";
|
||||
HiveTestUtil.createCommitFile(commitMetadata, emptyCommitTime);
|
||||
HoodieHiveClient hiveClient =
|
||||
new HoodieHiveClient(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
||||
assertFalse(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should not exist initially");
|
||||
|
||||
HiveSyncTool tool = new HiveSyncTool(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
||||
tool.syncHoodieTable();
|
||||
|
||||
verifyOldParquetFileTest(hiveClient, emptyCommitTime);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("useJdbc")
|
||||
public void testNotPickingOlderParquetFileWhenLatestCommitReadFails(boolean useJdbc) throws Exception {
|
||||
HiveTestUtil.hiveSyncConfig.useJdbc = useJdbc;
|
||||
final String commitTime = "100";
|
||||
HiveTestUtil.createCOWTable(commitTime, 1, false);
|
||||
HoodieCommitMetadata commitMetadata = new HoodieCommitMetadata();
|
||||
|
||||
// evolve the schema
|
||||
DateTime dateTime = DateTime.now().plusDays(6);
|
||||
String commitTime2 = "101";
|
||||
HiveTestUtil.addCOWPartitions(1, false, false, dateTime, commitTime2);
|
||||
|
||||
// create empty commit
|
||||
final String emptyCommitTime = "200";
|
||||
HiveTestUtil.createCommitFile(commitMetadata, emptyCommitTime);
|
||||
|
||||
HoodieHiveClient hiveClient =
|
||||
new HoodieHiveClient(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
||||
assertFalse(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should not exist initially");
|
||||
|
||||
HiveSyncTool tool = new HiveSyncTool(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
||||
|
||||
// now delete the evolved commit instant
|
||||
Path fullPath = new Path(HiveTestUtil.hiveSyncConfig.basePath + "/" + HoodieTableMetaClient.METAFOLDER_NAME + "/"
|
||||
+ hiveClient.getActiveTimeline().getInstants()
|
||||
.filter(inst -> inst.getTimestamp().equals(commitTime2))
|
||||
.findFirst().get().getFileName());
|
||||
assertTrue(HiveTestUtil.fileSystem.delete(fullPath, false));
|
||||
|
||||
try {
|
||||
tool.syncHoodieTable();
|
||||
} catch (RuntimeException e) {
|
||||
// we expect the table sync to fail
|
||||
}
|
||||
|
||||
// table should not be synced yet
|
||||
assertFalse(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should not exist at all");
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("useJdbc")
|
||||
public void testNotPickingOlderParquetFileWhenLatestCommitReadFailsForExistingTable(boolean useJdbc) throws Exception {
|
||||
HiveTestUtil.hiveSyncConfig.useJdbc = useJdbc;
|
||||
final String commitTime = "100";
|
||||
HiveTestUtil.createCOWTable(commitTime, 1, false);
|
||||
HoodieCommitMetadata commitMetadata = new HoodieCommitMetadata();
|
||||
// create empty commit
|
||||
final String emptyCommitTime = "200";
|
||||
HiveTestUtil.createCommitFile(commitMetadata, emptyCommitTime);
|
||||
HoodieHiveClient hiveClient =
|
||||
new HoodieHiveClient(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
||||
assertFalse(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should not exist initially");
|
||||
|
||||
HiveSyncTool tool = new HiveSyncTool(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
||||
tool.syncHoodieTable();
|
||||
|
||||
verifyOldParquetFileTest(hiveClient, emptyCommitTime);
|
||||
|
||||
// evolve the schema
|
||||
DateTime dateTime = DateTime.now().plusDays(6);
|
||||
String commitTime2 = "301";
|
||||
HiveTestUtil.addCOWPartitions(1, false, false, dateTime, commitTime2);
|
||||
HiveTestUtil.createCommitFile(commitMetadata, "400"); // create another empty commit
|
||||
|
||||
tool = new HiveSyncTool(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
||||
HoodieHiveClient hiveClientLatest = new HoodieHiveClient(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
||||
// now delete the evolved commit instant
|
||||
Path fullPath = new Path(HiveTestUtil.hiveSyncConfig.basePath + "/" + HoodieTableMetaClient.METAFOLDER_NAME + "/"
|
||||
+ hiveClientLatest.getActiveTimeline().getInstants()
|
||||
.filter(inst -> inst.getTimestamp().equals(commitTime2))
|
||||
.findFirst().get().getFileName());
|
||||
assertTrue(HiveTestUtil.fileSystem.delete(fullPath, false));
|
||||
|
||||
try {
|
||||
tool.syncHoodieTable();
|
||||
} catch (RuntimeException e) {
|
||||
// we expect the table sync to fail
|
||||
}
|
||||
|
||||
// old sync values should be left intact
|
||||
verifyOldParquetFileTest(hiveClient, emptyCommitTime);
|
||||
}*/
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user