[MINOR] Removing un-used files and references (#3150)
This commit is contained in:
@@ -943,137 +943,4 @@ public class TestHiveSyncTool {
|
|||||||
.containsValue("BIGINT"), errorMsg);
|
.containsValue("BIGINT"), errorMsg);
|
||||||
hiveClient.updateHiveSQL(dropTableSql);
|
hiveClient.updateHiveSQL(dropTableSql);
|
||||||
}
|
}
|
||||||
/*
|
|
||||||
private void verifyOldParquetFileTest(HoodieHiveClient hiveClient, String emptyCommitTime) throws Exception {
|
|
||||||
assertTrue(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
|
||||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should exist after sync completes");
|
|
||||||
assertEquals(hiveClient.getTableSchema(HiveTestUtil.hiveSyncConfig.tableName).size(),
|
|
||||||
hiveClient.getDataSchema().getColumns().size() + 1,
|
|
||||||
"Hive Schema should match the table schema + partition field");
|
|
||||||
assertEquals(1,
|
|
||||||
hiveClient.scanTablePartitions(HiveTestUtil.hiveSyncConfig.tableName).size(),
|
|
||||||
"Table partitions should match the number of partitions we wrote");
|
|
||||||
assertEquals("The last commit that was sycned should be updated in the TBLPROPERTIES", emptyCommitTime,
|
|
||||||
hiveClient.getLastCommitTimeSynced(HiveTestUtil.hiveSyncConfig.tableName).get());
|
|
||||||
|
|
||||||
// make sure correct schema is picked
|
|
||||||
Schema schema = SchemaTestUtil.getSimpleSchema();
|
|
||||||
for (Field field : schema.getFields()) {
|
|
||||||
assertEquals(String.format("Hive Schema Field %s was added", field), field.schema().getType().getName(),
|
|
||||||
hiveClient.getTableSchema(HiveTestUtil.hiveSyncConfig.tableName).get(field.name()).toLowerCase());
|
|
||||||
}
|
|
||||||
assertEquals("Hive Schema Field datestr was added", "string",
|
|
||||||
hiveClient.getTableSchema(HiveTestUtil.hiveSyncConfig.tableName).get("datestr").toLowerCase());
|
|
||||||
assertEquals(schema.getFields().size() + 1,
|
|
||||||
hiveClient.getTableSchema(HiveTestUtil.hiveSyncConfig.tableName).size(),
|
|
||||||
"Hive Schema fields size");
|
|
||||||
}
|
|
||||||
|
|
||||||
@ParameterizedTest
|
|
||||||
@MethodSource("useJdbc")
|
|
||||||
public void testPickingOlderParquetFileIfLatestIsEmptyCommit(boolean useJdbc) throws Exception {
|
|
||||||
HiveTestUtil.hiveSyncConfig.useJdbc = useJdbc;
|
|
||||||
final String commitTime = "100";
|
|
||||||
HiveTestUtil.createCOWTable(commitTime, 1, false);
|
|
||||||
HoodieCommitMetadata commitMetadata = new HoodieCommitMetadata();
|
|
||||||
// create empty commit
|
|
||||||
final String emptyCommitTime = "200";
|
|
||||||
HiveTestUtil.createCommitFile(commitMetadata, emptyCommitTime);
|
|
||||||
HoodieHiveClient hiveClient =
|
|
||||||
new HoodieHiveClient(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
|
||||||
assertFalse(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
|
||||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should not exist initially");
|
|
||||||
|
|
||||||
HiveSyncTool tool = new HiveSyncTool(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
|
||||||
tool.syncHoodieTable();
|
|
||||||
|
|
||||||
verifyOldParquetFileTest(hiveClient, emptyCommitTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
@ParameterizedTest
|
|
||||||
@MethodSource("useJdbc")
|
|
||||||
public void testNotPickingOlderParquetFileWhenLatestCommitReadFails(boolean useJdbc) throws Exception {
|
|
||||||
HiveTestUtil.hiveSyncConfig.useJdbc = useJdbc;
|
|
||||||
final String commitTime = "100";
|
|
||||||
HiveTestUtil.createCOWTable(commitTime, 1, false);
|
|
||||||
HoodieCommitMetadata commitMetadata = new HoodieCommitMetadata();
|
|
||||||
|
|
||||||
// evolve the schema
|
|
||||||
DateTime dateTime = DateTime.now().plusDays(6);
|
|
||||||
String commitTime2 = "101";
|
|
||||||
HiveTestUtil.addCOWPartitions(1, false, false, dateTime, commitTime2);
|
|
||||||
|
|
||||||
// create empty commit
|
|
||||||
final String emptyCommitTime = "200";
|
|
||||||
HiveTestUtil.createCommitFile(commitMetadata, emptyCommitTime);
|
|
||||||
|
|
||||||
HoodieHiveClient hiveClient =
|
|
||||||
new HoodieHiveClient(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
|
||||||
assertFalse(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
|
||||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should not exist initially");
|
|
||||||
|
|
||||||
HiveSyncTool tool = new HiveSyncTool(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
|
||||||
|
|
||||||
// now delete the evolved commit instant
|
|
||||||
Path fullPath = new Path(HiveTestUtil.hiveSyncConfig.basePath + "/" + HoodieTableMetaClient.METAFOLDER_NAME + "/"
|
|
||||||
+ hiveClient.getActiveTimeline().getInstants()
|
|
||||||
.filter(inst -> inst.getTimestamp().equals(commitTime2))
|
|
||||||
.findFirst().get().getFileName());
|
|
||||||
assertTrue(HiveTestUtil.fileSystem.delete(fullPath, false));
|
|
||||||
|
|
||||||
try {
|
|
||||||
tool.syncHoodieTable();
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
// we expect the table sync to fail
|
|
||||||
}
|
|
||||||
|
|
||||||
// table should not be synced yet
|
|
||||||
assertFalse(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
|
||||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should not exist at all");
|
|
||||||
}
|
|
||||||
|
|
||||||
@ParameterizedTest
|
|
||||||
@MethodSource("useJdbc")
|
|
||||||
public void testNotPickingOlderParquetFileWhenLatestCommitReadFailsForExistingTable(boolean useJdbc) throws Exception {
|
|
||||||
HiveTestUtil.hiveSyncConfig.useJdbc = useJdbc;
|
|
||||||
final String commitTime = "100";
|
|
||||||
HiveTestUtil.createCOWTable(commitTime, 1, false);
|
|
||||||
HoodieCommitMetadata commitMetadata = new HoodieCommitMetadata();
|
|
||||||
// create empty commit
|
|
||||||
final String emptyCommitTime = "200";
|
|
||||||
HiveTestUtil.createCommitFile(commitMetadata, emptyCommitTime);
|
|
||||||
HoodieHiveClient hiveClient =
|
|
||||||
new HoodieHiveClient(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
|
||||||
assertFalse(hiveClient.doesTableExist(HiveTestUtil.hiveSyncConfig.tableName),
|
|
||||||
"Table " + HiveTestUtil.hiveSyncConfig.tableName + " should not exist initially");
|
|
||||||
|
|
||||||
HiveSyncTool tool = new HiveSyncTool(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
|
||||||
tool.syncHoodieTable();
|
|
||||||
|
|
||||||
verifyOldParquetFileTest(hiveClient, emptyCommitTime);
|
|
||||||
|
|
||||||
// evolve the schema
|
|
||||||
DateTime dateTime = DateTime.now().plusDays(6);
|
|
||||||
String commitTime2 = "301";
|
|
||||||
HiveTestUtil.addCOWPartitions(1, false, false, dateTime, commitTime2);
|
|
||||||
HiveTestUtil.createCommitFile(commitMetadata, "400"); // create another empty commit
|
|
||||||
|
|
||||||
tool = new HiveSyncTool(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
|
||||||
HoodieHiveClient hiveClientLatest = new HoodieHiveClient(HiveTestUtil.hiveSyncConfig, HiveTestUtil.getHiveConf(), HiveTestUtil.fileSystem);
|
|
||||||
// now delete the evolved commit instant
|
|
||||||
Path fullPath = new Path(HiveTestUtil.hiveSyncConfig.basePath + "/" + HoodieTableMetaClient.METAFOLDER_NAME + "/"
|
|
||||||
+ hiveClientLatest.getActiveTimeline().getInstants()
|
|
||||||
.filter(inst -> inst.getTimestamp().equals(commitTime2))
|
|
||||||
.findFirst().get().getFileName());
|
|
||||||
assertTrue(HiveTestUtil.fileSystem.delete(fullPath, false));
|
|
||||||
|
|
||||||
try {
|
|
||||||
tool.syncHoodieTable();
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
// we expect the table sync to fail
|
|
||||||
}
|
|
||||||
|
|
||||||
// old sync values should be left intact
|
|
||||||
verifyOldParquetFileTest(hiveClient, emptyCommitTime);
|
|
||||||
}*/
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
or more contributor license agreements. See the NOTICE file
|
|
||||||
distributed with this work for additional information
|
|
||||||
regarding copyright ownership. The ASF licenses this file
|
|
||||||
to you under the Apache License, Version 2.0 (the
|
|
||||||
"License"); you may not use this file except in compliance
|
|
||||||
with the License. You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
<!DOCTYPE properties SYSTEM "http://java.sun.com/dtd/properties.dtd">
|
|
||||||
<properties>
|
|
||||||
<comment>hi</comment>
|
|
||||||
<entry key="hivesyncglobal.retry_attempts">1</entry>
|
|
||||||
<entry key="hivesyncglobal.remote_hive_site_uri">/home/hive-remote-site.xml</entry>
|
|
||||||
<entry key="hivesyncglobal.remote_base_path">hdfs://hadoop-cluster2:9000/tmp/hudi_trips_cow</entry>
|
|
||||||
<entry key="hivesyncglobal.local_hive_site_uri">/home/hive/packaging/target/apache-hive-2.3.4-uber-51-SNAPSHOT-bin/apache-hive-2.3.4-uber-51-SNAPSHOT-bin/conf/hive-site.xml</entry>
|
|
||||||
<entry key="hivesyncglobal.remote_hs2_jdbc_urls">jdbc:hive2://hadoop-cluster2:10000/default;transportMode=http;httpPath=hs2</entry>
|
|
||||||
</properties>
|
|
||||||
Reference in New Issue
Block a user