1
0

HUDI-267 Refactor bad method name HoodieTestUtils#initTableType and HoodieTableMetaClient#initializePathAsHoodieDataset (#916)

This commit is contained in:
vinoyang
2019-09-22 00:05:02 +08:00
committed by vinoth chandar
parent 1104f9526f
commit f020d029c4
11 changed files with 24 additions and 24 deletions

View File

@@ -200,7 +200,7 @@ public abstract class HoodieClientTestHarness implements Serializable {
throw new IllegalStateException("The Spark context has not been initialized.");
}
HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, getTableType());
HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, getTableType());
}
/**

View File

@@ -515,7 +515,7 @@ public class TestCleaner extends TestHoodieClientBase {
HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(1).build())
.build();
HoodieTableMetaClient metaClient = HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath,
HoodieTableMetaClient metaClient = HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath,
HoodieTableType.MERGE_ON_READ);
// Make 3 files, one base file and 2 log files associated with base file
@@ -858,7 +858,7 @@ public class TestCleaner extends TestHoodieClientBase {
*/
public void testPendingCompactions(HoodieWriteConfig config, int expNumFilesDeleted,
int expNumFilesUnderCompactionDeleted) throws IOException {
HoodieTableMetaClient metaClient = HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath,
HoodieTableMetaClient metaClient = HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath,
HoodieTableType.MERGE_ON_READ);
String[] instants = new String[]{"000", "001", "003", "005", "007", "009", "011", "013"};
String[] compactionInstants = new String[]{"002", "004", "006", "008", "010"};

View File

@@ -53,7 +53,7 @@ public class TestCompactionAdminClient extends TestHoodieClientBase {
public void setUp() throws Exception {
initTempFolderAndPath();
initSparkContexts();
metaClient = HoodieTestUtils.initTableType(HoodieTestUtils.getDefaultHadoopConf(), basePath, MERGE_ON_READ);
metaClient = HoodieTestUtils.init(HoodieTestUtils.getDefaultHadoopConf(), basePath, MERGE_ON_READ);
client = new CompactionAdminClient(jsc, basePath);
}

View File

@@ -62,7 +62,7 @@ public class TestHoodieCompactor extends HoodieClientTestHarness {
initTempFolderAndPath();
hadoopConf = HoodieTestUtils.getDefaultHadoopConf();
fs = FSUtils.getFs(basePath, hadoopConf);
HoodieTestUtils.initTableType(hadoopConf, basePath, HoodieTableType.MERGE_ON_READ);
HoodieTestUtils.init(hadoopConf, basePath, HoodieTableType.MERGE_ON_READ);
initTestDataGenerator();
}
@@ -96,7 +96,7 @@ public class TestHoodieCompactor extends HoodieClientTestHarness {
@Test(expected = HoodieNotSupportedException.class)
public void testCompactionOnCopyOnWriteFail() throws Exception {
HoodieTestUtils.initTableType(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE);
HoodieTestUtils.init(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE);
HoodieTableMetaClient metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
HoodieTable table = HoodieTable.getHoodieTable(metaClient, getConfig(), jsc);

View File

@@ -85,7 +85,7 @@ public class TestMergeOnReadTable extends HoodieClientTestHarness {
jsc.hadoopConfiguration().addResource(dfs.getConf());
initTempFolderAndPath();
dfs.mkdirs(new Path(basePath));
HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
initTestDataGenerator();
}
@@ -294,7 +294,7 @@ public class TestMergeOnReadTable extends HoodieClientTestHarness {
public void testCOWToMORConvertedDatasetRollback() throws Exception {
//Set TableType to COW
HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableType.COPY_ON_WRITE);
HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.COPY_ON_WRITE);
HoodieWriteConfig cfg = getConfig(true);
try (HoodieWriteClient client = getWriteClient(cfg);) {
@@ -330,7 +330,7 @@ public class TestMergeOnReadTable extends HoodieClientTestHarness {
assertNoWriteErrors(statuses);
//Set TableType to MOR
HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
//rollback a COW commit when TableType is MOR
client.rollback(newCommitTime);