1
0

HUDI-267 Refactor bad method name HoodieTestUtils#initTableType and HoodieTableMetaClient#initializePathAsHoodieDataset (#916)

This commit is contained in:
vinoyang
2019-09-22 00:05:02 +08:00
committed by vinoth chandar
parent 1104f9526f
commit f020d029c4
11 changed files with 24 additions and 24 deletions

View File

@@ -200,7 +200,7 @@ public abstract class HoodieClientTestHarness implements Serializable {
throw new IllegalStateException("The Spark context has not been initialized."); throw new IllegalStateException("The Spark context has not been initialized.");
} }
HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, getTableType()); HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, getTableType());
} }
/** /**

View File

@@ -515,7 +515,7 @@ public class TestCleaner extends TestHoodieClientBase {
HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(1).build()) HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(1).build())
.build(); .build();
HoodieTableMetaClient metaClient = HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableMetaClient metaClient = HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath,
HoodieTableType.MERGE_ON_READ); HoodieTableType.MERGE_ON_READ);
// Make 3 files, one base file and 2 log files associated with base file // Make 3 files, one base file and 2 log files associated with base file
@@ -858,7 +858,7 @@ public class TestCleaner extends TestHoodieClientBase {
*/ */
public void testPendingCompactions(HoodieWriteConfig config, int expNumFilesDeleted, public void testPendingCompactions(HoodieWriteConfig config, int expNumFilesDeleted,
int expNumFilesUnderCompactionDeleted) throws IOException { int expNumFilesUnderCompactionDeleted) throws IOException {
HoodieTableMetaClient metaClient = HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableMetaClient metaClient = HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath,
HoodieTableType.MERGE_ON_READ); HoodieTableType.MERGE_ON_READ);
String[] instants = new String[]{"000", "001", "003", "005", "007", "009", "011", "013"}; String[] instants = new String[]{"000", "001", "003", "005", "007", "009", "011", "013"};
String[] compactionInstants = new String[]{"002", "004", "006", "008", "010"}; String[] compactionInstants = new String[]{"002", "004", "006", "008", "010"};

View File

@@ -53,7 +53,7 @@ public class TestCompactionAdminClient extends TestHoodieClientBase {
public void setUp() throws Exception { public void setUp() throws Exception {
initTempFolderAndPath(); initTempFolderAndPath();
initSparkContexts(); initSparkContexts();
metaClient = HoodieTestUtils.initTableType(HoodieTestUtils.getDefaultHadoopConf(), basePath, MERGE_ON_READ); metaClient = HoodieTestUtils.init(HoodieTestUtils.getDefaultHadoopConf(), basePath, MERGE_ON_READ);
client = new CompactionAdminClient(jsc, basePath); client = new CompactionAdminClient(jsc, basePath);
} }

View File

@@ -62,7 +62,7 @@ public class TestHoodieCompactor extends HoodieClientTestHarness {
initTempFolderAndPath(); initTempFolderAndPath();
hadoopConf = HoodieTestUtils.getDefaultHadoopConf(); hadoopConf = HoodieTestUtils.getDefaultHadoopConf();
fs = FSUtils.getFs(basePath, hadoopConf); fs = FSUtils.getFs(basePath, hadoopConf);
HoodieTestUtils.initTableType(hadoopConf, basePath, HoodieTableType.MERGE_ON_READ); HoodieTestUtils.init(hadoopConf, basePath, HoodieTableType.MERGE_ON_READ);
initTestDataGenerator(); initTestDataGenerator();
} }
@@ -96,7 +96,7 @@ public class TestHoodieCompactor extends HoodieClientTestHarness {
@Test(expected = HoodieNotSupportedException.class) @Test(expected = HoodieNotSupportedException.class)
public void testCompactionOnCopyOnWriteFail() throws Exception { public void testCompactionOnCopyOnWriteFail() throws Exception {
HoodieTestUtils.initTableType(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE); HoodieTestUtils.init(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE);
HoodieTableMetaClient metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath); HoodieTableMetaClient metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
HoodieTable table = HoodieTable.getHoodieTable(metaClient, getConfig(), jsc); HoodieTable table = HoodieTable.getHoodieTable(metaClient, getConfig(), jsc);

View File

@@ -85,7 +85,7 @@ public class TestMergeOnReadTable extends HoodieClientTestHarness {
jsc.hadoopConfiguration().addResource(dfs.getConf()); jsc.hadoopConfiguration().addResource(dfs.getConf());
initTempFolderAndPath(); initTempFolderAndPath();
dfs.mkdirs(new Path(basePath)); dfs.mkdirs(new Path(basePath));
HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ); HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
initTestDataGenerator(); initTestDataGenerator();
} }
@@ -294,7 +294,7 @@ public class TestMergeOnReadTable extends HoodieClientTestHarness {
public void testCOWToMORConvertedDatasetRollback() throws Exception { public void testCOWToMORConvertedDatasetRollback() throws Exception {
//Set TableType to COW //Set TableType to COW
HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableType.COPY_ON_WRITE); HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.COPY_ON_WRITE);
HoodieWriteConfig cfg = getConfig(true); HoodieWriteConfig cfg = getConfig(true);
try (HoodieWriteClient client = getWriteClient(cfg);) { try (HoodieWriteClient client = getWriteClient(cfg);) {
@@ -330,7 +330,7 @@ public class TestMergeOnReadTable extends HoodieClientTestHarness {
assertNoWriteErrors(statuses); assertNoWriteErrors(statuses);
//Set TableType to MOR //Set TableType to MOR
HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ); HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
//rollback a COW commit when TableType is MOR //rollback a COW commit when TableType is MOR
client.rollback(newCommitTime); client.rollback(newCommitTime);

View File

@@ -273,7 +273,7 @@ public class HoodieTableMetaClient implements Serializable {
properties.put(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, tableName); properties.put(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, tableName);
properties.put(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, type.name()); properties.put(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, type.name());
properties.put(HoodieTableConfig.HOODIE_ARCHIVELOG_FOLDER_PROP_NAME, archiveLogFolder); properties.put(HoodieTableConfig.HOODIE_ARCHIVELOG_FOLDER_PROP_NAME, archiveLogFolder);
return HoodieTableMetaClient.initializePathAsHoodieDataset(hadoopConf, basePath, properties); return HoodieTableMetaClient.initDatasetAndGetMetaClient(hadoopConf, basePath, properties);
} }
/** /**
@@ -287,7 +287,7 @@ public class HoodieTableMetaClient implements Serializable {
if (tableType == HoodieTableType.MERGE_ON_READ) { if (tableType == HoodieTableType.MERGE_ON_READ) {
properties.setProperty(HoodieTableConfig.HOODIE_PAYLOAD_CLASS_PROP_NAME, payloadClassName); properties.setProperty(HoodieTableConfig.HOODIE_PAYLOAD_CLASS_PROP_NAME, payloadClassName);
} }
return HoodieTableMetaClient.initializePathAsHoodieDataset(hadoopConf, basePath, properties); return HoodieTableMetaClient.initDatasetAndGetMetaClient(hadoopConf, basePath, properties);
} }
/** /**
@@ -296,7 +296,7 @@ public class HoodieTableMetaClient implements Serializable {
* *
* @return Instance of HoodieTableMetaClient * @return Instance of HoodieTableMetaClient
*/ */
public static HoodieTableMetaClient initializePathAsHoodieDataset(Configuration hadoopConf, public static HoodieTableMetaClient initDatasetAndGetMetaClient(Configuration hadoopConf,
String basePath, Properties props) throws IOException { String basePath, Properties props) throws IOException {
log.info("Initializing " + basePath + " as hoodie dataset " + basePath); log.info("Initializing " + basePath + " as hoodie dataset " + basePath);
Path basePathDir = new Path(basePath); Path basePathDir = new Path(basePath);

View File

@@ -96,21 +96,21 @@ public class HoodieTestUtils {
} }
public static HoodieTableMetaClient init(String basePath, HoodieTableType tableType) throws IOException { public static HoodieTableMetaClient init(String basePath, HoodieTableType tableType) throws IOException {
return initTableType(getDefaultHadoopConf(), basePath, tableType); return init(getDefaultHadoopConf(), basePath, tableType);
} }
public static HoodieTableMetaClient init(Configuration hadoopConf, String basePath) public static HoodieTableMetaClient init(Configuration hadoopConf, String basePath)
throws IOException { throws IOException {
return initTableType(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE); return init(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE);
} }
public static HoodieTableMetaClient initTableType(Configuration hadoopConf, String basePath, public static HoodieTableMetaClient init(Configuration hadoopConf, String basePath, HoodieTableType tableType)
HoodieTableType tableType) throws IOException { throws IOException {
Properties properties = new Properties(); Properties properties = new Properties();
properties.setProperty(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, RAW_TRIPS_TEST_NAME); properties.setProperty(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, RAW_TRIPS_TEST_NAME);
properties.setProperty(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, tableType.name()); properties.setProperty(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, tableType.name());
properties.setProperty(HoodieTableConfig.HOODIE_PAYLOAD_CLASS_PROP_NAME, HoodieAvroPayload.class.getName()); properties.setProperty(HoodieTableConfig.HOODIE_PAYLOAD_CLASS_PROP_NAME, HoodieAvroPayload.class.getName());
return HoodieTableMetaClient.initializePathAsHoodieDataset(hadoopConf, basePath, properties); return HoodieTableMetaClient.initDatasetAndGetMetaClient(hadoopConf, basePath, properties);
} }
public static String makeNewCommitTime() { public static String makeNewCommitTime() {

View File

@@ -113,7 +113,7 @@ public class HoodieLogFormatTest {
assertTrue(fs.mkdirs(new Path(folder.getRoot().getPath()))); assertTrue(fs.mkdirs(new Path(folder.getRoot().getPath())));
this.partitionPath = new Path(folder.getRoot().getPath()); this.partitionPath = new Path(folder.getRoot().getPath());
this.basePath = folder.getRoot().getParent(); this.basePath = folder.getRoot().getParent();
HoodieTestUtils.initTableType(MiniClusterUtil.configuration, basePath, HoodieTableType.MERGE_ON_READ); HoodieTestUtils.init(MiniClusterUtil.configuration, basePath, HoodieTableType.MERGE_ON_READ);
} }
@After @After

View File

@@ -65,7 +65,7 @@ public class TestCompactionUtils {
@Before @Before
public void init() throws IOException { public void init() throws IOException {
metaClient = HoodieTestUtils.initTableType(getDefaultHadoopConf(), metaClient = HoodieTestUtils.init(getDefaultHadoopConf(),
tmpFolder.getRoot().getAbsolutePath(), HoodieTableType.MERGE_ON_READ); tmpFolder.getRoot().getAbsolutePath(), HoodieTableType.MERGE_ON_READ);
basePath = metaClient.getBasePath(); basePath = metaClient.getBasePath();
} }

View File

@@ -172,7 +172,7 @@ public class HoodieRealtimeRecordReaderTest {
public void testReader(boolean partitioned) throws Exception { public void testReader(boolean partitioned) throws Exception {
// initial commit // initial commit
Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getEvolvedSchema()); Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getEvolvedSchema());
HoodieTestUtils.initTableType(hadoopConf, basePath.getRoot().getAbsolutePath(), HoodieTestUtils.init(hadoopConf, basePath.getRoot().getAbsolutePath(),
HoodieTableType.MERGE_ON_READ); HoodieTableType.MERGE_ON_READ);
String baseInstant = "100"; String baseInstant = "100";
File partitionDir = File partitionDir =
@@ -263,7 +263,7 @@ public class HoodieRealtimeRecordReaderTest {
public void testUnMergedReader() throws Exception { public void testUnMergedReader() throws Exception {
// initial commit // initial commit
Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getEvolvedSchema()); Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getEvolvedSchema());
HoodieTestUtils.initTableType(hadoopConf, basePath.getRoot().getAbsolutePath(), HoodieTestUtils.init(hadoopConf, basePath.getRoot().getAbsolutePath(),
HoodieTableType.MERGE_ON_READ); HoodieTableType.MERGE_ON_READ);
String commitTime = "100"; String commitTime = "100";
final int numRecords = 1000; final int numRecords = 1000;
@@ -347,7 +347,7 @@ public class HoodieRealtimeRecordReaderTest {
public void testReaderWithNestedAndComplexSchema() throws Exception { public void testReaderWithNestedAndComplexSchema() throws Exception {
// initial commit // initial commit
Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getComplexEvolvedSchema()); Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getComplexEvolvedSchema());
HoodieTestUtils.initTableType(hadoopConf, basePath.getRoot().getAbsolutePath(), HoodieTestUtils.init(hadoopConf, basePath.getRoot().getAbsolutePath(),
HoodieTableType.MERGE_ON_READ); HoodieTableType.MERGE_ON_READ);
String commitTime = "100"; String commitTime = "100";
int numberOfRecords = 100; int numberOfRecords = 100;
@@ -489,7 +489,7 @@ public class HoodieRealtimeRecordReaderTest {
// initial commit // initial commit
List<String> logFilePaths = new ArrayList<>(); List<String> logFilePaths = new ArrayList<>();
Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getSimpleSchema()); Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getSimpleSchema());
HoodieTestUtils.initTableType(hadoopConf, basePath.getRoot().getAbsolutePath(), HoodieTestUtils.init(hadoopConf, basePath.getRoot().getAbsolutePath(),
HoodieTableType.MERGE_ON_READ); HoodieTableType.MERGE_ON_READ);
String commitTime = "100"; String commitTime = "100";
int numberOfRecords = 100; int numberOfRecords = 100;

View File

@@ -131,7 +131,7 @@ public class HDFSParquetImporter implements Serializable {
properties.put(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, cfg.tableName); properties.put(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, cfg.tableName);
properties.put(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, cfg.tableType); properties.put(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, cfg.tableType);
HoodieTableMetaClient HoodieTableMetaClient
.initializePathAsHoodieDataset(jsc.hadoopConfiguration(), cfg.targetPath, properties); .initDatasetAndGetMetaClient(jsc.hadoopConfiguration(), cfg.targetPath, properties);
HoodieWriteClient client = UtilHelpers.createHoodieClient(jsc, cfg.targetPath, schemaStr, HoodieWriteClient client = UtilHelpers.createHoodieClient(jsc, cfg.targetPath, schemaStr,
cfg.parallelism, Option.empty(), props); cfg.parallelism, Option.empty(), props);