[HUDI-2472] Enabling Metadata table for some of TestCleaner unit tests (#3803)
- Making use of HoodieTableMetadataWriter when constructing the HoodieMetadataTestTable instance for the test to enable metadata table usage.
This commit is contained in:
committed by
GitHub
parent
35111131c3
commit
2eaf0fd939
@@ -269,7 +269,7 @@ public class TestCleaner extends HoodieClientTestBase {
|
|||||||
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(maxVersions).build())
|
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(maxVersions).build())
|
||||||
.withParallelism(1, 1).withBulkInsertParallelism(1).withFinalizeWriteParallelism(1).withDeleteParallelism(1)
|
.withParallelism(1, 1).withBulkInsertParallelism(1).withFinalizeWriteParallelism(1).withDeleteParallelism(1)
|
||||||
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true).build())
|
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true).build())
|
||||||
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(false).build())
|
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(true).build())
|
||||||
.build();
|
.build();
|
||||||
try (SparkRDDWriteClient client = getHoodieWriteClient(cfg);) {
|
try (SparkRDDWriteClient client = getHoodieWriteClient(cfg);) {
|
||||||
|
|
||||||
@@ -439,7 +439,7 @@ public class TestCleaner extends HoodieClientTestBase {
|
|||||||
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(maxCommits).build())
|
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(maxCommits).build())
|
||||||
.withParallelism(1, 1).withBulkInsertParallelism(1).withFinalizeWriteParallelism(1).withDeleteParallelism(1)
|
.withParallelism(1, 1).withBulkInsertParallelism(1).withFinalizeWriteParallelism(1).withDeleteParallelism(1)
|
||||||
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true).build())
|
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true).build())
|
||||||
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(false).build())
|
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(true).build())
|
||||||
.build();
|
.build();
|
||||||
SparkRDDWriteClient client = getHoodieWriteClient(cfg);
|
SparkRDDWriteClient client = getHoodieWriteClient(cfg);
|
||||||
|
|
||||||
@@ -516,7 +516,7 @@ public class TestCleaner extends HoodieClientTestBase {
|
|||||||
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(maxCommits).build())
|
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(maxCommits).build())
|
||||||
.withParallelism(1, 1).withBulkInsertParallelism(1).withFinalizeWriteParallelism(1).withDeleteParallelism(1)
|
.withParallelism(1, 1).withBulkInsertParallelism(1).withFinalizeWriteParallelism(1).withDeleteParallelism(1)
|
||||||
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true).build())
|
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true).build())
|
||||||
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(false).build())
|
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(true).build())
|
||||||
.build();
|
.build();
|
||||||
SparkRDDWriteClient client = getHoodieWriteClient(cfg);
|
SparkRDDWriteClient client = getHoodieWriteClient(cfg);
|
||||||
|
|
||||||
@@ -1319,7 +1319,7 @@ public class TestCleaner extends HoodieClientTestBase {
|
|||||||
@Test
|
@Test
|
||||||
public void testCleaningWithZeroPartitionPaths() throws Exception {
|
public void testCleaningWithZeroPartitionPaths() throws Exception {
|
||||||
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath)
|
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath)
|
||||||
.withMetadataConfig(HoodieMetadataConfig.newBuilder().withAssumeDatePartitioning(true).enable(false).build())
|
.withMetadataConfig(HoodieMetadataConfig.newBuilder().withAssumeDatePartitioning(true).enable(true).build())
|
||||||
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
|
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
|
||||||
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(2).build())
|
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(2).build())
|
||||||
.build();
|
.build();
|
||||||
@@ -1327,7 +1327,9 @@ public class TestCleaner extends HoodieClientTestBase {
|
|||||||
// Make a commit, although there are no partitionPaths.
|
// Make a commit, although there are no partitionPaths.
|
||||||
// Example use-case of this is when a client wants to create a table
|
// Example use-case of this is when a client wants to create a table
|
||||||
// with just some commit metadata, but no data/partitionPaths.
|
// with just some commit metadata, but no data/partitionPaths.
|
||||||
HoodieTestTable.of(metaClient).addCommit("000");
|
HoodieTableMetadataWriter metadataWriter = SparkHoodieBackedTableMetadataWriter.create(hadoopConf, config, context);
|
||||||
|
HoodieTestTable testTable = HoodieMetadataTestTable.of(metaClient, metadataWriter);
|
||||||
|
testTable.doWriteOperation("001", WriteOperationType.INSERT, Collections.emptyList(), 1);
|
||||||
|
|
||||||
metaClient = HoodieTableMetaClient.reload(metaClient);
|
metaClient = HoodieTableMetaClient.reload(metaClient);
|
||||||
|
|
||||||
@@ -1341,7 +1343,7 @@ public class TestCleaner extends HoodieClientTestBase {
|
|||||||
@Test
|
@Test
|
||||||
public void testKeepLatestCommitsWithPendingCompactions() throws Exception {
|
public void testKeepLatestCommitsWithPendingCompactions() throws Exception {
|
||||||
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath)
|
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath)
|
||||||
.withMetadataConfig(HoodieMetadataConfig.newBuilder().withAssumeDatePartitioning(true).enable(false).build())
|
.withMetadataConfig(HoodieMetadataConfig.newBuilder().withAssumeDatePartitioning(true).enable(true).build())
|
||||||
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
|
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
|
||||||
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(2).build())
|
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(2).build())
|
||||||
.build();
|
.build();
|
||||||
@@ -1365,7 +1367,7 @@ public class TestCleaner extends HoodieClientTestBase {
|
|||||||
public void testKeepLatestVersionsWithPendingCompactions(boolean retryFailure) throws Exception {
|
public void testKeepLatestVersionsWithPendingCompactions(boolean retryFailure) throws Exception {
|
||||||
HoodieWriteConfig config =
|
HoodieWriteConfig config =
|
||||||
HoodieWriteConfig.newBuilder().withPath(basePath)
|
HoodieWriteConfig.newBuilder().withPath(basePath)
|
||||||
.withMetadataConfig(HoodieMetadataConfig.newBuilder().withAssumeDatePartitioning(true).build())
|
.withMetadataConfig(HoodieMetadataConfig.newBuilder().withAssumeDatePartitioning(true).enable(true).build())
|
||||||
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
|
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
|
||||||
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(2).build())
|
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(2).build())
|
||||||
.build();
|
.build();
|
||||||
|
|||||||
Reference in New Issue
Block a user