1
0

[HUDI-744] Restructure hudi-common and clean up files under util packages (#1462)

- Brings more order and cohesion to the classes in hudi-common
 - Utils classes related to a particular concept (avro, timeline,...) are placed near to the package
 - common.fs package now contains all the filesystem level classes including wrapper filesystem
 - bloom.filter package renamed to just bloom
 - config package contains classes that help store properties
 - common.fs.inline package contains all the inline filesystem classes/impl
 - common.table.timeline now consolidates all timeline related classes
 - common.table.view consolidates all the classes related to filesystem view metadata
 - common.table.timeline.versioning contains all classes related to versioning of timeline
 - Fix few unit tests as a result
 - Moved the test packages around to match the source file move
 - Rename AvroUtils to TimelineMetadataUtils & minor fixes/typos
This commit is contained in:
vinoth chandar
2020-03-29 10:58:49 -07:00
committed by GitHub
parent 07c3c5d797
commit e057c27603
269 changed files with 541 additions and 538 deletions

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.avro;
import org.apache.avro.Schema;
import org.codehaus.jackson.JsonNode;

View File

@@ -21,8 +21,8 @@ package org.apache.hudi.common;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.view.SyncableFileSystemView;
import org.apache.hudi.common.table.view.HoodieTableFileSystemView;
import org.junit.Rule;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
import org.junit.Assert;
import org.junit.Test;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
import org.apache.hadoop.util.hash.Hash;
import org.junit.Assert;

View File

@@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.fs;
import org.apache.hudi.common.HoodieCommonTestHarness;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.exception.HoodieException;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.fs.Path;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -42,10 +42,10 @@ import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.FILE_SCHEME;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.getPhantomFile;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.getRandomOuterInMemPath;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.FILE_SCHEME;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.getPhantomFile;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.getRandomOuterInMemPath;
/**
* Tests {@link InLineFileSystem} to inline HFile.

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hudi.common.util.collection.Pair;
@@ -38,8 +38,8 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.getRandomOuterFSPath;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.getRandomOuterFSPath;
/**
* Tests {@link InLineFileSystem}.

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -27,8 +27,8 @@ import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.getRandomOuterInMemPath;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.getRandomOuterInMemPath;
/**
* Unit tests {@link InMemoryFileSystem}.

View File

@@ -54,7 +54,7 @@ public class HdfsTestService {
private MiniDFSCluster miniDfsCluster;
public HdfsTestService() throws IOException {
workDir = Files.createTempDirectory("temp").getName(0).toString();
workDir = Files.createTempDirectory("temp").toFile().getAbsolutePath();
}
public Configuration getHadoopConf() {

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -26,7 +26,7 @@ import org.apache.hudi.common.HoodieCleanStat;
import org.apache.hudi.common.model.HoodieWriteStat.RuntimeStats;
import org.apache.hudi.common.table.HoodieTableConfig;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.log.HoodieLogFormat;
import org.apache.hudi.common.table.log.HoodieLogFormat.Writer;
import org.apache.hudi.common.table.log.block.HoodieAvroDataBlock;
@@ -34,11 +34,11 @@ import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.AvroUtils;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.common.util.CleanerUtils;
import org.apache.hudi.common.util.CompactionUtils;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieIOException;
@@ -162,7 +162,7 @@ public class HoodieTestUtils {
metaClient.getBasePath() + "/" + HoodieTableMetaClient.METAFOLDER_NAME + "/" + f);
os = metaClient.getFs().create(commitFile, true);
// Write empty clean metadata
os.write(AvroUtils.serializeCleanerPlan(
os.write(TimelineMetadataUtils.serializeCleanerPlan(
new HoodieCleanerPlan(new HoodieActionInstant("", "", ""), "", new HashMap<>(), 1)).get());
} catch (IOException ioe) {
throw new HoodieIOException(ioe.getMessage(), ioe);
@@ -241,7 +241,7 @@ public class HoodieTestUtils {
HoodieCompactionPlan plan = CompactionUtils.buildFromFileSlices(fileSliceList, Option.empty(), Option.empty());
HoodieInstant compactionInstant = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, instant);
metaClient.getActiveTimeline().saveToCompactionRequested(compactionInstant,
AvroUtils.serializeCompactionPlan(plan));
TimelineMetadataUtils.serializeCompactionPlan(plan));
}
public static String getDataFilePath(String basePath, String partitionPath, String instantTime, String fileID) {
@@ -306,7 +306,7 @@ public class HoodieTestUtils {
HoodieCleanMetadata cleanMetadata =
CleanerUtils.convertCleanMetadata(metaClient, instantTime, Option.of(0L), Collections.singletonList(cleanStats));
// Write empty clean metadata
os.write(AvroUtils.serializeCleanMetadata(cleanMetadata).get());
os.write(TimelineMetadataUtils.serializeCleanMetadata(cleanMetadata).get());
}
}

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hadoop.fs.Path;
import org.junit.Test;

View File

@@ -18,6 +18,7 @@
package org.apache.hudi.common.storage;
import org.apache.hudi.common.fs.StorageSchemes;
import org.junit.Test;
import static org.junit.Assert.assertFalse;

View File

@@ -22,6 +22,7 @@ import org.apache.hudi.common.HoodieCommonTestHarness;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.Option;
import org.junit.Before;

View File

@@ -18,10 +18,12 @@
package org.apache.hudi.common.table;
import org.apache.hudi.common.model.TimelineLayoutVersion;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.TimelineLayout;
import org.junit.Assert;
import org.junit.Test;

View File

@@ -34,8 +34,8 @@ import org.apache.hudi.common.table.log.block.HoodieDeleteBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HeaderMetadataType;
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HoodieLogBlockType;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.SchemaTestUtil;
import org.apache.hudi.exception.CorruptedLogFileException;

View File

@@ -16,11 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.table.string;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
package org.apache.hudi.common.table.timeline;
import java.util.Comparator;
import java.util.stream.Collectors;

View File

@@ -16,15 +16,12 @@
* limitations under the License.
*/
package org.apache.hudi.common.table.string;
package org.apache.hudi.common.table.timeline;
import org.apache.hudi.common.HoodieCommonTestHarness;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.model.TimelineLayoutVersion;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.CollectionUtils;
import org.apache.hudi.common.util.Option;
@@ -47,7 +44,7 @@ import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.hudi.common.model.TimelineLayoutVersion.VERSION_0;
import static org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion.VERSION_0;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;

View File

@@ -27,16 +27,15 @@ import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.model.HoodieFileGroupId;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.TableFileSystemView.BaseFileOnlyView;
import org.apache.hudi.common.table.TableFileSystemView.SliceView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.common.table.view.TableFileSystemView.BaseFileOnlyView;
import org.apache.hudi.common.table.view.TableFileSystemView.SliceView;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.AvroUtils;
import org.apache.hudi.common.util.CompactionUtils;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
@@ -279,11 +278,11 @@ public class TestHoodieTableFileSystemView extends HoodieCommonTestHarness {
new File(basePath + "/" + partitionPath + "/" + compactDataFileName).createNewFile();
compactionInstant = new HoodieInstant(State.INFLIGHT, HoodieTimeline.COMPACTION_ACTION, compactionRequestedTime);
HoodieInstant requested = HoodieTimeline.getCompactionRequestedInstant(compactionInstant.getTimestamp());
commitTimeline.saveToCompactionRequested(requested, AvroUtils.serializeCompactionPlan(compactionPlan));
commitTimeline.saveToCompactionRequested(requested, TimelineMetadataUtils.serializeCompactionPlan(compactionPlan));
commitTimeline.transitionCompactionRequestedToInflight(requested);
} else {
compactionInstant = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, compactionRequestedTime);
commitTimeline.saveToCompactionRequested(compactionInstant, AvroUtils.serializeCompactionPlan(compactionPlan));
commitTimeline.saveToCompactionRequested(compactionInstant, TimelineMetadataUtils.serializeCompactionPlan(compactionPlan));
}
// View immediately after scheduling compaction
@@ -1067,7 +1066,7 @@ public class TestHoodieTableFileSystemView extends HoodieCommonTestHarness {
new HoodieInstant(State.INFLIGHT, HoodieTimeline.COMPACTION_ACTION, compactionRequestedTime);
HoodieInstant requested = HoodieTimeline.getCompactionRequestedInstant(compactionInstant.getTimestamp());
metaClient.getActiveTimeline().saveToCompactionRequested(requested,
AvroUtils.serializeCompactionPlan(compactionPlan));
TimelineMetadataUtils.serializeCompactionPlan(compactionPlan));
metaClient.getActiveTimeline().transitionCompactionRequestedToInflight(requested);
// Fake delta-ingestion after compaction-requested

View File

@@ -35,15 +35,14 @@ import org.apache.hudi.common.model.HoodieFileGroupId;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.HoodieWriteStat;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.AvroUtils;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.common.util.CleanerUtils;
import org.apache.hudi.common.util.CollectionUtils;
import org.apache.hudi.common.util.CompactionUtils;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;
@@ -70,7 +69,7 @@ import java.util.UUID;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.hudi.common.table.HoodieTimeline.COMPACTION_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.COMPACTION_ACTION;
/**
* Tests incremental file system view sync.
@@ -418,7 +417,7 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
HoodieCleanMetadata cleanMetadata = CleanerUtils
.convertCleanMetadata(metaClient, cleanInstant, Option.empty(), cleanStats);
metaClient.getActiveTimeline().saveAsComplete(cleanInflightInstant,
AvroUtils.serializeCleanMetadata(cleanMetadata));
TimelineMetadataUtils.serializeCleanMetadata(cleanMetadata));
}
/**
@@ -439,7 +438,7 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
rollbacks.add(instant);
HoodieRollbackMetadata rollbackMetadata =
AvroUtils.convertRollbackMetadata(rollbackInstant, Option.empty(), rollbacks, rollbackStats);
TimelineMetadataUtils.convertRollbackMetadata(rollbackInstant, Option.empty(), rollbacks, rollbackStats);
if (isRestore) {
HoodieRestoreMetadata metadata = new HoodieRestoreMetadata();
@@ -453,13 +452,13 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
HoodieInstant restoreInstant = new HoodieInstant(true, HoodieTimeline.RESTORE_ACTION, rollbackInstant);
metaClient.getActiveTimeline().createNewInstant(restoreInstant);
metaClient.getActiveTimeline().saveAsComplete(restoreInstant, AvroUtils.serializeRestoreMetadata(metadata));
metaClient.getActiveTimeline().saveAsComplete(restoreInstant, TimelineMetadataUtils.serializeRestoreMetadata(metadata));
} else {
metaClient.getActiveTimeline().createNewInstant(
new HoodieInstant(true, HoodieTimeline.ROLLBACK_ACTION, rollbackInstant));
metaClient.getActiveTimeline().saveAsComplete(
new HoodieInstant(true, HoodieTimeline.ROLLBACK_ACTION, rollbackInstant),
AvroUtils.serializeRollbackMetadata(rollbackMetadata));
TimelineMetadataUtils.serializeRollbackMetadata(rollbackMetadata));
}
}
@@ -501,7 +500,7 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
HoodieCompactionPlan plan = CompactionUtils.buildFromFileSlices(slices, Option.empty(), Option.empty());
HoodieInstant compactionInstant = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, instantTime);
metaClient.getActiveTimeline().saveToCompactionRequested(compactionInstant,
AvroUtils.serializeCompactionPlan(plan));
TimelineMetadataUtils.serializeCompactionPlan(plan));
view.sync();
partitions.forEach(p -> {

View File

@@ -22,9 +22,8 @@ import org.apache.hudi.common.model.CompactionOperation;
import org.apache.hudi.common.model.FileSlice;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.string.MockHoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.MockHoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.ImmutablePair;

View File

@@ -19,8 +19,7 @@
package org.apache.hudi.common.table.view;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import java.io.IOException;

View File

@@ -18,8 +18,7 @@
package org.apache.hudi.common.table.view;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import java.io.IOException;

View File

@@ -18,8 +18,7 @@
package org.apache.hudi.common.table.view;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
/**
* Tests spillable map based file system view {@link SyncableFileSystemView}.

View File

@@ -19,8 +19,7 @@
package org.apache.hudi.common.table.view;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
/**
* Tests spillable map based incremental fs view sync.

View File

@@ -18,8 +18,10 @@
package org.apache.hudi.common.util;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.FileSlice;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileGroupId;
@@ -45,8 +47,8 @@ import java.util.stream.IntStream;
import java.util.stream.Stream;
import static org.apache.hudi.common.model.HoodieTestUtils.DEFAULT_PARTITION_PATHS;
import static org.apache.hudi.common.table.HoodieTimeline.COMPACTION_ACTION;
import static org.apache.hudi.common.table.HoodieTimeline.DELTA_COMMIT_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.COMPACTION_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.DELTA_COMMIT_ACTION;
/**
* The utility class to support testing compaction.
@@ -128,7 +130,7 @@ public class CompactionTestUtils {
HoodieCompactionPlan compactionPlan) throws IOException {
metaClient.getActiveTimeline().saveToCompactionRequested(
new HoodieInstant(State.REQUESTED, COMPACTION_ACTION, instantTime),
AvroUtils.serializeCompactionPlan(compactionPlan));
TimelineMetadataUtils.serializeCompactionPlan(compactionPlan));
}
public static void createDeltaCommit(HoodieTableMetaClient metaClient, String instantTime) {

View File

@@ -18,6 +18,7 @@
package org.apache.hudi.common.util;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.avro.MercifulJsonConverter;
import org.apache.hudi.common.model.HoodieAvroPayload;
import org.apache.hudi.common.model.HoodieKey;

View File

@@ -21,6 +21,7 @@ package org.apache.hudi.common.util;
import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.common.HoodieCommonTestHarness;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.FileSlice;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileGroupId;
@@ -29,7 +30,7 @@ import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.CompactionTestUtils.TestHoodieBaseFile;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.common.versioning.compaction.CompactionPlanMigrator;
import org.apache.hudi.common.table.timeline.versioning.compaction.CompactionPlanMigrator;
import org.apache.hadoop.fs.Path;
import org.junit.Assert;

View File

@@ -23,6 +23,8 @@ import org.apache.hudi.common.minicluster.HdfsTestService;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hudi.common.config.DFSPropertiesConfiguration;
import org.apache.hudi.common.config.TypedProperties;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;

View File

@@ -18,11 +18,12 @@
package org.apache.hudi.common.util;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.avro.HoodieAvroWriteSupport;
import org.apache.hudi.common.HoodieCommonTestHarness;
import org.apache.hudi.common.bloom.filter.BloomFilter;
import org.apache.hudi.common.bloom.filter.BloomFilterFactory;
import org.apache.hudi.common.bloom.filter.BloomFilterTypeCode;
import org.apache.hudi.common.bloom.BloomFilter;
import org.apache.hudi.common.bloom.BloomFilterFactory;
import org.apache.hudi.common.bloom.BloomFilterTypeCode;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieTestUtils;

View File

@@ -25,7 +25,7 @@ import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.HoodieRecordSizeEstimator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.SchemaTestUtil;

View File

@@ -25,7 +25,7 @@ import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.util.DefaultSizeEstimator;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.HoodieRecordSizeEstimator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.SchemaTestUtil;

View File

@@ -16,10 +16,9 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.util.collection;
import org.apache.hudi.common.table.view.FileSystemViewStorageConfig;
import org.apache.hudi.common.util.collection.Pair;
import org.junit.AfterClass;
import org.junit.Assert;