1
0

[HUDI-744] Restructure hudi-common and clean up files under util packages (#1462)

- Brings more order and cohesion to the classes in hudi-common
 - Utils classes related to a particular concept (avro, timeline,...) are placed near to the package
 - common.fs package now contains all the filesystem level classes including wrapper filesystem
 - bloom.filter package renamed to just bloom
 - config package contains classes that help store properties
 - common.fs.inline package contains all the inline filesystem classes/impl
 - common.table.timeline now consolidates all timeline related classes
 - common.table.view consolidates all the classes related to filesystem view metadata
 - common.table.timeline.versioning contains all classes related to versioning of timeline
 - Fix few unit tests as a result
 - Moved the test packages around to match the source file move
 - Rename AvroUtils to TimelineMetadataUtils & minor fixes/typos
This commit is contained in:
vinoth chandar
2020-03-29 10:58:49 -07:00
committed by GitHub
parent 07c3c5d797
commit e057c27603
269 changed files with 541 additions and 538 deletions

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.avro;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -18,8 +18,8 @@
package org.apache.hudi.avro;
import org.apache.hudi.common.bloom.filter.BloomFilter;
import org.apache.hudi.common.bloom.filter.HoodieDynamicBoundedBloomFilter;
import org.apache.hudi.common.bloom.BloomFilter;
import org.apache.hudi.common.bloom.HoodieDynamicBoundedBloomFilter;
import org.apache.avro.Schema;
import org.apache.parquet.avro.AvroWriteSupport;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
/**
* A Bloom filter interface.

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
import org.apache.hadoop.util.hash.Hash;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
/**
* Bloom filter type codes.

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
/**
* Bloom filter utils.

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
import org.apache.hudi.common.util.Base64CodecUtil;
import org.apache.hudi.exception.HoodieIndexException;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
import org.apache.hadoop.util.bloom.BloomFilter;
import org.apache.hadoop.util.bloom.Key;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.bloom.HashFunction;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
import org.apache.hudi.common.util.Base64CodecUtil;
import org.apache.hudi.exception.HoodieIndexException;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.config;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.config;
package org.apache.hudi.common.config;
import java.io.Serializable;
import java.util.Properties;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common;
package org.apache.hudi.common.config;
import org.apache.hadoop.conf.Configuration;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.config;
import java.io.Serializable;
import java.util.Arrays;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.fs;
import org.apache.hadoop.fs.Path;

View File

@@ -16,9 +16,9 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.fs;
import org.apache.hudi.config.DefaultHoodieConfig;
import org.apache.hudi.common.config.DefaultHoodieConfig;
import java.io.File;
import java.io.FileReader;

View File

@@ -16,13 +16,15 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.fs;
import org.apache.hudi.common.model.HoodieFileFormat;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodiePartitionMetadata;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -16,11 +16,12 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.fs;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;

View File

@@ -16,12 +16,8 @@
* limitations under the License.
*/
package org.apache.hudi.common.io.storage;
package org.apache.hudi.common.fs;
import org.apache.hudi.common.storage.StorageSchemes;
import org.apache.hudi.common.util.ConsistencyGuard;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.NoOpConsistencyGuard;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.fs;
import org.apache.hadoop.fs.Path;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.storage;
package org.apache.hudi.common.fs;
import java.io.DataInputStream;
import java.io.IOException;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.util.collection.io.storage;
package org.apache.hudi.common.fs;
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;

View File

@@ -16,9 +16,8 @@
* limitations under the License.
*/
package org.apache.hudi.common.io.storage;
package org.apache.hudi.common.fs;
import org.apache.hudi.common.util.ConsistencyGuard;
import org.apache.hudi.exception.HoodieException;
import org.apache.hadoop.fs.FSDataOutputStream;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.storage;
package org.apache.hudi.common.fs;
import java.util.Arrays;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.fs.Path;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.ReadOption;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -19,7 +19,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hadoop.fs.Path;

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.Option;

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
import org.apache.avro.Schema;

View File

@@ -21,7 +21,7 @@ package org.apache.hudi.common.table;
import org.apache.hudi.common.model.HoodieFileFormat;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.OverwriteWithLatestAvroPayload;
import org.apache.hudi.common.model.TimelineLayoutVersion;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -18,17 +18,19 @@
package org.apache.hudi.common.table;
import org.apache.hudi.common.SerializableConfiguration;
import org.apache.hudi.common.io.storage.HoodieWrapperFileSystem;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.TimelineLayout;
import org.apache.hudi.common.config.SerializableConfiguration;
import org.apache.hudi.common.fs.HoodieWrapperFileSystem;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.TimelineLayoutVersion;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieArchivedTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.ConsistencyGuardConfig;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.FailSafeConsistencyGuard;
import org.apache.hudi.common.util.NoOpConsistencyGuard;
import org.apache.hudi.common.fs.ConsistencyGuardConfig;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.fs.FailSafeConsistencyGuard;
import org.apache.hudi.common.fs.NoOpConsistencyGuard;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.exception.HoodieException;

View File

@@ -23,7 +23,7 @@ import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.log.block.HoodieAvroDataBlock;
import org.apache.hudi.common.table.log.block.HoodieCommandBlock;
import org.apache.hudi.common.table.log.block.HoodieDeleteBlock;

View File

@@ -26,7 +26,7 @@ import org.apache.hudi.common.table.log.block.HoodieDeleteBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HeaderMetadataType;
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HoodieLogBlockType;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.exception.CorruptedLogFileException;

View File

@@ -20,7 +20,7 @@ package org.apache.hudi.common.table.log;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;

View File

@@ -19,11 +19,11 @@
package org.apache.hudi.common.table.log;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.storage.StorageSchemes;
import org.apache.hudi.common.fs.StorageSchemes;
import org.apache.hudi.common.table.log.HoodieLogFormat.Writer;
import org.apache.hudi.common.table.log.HoodieLogFormat.WriterBuilder;
import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -16,12 +16,12 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.table.log;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.log.HoodieLogFormat;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.log.HoodieLogFormat.Reader;
import org.apache.hudi.common.table.log.block.HoodieAvroDataBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock;

View File

@@ -19,8 +19,8 @@
package org.apache.hudi.common.table.log.block;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.storage.SizeAwareDataInputStream;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.common.fs.SizeAwareDataInputStream;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -20,7 +20,7 @@ package org.apache.hudi.common.table.log.block;
import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.storage.SizeAwareDataInputStream;
import org.apache.hudi.common.fs.SizeAwareDataInputStream;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.SerializationUtils;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -20,7 +20,7 @@ package org.apache.hudi.common.table.log.block;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -19,7 +19,6 @@
package org.apache.hudi.common.table.timeline;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;

View File

@@ -26,7 +26,6 @@ import org.apache.hudi.avro.model.HoodieArchivedMetaEntry;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodiePartitionMetadata;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.log.HoodieLogFormat;
import org.apache.hudi.common.table.log.block.HoodieAvroDataBlock;
import org.apache.hudi.common.util.Option;

View File

@@ -18,7 +18,6 @@
package org.apache.hudi.common.table.timeline;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.CollectionUtils;
import org.apache.hudi.common.util.Option;

View File

@@ -18,9 +18,8 @@
package org.apache.hudi.common.table.timeline;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.util.CollectionUtils;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hadoop.fs.FileStatus;

View File

@@ -16,11 +16,10 @@
* limitations under the License.
*/
package org.apache.hudi.common.table;
package org.apache.hudi.common.table.timeline;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.HoodieDefaultTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.StringUtils;

View File

@@ -16,11 +16,10 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.table.timeline;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.log4j.LogManager;

View File

@@ -16,10 +16,9 @@
* limitations under the License.
*/
package org.apache.hudi.common.table;
package org.apache.hudi.common.table.timeline;
import org.apache.hudi.common.model.TimelineLayoutVersion;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.util.collection.Pair;
import java.io.Serializable;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.table.timeline;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanerPlan;
@@ -37,6 +37,8 @@ import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.avro.specific.SpecificRecordBase;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@@ -45,10 +47,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A utility class for avro.
*/
public class AvroUtils {
public class TimelineMetadataUtils {
private static final Integer DEFAULT_VERSION = 1;

View File

@@ -19,7 +19,7 @@
package org.apache.hudi.common.table.timeline.dto;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieDefaultTimeline;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.versioning;
package org.apache.hudi.common.table.timeline.versioning;
import org.apache.hudi.common.table.HoodieTableMetaClient;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.versioning;
package org.apache.hudi.common.table.timeline.versioning;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.ValidationUtils;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.model;
package org.apache.hudi.common.table.timeline.versioning;
import org.apache.hudi.common.util.ValidationUtils;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.versioning;
package org.apache.hudi.common.table.timeline.versioning;
import java.io.Serializable;

View File

@@ -16,11 +16,11 @@
* limitations under the License.
*/
package org.apache.hudi.common.versioning.clean;
package org.apache.hudi.common.table.timeline.versioning.clean;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.versioning.MetadataMigrator;
import org.apache.hudi.common.table.timeline.versioning.MetadataMigrator;
import java.util.Arrays;

View File

@@ -16,15 +16,15 @@
* limitations under the License.
*/
package org.apache.hudi.common.versioning.clean;
package org.apache.hudi.common.table.timeline.versioning.clean;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanPartitionMetadata;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.common.versioning.AbstractMigratorBase;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hadoop.fs.Path;

View File

@@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hudi.common.versioning.clean;
package org.apache.hudi.common.table.timeline.versioning.clean;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanPartitionMetadata;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.common.versioning.AbstractMigratorBase;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hadoop.fs.Path;

View File

@@ -16,11 +16,11 @@
* limitations under the License.
*/
package org.apache.hudi.common.versioning.compaction;
package org.apache.hudi.common.table.timeline.versioning.compaction;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.versioning.MetadataMigrator;
import org.apache.hudi.common.table.timeline.versioning.MetadataMigrator;
import java.util.Arrays;

View File

@@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hudi.common.versioning.compaction;
package org.apache.hudi.common.table.timeline.versioning.compaction;
import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.versioning.AbstractMigratorBase;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hadoop.fs.Path;

View File

@@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hudi.common.versioning.compaction;
package org.apache.hudi.common.table.timeline.versioning.compaction;
import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.versioning.AbstractMigratorBase;
import org.apache.hudi.common.table.timeline.versioning.AbstractMigratorBase;
import org.apache.hadoop.fs.Path;

View File

@@ -25,11 +25,10 @@ import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.model.HoodieFileGroupId;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.CompactionUtils;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.HoodieTimer;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;

View File

@@ -18,10 +18,9 @@
package org.apache.hudi.common.table.view;
import org.apache.hudi.common.SerializableConfiguration;
import org.apache.hudi.common.config.SerializableConfiguration;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.Functions.Function2;
import org.apache.log4j.LogManager;

View File

@@ -19,7 +19,7 @@
package org.apache.hudi.common.table.view;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.config.DefaultHoodieConfig;
import org.apache.hudi.common.config.DefaultHoodieConfig;
import java.io.File;
import java.io.FileReader;

View File

@@ -22,8 +22,7 @@ import org.apache.hudi.common.model.CompactionOperation;
import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.model.HoodieFileGroupId;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.TableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;

View File

@@ -28,15 +28,15 @@ import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieCommitMetadata;
import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.AvroUtils;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.common.util.CleanerUtils;
import org.apache.hudi.common.util.CompactionUtils;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.TimelineDiffHelper;
import org.apache.hudi.common.util.TimelineDiffHelper.TimelineDiffResult;
import org.apache.hudi.common.table.timeline.TimelineDiffHelper;
import org.apache.hudi.common.table.timeline.TimelineDiffHelper.TimelineDiffResult;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieException;
@@ -220,7 +220,7 @@ public abstract class IncrementalTimelineSyncFileSystemView extends AbstractTabl
private void addRestoreInstant(HoodieTimeline timeline, HoodieInstant instant) throws IOException {
LOG.info("Syncing restore instant (" + instant + ")");
HoodieRestoreMetadata metadata =
AvroUtils.deserializeAvroMetadata(timeline.getInstantDetails(instant).get(), HoodieRestoreMetadata.class);
TimelineMetadataUtils.deserializeAvroMetadata(timeline.getInstantDetails(instant).get(), HoodieRestoreMetadata.class);
Map<String, List<Pair<String, String>>> partitionFiles =
metadata.getHoodieRestoreMetadata().entrySet().stream().flatMap(entry -> {
@@ -244,7 +244,7 @@ public abstract class IncrementalTimelineSyncFileSystemView extends AbstractTabl
private void addRollbackInstant(HoodieTimeline timeline, HoodieInstant instant) throws IOException {
LOG.info("Syncing rollback instant (" + instant + ")");
HoodieRollbackMetadata metadata =
AvroUtils.deserializeAvroMetadata(timeline.getInstantDetails(instant).get(), HoodieRollbackMetadata.class);
TimelineMetadataUtils.deserializeAvroMetadata(timeline.getInstantDetails(instant).get(), HoodieRollbackMetadata.class);
metadata.getPartitionMetadata().entrySet().stream().forEach(e -> {
removeFileSlicesForPartition(timeline, instant, e.getKey(), e.getValue().getSuccessDeleteFiles());

View File

@@ -22,8 +22,7 @@ import org.apache.hudi.common.model.CompactionOperation;
import org.apache.hudi.common.model.FileSlice;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.Functions.Function0;
import org.apache.hudi.common.util.Functions.Function1;

View File

@@ -23,8 +23,7 @@ import org.apache.hudi.common.model.FileSlice;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.dto.BaseFileDTO;
import org.apache.hudi.common.table.timeline.dto.CompactionOpDTO;

View File

@@ -25,9 +25,9 @@ import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.model.HoodieFileGroupId;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.RocksDBDAO;
import org.apache.hudi.common.util.collection.RocksDBDAO;
import org.apache.hudi.common.util.RocksDBSchemaHelper;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;

View File

@@ -22,7 +22,7 @@ import org.apache.hudi.common.model.CompactionOperation;
import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.model.HoodieFileGroupId;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.DefaultSizeEstimator;
import org.apache.hudi.common.util.collection.ExternalSpillableMap;
import org.apache.hudi.common.util.collection.Pair;

View File

@@ -16,10 +16,10 @@
* limitations under the License.
*/
package org.apache.hudi.common.table;
package org.apache.hudi.common.table.view;
import org.apache.hudi.common.table.TableFileSystemView.BaseFileOnlyView;
import org.apache.hudi.common.table.TableFileSystemView.SliceView;
import org.apache.hudi.common.table.view.TableFileSystemView.BaseFileOnlyView;
import org.apache.hudi.common.table.view.TableFileSystemView.SliceView;
/**
* A consolidated file-system view interface exposing both complete slice and basefile only views along with

View File

@@ -16,13 +16,14 @@
* limitations under the License.
*/
package org.apache.hudi.common.table;
package org.apache.hudi.common.table.view;
import org.apache.hudi.common.model.CompactionOperation;
import org.apache.hudi.common.model.FileSlice;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;

View File

@@ -18,15 +18,16 @@
package org.apache.hudi.common.util;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanPartitionMetadata;
import org.apache.hudi.avro.model.HoodieCleanerPlan;
import org.apache.hudi.common.HoodieCleanStat;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.versioning.clean.CleanMetadataMigrator;
import org.apache.hudi.common.versioning.clean.CleanV1MigrationHandler;
import org.apache.hudi.common.versioning.clean.CleanV2MigrationHandler;
import org.apache.hudi.common.table.timeline.versioning.clean.CleanMetadataMigrator;
import org.apache.hudi.common.table.timeline.versioning.clean.CleanV1MigrationHandler;
import org.apache.hudi.common.table.timeline.versioning.clean.CleanV2MigrationHandler;
import java.io.IOException;
import java.util.HashMap;
@@ -69,7 +70,7 @@ public class CleanerUtils {
public static HoodieCleanMetadata getCleanerMetadata(HoodieTableMetaClient metaClient, HoodieInstant cleanInstant)
throws IOException {
CleanMetadataMigrator metadataMigrator = new CleanMetadataMigrator(metaClient);
HoodieCleanMetadata cleanMetadata = AvroUtils.deserializeHoodieCleanMetadata(
HoodieCleanMetadata cleanMetadata = TimelineMetadataUtils.deserializeHoodieCleanMetadata(
metaClient.getActiveTimeline().readCleanerInfoAsBytes(cleanInstant).get());
return metadataMigrator.upgradeToLatest(cleanMetadata, cleanMetadata.getVersion());
}
@@ -83,7 +84,7 @@ public class CleanerUtils {
*/
public static HoodieCleanerPlan getCleanerPlan(HoodieTableMetaClient metaClient, HoodieInstant cleanInstant)
throws IOException {
return AvroUtils.deserializeAvroMetadata(metaClient.getActiveTimeline().readCleanerInfoAsBytes(cleanInstant).get(),
return TimelineMetadataUtils.deserializeAvroMetadata(metaClient.getActiveTimeline().readCleanerInfoAsBytes(cleanInstant).get(),
HoodieCleanerPlan.class);
}
}

View File

@@ -18,18 +18,19 @@
package org.apache.hudi.common.util;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.common.model.CompactionOperation;
import org.apache.hudi.common.model.FileSlice;
import org.apache.hudi.common.model.HoodieFileGroupId;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.common.versioning.compaction.CompactionPlanMigrator;
import org.apache.hudi.common.versioning.compaction.CompactionV1MigrationHandler;
import org.apache.hudi.common.versioning.compaction.CompactionV2MigrationHandler;
import org.apache.hudi.common.table.timeline.versioning.compaction.CompactionPlanMigrator;
import org.apache.hudi.common.table.timeline.versioning.compaction.CompactionV1MigrationHandler;
import org.apache.hudi.common.table.timeline.versioning.compaction.CompactionV2MigrationHandler;
import org.apache.hudi.exception.HoodieException;
import org.apache.log4j.LogManager;
@@ -139,7 +140,7 @@ public class CompactionUtils {
public static HoodieCompactionPlan getCompactionPlan(HoodieTableMetaClient metaClient, String compactionInstant)
throws IOException {
CompactionPlanMigrator migrator = new CompactionPlanMigrator(metaClient);
HoodieCompactionPlan compactionPlan = AvroUtils.deserializeCompactionPlan(
HoodieCompactionPlan compactionPlan = TimelineMetadataUtils.deserializeCompactionPlan(
metaClient.getActiveTimeline().readCompactionPlanAsBytes(
HoodieTimeline.getCompactionRequestedInstant(compactionInstant)).get());
return migrator.upgradeToLatest(compactionPlan, compactionPlan.getVersion());

View File

@@ -18,10 +18,12 @@
package org.apache.hudi.common.util;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.avro.HoodieAvroWriteSupport;
import org.apache.hudi.common.bloom.filter.BloomFilter;
import org.apache.hudi.common.bloom.filter.BloomFilterFactory;
import org.apache.hudi.common.bloom.filter.BloomFilterTypeCode;
import org.apache.hudi.common.bloom.BloomFilter;
import org.apache.hudi.common.bloom.BloomFilterFactory;
import org.apache.hudi.common.bloom.BloomFilterTypeCode;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -22,7 +22,7 @@ import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.util.collection.DiskBasedMap.FileEntry;
import org.apache.hudi.common.util.collection.io.storage.SizeAwareDataOutputStream;
import org.apache.hudi.common.fs.SizeAwareDataOutputStream;
import org.apache.hudi.exception.HoodieCorruptedDataException;
import org.apache.avro.generic.GenericRecord;

View File

@@ -21,7 +21,7 @@ package org.apache.hudi.common.util.collection;
import org.apache.hudi.common.util.BufferedRandomAccessFile;
import org.apache.hudi.common.util.SerializationUtils;
import org.apache.hudi.common.util.SpillableMapUtils;
import org.apache.hudi.common.util.collection.io.storage.SizeAwareDataOutputStream;
import org.apache.hudi.common.fs.SizeAwareDataOutputStream;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.HoodieNotSupportedException;

View File

@@ -18,7 +18,6 @@
package org.apache.hudi.common.util.collection;
import org.apache.hudi.common.util.RocksDBDAO;
import org.apache.hudi.exception.HoodieNotSupportedException;
import java.io.Serializable;

View File

@@ -16,9 +16,12 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.util.collection;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.HoodieTimer;
import org.apache.hudi.common.util.SerializationUtils;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.avro;
import org.apache.avro.Schema;
import org.codehaus.jackson.JsonNode;

View File

@@ -21,8 +21,8 @@ package org.apache.hudi.common;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.SyncableFileSystemView;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.view.SyncableFileSystemView;
import org.apache.hudi.common.table.view.HoodieTableFileSystemView;
import org.junit.Rule;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
import org.junit.Assert;
import org.junit.Test;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.bloom.filter;
package org.apache.hudi.common.bloom;
import org.apache.hadoop.util.hash.Hash;
import org.junit.Assert;

View File

@@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hudi.common.util;
package org.apache.hudi.common.fs;
import org.apache.hudi.common.HoodieCommonTestHarness;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.exception.HoodieException;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.fs.Path;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -42,10 +42,10 @@ import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.FILE_SCHEME;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.getPhantomFile;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.getRandomOuterInMemPath;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.FILE_SCHEME;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.getPhantomFile;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.getRandomOuterInMemPath;
/**
* Tests {@link InLineFileSystem} to inline HFile.

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hudi.common.util.collection.Pair;
@@ -38,8 +38,8 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.getRandomOuterFSPath;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.getRandomOuterFSPath;
/**
* Tests {@link InLineFileSystem}.

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.common.inline.fs;
package org.apache.hudi.common.fs.inline;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -27,8 +27,8 @@ import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.inline.fs.FileSystemTestUtils.getRandomOuterInMemPath;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.RANDOM;
import static org.apache.hudi.common.fs.inline.FileSystemTestUtils.getRandomOuterInMemPath;
/**
* Unit tests {@link InMemoryFileSystem}.

View File

@@ -54,7 +54,7 @@ public class HdfsTestService {
private MiniDFSCluster miniDfsCluster;
public HdfsTestService() throws IOException {
workDir = Files.createTempDirectory("temp").getName(0).toString();
workDir = Files.createTempDirectory("temp").toFile().getAbsolutePath();
}
public Configuration getHadoopConf() {

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieIOException;

View File

@@ -26,7 +26,7 @@ import org.apache.hudi.common.HoodieCleanStat;
import org.apache.hudi.common.model.HoodieWriteStat.RuntimeStats;
import org.apache.hudi.common.table.HoodieTableConfig;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.log.HoodieLogFormat;
import org.apache.hudi.common.table.log.HoodieLogFormat.Writer;
import org.apache.hudi.common.table.log.block.HoodieAvroDataBlock;
@@ -34,11 +34,11 @@ import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.AvroUtils;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.common.util.CleanerUtils;
import org.apache.hudi.common.util.CompactionUtils;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieIOException;
@@ -162,7 +162,7 @@ public class HoodieTestUtils {
metaClient.getBasePath() + "/" + HoodieTableMetaClient.METAFOLDER_NAME + "/" + f);
os = metaClient.getFs().create(commitFile, true);
// Write empty clean metadata
os.write(AvroUtils.serializeCleanerPlan(
os.write(TimelineMetadataUtils.serializeCleanerPlan(
new HoodieCleanerPlan(new HoodieActionInstant("", "", ""), "", new HashMap<>(), 1)).get());
} catch (IOException ioe) {
throw new HoodieIOException(ioe.getMessage(), ioe);
@@ -241,7 +241,7 @@ public class HoodieTestUtils {
HoodieCompactionPlan plan = CompactionUtils.buildFromFileSlices(fileSliceList, Option.empty(), Option.empty());
HoodieInstant compactionInstant = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, instant);
metaClient.getActiveTimeline().saveToCompactionRequested(compactionInstant,
AvroUtils.serializeCompactionPlan(plan));
TimelineMetadataUtils.serializeCompactionPlan(plan));
}
public static String getDataFilePath(String basePath, String partitionPath, String instantTime, String fileID) {
@@ -306,7 +306,7 @@ public class HoodieTestUtils {
HoodieCleanMetadata cleanMetadata =
CleanerUtils.convertCleanMetadata(metaClient, instantTime, Option.of(0L), Collections.singletonList(cleanStats));
// Write empty clean metadata
os.write(AvroUtils.serializeCleanMetadata(cleanMetadata).get());
os.write(TimelineMetadataUtils.serializeCleanMetadata(cleanMetadata).get());
}
}

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hadoop.fs.Path;
import org.junit.Test;

View File

@@ -18,6 +18,7 @@
package org.apache.hudi.common.storage;
import org.apache.hudi.common.fs.StorageSchemes;
import org.junit.Test;
import static org.junit.Assert.assertFalse;

View File

@@ -22,6 +22,7 @@ import org.apache.hudi.common.HoodieCommonTestHarness;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.Option;
import org.junit.Before;

View File

@@ -18,10 +18,12 @@
package org.apache.hudi.common.table;
import org.apache.hudi.common.model.TimelineLayoutVersion;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.TimelineLayout;
import org.junit.Assert;
import org.junit.Test;

View File

@@ -34,8 +34,8 @@ import org.apache.hudi.common.table.log.block.HoodieDeleteBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HeaderMetadataType;
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HoodieLogBlockType;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.util.SchemaTestUtil;
import org.apache.hudi.exception.CorruptedLogFileException;

Some files were not shown because too many files have changed in this diff Show More