1
0

[HUDI-1684] Tweak hudi-flink-bundle module pom and reorganize the pacakges for hudi-flink module (#2669)

* Add required dependencies for hudi-flink-bundle module
* Some packages reorganization of hudi-flink module
This commit is contained in:
Danny Chan
2021-03-15 16:02:05 +08:00
committed by GitHub
parent e93c6a5693
commit fc6c5f4285
72 changed files with 357 additions and 203 deletions

View File

@@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.configuration;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.streamer.FlinkStreamerConfig;
import org.apache.hudi.common.model.OverwriteWithLatestAvroPayload;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.keygen.SimpleAvroKeyGenerator;
import org.apache.hudi.keygen.constant.KeyGeneratorOptions;
import org.apache.hudi.streamer.FlinkStreamerConfig;
import org.apache.hudi.util.StreamerUtil;
import org.apache.flink.configuration.ConfigOption;

View File

@@ -20,8 +20,8 @@ package org.apache.hudi.schema;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.util.StreamerUtil;
import org.apache.avro.Schema;

View File

@@ -31,7 +31,6 @@ import org.apache.hudi.util.StreamerUtil;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@@ -16,9 +16,8 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.streamer.FlinkStreamerConfig;
import org.apache.hudi.client.FlinkTaskContextSupplier;
import org.apache.hudi.client.HoodieFlinkWriteClient;
import org.apache.hudi.client.common.HoodieFlinkEngineContext;
@@ -30,6 +29,8 @@ import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.streamer.FlinkStreamerConfig;
import org.apache.hudi.util.StreamerUtil;
import org.apache.flink.api.common.state.ListState;
@@ -40,11 +41,10 @@ import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.PathFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@@ -16,10 +16,8 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.index.HoodieIndex;
import org.apache.hudi.streamer.FlinkStreamerConfig;
import org.apache.hudi.client.FlinkTaskContextSupplier;
import org.apache.hudi.client.HoodieFlinkWriteClient;
import org.apache.hudi.client.WriteStatus;
@@ -29,6 +27,8 @@ import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.exception.HoodieFlinkStreamerException;
import org.apache.hudi.index.HoodieIndex;
import org.apache.hudi.streamer.FlinkStreamerConfig;
import org.apache.hudi.table.action.commit.FlinkWriteHelper;
import org.apache.hudi.util.StreamerUtil;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.common.model.HoodieRecord;
@@ -26,7 +26,6 @@ import org.apache.flink.runtime.state.StateSnapshotContext;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.operators.KeyedProcessOperator;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.client.FlinkTaskContextSupplier;
import org.apache.hudi.client.HoodieFlinkWriteClient;
@@ -26,8 +26,9 @@ import org.apache.hudi.common.config.SerializableConfiguration;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.util.ObjectSizeCalculator;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.index.HoodieIndex;
import org.apache.hudi.operator.event.BatchWriteSuccessEvent;
import org.apache.hudi.sink.event.BatchWriteSuccessEvent;
import org.apache.hudi.table.action.commit.FlinkWriteHelper;
import org.apache.hudi.util.StreamerUtil;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;

View File

@@ -16,15 +16,16 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.client.FlinkTaskContextSupplier;
import org.apache.hudi.client.HoodieFlinkWriteClient;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.client.common.HoodieFlinkEngineContext;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.operator.event.BatchWriteSuccessEvent;
import org.apache.hudi.sink.event.BatchWriteSuccessEvent;
import org.apache.hudi.util.StreamerUtil;
import org.apache.flink.annotation.VisibleForTesting;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.jobgraph.OperatorID;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.compact;
package org.apache.hudi.sink.compact;
import org.apache.hudi.client.FlinkTaskContextSupplier;
import org.apache.hudi.client.HoodieFlinkWriteClient;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.compact;
package org.apache.hudi.sink.compact;
import org.apache.hudi.client.WriteStatus;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.compact;
package org.apache.hudi.sink.compact;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.client.FlinkTaskContextSupplier;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.compact;
package org.apache.hudi.sink.compact;
import org.apache.hudi.common.model.CompactionOperation;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.compact;
package org.apache.hudi.sink.compact;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.client.FlinkTaskContextSupplier;

View File

@@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hudi.operator.event;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;
package org.apache.hudi.sink.event;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;
import java.util.ArrayList;
import java.util.List;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.partitioner;
package org.apache.hudi.sink.partitioner;
import org.apache.hudi.client.FlinkTaskContextSupplier;
import org.apache.hudi.client.common.HoodieFlinkEngineContext;
@@ -30,10 +30,10 @@ import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.util.ParquetUtils;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.index.HoodieIndexUtils;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.commit.BucketInfo;
import org.apache.hudi.util.StreamerUtil;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.partitioner;
package org.apache.hudi.sink.partitioner;
import org.apache.hudi.client.common.HoodieFlinkEngineContext;
import org.apache.hudi.common.fs.FSUtils;

View File

@@ -16,12 +16,12 @@
* limitations under the License.
*/
package org.apache.hudi.operator.partitioner;
package org.apache.hudi.sink.partitioner;
import org.apache.hudi.client.common.HoodieFlinkEngineContext;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.operator.partitioner.delta.DeltaBucketAssigner;
import org.apache.hudi.sink.partitioner.delta.DeltaBucketAssigner;
/**
* Utilities for {@code BucketAssigner}.

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.partitioner.delta;
package org.apache.hudi.sink.partitioner.delta;
import org.apache.hudi.client.common.HoodieFlinkEngineContext;
import org.apache.hudi.common.fs.FSUtils;
@@ -26,7 +26,7 @@ import org.apache.hudi.common.model.HoodieRecordLocation;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.operator.partitioner.BucketAssigner;
import org.apache.hudi.sink.partitioner.BucketAssigner;
import org.apache.hudi.table.action.commit.SmallFile;
import java.util.ArrayList;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source;
package org.apache.hudi.sink.transform;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.config.TypedProperties;

View File

@@ -16,15 +16,15 @@
* limitations under the License.
*/
package org.apache.hudi.operator.transform;
package org.apache.hudi.sink.transform;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.keygen.KeyGenerator;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.util.RowDataToAvroConverters;
import org.apache.hudi.util.StreamerUtil;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.source;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.HoodieCommitMetadata;
@@ -27,9 +27,10 @@ import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.view.HoodieTableFileSystemView;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.source.format.mor.InstantRange;
import org.apache.hudi.source.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.table.format.mor.InstantRange;
import org.apache.hudi.table.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.util.StreamerUtil;
import org.apache.flink.annotation.VisibleForTesting;
@@ -341,13 +342,13 @@ public class StreamReadMonitoringFunction
* @return the file statuses array
*/
private FileStatus[] getWritePathsOfInstants(List<HoodieCommitMetadata> metadataList) {
FileSystem fs = FSUtils.getFs(path.getPath(), hadoopConf);
FileSystem fs = FSUtils.getFs(path.toString(), hadoopConf);
return metadataList.stream().map(metadata -> getWritePathsOfInstant(metadata, fs))
.flatMap(Collection::stream).toArray(FileStatus[]::new);
}
private List<FileStatus> getWritePathsOfInstant(HoodieCommitMetadata metadata, FileSystem fs) {
return metadata.getFileIdAndFullPaths(path.getPath()).values().stream()
return metadata.getFileIdAndFullPaths(path.toString()).values().stream()
.map(path -> {
try {
return fs.getFileStatus(new org.apache.hadoop.fs.Path(path));

View File

@@ -16,10 +16,10 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.source;
import org.apache.hudi.source.format.mor.MergeOnReadInputFormat;
import org.apache.hudi.source.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.table.format.mor.MergeOnReadInputFormat;
import org.apache.hudi.table.format.mor.MergeOnReadInputSplit;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;

View File

@@ -22,13 +22,13 @@ import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.InstantGenerateOperator;
import org.apache.hudi.operator.KeyedWriteProcessFunction;
import org.apache.hudi.operator.KeyedWriteProcessOperator;
import org.apache.hudi.operator.partitioner.BucketAssignFunction;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.sink.CommitSink;
import org.apache.hudi.source.JsonStringToHoodieRecordMapFunction;
import org.apache.hudi.sink.InstantGenerateOperator;
import org.apache.hudi.sink.KeyedWriteProcessFunction;
import org.apache.hudi.sink.KeyedWriteProcessOperator;
import org.apache.hudi.sink.partitioner.BucketAssignFunction;
import org.apache.hudi.sink.transform.JsonStringToHoodieRecordMapFunction;
import org.apache.hudi.util.StreamerUtil;
import com.beust.jcommander.JCommander;

View File

@@ -19,10 +19,10 @@
package org.apache.hudi.streamer;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.StreamWriteOperatorFactory;
import org.apache.hudi.operator.partitioner.BucketAssignFunction;
import org.apache.hudi.operator.transform.RowDataToHoodieFunction;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.sink.StreamWriteOperatorFactory;
import org.apache.hudi.sink.partitioner.BucketAssignFunction;
import org.apache.hudi.sink.transform.RowDataToHoodieFunction;
import org.apache.hudi.util.AvroSchemaConverter;
import org.apache.hudi.util.StreamerUtil;

View File

@@ -16,11 +16,9 @@
* limitations under the License.
*/
package org.apache.hudi.factory;
package org.apache.hudi.table;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.sink.HoodieTableSink;
import org.apache.hudi.source.HoodieTableSource;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.util.AvroSchemaConverter;
import org.apache.flink.configuration.Configuration;

View File

@@ -16,18 +16,18 @@
* limitations under the License.
*/
package org.apache.hudi.sink;
package org.apache.hudi.table;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.StreamWriteOperatorFactory;
import org.apache.hudi.operator.compact.CompactFunction;
import org.apache.hudi.operator.compact.CompactionCommitEvent;
import org.apache.hudi.operator.compact.CompactionCommitSink;
import org.apache.hudi.operator.compact.CompactionPlanEvent;
import org.apache.hudi.operator.compact.CompactionPlanOperator;
import org.apache.hudi.operator.partitioner.BucketAssignFunction;
import org.apache.hudi.operator.transform.RowDataToHoodieFunction;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.sink.StreamWriteOperatorFactory;
import org.apache.hudi.sink.compact.CompactFunction;
import org.apache.hudi.sink.compact.CompactionCommitEvent;
import org.apache.hudi.sink.compact.CompactionCommitSink;
import org.apache.hudi.sink.compact.CompactionPlanEvent;
import org.apache.hudi.sink.compact.CompactionPlanOperator;
import org.apache.hudi.sink.partitioner.BucketAssignFunction;
import org.apache.hudi.sink.transform.RowDataToHoodieFunction;
import org.apache.hudi.util.StreamerUtil;
import org.apache.flink.annotation.VisibleForTesting;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source;
package org.apache.hudi.table;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.HoodieBaseFile;
@@ -27,17 +27,17 @@ import org.apache.hudi.common.table.TableSchemaResolver;
import org.apache.hudi.common.table.view.HoodieTableFileSystemView;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.hadoop.HoodieROTablePathFilter;
import org.apache.hudi.hadoop.utils.HoodieRealtimeInputFormatUtils;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.StreamReadMonitoringFunction;
import org.apache.hudi.operator.StreamReadOperator;
import org.apache.hudi.source.format.FilePathUtils;
import org.apache.hudi.source.format.cow.CopyOnWriteInputFormat;
import org.apache.hudi.source.format.mor.MergeOnReadInputFormat;
import org.apache.hudi.source.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.source.format.mor.MergeOnReadTableState;
import org.apache.hudi.source.StreamReadMonitoringFunction;
import org.apache.hudi.source.StreamReadOperator;
import org.apache.hudi.table.format.FilePathUtils;
import org.apache.hudi.table.format.cow.CopyOnWriteInputFormat;
import org.apache.hudi.table.format.mor.MergeOnReadInputFormat;
import org.apache.hudi.table.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.table.format.mor.MergeOnReadTableState;
import org.apache.hudi.util.AvroSchemaConverter;
import org.apache.hudi.util.StreamerUtil;
@@ -87,7 +87,7 @@ import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.hudi.hadoop.utils.HoodieRealtimeRecordReaderUtils.getMaxCompactionMemoryInBytes;
import static org.apache.hudi.source.format.FormatUtils.getParquetConf;
import static org.apache.hudi.table.format.FormatUtils.getParquetConf;
/**
* Hoodie batch table source that always read the latest snapshot of the underneath table.

View File

@@ -16,10 +16,10 @@
* limitations under the License.
*/
package org.apache.hudi.source.format;
package org.apache.hudi.table.format;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.table.api.TableException;

View File

@@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hudi.source.format;
package org.apache.hudi.table.format;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.hadoop.config.HoodieRealtimeConfig;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.source.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.table.format.mor.MergeOnReadInputSplit;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.cow;
package org.apache.hudi.table.format.cow;
import org.apache.flink.formats.parquet.vector.ParquetDictionary;
import org.apache.flink.formats.parquet.vector.reader.ColumnReader;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.cow;
package org.apache.hudi.table.format.cow;
import org.apache.hudi.common.fs.FSUtils;

View File

@@ -15,7 +15,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.cow;
package org.apache.hudi.table.format.cow;
import org.apache.flink.table.data.TimestampData;
import org.apache.flink.table.data.vector.writable.WritableIntVector;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.cow;
package org.apache.hudi.table.format.cow;
import org.apache.flink.formats.parquet.vector.reader.ColumnReader;
import org.apache.flink.table.data.ColumnarRowData;
@@ -47,8 +47,8 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import static org.apache.hudi.source.format.cow.ParquetSplitReaderUtil.createColumnReader;
import static org.apache.hudi.source.format.cow.ParquetSplitReaderUtil.createWritableColumnVector;
import static org.apache.hudi.table.format.cow.ParquetSplitReaderUtil.createColumnReader;
import static org.apache.hudi.table.format.cow.ParquetSplitReaderUtil.createWritableColumnVector;
import static org.apache.parquet.filter2.compat.RowGroupFilter.filterRowGroups;
import static org.apache.parquet.format.converter.ParquetMetadataConverter.range;
import static org.apache.parquet.hadoop.ParquetFileReader.readFooter;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.cow;
package org.apache.hudi.table.format.cow;
import org.apache.flink.table.data.DecimalData;
import org.apache.flink.table.data.DecimalDataUtils;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.cow;
package org.apache.hudi.table.format.cow;
import org.apache.flink.core.fs.Path;
import org.apache.flink.formats.parquet.vector.reader.BooleanColumnReader;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.cow;
package org.apache.hudi.table.format.cow;
import org.apache.flink.table.data.vector.writable.WritableColumnVector;
import org.apache.flink.table.data.vector.writable.WritableIntVector;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.mor;
package org.apache.hudi.table.format.mor;
import org.apache.hudi.common.table.timeline.HoodieTimeline;

View File

@@ -16,17 +16,17 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.mor;
package org.apache.hudi.table.format.mor;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.source.format.FilePathUtils;
import org.apache.hudi.source.format.FormatUtils;
import org.apache.hudi.source.format.cow.ParquetColumnarRowSplitReader;
import org.apache.hudi.source.format.cow.ParquetSplitReaderUtil;
import org.apache.hudi.table.format.FilePathUtils;
import org.apache.hudi.table.format.FormatUtils;
import org.apache.hudi.table.format.cow.ParquetColumnarRowSplitReader;
import org.apache.hudi.table.format.cow.ParquetSplitReaderUtil;
import org.apache.hudi.util.AvroToRowDataConverters;
import org.apache.hudi.util.RowDataToAvroConverters;
import org.apache.hudi.util.StreamerUtil;
@@ -57,7 +57,7 @@ import java.util.stream.IntStream;
import static org.apache.flink.table.data.vector.VectorizedColumnBatch.DEFAULT_SIZE;
import static org.apache.flink.table.filesystem.RowPartitionComputer.restorePartValueFromType;
import static org.apache.hudi.hadoop.utils.HoodieInputFormatUtils.HOODIE_RECORD_KEY_COL_POS;
import static org.apache.hudi.source.format.FormatUtils.buildAvroRecordBySchema;
import static org.apache.hudi.table.format.FormatUtils.buildAvroRecordBySchema;
/**
* The base InputFormat class to read from Hoodie data + log files.

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.mor;
package org.apache.hudi.table.format.mor;
import org.apache.hudi.common.util.Option;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source.format.mor;
package org.apache.hudi.table.format.mor;
import org.apache.flink.table.types.logical.RowType;

View File

@@ -18,6 +18,10 @@
package org.apache.hudi.util;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import org.apache.flink.annotation.Internal;
import org.apache.flink.table.data.ArrayData;
import org.apache.flink.table.data.DecimalData;
@@ -28,11 +32,6 @@ import org.apache.flink.table.types.logical.ArrayType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.util.ArrayList;

View File

@@ -30,12 +30,12 @@ import org.apache.hudi.common.util.ReflectionUtils;
import org.apache.hudi.common.util.TablePathUtils;
import org.apache.hudi.config.HoodieCompactionConfig;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.TableNotFoundException;
import org.apache.hudi.keygen.KeyGenerator;
import org.apache.hudi.keygen.SimpleAvroKeyGenerator;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.schema.FilebasedSchemaProvider;
import org.apache.hudi.streamer.FlinkStreamerConfig;
import org.apache.hudi.table.action.compact.CompactionTriggerStrategy;

View File

@@ -14,4 +14,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.hudi.factory.HoodieTableFactory
org.apache.hudi.table.HoodieTableFactory