1
0

[HUDI-1684] Tweak hudi-flink-bundle module pom and reorganize the pacakges for hudi-flink module (#2669)

* Add required dependencies for hudi-flink-bundle module
* Some packages reorganization of hudi-flink module
This commit is contained in:
Danny Chan
2021-03-15 16:02:05 +08:00
committed by GitHub
parent e93c6a5693
commit fc6c5f4285
72 changed files with 357 additions and 203 deletions

View File

@@ -16,24 +16,24 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.operator.compact.CompactFunction;
import org.apache.hudi.operator.compact.CompactionCommitEvent;
import org.apache.hudi.operator.compact.CompactionCommitSink;
import org.apache.hudi.operator.compact.CompactionPlanEvent;
import org.apache.hudi.operator.compact.CompactionPlanOperator;
import org.apache.hudi.operator.partitioner.BucketAssignFunction;
import org.apache.hudi.operator.transform.RowDataToHoodieFunction;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.operator.utils.TestData;
import org.apache.hudi.sink.CommitSink;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.sink.compact.CompactFunction;
import org.apache.hudi.sink.compact.CompactionCommitEvent;
import org.apache.hudi.sink.compact.CompactionCommitSink;
import org.apache.hudi.sink.compact.CompactionPlanEvent;
import org.apache.hudi.sink.compact.CompactionPlanOperator;
import org.apache.hudi.sink.partitioner.BucketAssignFunction;
import org.apache.hudi.sink.transform.RowDataToHoodieFunction;
import org.apache.hudi.streamer.FlinkStreamerConfig;
import org.apache.hudi.util.AvroSchemaConverter;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.hudi.utils.TestData;
import org.apache.hudi.utils.source.ContinuousFileSource;
import org.apache.flink.api.common.JobStatus;

View File

@@ -16,16 +16,16 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.HoodieWriteStat;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.operator.event.BatchWriteSuccessEvent;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.sink.event.BatchWriteSuccessEvent;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;
import org.apache.hadoop.fs.FileSystem;
@@ -49,7 +49,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Test cases for StreamingSinkOperatorCoordinator.
*/
public class StreamWriteOperatorCoordinatorTest {
public class TestStreamWriteOperatorCoordinator {
private StreamWriteOperatorCoordinator coordinator;
@TempDir

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.client.HoodieFlinkWriteClient;
import org.apache.hudi.client.WriteStatus;
@@ -24,11 +24,12 @@ import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.operator.event.BatchWriteSuccessEvent;
import org.apache.hudi.operator.utils.StreamWriteFunctionWrapper;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.operator.utils.TestData;
import org.apache.hudi.sink.event.BatchWriteSuccessEvent;
import org.apache.hudi.sink.utils.StreamWriteFunctionWrapper;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.hudi.utils.TestData;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.client.FlinkTaskContextSupplier;
import org.apache.hudi.client.common.HoodieFlinkEngineContext;
@@ -28,9 +28,9 @@ import org.apache.hudi.common.table.TableSchemaResolver;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.operator.utils.TestData;
import org.apache.hudi.table.HoodieFlinkTable;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestData;
import org.apache.avro.Schema;
import org.apache.hadoop.fs.FileSystem;

View File

@@ -16,9 +16,10 @@
* limitations under the License.
*/
package org.apache.hudi.operator;
package org.apache.hudi.sink;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.flink.configuration.Configuration;
import org.junit.jupiter.api.Disabled;

View File

@@ -16,18 +16,18 @@
* limitations under the License.
*/
package org.apache.hudi.operator.partitioner;
package org.apache.hudi.sink.partitioner;
import org.apache.hudi.client.FlinkTaskContextSupplier;
import org.apache.hudi.client.common.HoodieFlinkEngineContext;
import org.apache.hudi.common.config.SerializableConfiguration;
import org.apache.hudi.common.model.HoodieRecordLocation;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.table.action.commit.BucketInfo;
import org.apache.hudi.table.action.commit.BucketType;
import org.apache.hudi.table.action.commit.SmallFile;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.flink.configuration.Configuration;
import org.junit.jupiter.api.BeforeEach;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.source;
package org.apache.hudi.sink.transform;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.hudi.common.model.HoodieRecord;

View File

@@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hudi.operator.utils;
package org.apache.hudi.sink.utils;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.operator.compact.CompactFunction;
import org.apache.hudi.operator.compact.CompactionCommitEvent;
import org.apache.hudi.operator.compact.CompactionCommitSink;
import org.apache.hudi.operator.compact.CompactionPlanEvent;
import org.apache.hudi.operator.compact.CompactionPlanOperator;
import org.apache.hudi.sink.compact.CompactFunction;
import org.apache.hudi.sink.compact.CompactionCommitEvent;
import org.apache.hudi.sink.compact.CompactionCommitSink;
import org.apache.hudi.sink.compact.CompactionPlanEvent;
import org.apache.hudi.sink.compact.CompactionPlanOperator;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
@@ -43,7 +43,7 @@ import java.util.ArrayList;
import java.util.List;
/**
* A wrapper class to manipulate the {@link org.apache.hudi.operator.compact.CompactFunction} instance for testing.
* A wrapper class to manipulate the {@link org.apache.hudi.sink.compact.CompactFunction} instance for testing.
*/
public class CompactFunctionWrapper {
private final Configuration conf;

View File

@@ -15,7 +15,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.utils;
package org.apache.hudi.sink.utils;
import org.apache.flink.api.common.state.KeyedStateStore;
import org.apache.flink.runtime.state.FunctionInitializationContext;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.utils;
package org.apache.hudi.sink.utils;
import org.apache.flink.api.common.state.MapState;

View File

@@ -15,7 +15,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.utils;
package org.apache.hudi.sink.utils;
import org.apache.flink.api.common.state.AggregatingState;
import org.apache.flink.api.common.state.AggregatingStateDescriptor;

View File

@@ -15,7 +15,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.utils;
package org.apache.hudi.sink.utils;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.state.KeyedStateStore;

View File

@@ -16,18 +16,19 @@
* limitations under the License.
*/
package org.apache.hudi.operator.utils;
package org.apache.hudi.sink.utils;
import org.apache.hudi.client.HoodieFlinkWriteClient;
import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.StreamWriteFunction;
import org.apache.hudi.operator.StreamWriteOperatorCoordinator;
import org.apache.hudi.operator.event.BatchWriteSuccessEvent;
import org.apache.hudi.operator.partitioner.BucketAssignFunction;
import org.apache.hudi.operator.transform.RowDataToHoodieFunction;
import org.apache.hudi.sink.StreamWriteFunction;
import org.apache.hudi.sink.StreamWriteOperatorCoordinator;
import org.apache.hudi.sink.event.BatchWriteSuccessEvent;
import org.apache.hudi.sink.partitioner.BucketAssignFunction;
import org.apache.hudi.sink.transform.RowDataToHoodieFunction;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;

View File

@@ -18,12 +18,11 @@
package org.apache.hudi.source;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.StreamReadMonitoringFunction;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.operator.utils.TestData;
import org.apache.hudi.source.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.table.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.hudi.utils.TestData;
import org.apache.hudi.utils.TestUtils;
import org.apache.flink.configuration.Configuration;

View File

@@ -20,18 +20,16 @@ package org.apache.hudi.source;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.TableSchemaResolver;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.StreamReadMonitoringFunction;
import org.apache.hudi.operator.StreamReadOperator;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.operator.utils.TestData;
import org.apache.hudi.source.format.FilePathUtils;
import org.apache.hudi.source.format.mor.MergeOnReadInputFormat;
import org.apache.hudi.source.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.source.format.mor.MergeOnReadTableState;
import org.apache.hudi.table.format.FilePathUtils;
import org.apache.hudi.table.format.mor.MergeOnReadInputFormat;
import org.apache.hudi.table.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.table.format.mor.MergeOnReadTableState;
import org.apache.hudi.util.AvroSchemaConverter;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.hudi.utils.TestData;
import org.apache.hudi.utils.TestUtils;
import org.apache.avro.Schema;

View File

@@ -16,11 +16,11 @@
* limitations under the License.
*/
package org.apache.hudi.source;
package org.apache.hudi.table;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.operator.utils.TestData;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.hudi.utils.TestData;
import org.apache.hudi.utils.TestUtils;
import org.apache.hudi.utils.factory.CollectSinkTableFactory;
@@ -46,7 +46,7 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.apache.hudi.operator.utils.TestData.assertRowsEquals;
import static org.apache.hudi.utils.TestData.assertRowsEquals;
/**
* IT cases for Hoodie table source and sink.

View File

@@ -16,13 +16,11 @@
* limitations under the License.
*/
package org.apache.hudi.factory;
package org.apache.hudi.table;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.sink.HoodieTableSink;
import org.apache.hudi.source.HoodieTableSource;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ReadableConfig;

View File

@@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hudi.source;
package org.apache.hudi.table;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.operator.utils.TestData;
import org.apache.hudi.source.format.mor.MergeOnReadInputFormat;
import org.apache.hudi.table.format.mor.MergeOnReadInputFormat;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.hudi.utils.TestData;
import org.apache.flink.api.common.io.FileInputFormat;
import org.apache.flink.api.common.io.InputFormat;

View File

@@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hudi.source.format;
package org.apache.hudi.table.format;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.operator.utils.TestData;
import org.apache.hudi.source.HoodieTableSource;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.table.HoodieTableSource;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.hudi.utils.TestData;
import org.apache.flink.api.common.io.InputFormat;
import org.apache.flink.configuration.Configuration;

View File

@@ -16,9 +16,9 @@
* limitations under the License.
*/
package org.apache.hudi.operator.utils;
package org.apache.hudi.utils;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.streamer.FlinkStreamerConfig;
import org.apache.hudi.utils.factory.CollectSinkTableFactory;
import org.apache.hudi.utils.factory.ContinuousFileSourceFactory;

View File

@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hudi.operator.utils;
package org.apache.hudi.utils;
import org.apache.hudi.client.FlinkTaskContextSupplier;
import org.apache.hudi.client.common.HoodieFlinkEngineContext;
@@ -25,7 +25,8 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner;
import org.apache.hudi.common.testutils.HoodieTestUtils;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.sink.utils.StreamWriteFunctionWrapper;
import org.apache.hudi.table.HoodieFlinkTable;
import org.apache.avro.Schema;

View File

@@ -19,9 +19,9 @@
package org.apache.hudi.utils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.operator.StreamReadMonitoringFunction;
import org.apache.hudi.source.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.source.StreamReadMonitoringFunction;
import org.apache.hudi.table.format.mor.MergeOnReadInputSplit;
import org.apache.hudi.util.StreamerUtil;
import org.apache.flink.configuration.Configuration;

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.utils.factory;
import org.apache.hudi.operator.utils.TestConfigurations;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;

View File

@@ -18,7 +18,7 @@
package org.apache.hudi.utils.factory;
import org.apache.hudi.operator.FlinkOptions;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.utils.source.ContinuousFileSource;
import org.apache.flink.configuration.Configuration;

View File

@@ -153,7 +153,7 @@ public class ContinuousFileSource implements StreamTableSource<RowData> {
private void loadDataBuffer() {
this.dataBuffer = new ArrayList<>();
try (BufferedReader reader =
new BufferedReader(new FileReader(this.path.getPath()))) {
new BufferedReader(new FileReader(this.path.toString()))) {
String line = reader.readLine();
while (line != null) {
this.dataBuffer.add(line);

View File

@@ -14,5 +14,5 @@
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.hudi.factory.HoodieTableFactory
org.apache.hudi.table.HoodieTableFactory
org.apache.hudi.utils.factory.ContinuousFileSourceFactory