1
0

[HUDI-378] Refactor the rest codes based on new ImportOrder code style rule (#1078)

This commit is contained in:
lamber-ken
2019-12-05 17:25:03 +08:00
committed by vinoyang
parent b3e0ebbc4a
commit c06d89b648
10 changed files with 59 additions and 49 deletions

View File

@@ -18,11 +18,12 @@
package org.apache.hudi.config;
import javax.annotation.concurrent.Immutable;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Properties;
import javax.annotation.concurrent.Immutable;
/**
* Storage related config

View File

@@ -18,18 +18,20 @@
package org.apache.hudi.metrics;
import com.google.common.base.Preconditions;
import java.io.Closeable;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import com.google.common.base.Preconditions;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import java.lang.management.ManagementFactory;
import java.rmi.registry.LocateRegistry;
import javax.management.remote.JMXConnectorServer;
import javax.management.remote.JMXConnectorServerFactory;
import javax.management.remote.JMXServiceURL;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import java.io.Closeable;
import java.lang.management.ManagementFactory;
import java.rmi.registry.LocateRegistry;
/**
* Implementation of Jmx reporter, which used to report jmx metric.

View File

@@ -78,6 +78,7 @@ import java.util.TreeSet;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import scala.Tuple3;
import static org.apache.hudi.common.model.HoodieTestUtils.DEFAULT_PARTITION_PATHS;

View File

@@ -18,16 +18,16 @@
package org.apache.hudi.metrics;
import static org.apache.hudi.metrics.Metrics.registerGauge;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.apache.hudi.config.HoodieMetricsConfig;
import org.apache.hudi.config.HoodieWriteConfig;
import org.junit.Test;
import static org.apache.hudi.metrics.Metrics.registerGauge;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Test for the Jmx metrics report.
*/

View File

@@ -18,8 +18,6 @@
package org.apache.hudi.common.util;
import com.google.common.collect.ImmutableMap;
import java.util.List;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanPartitionMetadata;
import org.apache.hudi.common.HoodieCleanStat;
@@ -28,6 +26,10 @@ import org.apache.hudi.common.versioning.clean.CleanMetadataMigrator;
import org.apache.hudi.common.versioning.clean.CleanV1MigrationHandler;
import org.apache.hudi.common.versioning.clean.CleanV2MigrationHandler;
import com.google.common.collect.ImmutableMap;
import java.util.List;
public class CleanerUtils {
public static final Integer CLEAN_METADATA_VERSION_1 = CleanV1MigrationHandler.VERSION;
public static final Integer CLEAN_METADATA_VERSION_2 = CleanV2MigrationHandler.VERSION;

View File

@@ -18,11 +18,12 @@
package org.apache.hudi.common.versioning.clean;
import java.util.Arrays;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.versioning.MetadataMigrator;
import java.util.Arrays;
public class CleanMetadataMigrator extends MetadataMigrator<HoodieCleanMetadata> {
public CleanMetadataMigrator(HoodieTableMetaClient metaClient) {

View File

@@ -26,9 +26,10 @@ import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.common.versioning.AbstractMigratorBase;
import com.google.common.base.Preconditions;
import org.apache.hadoop.fs.Path;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.Path;
public class CleanV1MigrationHandler extends AbstractMigratorBase<HoodieCleanMetadata> {

View File

@@ -25,10 +25,11 @@ import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.common.versioning.AbstractMigratorBase;
import com.google.common.base.Preconditions;
import org.apache.hadoop.fs.Path;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.Path;
public class CleanV2MigrationHandler extends AbstractMigratorBase<HoodieCleanMetadata> {

View File

@@ -18,35 +18,6 @@
package org.apache.hudi.hive;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.jdbc.HiveDriver;
import org.apache.hudi.common.model.HoodieCommitMetadata;
import org.apache.hudi.common.model.HoodieFileFormat;
import org.apache.hudi.common.model.HoodieLogFile;
@@ -61,6 +32,22 @@ import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.InvalidDatasetException;
import org.apache.hudi.hive.util.SchemaUtil;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.jdbc.HiveDriver;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.parquet.format.converter.ParquetMetadataConverter;
@@ -69,6 +56,21 @@ import org.apache.parquet.hadoop.metadata.ParquetMetadata;
import org.apache.parquet.schema.MessageType;
import org.apache.thrift.TException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@SuppressWarnings("ConstantConditions")
public class HoodieHiveClient {

View File

@@ -282,7 +282,6 @@
<!-- Checks for out of order import statements. -->
<module name="ImportOrder">
<property name="severity" value="info"/>
<property name="groups" value="org.apache.hudi,*,javax,java,scala"/>
<property name="separated" value="true"/>
<property name="sortStaticImportsAlphabetically" value="true"/>