diff --git a/hudi-cli/pom.xml b/hudi-cli/pom.xml
index 9eb8876f3..819f902a1 100644
--- a/hudi-cli/pom.xml
+++ b/hudi-cli/pom.xml
@@ -176,12 +176,6 @@
spark-sql_2.11
-
-
- commons-dbcp
- commons-dbcp
-
-
org.springframework.shell
spring-shell
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RollbacksCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RollbacksCommand.java
index 95fc730c3..bd568de17 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RollbacksCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RollbacksCommand.java
@@ -26,7 +26,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Stream;
-import org.apache.commons.lang3.tuple.Pair;
import org.apache.hudi.avro.model.HoodieRollbackMetadata;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper;
@@ -37,6 +36,7 @@ import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.AvroUtils;
+import org.apache.hudi.common.util.collection.Pair;
import org.springframework.shell.core.CommandMarker;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/HiveUtil.java b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/HiveUtil.java
index 97fd85e9d..d0eaff806 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/HiveUtil.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/HiveUtil.java
@@ -19,11 +19,10 @@
package org.apache.hudi.cli.utils;
import java.sql.Connection;
+import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
-import javax.sql.DataSource;
-import org.apache.commons.dbcp.BasicDataSource;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.joda.time.DateTime;
@@ -42,17 +41,8 @@ public class HiveUtil {
private static Connection connection;
private static Connection getConnection(String jdbcUrl, String user, String pass) throws SQLException {
- DataSource ds = getDatasource(jdbcUrl, user, pass);
- return ds.getConnection();
- }
-
- private static DataSource getDatasource(String jdbcUrl, String user, String pass) {
- BasicDataSource ds = new BasicDataSource();
- ds.setDriverClassName(driverName);
- ds.setUrl(jdbcUrl);
- ds.setUsername(user);
- ds.setPassword(pass);
- return ds;
+ connection = DriverManager.getConnection(jdbcUrl, user, pass);
+ return connection;
}
public static long countRecords(String jdbcUrl, HoodieTableMetaClient source, String dbName, String user, String pass)
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
index 69ac79620..8dc221648 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
@@ -20,10 +20,10 @@ package org.apache.hudi.cli.utils;
import java.io.File;
import java.net.URISyntaxException;
-import org.apache.commons.lang.StringUtils;
import org.apache.hudi.HoodieWriteClient;
import org.apache.hudi.cli.commands.SparkMain;
import org.apache.hudi.common.util.FSUtils;
+import org.apache.hudi.common.util.StringUtils;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
@@ -43,7 +43,7 @@ public class SparkUtil {
SparkLauncher sparkLauncher = new SparkLauncher().setAppResource(currentJar)
.setMainClass(SparkMain.class.getName());
- if (StringUtils.isNotEmpty(propertiesFile)) {
+ if (!StringUtils.isEmpty(propertiesFile)) {
sparkLauncher.setPropertiesFile(propertiesFile);
}
diff --git a/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/BoundedPartitionAwareCompactionStrategy.java b/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/BoundedPartitionAwareCompactionStrategy.java
index 6f0f793a9..7528730f0 100644
--- a/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/BoundedPartitionAwareCompactionStrategy.java
+++ b/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/BoundedPartitionAwareCompactionStrategy.java
@@ -18,13 +18,14 @@
package org.apache.hudi.io.compact.strategy;
+import com.google.common.annotations.VisibleForTesting;
import java.text.SimpleDateFormat;
+import java.util.Calendar;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
-import org.apache.commons.lang3.time.DateUtils;
import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.config.HoodieWriteConfig;
@@ -45,8 +46,8 @@ public class BoundedPartitionAwareCompactionStrategy extends DayBasedCompactionS
public List orderAndFilter(HoodieWriteConfig writeConfig,
List operations, List pendingCompactionPlans) {
// The earliest partition to compact - current day minus the target partitions limit
- String earliestPartitionPathToCompact = dateFormat.format(DateUtils.addDays(new Date(), -1 * writeConfig
- .getTargetPartitionsPerDayBasedCompaction()));
+ String earliestPartitionPathToCompact = dateFormat.format(
+ getDateAtOffsetFromToday(-1 * writeConfig.getTargetPartitionsPerDayBasedCompaction()));
// Filter out all partitions greater than earliestPartitionPathToCompact
List eligibleCompactionOperations = operations.stream()
.collect(Collectors.groupingBy(HoodieCompactionOperation::getPartitionPath)).entrySet().stream()
@@ -61,8 +62,8 @@ public class BoundedPartitionAwareCompactionStrategy extends DayBasedCompactionS
@Override
public List filterPartitionPaths(HoodieWriteConfig writeConfig, List partitionPaths) {
// The earliest partition to compact - current day minus the target partitions limit
- String earliestPartitionPathToCompact = dateFormat.format(DateUtils.addDays(new Date(), -1 * writeConfig
- .getTargetPartitionsPerDayBasedCompaction()));
+ String earliestPartitionPathToCompact = dateFormat.format(
+ getDateAtOffsetFromToday(-1 * writeConfig.getTargetPartitionsPerDayBasedCompaction()));
// Get all partitions and sort them
List filteredPartitionPaths = partitionPaths.stream().map(partition -> partition.replace("/", "-"))
.sorted(Comparator.reverseOrder()).map(partitionPath -> partitionPath.replace("-", "/"))
@@ -70,4 +71,11 @@ public class BoundedPartitionAwareCompactionStrategy extends DayBasedCompactionS
.collect(Collectors.toList());
return filteredPartitionPaths;
}
+
+ @VisibleForTesting
+ public static Date getDateAtOffsetFromToday(int offset) {
+ Calendar calendar = Calendar.getInstance();
+ calendar.add(Calendar.DATE, offset);
+ return calendar.getTime();
+ }
}
diff --git a/hudi-client/src/main/java/org/apache/hudi/metrics/Metrics.java b/hudi-client/src/main/java/org/apache/hudi/metrics/Metrics.java
index 82219bb19..fb970e1d9 100644
--- a/hudi-client/src/main/java/org/apache/hudi/metrics/Metrics.java
+++ b/hudi-client/src/main/java/org/apache/hudi/metrics/Metrics.java
@@ -22,7 +22,6 @@ import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry;
import com.google.common.io.Closeables;
import java.io.Closeable;
-import org.apache.commons.configuration.ConfigurationException;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.log4j.LogManager;
@@ -37,9 +36,9 @@ public class Metrics {
private static volatile boolean initialized = false;
private static Metrics metrics = null;
private final MetricRegistry registry;
- private MetricsReporter reporter = null;
+ private MetricsReporter reporter;
- private Metrics(HoodieWriteConfig metricConfig) throws ConfigurationException {
+ private Metrics(HoodieWriteConfig metricConfig) {
registry = new MetricRegistry();
reporter = MetricsReporterFactory.createReporter(metricConfig, registry);
@@ -72,7 +71,7 @@ public class Metrics {
}
try {
metrics = new Metrics(metricConfig);
- } catch (ConfigurationException e) {
+ } catch (Exception e) {
throw new HoodieException(e);
}
initialized = true;
diff --git a/hudi-client/src/test/java/org/apache/hudi/TestHoodieClientOnCopyOnWriteStorage.java b/hudi-client/src/test/java/org/apache/hudi/TestHoodieClientOnCopyOnWriteStorage.java
index ed1458c4e..938ecae61 100644
--- a/hudi-client/src/test/java/org/apache/hudi/TestHoodieClientOnCopyOnWriteStorage.java
+++ b/hudi-client/src/test/java/org/apache/hudi/TestHoodieClientOnCopyOnWriteStorage.java
@@ -38,7 +38,6 @@ import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.avro.generic.GenericRecord;
-import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.common.HoodieClientTestUtils;
import org.apache.hudi.common.HoodieTestDataGenerator;
@@ -55,6 +54,7 @@ import org.apache.hudi.common.table.TableFileSystemView.ReadOptimizedView;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.ConsistencyGuardConfig;
import org.apache.hudi.common.util.FSUtils;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ParquetUtils;
import org.apache.hudi.common.util.collection.Pair;
@@ -562,9 +562,8 @@ public class TestHoodieClientOnCopyOnWriteStorage extends TestHoodieClientBase {
// Read from commit file
String filename = HoodieTestUtils.getCommitFilePath(basePath, commitTime);
FileInputStream inputStream = new FileInputStream(filename);
- String everything = IOUtils.toString(inputStream, "UTF-8");
- HoodieCommitMetadata metadata = HoodieCommitMetadata.fromJsonString(everything.toString(),
- HoodieCommitMetadata.class);
+ String everything = FileIOUtils.readAsUTFString(inputStream);
+ HoodieCommitMetadata metadata = HoodieCommitMetadata.fromJsonString(everything, HoodieCommitMetadata.class);
HashMap paths = metadata.getFileIdAndFullPaths(basePath);
inputStream.close();
@@ -600,7 +599,7 @@ public class TestHoodieClientOnCopyOnWriteStorage extends TestHoodieClientBase {
// Read from commit file
String filename = HoodieTestUtils.getCommitFilePath(basePath, commitTime);
FileInputStream inputStream = new FileInputStream(filename);
- String everything = IOUtils.toString(inputStream, "UTF-8");
+ String everything = FileIOUtils.readAsUTFString(inputStream);
HoodieCommitMetadata metadata = HoodieCommitMetadata.fromJsonString(everything.toString(),
HoodieCommitMetadata.class);
HoodieRollingStatMetadata rollingStatMetadata = HoodieCommitMetadata.fromJsonString(metadata.getExtraMetadata()
@@ -629,7 +628,7 @@ public class TestHoodieClientOnCopyOnWriteStorage extends TestHoodieClientBase {
// Read from commit file
filename = HoodieTestUtils.getCommitFilePath(basePath, commitTime);
inputStream = new FileInputStream(filename);
- everything = IOUtils.toString(inputStream, "UTF-8");
+ everything = FileIOUtils.readAsUTFString(inputStream);
metadata = HoodieCommitMetadata.fromJsonString(everything.toString(), HoodieCommitMetadata.class);
rollingStatMetadata = HoodieCommitMetadata.fromJsonString(metadata.getExtraMetadata()
.get(HoodieRollingStatMetadata.ROLLING_STAT_METADATA_KEY), HoodieRollingStatMetadata.class);
diff --git a/hudi-client/src/test/java/org/apache/hudi/common/TestRawTripPayload.java b/hudi-client/src/test/java/org/apache/hudi/common/TestRawTripPayload.java
index 32297cb37..074aad449 100644
--- a/hudi-client/src/test/java/org/apache/hudi/common/TestRawTripPayload.java
+++ b/hudi-client/src/test/java/org/apache/hudi/common/TestRawTripPayload.java
@@ -22,7 +22,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
-import java.io.StringWriter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -32,11 +31,11 @@ import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
import org.apache.avro.Schema;
import org.apache.avro.generic.IndexedRecord;
-import org.apache.commons.io.IOUtils;
import org.apache.hudi.WriteStatus;
import org.apache.hudi.avro.MercifulJsonConverter;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
/**
@@ -132,10 +131,9 @@ public class TestRawTripPayload implements HoodieRecordPayload hoodieRecords = hoodieTestDataGenerator.generateInserts(commitTime, numRecords);
final BoundedInMemoryQueue> queue =
- new BoundedInMemoryQueue(FileUtils.ONE_KB, getTransformFunction(HoodieTestDataGenerator.avroSchema));
+ new BoundedInMemoryQueue(FileIOUtils.KB, getTransformFunction(HoodieTestDataGenerator.avroSchema));
// Produce
Future resFuture =
executorService.submit(() -> {
@@ -122,7 +122,7 @@ public class TestBoundedInMemoryQueue {
final List> recs = new ArrayList<>();
final BoundedInMemoryQueue> queue =
- new BoundedInMemoryQueue(FileUtils.ONE_KB, getTransformFunction(HoodieTestDataGenerator.avroSchema));
+ new BoundedInMemoryQueue(FileIOUtils.KB, getTransformFunction(HoodieTestDataGenerator.avroSchema));
// Record Key to
Map> keyToProducerAndIndexMap = new HashMap<>();
diff --git a/hudi-client/src/test/java/org/apache/hudi/func/TestUpdateMapFunction.java b/hudi-client/src/test/java/org/apache/hudi/func/TestUpdateMapFunction.java
index bfa1a49e7..a614c1c5b 100644
--- a/hudi-client/src/test/java/org/apache/hudi/func/TestUpdateMapFunction.java
+++ b/hudi-client/src/test/java/org/apache/hudi/func/TestUpdateMapFunction.java
@@ -27,7 +27,6 @@ import java.util.Arrays;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
-import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.WriteStatus;
@@ -39,6 +38,7 @@ import org.apache.hudi.common.model.HoodieRecordLocation;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.util.FSUtils;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.ParquetUtils;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.io.HoodieCreateHandle;
@@ -151,7 +151,7 @@ public class TestUpdateMapFunction implements Serializable {
private HoodieWriteConfig makeHoodieClientConfig(String schema) throws Exception {
// Prepare the AvroParquetIO
- String schemaStr = IOUtils.toString(getClass().getResourceAsStream(schema), "UTF-8");
+ String schemaStr = FileIOUtils.readAsUTFString(getClass().getResourceAsStream(schema));
return HoodieWriteConfig.newBuilder().withPath(basePath).withSchema(schemaStr).build();
}
}
diff --git a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
index dffe3da41..fad815faa 100644
--- a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
+++ b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
@@ -37,7 +37,6 @@ import java.util.Map;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.avro.Schema;
-import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.common.BloomFilter;
@@ -48,6 +47,7 @@ import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.FSUtils;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
@@ -103,7 +103,7 @@ public class TestHoodieBloomIndex {
fs = FSUtils.getFs(basePath, jsc.hadoopConfiguration());
HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath);
// We have some records to be tagged (two different partitions)
- schemaStr = IOUtils.toString(getClass().getResourceAsStream("/exampleSchema.txt"), "UTF-8");
+ schemaStr = FileIOUtils.readAsUTFString(getClass().getResourceAsStream("/exampleSchema.txt"));
schema = HoodieAvroUtils.addMetadataFields(new Schema.Parser().parse(schemaStr));
}
diff --git a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
index 90c2e0cf7..6d7df0df8 100644
--- a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
+++ b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
@@ -34,7 +34,6 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.avro.Schema;
-import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hudi.common.HoodieClientTestUtils;
import org.apache.hudi.common.TestRawTripPayload;
@@ -44,6 +43,7 @@ import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.util.FSUtils;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.table.HoodieTable;
@@ -78,7 +78,7 @@ public class TestHoodieGlobalBloomIndex {
fs = FSUtils.getFs(basePath, jsc.hadoopConfiguration());
HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath);
// We have some records to be tagged (two different partitions)
- schemaStr = IOUtils.toString(getClass().getResourceAsStream("/exampleSchema.txt"), "UTF-8");
+ schemaStr = FileIOUtils.readAsUTFString(getClass().getResourceAsStream("/exampleSchema.txt"));
schema = HoodieAvroUtils.addMetadataFields(new Schema.Parser().parse(schemaStr));
}
diff --git a/hudi-client/src/test/java/org/apache/hudi/io/strategy/TestHoodieCompactionStrategy.java b/hudi-client/src/test/java/org/apache/hudi/io/strategy/TestHoodieCompactionStrategy.java
index f075edffe..9a2f0c5ef 100644
--- a/hudi-client/src/test/java/org/apache/hudi/io/strategy/TestHoodieCompactionStrategy.java
+++ b/hudi-client/src/test/java/org/apache/hudi/io/strategy/TestHoodieCompactionStrategy.java
@@ -31,7 +31,6 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.stream.Collectors;
-import org.apache.commons.lang3.time.DateUtils;
import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.common.model.HoodieDataFile;
import org.apache.hudi.common.model.HoodieLogFile;
@@ -161,11 +160,12 @@ public class TestHoodieCompactionStrategy {
SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd");
Date today = new Date();
String currentDay = format.format(today);
- String currentDayMinus1 = format.format(DateUtils.addDays(today, -1));
- String currentDayMinus2 = format.format(DateUtils.addDays(today, -2));
- String currentDayMinus3 = format.format(DateUtils.addDays(today, -3));
- String currentDayPlus1 = format.format(DateUtils.addDays(today, 1));
- String currentDayPlus5 = format.format(DateUtils.addDays(today, 5));
+
+ String currentDayMinus1 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(-1));
+ String currentDayMinus2 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(-2));
+ String currentDayMinus3 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(-3));
+ String currentDayPlus1 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(1));
+ String currentDayPlus5 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(5));
Map keyToPartitionMap = new ImmutableMap.Builder()
.put(120 * MB, currentDay)
@@ -208,11 +208,12 @@ public class TestHoodieCompactionStrategy {
SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd");
Date today = new Date();
String currentDay = format.format(today);
- String currentDayMinus1 = format.format(DateUtils.addDays(today, -1));
- String currentDayMinus2 = format.format(DateUtils.addDays(today, -2));
- String currentDayMinus3 = format.format(DateUtils.addDays(today, -3));
- String currentDayPlus1 = format.format(DateUtils.addDays(today, 1));
- String currentDayPlus5 = format.format(DateUtils.addDays(today, 5));
+
+ String currentDayMinus1 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(-1));
+ String currentDayMinus2 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(-2));
+ String currentDayMinus3 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(-3));
+ String currentDayPlus1 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(1));
+ String currentDayPlus5 = format.format(BoundedPartitionAwareCompactionStrategy.getDateAtOffsetFromToday(5));
Map keyToPartitionMap = new ImmutableMap.Builder()
.put(120 * MB, currentDay)
diff --git a/hudi-client/src/test/java/org/apache/hudi/metrics/TestHoodieMetrics.java b/hudi-client/src/test/java/org/apache/hudi/metrics/TestHoodieMetrics.java
index 1a1b40758..a99c6146f 100644
--- a/hudi-client/src/test/java/org/apache/hudi/metrics/TestHoodieMetrics.java
+++ b/hudi-client/src/test/java/org/apache/hudi/metrics/TestHoodieMetrics.java
@@ -23,7 +23,6 @@ import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
-import org.apache.commons.configuration.ConfigurationException;
import org.apache.hudi.config.HoodieWriteConfig;
import org.junit.Before;
import org.junit.Test;
@@ -33,7 +32,7 @@ public class TestHoodieMetrics {
private HoodieMetrics metrics = null;
@Before
- public void start() throws ConfigurationException {
+ public void start() {
HoodieWriteConfig config = mock(HoodieWriteConfig.class);
when(config.isMetricsOn()).thenReturn(true);
when(config.getMetricsReporterType()).thenReturn(MetricsReporterType.INMEMORY);
diff --git a/hudi-client/src/test/java/org/apache/hudi/table/TestCopyOnWriteTable.java b/hudi-client/src/test/java/org/apache/hudi/table/TestCopyOnWriteTable.java
index 980c15848..bdbf094ee 100644
--- a/hudi-client/src/test/java/org/apache/hudi/table/TestCopyOnWriteTable.java
+++ b/hudi-client/src/test/java/org/apache/hudi/table/TestCopyOnWriteTable.java
@@ -30,7 +30,6 @@ import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.avro.generic.GenericRecord;
-import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.WriteStatus;
import org.apache.hudi.common.BloomFilter;
@@ -45,6 +44,7 @@ import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.util.FSUtils;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ParquetUtils;
import org.apache.hudi.common.util.collection.Pair;
@@ -117,7 +117,7 @@ public class TestCopyOnWriteTable {
private HoodieWriteConfig.Builder makeHoodieClientConfigBuilder() throws Exception {
// Prepare the AvroParquetIO
- String schemaStr = IOUtils.toString(getClass().getResourceAsStream("/exampleSchema.txt"), "UTF-8");
+ String schemaStr = FileIOUtils.readAsUTFString(getClass().getResourceAsStream("/exampleSchema.txt"));
return HoodieWriteConfig.newBuilder().withPath(basePath).withSchema(schemaStr);
}
diff --git a/hudi-common/pom.xml b/hudi-common/pom.xml
index db3ffe98d..2b83af7ff 100644
--- a/hudi-common/pom.xml
+++ b/hudi-common/pom.xml
@@ -94,16 +94,6 @@
org.apache.avro
avro
-
- org.apache.avro
- avro-mapred
-
-
- org.mortbay.jetty
- *
-
-
-
@@ -111,12 +101,6 @@
parquet-avro
-
-
- commons-codec
- commons-codec
-
-
org.apache.httpcomponents
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/BloomFilter.java b/hudi-common/src/main/java/org/apache/hudi/common/BloomFilter.java
index 5b18ef7c7..57ab2d8b9 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/BloomFilter.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/BloomFilter.java
@@ -19,12 +19,12 @@
package org.apache.hudi.common;
import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import javax.xml.bind.DatatypeConverter;
-import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.hadoop.util.bloom.Key;
import org.apache.hadoop.util.hash.Hash;
import org.apache.hudi.exception.HoodieIndexException;
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/HoodieJsonPayload.java b/hudi-common/src/main/java/org/apache/hudi/common/HoodieJsonPayload.java
index 9f3eb93f8..067565367 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/HoodieJsonPayload.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/HoodieJsonPayload.java
@@ -23,15 +23,14 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
-import java.io.StringWriter;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
import org.apache.avro.Schema;
import org.apache.avro.generic.IndexedRecord;
-import org.apache.commons.io.IOUtils;
import org.apache.hudi.avro.MercifulJsonConverter;
import org.apache.hudi.common.model.HoodieRecordPayload;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieException;
@@ -87,9 +86,7 @@ public class HoodieJsonPayload implements HoodieRecordPayload
private String unCompressData(byte[] data) throws IOException {
InflaterInputStream iis = new InflaterInputStream(new ByteArrayInputStream(data));
try {
- StringWriter sw = new StringWriter(dataSize);
- IOUtils.copy(iis, sw);
- return sw.toString();
+ return FileIOUtils.readAsUTFString(iis, dataSize);
} finally {
iis.close();
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieActiveTimeline.java b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieActiveTimeline.java
index c86c3a111..fd62eef80 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieActiveTimeline.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieActiveTimeline.java
@@ -30,13 +30,13 @@ import java.util.HashSet;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Stream;
-import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.log4j.LogManager;
@@ -394,7 +394,7 @@ public class HoodieActiveTimeline extends HoodieDefaultTimeline {
private Option readDataFromPath(Path detailPath) {
try (FSDataInputStream is = metaClient.getFs().open(detailPath)) {
- return Option.of(IOUtils.toByteArray(is));
+ return Option.of(FileIOUtils.readAsByteArray(is));
} catch (IOException e) {
throw new HoodieIOException("Could not read commit details from " + detailPath, e);
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieDefaultTimeline.java b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieDefaultTimeline.java
index 9f8154f87..acf9bfdee 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieDefaultTimeline.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieDefaultTimeline.java
@@ -27,7 +27,6 @@ import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import org.apache.commons.codec.binary.Hex;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.StringUtils;
@@ -68,7 +67,8 @@ public class HoodieDefaultTimeline implements HoodieTimeline {
} catch (NoSuchAlgorithmException nse) {
throw new HoodieException(nse);
}
- this.timelineHash = new String(Hex.encodeHex(md.digest()));
+
+ this.timelineHash = StringUtils.toHexString(md.digest());
}
/**
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/AvroUtils.java b/hudi-common/src/main/java/org/apache/hudi/common/util/AvroUtils.java
index 01baa4719..b0a01b138 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/AvroUtils.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/AvroUtils.java
@@ -20,28 +20,20 @@ package org.apache.hudi.common.util;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
-import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.FileReader;
import org.apache.avro.file.SeekableByteArrayInput;
-import org.apache.avro.file.SeekableInput;
-import org.apache.avro.generic.GenericDatumReader;
-import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
-import org.apache.avro.mapred.FsInput;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.avro.specific.SpecificRecordBase;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanPartitionMetadata;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
@@ -52,49 +44,9 @@ import org.apache.hudi.avro.model.HoodieSavepointMetadata;
import org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata;
import org.apache.hudi.common.HoodieCleanStat;
import org.apache.hudi.common.HoodieRollbackStat;
-import org.apache.hudi.common.model.HoodieAvroPayload;
-import org.apache.hudi.common.model.HoodieKey;
-import org.apache.hudi.common.model.HoodieRecord;
-import org.apache.hudi.exception.HoodieIOException;
public class AvroUtils {
- public static List> loadFromFiles(FileSystem fs,
- List deltaFilePaths, Schema expectedSchema) {
- List> loadedRecords = Lists.newArrayList();
- deltaFilePaths.forEach(s -> {
- List> records = loadFromFile(fs, s, expectedSchema);
- loadedRecords.addAll(records);
- });
- return loadedRecords;
- }
-
- public static List> loadFromFile(FileSystem fs,
- String deltaFilePath, Schema expectedSchema) {
- List> loadedRecords = Lists.newArrayList();
- Path path = new Path(deltaFilePath);
- try {
- SeekableInput input = new FsInput(path, fs.getConf());
- GenericDatumReader reader = new GenericDatumReader<>();
- // Set the expected schema to be the current schema to account for schema evolution
- reader.setExpected(expectedSchema);
-
- FileReader fileReader = DataFileReader.openReader(input, reader);
- for (GenericRecord deltaRecord : fileReader) {
- String key = deltaRecord.get(HoodieRecord.RECORD_KEY_METADATA_FIELD).toString();
- String partitionPath =
- deltaRecord.get(HoodieRecord.PARTITION_PATH_METADATA_FIELD).toString();
- loadedRecords.add(new HoodieRecord<>(new HoodieKey(key, partitionPath),
- new HoodieAvroPayload(Option.of(deltaRecord))));
- }
- fileReader.close(); // also closes underlying FsInput
- } catch (IOException e) {
- throw new HoodieIOException("Could not read avro records from path " + deltaFilePath,
- e);
- }
- return loadedRecords;
- }
-
public static HoodieCleanMetadata convertCleanMetadata(String startCleanTime,
Option durationInMs, List cleanStats) {
ImmutableMap.Builder partitionMetadataBuilder =
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/FailSafeConsistencyGuard.java b/hudi-common/src/main/java/org/apache/hudi/common/util/FailSafeConsistencyGuard.java
index e1b816ec0..bec07a8f5 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/FailSafeConsistencyGuard.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/FailSafeConsistencyGuard.java
@@ -107,7 +107,7 @@ public class FailSafeConsistencyGuard implements ConsistencyGuard {
log.warn("Got IOException waiting for file event. Have tried " + retryNum + " time(s)", ioe);
}
return false;
- }, "Timed out waiting for filles to become visible");
+ }, "Timed out waiting for files to become visible");
}
/**
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/FileIOUtils.java b/hudi-common/src/main/java/org/apache/hudi/common/util/FileIOUtils.java
new file mode 100644
index 000000000..908c780e2
--- /dev/null
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/FileIOUtils.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hudi.common.util;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Comparator;
+
+/**
+ * Bunch of utility methods for working with files and byte streams
+ */
+public class FileIOUtils {
+
+ public static final long KB = 1024;
+
+ public static void deleteDirectory(File directory) throws IOException {
+ if (directory.exists()) {
+ Files.walk(directory.toPath())
+ .sorted(Comparator.reverseOrder())
+ .map(Path::toFile)
+ .forEach(File::delete);
+ directory.delete();
+ if (directory.exists()) {
+ throw new IOException("Unable to delete directory " + directory);
+ }
+ }
+ }
+
+ public static void mkdir(File directory) throws IOException {
+ if (!directory.exists()) {
+ directory.mkdirs();
+ }
+
+ if (!directory.isDirectory()) {
+ throw new IOException("Unable to create :" + directory);
+ }
+ }
+
+ public static String readAsUTFString(InputStream input) throws IOException {
+ return readAsUTFString(input, 128);
+ }
+
+ public static String readAsUTFString(InputStream input, int length) throws IOException {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream(length);
+ copy(input, bos);
+ return new String(bos.toByteArray(), StandardCharsets.UTF_8);
+ }
+
+ public static void copy(InputStream inputStream, OutputStream outputStream) throws IOException {
+ byte[] buffer = new byte[1024];
+ int len;
+ while ((len = inputStream.read(buffer)) != -1) {
+ outputStream.write(buffer, 0, len);
+ }
+ }
+
+ public static byte[] readAsByteArray(InputStream input) throws IOException {
+ return readAsByteArray(input, 128);
+ }
+
+ public static byte[] readAsByteArray(InputStream input, int outputSize) throws IOException {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream(outputSize);
+ copy(input, bos);
+ return bos.toByteArray();
+ }
+
+ public static void writeStringToFile(String str, String filePath) throws IOException {
+ PrintStream out = new PrintStream(new FileOutputStream(filePath));
+ out.println(str);
+ out.flush();
+ out.close();
+ }
+}
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/RocksDBDAO.java b/hudi-common/src/main/java/org/apache/hudi/common/util/RocksDBDAO.java
index 52992a2c6..2ae53f975 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/RocksDBDAO.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/RocksDBDAO.java
@@ -30,7 +30,6 @@ import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import org.apache.commons.io.FileUtils;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
@@ -86,7 +85,7 @@ public class RocksDBDAO {
private void init() throws HoodieException {
try {
log.info("DELETING RocksDB persisted at " + rocksDBBasePath);
- FileUtils.deleteDirectory(new File(rocksDBBasePath));
+ FileIOUtils.deleteDirectory(new File(rocksDBBasePath));
managedHandlesMap = new ConcurrentHashMap<>();
managedDescriptorMap = new ConcurrentHashMap<>();
@@ -103,7 +102,7 @@ public class RocksDBDAO {
});
final List managedColumnFamilies = loadManagedColumnFamilies(dbOptions);
final List managedHandles = new ArrayList<>();
- FileUtils.forceMkdir(new File(rocksDBBasePath));
+ FileIOUtils.mkdir(new File(rocksDBBasePath));
rocksDB = RocksDB.open(dbOptions, rocksDBBasePath, managedColumnFamilies, managedHandles);
Preconditions.checkArgument(managedHandles.size() == managedColumnFamilies.size(),
@@ -450,7 +449,7 @@ public class RocksDBDAO {
managedDescriptorMap.clear();
getRocksDB().close();
try {
- FileUtils.deleteDirectory(new File(rocksDBBasePath));
+ FileIOUtils.deleteDirectory(new File(rocksDBBasePath));
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/StringUtils.java b/hudi-common/src/main/java/org/apache/hudi/common/util/StringUtils.java
index d3d162d14..5fd45b8c3 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/StringUtils.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/StringUtils.java
@@ -54,4 +54,15 @@ public class StringUtils {
return org.apache.hadoop.util.StringUtils.join(separator, array);
}
+ public static String toHexString(byte[] bytes) {
+ StringBuilder sb = new StringBuilder(bytes.length * 2);
+ for (byte b: bytes) {
+ sb.append(String.format("%02x", b));
+ }
+ return sb.toString();
+ }
+
+ public static boolean isEmpty(String str) {
+ return str == null || str.length() == 0;
+ }
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/collection/Pair.java b/hudi-common/src/main/java/org/apache/hudi/common/util/collection/Pair.java
index 28fcc982d..bcd800cd8 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/collection/Pair.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/collection/Pair.java
@@ -20,7 +20,6 @@ package org.apache.hudi.common.util.collection;
import java.io.Serializable;
import java.util.Map;
-import org.apache.commons.lang.builder.CompareToBuilder;
/**
* (NOTE: Adapted from Apache commons-lang3)
@@ -57,7 +56,7 @@ public abstract class Pair implements Map.Entry, Comparable Pair of(final L left, final R right) {
- return new ImmutablePair(left, right);
+ return new ImmutablePair<>(left, right);
}
//-----------------------------------------------------------------------
@@ -117,8 +116,20 @@ public abstract class Pair implements Map.Entry, Comparable other) {
- return new CompareToBuilder().append(getLeft(), other.getLeft())
- .append(getRight(), other.getRight()).toComparison();
+
+ checkComparable(this);
+ checkComparable(other);
+
+ Comparable thisLeft = (Comparable) getLeft();
+ Comparable thisRight = (Comparable) getRight();
+ Comparable otherLeft = (Comparable) other.getLeft();
+ Comparable otherRight = (Comparable) other.getRight();
+
+ if (thisLeft.compareTo(otherLeft) == 0) {
+ return thisRight.compareTo(otherRight);
+ } else {
+ return thisLeft.compareTo(otherLeft);
+ }
}
/**
@@ -178,4 +189,9 @@ public abstract class Pair implements Map.Entry, Comparable pair) {
+ if (!(pair.getLeft() instanceof Comparable) || !(pair.getRight() instanceof Comparable)) {
+ throw new IllegalArgumentException("Elements of Pair must implement Comparable :" + pair);
+ }
+ }
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/collection/Triple.java b/hudi-common/src/main/java/org/apache/hudi/common/util/collection/Triple.java
index 8a030e193..55bb63b00 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/collection/Triple.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/collection/Triple.java
@@ -19,7 +19,6 @@
package org.apache.hudi.common.util.collection;
import java.io.Serializable;
-import org.apache.commons.lang.builder.CompareToBuilder;
/**
* (NOTE: Adapted from Apache commons-lang3)
@@ -96,9 +95,17 @@ public abstract class Triple implements Comparable>, Se
*/
@Override
public int compareTo(final Triple other) {
- return new CompareToBuilder().append(getLeft(), other.getLeft())
- .append(getMiddle(), other.getMiddle())
- .append(getRight(), other.getRight()).toComparison();
+ checkComparable(this);
+ checkComparable(other);
+
+ Comparable thisLeft = (Comparable) getLeft();
+ Comparable otherLeft = (Comparable) other.getLeft();
+
+ if (thisLeft.compareTo(otherLeft) == 0) {
+ return Pair.of(getMiddle(), getRight()).compareTo(Pair.of(other.getMiddle(), other.getRight()));
+ } else {
+ return thisLeft.compareTo(otherLeft);
+ }
}
/**
@@ -160,5 +167,11 @@ public abstract class Triple implements Comparable>, Se
return String.format(format, getLeft(), getMiddle(), getRight());
}
+ private void checkComparable(Triple triplet) {
+ if (!(triplet.getLeft() instanceof Comparable) || !(triplet.getMiddle() instanceof Comparable)
+ || !(triplet.getRight() instanceof Comparable)) {
+ throw new IllegalArgumentException("Elements of Triple must implement Comparable :" + triplet);
+ }
+ }
}
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/minicluster/HdfsTestService.java b/hudi-common/src/test/java/org/apache/hudi/common/minicluster/HdfsTestService.java
index 5408189fe..09eaa733d 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/minicluster/HdfsTestService.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/minicluster/HdfsTestService.java
@@ -22,21 +22,21 @@ import com.google.common.base.Preconditions;
import com.google.common.io.Files;
import java.io.File;
import java.io.IOException;
-import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hudi.common.model.HoodieTestUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hudi.common.util.FileIOUtils;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
/**
* An HDFS minicluster service implementation.
*/
public class HdfsTestService {
- private static final Logger logger = LoggerFactory.getLogger(HdfsTestService.class);
+ private static final Logger logger = LogManager.getLogger(HdfsTestService.class);
/**
* Configuration settings
@@ -72,7 +72,7 @@ public class HdfsTestService {
if (format) {
logger.info("Cleaning HDFS cluster data at: " + localDFSLocation + " and starting fresh.");
File file = new File(localDFSLocation);
- FileUtils.deleteDirectory(file);
+ FileIOUtils.deleteDirectory(file);
}
// Configure and start the HDFS cluster
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/minicluster/ZookeeperTestService.java b/hudi-common/src/test/java/org/apache/hudi/common/minicluster/ZookeeperTestService.java
index 1a01012a2..89992eecb 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/minicluster/ZookeeperTestService.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/minicluster/ZookeeperTestService.java
@@ -30,11 +30,11 @@ import java.net.InetSocketAddress;
import java.net.Socket;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
import org.apache.zookeeper.server.NIOServerCnxnFactory;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.apache.zookeeper.server.persistence.FileTxnLog;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* A Zookeeper minicluster service implementation.
@@ -52,7 +52,7 @@ import org.slf4j.LoggerFactory;
*/
public class ZookeeperTestService {
- private static final Logger logger = LoggerFactory.getLogger(ZookeeperTestService.class);
+ private static final Logger logger = LogManager.getLogger(ZookeeperTestService.class);
private static final int TICK_TIME = 2000;
private static final int CONNECTION_TIMEOUT = 30000;
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/util/TestFileIOUtils.java b/hudi-common/src/test/java/org/apache/hudi/common/util/TestFileIOUtils.java
new file mode 100644
index 000000000..222c7aa6a
--- /dev/null
+++ b/hudi-common/src/test/java/org/apache/hudi/common/util/TestFileIOUtils.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hudi.common.util;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestFileIOUtils {
+
+ @Test
+ public void testMkdirAndDelete() throws IOException {
+ TemporaryFolder folder = new TemporaryFolder();
+ folder.create();
+ try {
+ FileIOUtils.mkdir(folder.getRoot());
+ } catch (IOException e) {
+ fail("Should not error out if dir exists already");
+ }
+ File dir = new File(folder.getRoot().getAbsolutePath() + "/dir");
+ FileIOUtils.mkdir(dir);
+ assertTrue(dir.exists());
+
+ new File(dir, "t.txt").createNewFile();
+ new File(dir, "subdir").mkdirs();
+ new File(dir, "subdir/z.txt").createNewFile();
+ FileIOUtils.deleteDirectory(dir);
+ assertFalse(dir.exists());
+ }
+
+ @Test
+ public void testInputStreamReads() throws IOException {
+ String msg = "hudi rocks!";
+ ByteArrayInputStream inputStream = new ByteArrayInputStream(msg.getBytes(StandardCharsets.UTF_8));
+ assertEquals(msg, FileIOUtils.readAsUTFString(inputStream));
+ inputStream = new ByteArrayInputStream(msg.getBytes(StandardCharsets.UTF_8));
+ assertEquals(msg.length(), FileIOUtils.readAsByteArray(inputStream).length);
+ }
+}
diff --git a/hudi-hadoop-mr/pom.xml b/hudi-hadoop-mr/pom.xml
index 25574e7af..07487dff9 100644
--- a/hudi-hadoop-mr/pom.xml
+++ b/hudi-hadoop-mr/pom.xml
@@ -49,12 +49,6 @@
parquet-avro
-
-
- commons-logging
- commons-logging
-
-
org.apache.hadoop
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieInputFormat.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieInputFormat.java
index 9f8e4d550..a12579c18 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieInputFormat.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieInputFormat.java
@@ -24,8 +24,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -49,6 +47,8 @@ import org.apache.hudi.common.table.view.HoodieTableFileSystemView;
import org.apache.hudi.exception.DatasetNotFoundException;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.InvalidDatasetException;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
/**
* HoodieInputFormat which understands the Hoodie File Structure and filters files based on the
@@ -59,7 +59,7 @@ import org.apache.hudi.exception.InvalidDatasetException;
@UseFileSplitsFromInputFormat
public class HoodieInputFormat extends MapredParquetInputFormat implements Configurable {
- public static final Log LOG = LogFactory.getLog(HoodieInputFormat.class);
+ private static final transient Logger LOG = LogManager.getLogger(HoodieInputFormat.class);
protected Configuration conf;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java
index f268810ca..aaac77989 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.stream.Collectors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -35,6 +33,8 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.view.HoodieTableFileSystemView;
import org.apache.hudi.exception.DatasetNotFoundException;
import org.apache.hudi.exception.HoodieException;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
/**
* Given a path is a part of - Hoodie dataset = accepts ONLY the latest version of each path -
@@ -48,7 +48,7 @@ import org.apache.hudi.exception.HoodieException;
*/
public class HoodieROTablePathFilter implements PathFilter, Serializable {
- public static final Log LOG = LogFactory.getLog(HoodieROTablePathFilter.class);
+ private static final transient Logger LOG = LogManager.getLogger(HoodieROTablePathFilter.class);
/**
* Its quite common, to have all files from a given partition path be passed into accept(), cache
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/RecordReaderValueIterator.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/RecordReaderValueIterator.java
index f8c231a7e..d68afb4bc 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/RecordReaderValueIterator.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/RecordReaderValueIterator.java
@@ -21,10 +21,10 @@ package org.apache.hudi.hadoop;
import java.io.IOException;
import java.util.Iterator;
import java.util.NoSuchElementException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hudi.exception.HoodieException;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
/**
* Provides Iterator Interface to iterate value entries read from record reader
@@ -34,7 +34,7 @@ import org.apache.hudi.exception.HoodieException;
*/
public class RecordReaderValueIterator implements Iterator {
- public static final Log LOG = LogFactory.getLog(RecordReaderValueIterator.class);
+ private static final transient Logger LOG = LogManager.getLogger(RecordReaderValueIterator.class);
private final RecordReader reader;
private V nextVal = null;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineHiveInputFormat.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineHiveInputFormat.java
index 06ae8d6aa..501b0c475 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineHiveInputFormat.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineHiveInputFormat.java
@@ -72,8 +72,8 @@ import org.apache.hadoop.mapred.lib.CombineFileSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hudi.hadoop.HoodieInputFormat;
import org.apache.hudi.hadoop.realtime.HoodieRealtimeInputFormat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
/**
* This is just a copy of the org.apache.hadoop.hive.ql.io.CombineHiveInputFormat from Hive 2.x
@@ -92,7 +92,7 @@ public class HoodieCombineHiveInputFormat {
private static final String CLASS_NAME = HoodieCombineHiveInputFormat.class.getName();
- public static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
+ public static final Logger LOG = LogManager.getLogger(CLASS_NAME);
// max number of threads we can use to check non-combinable paths
private static final int MAX_CHECK_NONCOMBINABLE_THREAD_NUM = 50;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/AbstractRealtimeRecordReader.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/AbstractRealtimeRecordReader.java
index a82b0bc3d..93a608ca6 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/AbstractRealtimeRecordReader.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/AbstractRealtimeRecordReader.java
@@ -31,8 +31,6 @@ import org.apache.avro.Schema.Field;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericFixed;
import org.apache.avro.generic.GenericRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -53,6 +51,8 @@ import org.apache.hudi.common.util.LogReaderUtils;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
import org.apache.parquet.avro.AvroSchemaConverter;
import org.apache.parquet.hadoop.ParquetFileReader;
import org.apache.parquet.schema.MessageType;
@@ -82,7 +82,8 @@ public abstract class AbstractRealtimeRecordReader {
// Default file path prefix for spillable file
public static final String DEFAULT_SPILLABLE_MAP_BASE_PATH = "/tmp/";
- public static final Log LOG = LogFactory.getLog(AbstractRealtimeRecordReader.class);
+ private static final Logger LOG = LogManager.getLogger(AbstractRealtimeRecordReader.class);
+
protected final HoodieRealtimeFileSplit split;
protected final JobConf jobConf;
private final MessageType baseFileSchema;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeInputFormat.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeInputFormat.java
index 67f3e46f2..bd354e05a 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeInputFormat.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeInputFormat.java
@@ -29,8 +29,6 @@ import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -56,6 +54,8 @@ import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.hadoop.HoodieInputFormat;
import org.apache.hudi.hadoop.UseFileSplitsFromInputFormat;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
/**
* Input Format, that provides a real-time view of data in a Hoodie dataset
@@ -63,7 +63,7 @@ import org.apache.hudi.hadoop.UseFileSplitsFromInputFormat;
@UseFileSplitsFromInputFormat
public class HoodieRealtimeInputFormat extends HoodieInputFormat implements Configurable {
- public static final Log LOG = LogFactory.getLog(HoodieRealtimeInputFormat.class);
+ private static final transient Logger LOG = LogManager.getLogger(HoodieRealtimeInputFormat.class);
// These positions have to be deterministic across all tables
public static final int HOODIE_COMMIT_TIME_COL_POS = 0;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeRecordReader.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeRecordReader.java
index b1fab446e..5e095d868 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeRecordReader.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeRecordReader.java
@@ -19,13 +19,13 @@
package org.apache.hudi.hadoop.realtime;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hudi.exception.HoodieException;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
/**
* Realtime Record Reader which can do compacted (merge-on-read) record reading or
@@ -37,7 +37,7 @@ public class HoodieRealtimeRecordReader implements RecordReader reader;
public HoodieRealtimeRecordReader(HoodieRealtimeFileSplit split, JobConf job,
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/RealtimeCompactedRecordReader.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/RealtimeCompactedRecordReader.java
index 8f223524f..9c54b56d4 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/RealtimeCompactedRecordReader.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/RealtimeCompactedRecordReader.java
@@ -32,10 +32,14 @@ import org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
class RealtimeCompactedRecordReader extends AbstractRealtimeRecordReader implements
RecordReader {
+ private static final Logger LOG = LogManager.getLogger(AbstractRealtimeRecordReader.class);
+
protected final RecordReader parquetReader;
private final Map> deltaRecordMap;
diff --git a/hudi-hive/pom.xml b/hudi-hive/pom.xml
index 8fdcf5869..af6c827a3 100644
--- a/hudi-hive/pom.xml
+++ b/hudi-hive/pom.xml
@@ -65,20 +65,6 @@
joda-time
-
-
- commons-dbcp
- commons-dbcp
-
-
- commons-pool
- commons-pool
-
-
- commons-io
- commons-io
-
-
com.beust
jcommander
diff --git a/hudi-hive/run_sync_tool.sh b/hudi-hive/run_sync_tool.sh
index cd58db35e..60cc661ff 100755
--- a/hudi-hive/run_sync_tool.sh
+++ b/hudi-hive/run_sync_tool.sh
@@ -44,12 +44,12 @@ fi
HIVE_EXEC=`ls ${HIVE_HOME}/lib/hive-exec-*.jar | tr '\n' ':'`
HIVE_SERVICE=`ls ${HIVE_HOME}/lib/hive-service-*.jar | grep -v rpc | tr '\n' ':'`
HIVE_METASTORE=`ls ${HIVE_HOME}/lib/hive-metastore-*.jar | tr '\n' ':'`
-# Hive 1.x/CDH has standalone jdbc jar which is no longer available in 2.x
-HIVE_JDBC=`ls ${HIVE_HOME}/lib/hive-jdbc-*standalone*.jar | tr '\n' ':'`
+HIVE_JDBC=`ls ${HIVE_HOME}/lib/hive-jdbc-*.jar | tr '\n' ':'`
if [ -z "${HIVE_JDBC}" ]; then
HIVE_JDBC=`ls ${HIVE_HOME}/lib/hive-jdbc-*.jar | grep -v handler | tr '\n' ':'`
fi
-HIVE_JARS=$HIVE_METASTORE:$HIVE_SERVICE:$HIVE_EXEC:$HIVE_SERVICE:$HIVE_JDBC
+HIVE_JACKSON=`ls ${HIVE_HOME}/lib/jackson-*.jar | tr '\n' ':'`
+HIVE_JARS=$HIVE_METASTORE:$HIVE_SERVICE:$HIVE_EXEC:$HIVE_SERVICE:$HIVE_JDBC:$HIVE_JACKSON
HADOOP_HIVE_JARS=${HIVE_JARS}:${HADOOP_HOME}/share/hadoop/common/*:${HADOOP_HOME}/share/hadoop/mapreduce/*:${HADOOP_HOME}/share/hadoop/hdfs/*:${HADOOP_HOME}/share/hadoop/common/lib/*:${HADOOP_HOME}/share/hadoop/hdfs/lib/*
diff --git a/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java b/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
index 4a478016e..92667d8ce 100644
--- a/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
+++ b/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
@@ -24,7 +24,7 @@ import com.google.common.collect.Maps;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
-import java.sql.Driver;
+import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
@@ -33,9 +33,7 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
-import org.apache.commons.dbcp.BasicDataSource;
-import org.apache.commons.dbcp.ConnectionFactory;
-import org.apache.commons.dbcp.DriverConnectionFactory;
+import jline.internal.Log;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -57,13 +55,13 @@ import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.InvalidDatasetException;
import org.apache.hudi.hive.util.SchemaUtil;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
import org.apache.parquet.format.converter.ParquetMetadataConverter;
import org.apache.parquet.hadoop.ParquetFileReader;
import org.apache.parquet.hadoop.metadata.ParquetMetadata;
import org.apache.parquet.schema.MessageType;
import org.apache.thrift.TException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
@SuppressWarnings("ConstantConditions")
public class HoodieHiveClient {
@@ -80,7 +78,7 @@ public class HoodieHiveClient {
}
}
- private static Logger LOG = LoggerFactory.getLogger(HoodieHiveClient.class);
+ private static Logger LOG = LogManager.getLogger(HoodieHiveClient.class);
private final HoodieTableMetaClient metaClient;
private final HoodieTableType tableType;
private final PartitionValueExtractor partitionValueExtractor;
@@ -473,20 +471,18 @@ public class HoodieHiveClient {
private void createHiveConnection() {
if (connection == null) {
- BasicDataSource ds = new HiveDataSource();
- ds.setDriverClassName(HiveDriver.class.getCanonicalName());
- ds.setUrl(getHiveJdbcUrlWithDefaultDBName());
- if (syncConfig.hiveUser != null) {
- ds.setUsername(syncConfig.hiveUser);
- ds.setPassword(syncConfig.hivePass);
- }
- LOG.info("Getting Hive Connection from Datasource " + ds);
try {
- this.connection = ds.getConnection();
- LOG.info("Successfully got Hive Connection from Datasource " + ds);
+ Class.forName(HiveDriver.class.getCanonicalName());
+ } catch (ClassNotFoundException e) {
+ Log.error("Unable to load Hive driver class", e);
+ return;
+ }
+
+ try {
+ this.connection = DriverManager.getConnection(syncConfig.jdbcUrl, syncConfig.hiveUser, syncConfig.hivePass);
+ LOG.info("Successfully established Hive connection to " + syncConfig.jdbcUrl);
} catch (SQLException e) {
- throw new HoodieHiveSyncException(
- "Cannot create hive connection " + getHiveJdbcUrlWithDefaultDBName(), e);
+ throw new HoodieHiveSyncException("Cannot create hive connection " + getHiveJdbcUrlWithDefaultDBName(), e);
}
}
}
@@ -627,54 +623,4 @@ public class HoodieHiveClient {
return new PartitionEvent(PartitionEventType.UPDATE, storagePartition);
}
}
-
- /**
- * There is a bug in BasicDataSource implementation (dbcp-1.4) which does not allow custom version of Driver (needed
- * to talk to older version of HiveServer2 including CDH-5x). This is fixed in dbcp-2x but we are using dbcp1.4.
- * Adding a workaround here. TODO: varadarb We need to investigate moving to dbcp-2x
- */
- protected class HiveDataSource extends BasicDataSource {
-
- protected ConnectionFactory createConnectionFactory() throws SQLException {
- try {
- Driver driver = HiveDriver.class.newInstance();
- // Can't test without a validationQuery
- if (validationQuery == null) {
- setTestOnBorrow(false);
- setTestOnReturn(false);
- setTestWhileIdle(false);
- }
-
- // Set up the driver connection factory we will use
- String user = username;
- if (user != null) {
- connectionProperties.put("user", user);
- } else {
- log("DBCP DataSource configured without a 'username'");
- }
-
- String pwd = password;
- if (pwd != null) {
- connectionProperties.put("password", pwd);
- } else {
- log("DBCP DataSource configured without a 'password'");
- }
-
- ConnectionFactory driverConnectionFactory = new DriverConnectionFactory(driver, url, connectionProperties);
- return driverConnectionFactory;
- } catch (Throwable x) {
- LOG.warn("Got exception trying to instantiate connection factory. Trying default instantiation", x);
- return super.createConnectionFactory();
- }
- }
-
- @Override
- public String toString() {
- return "HiveDataSource{"
- + "driverClassName='" + driverClassName + '\''
- + ", driverClassLoader=" + driverClassLoader
- + ", url='" + url + '\''
- + '}';
- }
- }
}
diff --git a/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java b/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java
index 9e2784ee3..6cd7747a6 100644
--- a/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java
+++ b/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java
@@ -36,6 +36,8 @@ import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.hive.HiveSyncConfig;
import org.apache.hudi.hive.HoodieHiveSyncException;
import org.apache.hudi.hive.SchemaDifference;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
import org.apache.parquet.avro.AvroSchemaConverter;
import org.apache.parquet.schema.DecimalMetadata;
import org.apache.parquet.schema.GroupType;
@@ -43,15 +45,13 @@ import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.Type;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Schema Utilities
*/
public class SchemaUtil {
- private static final Logger LOG = LoggerFactory.getLogger(SchemaUtil.class);
+ private static final Logger LOG = LogManager.getLogger(SchemaUtil.class);
/**
* Get the schema difference between the storage schema and hive table schema
diff --git a/hudi-hive/src/test/java/org/apache/hudi/hive/TestUtil.java b/hudi-hive/src/test/java/org/apache/hudi/hive/TestUtil.java
index 6e9e06bdf..f49b7df90 100644
--- a/hudi-hive/src/test/java/org/apache/hudi/hive/TestUtil.java
+++ b/hudi-hive/src/test/java/org/apache/hudi/hive/TestUtil.java
@@ -34,7 +34,6 @@ import java.util.Set;
import java.util.UUID;
import org.apache.avro.Schema;
import org.apache.avro.generic.IndexedRecord;
-import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -61,6 +60,7 @@ import org.apache.hudi.common.table.log.block.HoodieAvroDataBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HeaderMetadataType;
import org.apache.hudi.common.util.FSUtils;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.SchemaTestUtil;
import org.apache.hudi.hive.util.HiveTestService;
import org.apache.parquet.avro.AvroSchemaConverter;
@@ -153,7 +153,7 @@ public class TestUtil {
static void createCOWDataset(String commitTime, int numberOfPartitions)
throws IOException, InitializationError, URISyntaxException, InterruptedException {
Path path = new Path(hiveSyncConfig.basePath);
- FileUtils.deleteDirectory(new File(hiveSyncConfig.basePath));
+ FileIOUtils.deleteDirectory(new File(hiveSyncConfig.basePath));
HoodieTableMetaClient
.initTableType(configuration, hiveSyncConfig.basePath, HoodieTableType.COPY_ON_WRITE,
hiveSyncConfig.tableName, HoodieAvroPayload.class.getName());
@@ -169,7 +169,7 @@ public class TestUtil {
static void createMORDataset(String commitTime, String deltaCommitTime, int numberOfPartitions)
throws IOException, InitializationError, URISyntaxException, InterruptedException {
Path path = new Path(hiveSyncConfig.basePath);
- FileUtils.deleteDirectory(new File(hiveSyncConfig.basePath));
+ FileIOUtils.deleteDirectory(new File(hiveSyncConfig.basePath));
HoodieTableMetaClient
.initTableType(configuration, hiveSyncConfig.basePath, HoodieTableType.MERGE_ON_READ,
hiveSyncConfig.tableName, HoodieAvroPayload.class.getName());
diff --git a/hudi-hive/src/test/java/org/apache/hudi/hive/util/HiveTestService.java b/hudi-hive/src/test/java/org/apache/hudi/hive/util/HiveTestService.java
index 487200e49..8961558bb 100644
--- a/hudi-hive/src/test/java/org/apache/hudi/hive/util/HiveTestService.java
+++ b/hudi-hive/src/test/java/org/apache/hudi/hive/util/HiveTestService.java
@@ -28,7 +28,6 @@ import java.net.SocketException;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -41,6 +40,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.thrift.TUGIContainingTransport;
import org.apache.hive.service.server.HiveServer2;
import org.apache.hudi.common.model.HoodieTestUtils;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.thrift.TProcessor;
@@ -95,7 +95,7 @@ public class HiveTestService {
if (clean) {
LOG.info("Cleaning Hive cluster data at: " + localHiveLocation + " and starting fresh.");
File file = new File(localHiveLocation);
- FileUtils.deleteDirectory(file);
+ FileIOUtils.deleteDirectory(file);
}
HiveConf serverConf = configureHive(hadoopConf, localHiveLocation);
diff --git a/hudi-integ-test/pom.xml b/hudi-integ-test/pom.xml
index ffe16e1c8..e56236911 100644
--- a/hudi-integ-test/pom.xml
+++ b/hudi-integ-test/pom.xml
@@ -42,12 +42,8 @@
- org.slf4j
- slf4j-api
-
-
- org.slf4j
- slf4j-log4j12
+ log4j
+ log4j
diff --git a/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestBase.java b/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestBase.java
index c12cf6d05..9b33ba3a8 100644
--- a/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestBase.java
+++ b/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestBase.java
@@ -38,6 +38,7 @@ import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
@@ -215,6 +216,19 @@ public abstract class ITTestBase {
return Pair.of(callback.getStdout().toString(), callback.getStderr().toString());
}
+ private void saveUpLogs() {
+ try {
+ // save up the Hive log files for introspection
+ String hiveLogStr = executeCommandStringInDocker(HIVESERVER, "cat /tmp/root/hive.log", true)
+ .getStdout().toString();
+ String filePath = System.getProperty("java.io.tmpdir") + "/" + System.currentTimeMillis() + "-hive.log";
+ FileIOUtils.writeStringToFile(hiveLogStr, filePath);
+ LOG.info("Hive log saved up at : " + filePath);
+ } catch (Exception e) {
+ LOG.error("Unable to save up logs..", e);
+ }
+ }
+
void assertStdOutContains(Pair stdOutErr, String expectedOutput) {
assertStdOutContains(stdOutErr, expectedOutput, 1);
}
@@ -233,7 +247,12 @@ public abstract class ITTestBase {
lastIndex += expectedOutput.length();
}
}
- Assert.assertEquals("Did not find output the expected number of times", times, count);
+
+ if (times != count) {
+ saveUpLogs();
+ }
+
+ Assert.assertEquals("Did not find output the expected number of times", times, count);
}
public class TestExecStartResultCallback extends ExecStartResultCallback {
diff --git a/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestHoodieDemo.java b/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestHoodieDemo.java
index ee3855a83..177962521 100644
--- a/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestHoodieDemo.java
+++ b/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestHoodieDemo.java
@@ -19,8 +19,8 @@
package org.apache.hudi.integ;
import com.google.common.collect.ImmutableList;
-import org.apache.hudi.common.util.collection.Pair;
import java.util.List;
+import org.apache.hudi.common.util.collection.Pair;
import org.junit.Test;
/**
diff --git a/hudi-utilities/pom.xml b/hudi-utilities/pom.xml
index ce8327687..bf5d104d5 100644
--- a/hudi-utilities/pom.xml
+++ b/hudi-utilities/pom.xml
@@ -138,12 +138,6 @@
jackson-module-scala_2.11
-
-
- org.apache.avro
- avro-mapred
-
-
org.apache.parquet
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HiveIncrementalPuller.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HiveIncrementalPuller.java
index 465d23583..3f443c3fe 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HiveIncrementalPuller.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HiveIncrementalPuller.java
@@ -25,15 +25,13 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.Serializable;
import java.sql.Connection;
+import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Scanner;
import java.util.stream.Collectors;
-import javax.sql.DataSource;
-import org.apache.commons.dbcp.BasicDataSource;
-import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -41,6 +39,7 @@ import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieInstant;
+import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.utilities.exception.HoodieIncrementalPullException;
@@ -110,8 +109,8 @@ public class HiveIncrementalPuller {
public HiveIncrementalPuller(Config config) throws IOException {
this.config = config;
validateConfig(config);
- String templateContent = IOUtils.toString(
- this.getClass().getResourceAsStream("IncrementalPull.sqltemplate"), "UTF-8");
+ String templateContent = FileIOUtils.readAsUTFString(
+ this.getClass().getResourceAsStream("IncrementalPull.sqltemplate"));
incrementalPullSQLtemplate = new ST(templateContent);
}
@@ -344,22 +343,13 @@ public class HiveIncrementalPuller {
private Connection getConnection() throws SQLException {
if (connection == null) {
- DataSource ds = getDatasource();
- log.info("Getting Hive Connection from Datasource " + ds);
- this.connection = ds.getConnection();
+ log.info("Getting Hive Connection to " + config.hiveJDBCUrl);
+ this.connection = DriverManager.getConnection(config.hiveJDBCUrl, config.hiveUsername, config.hivePassword);
+
}
return connection;
}
- private DataSource getDatasource() {
- BasicDataSource ds = new BasicDataSource();
- ds.setDriverClassName(driverName);
- ds.setUrl(config.hiveJDBCUrl);
- ds.setUsername(config.hiveUsername);
- ds.setPassword(config.hivePassword);
- return ds;
- }
-
public static void main(String[] args) throws IOException {
final Config cfg = new Config();
JCommander cmd = new JCommander(cfg, args);
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SchedulerConfGenerator.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SchedulerConfGenerator.java
index 418df2763..8f0a6b9be 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SchedulerConfGenerator.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SchedulerConfGenerator.java
@@ -24,7 +24,6 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
-import org.apache.commons.lang.text.StrSubstitutor;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.util.Option;
import org.apache.log4j.LogManager;
@@ -44,47 +43,26 @@ public class SchedulerConfGenerator {
public static final String SPARK_SCHEDULER_MODE_KEY = "spark.scheduler.mode";
public static final String SPARK_SCHEDULER_ALLOCATION_FILE_KEY = "spark.scheduler.allocation.file";
-
- private static final String DELTASYNC_POOL_KEY = "deltasync_pool";
- private static final String COMPACT_POOL_KEY = "compact_pool";
- private static final String DELTASYNC_POLICY_KEY = "deltasync_policy";
- private static final String COMPACT_POLICY_KEY = "compact_policy";
- private static final String DELTASYNC_WEIGHT_KEY = "deltasync_weight";
- private static final String DELTASYNC_MINSHARE_KEY = "deltasync_minshare";
- private static final String COMPACT_WEIGHT_KEY = "compact_weight";
- private static final String COMPACT_MINSHARE_KEY = "compact_minshare";
-
private static String SPARK_SCHEDULING_PATTERN =
"\n"
+ "\n"
- + " \n"
- + " %(deltasync_policy)\n"
- + " %(deltasync_weight)\n"
- + " %(deltasync_minshare)\n"
+ + " \n"
+ + " %s\n"
+ + " %s\n"
+ + " %s\n"
+ " \n"
- + " \n"
- + " %(compact_policy)\n"
- + " %(compact_weight)\n"
- + " %(compact_minshare)\n"
+ + " \n"
+ + " %s\n"
+ + " %s\n"
+ + " %s\n"
+ " \n"
+ "";
private static String generateConfig(Integer deltaSyncWeight, Integer compactionWeight, Integer deltaSyncMinShare,
Integer compactionMinShare) {
- Map schedulingProps = new HashMap<>();
- schedulingProps.put(DELTASYNC_POOL_KEY, DELTASYNC_POOL_NAME);
- schedulingProps.put(COMPACT_POOL_KEY, COMPACT_POOL_NAME);
- schedulingProps.put(DELTASYNC_POLICY_KEY, "FAIR");
- schedulingProps.put(COMPACT_POLICY_KEY, "FAIR");
- schedulingProps.put(DELTASYNC_WEIGHT_KEY, deltaSyncWeight.toString());
- schedulingProps.put(DELTASYNC_MINSHARE_KEY, deltaSyncMinShare.toString());
- schedulingProps.put(COMPACT_WEIGHT_KEY, compactionWeight.toString());
- schedulingProps.put(COMPACT_MINSHARE_KEY, compactionMinShare.toString());
-
- StrSubstitutor sub = new StrSubstitutor(schedulingProps, "%(", ")");
- String xmlString = sub.replace(SPARK_SCHEDULING_PATTERN);
- log.info("Scheduling Configurations generated. Config=\n" + xmlString);
- return xmlString;
+ return String.format(SPARK_SCHEDULING_PATTERN,
+ DELTASYNC_POOL_NAME, "FAIR", deltaSyncWeight.toString(), deltaSyncMinShare.toString(),
+ COMPACT_POOL_NAME, "FAIR", compactionWeight.toString(), compactionMinShare.toString());
}
diff --git a/hudi-utilities/src/test/java/org/apache/hudi/utilities/SchedulerConfGeneratorTest.java b/hudi-utilities/src/test/java/org/apache/hudi/utilities/SchedulerConfGeneratorTest.java
index 091c43651..7a249c91b 100644
--- a/hudi-utilities/src/test/java/org/apache/hudi/utilities/SchedulerConfGeneratorTest.java
+++ b/hudi-utilities/src/test/java/org/apache/hudi/utilities/SchedulerConfGeneratorTest.java
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package org.apache.hudi.utilities;
import static org.junit.Assert.assertNotNull;
diff --git a/packaging/README.md b/packaging/README.md
new file mode 100644
index 000000000..d2b112d07
--- /dev/null
+++ b/packaging/README.md
@@ -0,0 +1,18 @@
+# Overview
+
+This folder contains several modules that build out bundles (i.e fat/uber jars) that enable hudi integration into various systems.
+
+Here are the key principles applied in designing these bundles
+
+ - As much as possible, try to make the bundle work with the target system's jars and classes. (e.g: better to make Hudi work with Hive's parquet version than bundling parquet with Hudi). This lets us evolve Hudi as a lighter weight component and also provides flexibility for changing these jar versions in target systems
+ - Bundle's pom only needs to depend on the required hudi modules & any other modules that are declared "provided" in parent poms (e.g: parquet-avro).
+ - Such other modules should be declared as "compile" dependency in the bundle pom to actually get the shade plugin in pull them into the bundle. By default, provided scoped dependencies are not included
+ - Any other runtime dependencies needed by the bundle should specified in the `` whitelist. New bundles also should follow the same style of explicitly whitelisting modules and shading as needed.
+ - Leave abundant comments on why someone is being included, shaded or even being left out.
+
+Please follow these when adding new ones or making changes.
+
+# Resources
+
+ 1. Classes needed for Hive2 JDBC documented [here](https://cwiki.apache.org/confluence/display/Hive/HiveServer2+Clients#HiveServer2Clients-RunningtheJDBCSampleCode)
+
\ No newline at end of file
diff --git a/packaging/hudi-hadoop-mr-bundle/pom.xml b/packaging/hudi-hadoop-mr-bundle/pom.xml
index 81b5e3d5a..be37d043a 100644
--- a/packaging/hudi-hadoop-mr-bundle/pom.xml
+++ b/packaging/hudi-hadoop-mr-bundle/pom.xml
@@ -23,141 +23,16 @@
../../pom.xml
4.0.0
-
hudi-hadoop-mr-bundle
jar
-
-
-
- org.apache.hudi
- hudi-common
- ${project.version}
-
-
- org.apache.hudi
- hudi-hadoop-mr
- ${project.version}
-
-
-
- org.apache.hudi
- *
-
-
-
-
-
-
- org.apache.avro
- avro
-
-
-
-
- org.apache.parquet
- parquet-avro
-
-
-
-
- commons-logging
- commons-logging
-
-
- commons-io
- commons-io
-
-
- commons-codec
- commons-codec
-
-
-
-
- org.apache.hadoop
- hadoop-common
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-common
-
-
- org.apache.hadoop
- hadoop-auth
-
-
- org.apache.hadoop
- hadoop-hdfs
-
-
-
-
- ${hive.groupid}
- hive-jdbc
- ${hive.version}
-
-
- commons-logging
- commons-logging
-
-
-
-
- ${hive.groupid}
- hive-exec
- ${hive.version}
-
-
- ${hive.groupid}
- hive-service
- ${hive.version}
-
-
- ${hive.groupid}
- hive-shims
- ${hive.version}
-
-
- ${hive.groupid}
- hive-serde
- ${hive.version}
-
-
- ${hive.groupid}
- hive-metastore
- ${hive.version}
-
-
- ${hive.groupid}
- hive-common
- ${hive.version}
-
-
-
- com.esotericsoftware
- kryo
- test
-
-
-
- junit
- junit
- test
-
-
-
+
+ true
+ ${project.basedir}/src/main/resources/META-INF
+ HUDI_NOTICE.txt
+
-
-
- src/main/resources
-
-
org.apache.rat
@@ -166,7 +41,7 @@
org.apache.maven.plugins
maven-shade-plugin
- 2.4
+ ${maven-shade-plugin.version}
package
@@ -177,65 +52,33 @@
true
${project.build.directory}/dependency-reduced-pom.xml
-
-
- parquet.avro
- org.apache.hudi.parquet.avro
-
-
- parquet.column
- org.apache.hudi.parquet.column
-
-
- parquet.format.
- org.apache.hudi.parquet.format.
-
-
- parquet.hadoop.
- org.apache.hudi.parquet.hadoop.
-
-
- parquet.schema
- org.apache.hudi.parquet.schema
-
-
- org.apache.commons.
- org.apache.hudi.org.apache.commons.
-
-
- org.apache.commons.codec.
- org.apache.hudi.org.apache.commons.codec.
-
-
- com.esotericsoftware.kryo.
- org.apache.hudi.com.esotericsoftware.kryo.
-
-
- org.objenesis.
- org.apache.hudi.org.objenesis.
-
-
- com.esotericsoftware.minlog.
- org.apache.hudi.com.esotericsoftware.minlog.
-
-
- false
org.apache.hudi:hudi-common
org.apache.hudi:hudi-hadoop-mr
- commons-logging:commons-logging
- commons-io:commons-io
- commons-lang:commons-lang
- commons-pool:commons-pool
- commons-codec:commons-codec
+
+ org.apache.parquet:parquet-avro
com.esotericsoftware:kryo-shaded
org.objenesis:objenesis
com.esotericsoftware:minlog
- commons-codec:commons-codec
- org.apache.parquet:parquet-avro
+
+
+
+ com.esotericsoftware.kryo.
+ org.apache.hudi.com.esotericsoftware.kryo.
+
+
+ org.objenesis.
+ org.apache.hudi.org.objenesis.
+
+
+ com.esotericsoftware.minlog.
+ org.apache.hudi.com.esotericsoftware.minlog.
+
+
+ false
*:*
@@ -255,11 +98,34 @@
+
+
+ src/main/resources
+
+
+ src/test/resources
+
+
-
- true
- ${project.basedir}/src/main/resources/META-INF
- HUDI_NOTICE.txt
-
+
+
+
+ org.apache.hudi
+ hudi-common
+ ${project.version}
+
+
+ org.apache.hudi
+ hudi-hadoop-mr
+ ${project.version}
+
+
+
+
+ org.apache.parquet
+ parquet-avro
+ compile
+
+
diff --git a/packaging/hudi-hive-bundle/pom.xml b/packaging/hudi-hive-bundle/pom.xml
index 72f00dba7..7ad1047e9 100644
--- a/packaging/hudi-hive-bundle/pom.xml
+++ b/packaging/hudi-hive-bundle/pom.xml
@@ -23,135 +23,16 @@
../../pom.xml
4.0.0
-
hudi-hive-bundle
jar
-
-
-
- org.apache.hudi
- hudi-common
- ${project.version}
-
-
- org.apache.hudi
- hudi-hadoop-mr-bundle
- ${project.version}
-
-
- org.apache.hudi
- hudi-hive
- ${project.version}
-
-
-
- org.apache.hudi
- *
-
-
-
-
-
-
- org.slf4j
- slf4j-api
-
-
- org.slf4j
- slf4j-log4j12
-
-
-
-
- org.apache.thrift
- libthrift
- ${thrift.version}
-
-
- org.apache.thrift
- libfb303
- 0.9.3
-
-
-
- com.google.guava
- guava
-
-
-
- joda-time
- joda-time
-
-
-
- com.beust
- jcommander
-
-
-
-
- commons-dbcp
- commons-dbcp
-
-
- commons-io
- commons-io
-
-
-
-
- org.apache.httpcomponents
- httpcore
-
-
- org.apache.httpcomponents
- httpclient
-
-
-
-
- org.apache.hadoop
- hadoop-client
-
-
- org.apache.hadoop
- hadoop-common
-
-
- org.apache.hadoop
- hadoop-hdfs
-
-
- org.apache.hadoop
- hadoop-auth
-
-
-
-
- ${hive.groupid}
- hive-service
-
-
- ${hive.groupid}
- hive-jdbc
-
-
- ${hive.groupid}
- hive-metastore
-
-
- ${hive.groupid}
- hive-common
-
-
+
+ true
+ ${project.basedir}/src/main/resources/META-INF
+ HUDI_NOTICE.txt
+
-
-
- src/main/resources
-
-
org.apache.rat
@@ -160,7 +41,7 @@
org.apache.maven.plugins
maven-shade-plugin
- 2.4
+ ${maven-shade-plugin.version}
package
@@ -169,53 +50,22 @@
true
-
-
- com.beust.
- org.apache.hudi.com.beust.
-
-
- org.joda.
- org.apache.hudi.org.joda.
-
-
- com.google.
- org.apache.hudi.com.google.
-
-
- org.slf4j.
- org.apache.hudi.org.slf4j.
-
-
- org.apache.commons.
- org.apache.hudi.org.apache.commons.
-
-
- parquet.column
- org.apache.hudi.parquet.column
-
-
- parquet.format.
- org.apache.hudi.parquet.format.
-
-
- parquet.hadoop.
- org.apache.hudi.parquet.hadoop.
-
-
- parquet.schema.
- org.apache.hudi.parquet.schema.
-
-
- false
+ ${project.build.directory}/dependency-reduced-pom.xml
+
-
- log4j:log4j
- org.apache.hadoop:*
- org.apache.hive:*
- org.apache.derby:derby
-
+
+ org.apache.hudi:hudi-common
+ org.apache.hudi:hudi-hadoop-mr
+ org.apache.hudi:hudi-hive
+
+ com.beust:jcommander
+ org.apache.parquet:parquet-avro
+ com.esotericsoftware:kryo-shaded
+ org.objenesis:objenesis
+ com.esotericsoftware:minlog
+
+ false
*:*
@@ -235,11 +85,32 @@
+
+
+ src/main/resources
+
+
+ src/test/resources
+
+
-
- true
- ${project.basedir}/src/main/resources/META-INF
- HUDI_NOTICE.txt
-
+
+
+
+ org.apache.hudi
+ hudi-common
+ ${project.version}
+
+
+ org.apache.hudi
+ hudi-hadoop-mr-bundle
+ ${project.version}
+
+
+ org.apache.hudi
+ hudi-hive
+ ${project.version}
+
+
diff --git a/packaging/hudi-presto-bundle/pom.xml b/packaging/hudi-presto-bundle/pom.xml
index fbaba749c..17c3d2e0d 100644
--- a/packaging/hudi-presto-bundle/pom.xml
+++ b/packaging/hudi-presto-bundle/pom.xml
@@ -23,100 +23,16 @@
../../pom.xml
4.0.0
-
hudi-presto-bundle
jar
-
-
-
- org.apache.hudi
- hudi-common
- ${project.version}
-
-
- org.apache.hudi
- hudi-hadoop-mr-bundle
- ${project.version}
-
-
-
-
- org.slf4j
- slf4j-api
-
-
- org.slf4j
- slf4j-log4j12
-
-
-
-
- org.apache.thrift
- libthrift
- ${thrift.version}
-
-
-
- joda-time
- joda-time
-
-
-
- com.google.guava
- guava
-
-
-
-
- commons-dbcp
- commons-dbcp
-
-
- commons-io
- commons-io
-
-
-
- com.beust
- jcommander
-
-
-
-
- org.apache.httpcomponents
- httpcore
-
-
- org.apache.httpcomponents
- httpclient
-
-
-
-
- org.apache.hadoop
- hadoop-client
-
-
- org.apache.hadoop
- hadoop-common
-
-
- org.apache.hadoop
- hadoop-hdfs
-
-
- org.apache.hadoop
- hadoop-auth
-
-
+
+ true
+ ${project.basedir}/src/main/resources/META-INF
+ HUDI_NOTICE.txt
+
-
-
- src/main/resources
-
-
org.apache.rat
@@ -125,7 +41,7 @@
org.apache.maven.plugins
maven-shade-plugin
- 2.4
+ ${maven-shade-plugin.version}
package
@@ -134,73 +50,35 @@
true
+ ${project.build.directory}/dependency-reduced-pom.xml
+
+
+
+ org.apache.hudi:hudi-common
+ org.apache.hudi:hudi-hadoop-mr
+
+ org.apache.parquet:parquet-avro
+ com.esotericsoftware:kryo-shaded
+ org.objenesis:objenesis
+ com.esotericsoftware:minlog
+
+
-
- com.beust.
- org.apache.hudi.com.beust.
-
-
- org.joda.
- org.apache.hudi.org.joda.
-
-
- com.google.
- org.apache.hudi.com.google.
-
-
- org.slf4j.
- org.apache.hudi.org.slf4j.
-
-
- org.apache.commons.
- org.apache.hudi.org.apache.commons.
-
-
- parquet.column
- org.apache.hudi.parquet.column
-
-
- parquet.format.
- org.apache.hudi.parquet.format.
-
-
- parquet.hadoop.
- org.apache.hudi.parquet.hadoop.
-
-
- parquet.schema.
- org.apache.hudi.parquet.schema.
-
-
- com.esotericsoftware.kryo.
- org.apache.hudi.com.esotericsoftware.kryo.
-
-
- org.objenesis.
- org.apache.hudi.org.objenesis.
-
-
- com.esotericsoftware.minlog.
- org.apache.hudi.com.esotericsoftware.minlog.
-
+
+
+ com.esotericsoftware.kryo.
+ org.apache.hudi.com.esotericsoftware.kryo.
+
+
+ org.objenesis.
+ org.apache.hudi.org.objenesis.
+
+
+ com.esotericsoftware.minlog.
+ org.apache.hudi.com.esotericsoftware.minlog.
+
false
-
-
- log4j:log4j
- org.apache.hadoop:*
- org.apache.hive:*
- org.apache.derby:derby
-
- org.apache.thrift:*
-
- org.apache.httpcomponents:*
-
- com.fasterxml.jackson.core:*
- com.fasterxml.jackson.datatype:jackson-datatype-guava
- org.apache.parquet:*
-
-
*:*
@@ -220,11 +98,27 @@
+
+
+ src/main/resources
+
+
+ src/test/resources
+
+
-
- true
- ${project.basedir}/src/main/resources/META-INF
- HUDI_NOTICE.txt
-
+
+
+
+ org.apache.hudi
+ hudi-common
+ ${project.version}
+
+
+ org.apache.hudi
+ hudi-hadoop-mr-bundle
+ ${project.version}
+
+
diff --git a/packaging/hudi-spark-bundle/pom.xml b/packaging/hudi-spark-bundle/pom.xml
index bcd5beaff..d36d3a63c 100644
--- a/packaging/hudi-spark-bundle/pom.xml
+++ b/packaging/hudi-spark-bundle/pom.xml
@@ -23,45 +23,17 @@
../../pom.xml
4.0.0
-
- org.apache.hudi
hudi-spark-bundle
jar
- 1.2.17
- 4.10
true
${project.basedir}/src/main/resources/META-INF
HUDI_NOTICE.txt
-
-
- src/main/resources
-
-
-
- org.apache.maven.plugins
- maven-dependency-plugin
-
-
- copy-dependencies
- prepare-package
-
- copy-dependencies
-
-
- ${project.build.directory}/lib
- true
- true
- true
-
-
-
-
org.apache.rat
apache-rat-plugin
@@ -69,7 +41,7 @@
org.apache.maven.plugins
maven-shade-plugin
- 2.4
+ ${maven-shade-plugin.version}
package
@@ -78,54 +50,82 @@
true
+ ${project.build.directory}/dependency-reduced-pom.xml
+
+
+
+ org.apache.hudi:hudi-common
+ org.apache.hudi:hudi-client
+ org.apache.hudi:hudi-spark
+ org.apache.hudi:hudi-hive
+ org.apache.hudi:hudi-hadoop-mr
+ org.apache.hudi:hudi-timeline-service
+
+ com.beust:jcommander
+ io.javalin:javalin
+
+ org.eclipse.jetty:*
+ org.eclipse.jetty.websocket:*
+ org.jetbrains.kotlin:*
+ org.rocksdb:rocksdbjni
+ org.apache.httpcomponents:httpclient
+ org.apache.httpcomponents:fluent-hc
+ org.antlr:stringtemplate
+ org.apache.parquet:parquet-avro
+
+ com.twitter:bijection-avro_2.11
+ com.twitter:bijection-core_2.11
+ io.dropwizard.metrics:metrics-core
+ io.dropwizard.metrics:metrics-graphite
+ com.yammer.metrics:metrics-core
+
+ org.apache.hive:hive-common
+ org.apache.hive:hive-service
+ org.apache.hive:hive-service-rpc
+ org.apache.hive:hive-metastore
+ org.apache.hive:hive-jdbc
+
+
- com.beust.
- org.apache.hudi.com.beust.
+ com.beust.jcommander.
+ org.apache.hudi.com.beust.jcommander.
- org.joda.
- org.apache.hudi.org.joda.
+ org.apache.hive.jdbc.
+ org.apache.hudi.org.apache.hive.jdbc.
- com.google.
- org.apache.hudi.com.google.
+ org.apache.hadoop.hive.metastore.
+ org.apache.hudi.org.apache.hadoop_hive.metastore.
- org.slf4j.
- org.apache.hudi.org.slf4j.
+ org.apache.hive.common.
+ org.apache.hudi.org.apache.hive.common.
- org.apache.
- org.apache.hudi.org.apache.
-
- com.databricks.spark.**
- org.apache.avro.**
- org.apache.derby.**
- org.apache.hadoop.**
- org.apache.hive.**
- org.apache.hudi.**
- org.apache.logging.log4j.**
- org.apache.log4j.**
- org.apache.spark.**
- org.apache.thrift.**
-
+ org.apache.hadoop.hive.common.
+ org.apache.hudi.org.apache.hadoop_hive.common.
- parquet.column
- org.apache.hudi.parquet.column
+ org.apache.hadoop.hive.conf.
+ org.apache.hudi.org.apache.hadoop_hive.conf.
- parquet.format.
- org.apache.hudi.parquet.format.
+ org.apache.hive.service.
+ org.apache.hudi.org.apache.hive.service.
- parquet.hadoop.
- org.apache.hudi.parquet.hadoop.
+ org.apache.hadoop.hive.service.
+ org.apache.hudi.org.apache.hadoop_hive.service.
- parquet.schema
- org.apache.hudi.parquet.schema
+ com.codahale.metrics.
+ org.apache.hudi.com.codahale.metrics.
+
+
+ org.apache.commons.codec.
+ org.apache.hudi.org.apache.commons.codec.
-
- com.esotericsoftware.kryo.
- org.apache.hudi.com.esotericsoftware.kryo.
-
-
- org.objenesis.
- org.apache.hudi.org.objenesis.
-
-
- com.esotericsoftware.minlog.
- org.apache.hudi.com.esotericsoftware.minlog.
-
- false
-
-
- com.databricks:spark-avro_2.11
- log4j:*
- org.apache.avro:*
- org.apache.derby:derby
- org.apache.hadoop:*
- org.apache.hbase:*
-
- org.apache.hive:hive-exec
- org.apache.hive:hive-shims
- org.apache.spark:*
-
-
-
+
*:*
@@ -209,25 +182,26 @@
+
+
+ src/main/resources
+
+
+ src/test/resources
+
+
-
-
- org.scala-lang
- scala-library
- ${scala.version}
-
-
org.apache.hudi
- hudi-client
+ hudi-common
${project.version}
org.apache.hudi
- hudi-common
+ hudi-client
${project.version}
@@ -246,117 +220,42 @@
${project.version}
-
+
- log4j
- log4j
- ${log4j.version}
+ org.apache.parquet
+ parquet-avro
+ compile
-
-
- com.fasterxml.jackson.core
- jackson-annotations
-
-
-
-
- org.apache.avro
- avro
-
-
-
-
- org.apache.spark
- spark-core_2.11
-
-
- org.apache.spark
- spark-sql_2.11
-
-
-
-
- com.databricks
- spark-avro_2.11
- 4.0.0
-
-
-
- com.beust
- jcommander
-
-
-
-
- commons-codec
- commons-codec
-
-
- commons-dbcp
- commons-dbcp
-
-
-
-
- org.apache.hadoop
- hadoop-client
-
-
- javax.servlet
- *
-
-
- provided
-
-
- org.apache.hadoop
- hadoop-common
- provided
-
-
-
-
- ${hive.groupid}
- hive-jdbc
- ${hive.version}
- standalone
-
-
- org.slf4j
- slf4j-api
-
-
- javax.servlet
- servlet-api
-
-
-
-
+
${hive.groupid}
hive-service
${hive.version}
compile
+
+
+ ${hive.groupid}
+ hive-service-rpc
+ ${hive.version}
+ compile
+
+
${hive.groupid}
hive-jdbc
${hive.version}
compile
-
- ${hive.groupid}
- hive-serde
- ${hive.version}
- compile
-
+
${hive.groupid}
hive-metastore
${hive.version}
compile
+
${hive.groupid}
hive-common
@@ -364,13 +263,6 @@
compile
-
-
- org.scalatest
- scalatest_2.11
- ${scalatest.version}
- test
-
diff --git a/packaging/hudi-utilities-bundle/pom.xml b/packaging/hudi-utilities-bundle/pom.xml
index e002c11ab..bf992da14 100644
--- a/packaging/hudi-utilities-bundle/pom.xml
+++ b/packaging/hudi-utilities-bundle/pom.xml
@@ -36,21 +36,13 @@
- org.jacoco
- jacoco-maven-plugin
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
-
- 1.8
- 1.8
-
+ org.apache.rat
+ apache-rat-plugin
org.apache.maven.plugins
maven-shade-plugin
- 3.1.1
+ ${maven-shade-plugin.version}
package
@@ -63,10 +55,6 @@
- commons-codec:commons-codec
- commons-dbcp:commons-dbcp
- commons-lang:commons-lang
- commons-pool:commons-pool
org.apache.hudi:hudi-common
org.apache.hudi:hudi-client
org.apache.hudi:hudi-utilities
@@ -74,41 +62,38 @@
org.apache.hudi:hudi-hive
org.apache.hudi:hudi-hadoop-mr
org.apache.hudi:hudi-timeline-service
- com.beust:jcommander
+ com.beust:jcommander
io.javalin:javalin
- org.jetbrains.kotlin:*
+
org.eclipse.jetty:*
org.eclipse.jetty.websocket:*
+ org.jetbrains.kotlin:*
org.rocksdb:rocksdbjni
org.apache.httpcomponents:httpclient
org.apache.httpcomponents:fluent-hc
org.antlr:stringtemplate
- commons-io:commons-io
- commons-logging:commons-logging
org.apache.parquet:parquet-avro
com.twitter:bijection-avro_2.11
com.twitter:bijection-core_2.11
- org.apache.parquet:parquet-avro
io.confluent:kafka-avro-serializer
io.confluent:common-config
io.confluent:common-utils
io.confluent:kafka-schema-registry-client
io.dropwizard.metrics:metrics-core
io.dropwizard.metrics:metrics-graphite
+ com.yammer.metrics:metrics-core
org.apache.spark:spark-streaming-kafka-0-8_2.11
org.apache.kafka:kafka_2.11
com.101tec:zkclient
org.apache.kafka:kafka-clients
+
org.apache.hive:hive-common
org.apache.hive:hive-service
+ org.apache.hive:hive-service-rpc
org.apache.hive:hive-metastore
org.apache.hive:hive-jdbc
- com.esotericsoftware:kryo-shaded
- org.objenesis:objenesis
- com.esotericsoftware:minlog
- com.yammer.metrics:metrics-core
@@ -116,18 +101,6 @@
com.beust.jcommander.
org.apache.hudi.com.beust.jcommander.
-
- org.apache.commons.dbcp.
- org.apache.hudi.org.apache.commons.dbcp.
-
-
- org.apache.commons.lang.
- org.apache.hudi.org.apache.commons.lang.
-
-
- org.apache.commons.pool.
- org.apache.hudi.org.apache.commons.pool.
-
org.apache.hive.jdbc.
org.apache.hudi.org.apache.hive.jdbc.
@@ -156,18 +129,6 @@
org.apache.hadoop.hive.service.
org.apache.hudi.org.apache.hadoop_hive.service.
-
- com.esotericsoftware.kryo.
- org.apache.hudi.com.esotericsoftware.kryo.
-
-
- org.objenesis.
- org.apache.hudi.org.objenesis.
-
-
- com.esotericsoftware.minlog.
- org.apache.hudi.com.esotericsoftware.minlog.
-
com.codahale.metrics.
org.apache.hudi.com.codahale.metrics.
@@ -217,12 +178,12 @@
org.apache.hudi
- hudi-client
+ hudi-common
${project.version}
org.apache.hudi
- hudi-common
+ hudi-client
${project.version}
@@ -247,231 +208,47 @@
${project.version}
-
-
- log4j
- log4j
-
-
- org.slf4j
- slf4j-api
-
-
-
-
- com.fasterxml.jackson.module
- jackson-module-scala_2.11
-
-
-
-
- org.apache.avro
- avro-mapred
-
-
org.apache.parquet
parquet-avro
-
-
-
-
- org.apache.spark
- spark-core_2.11
-
-
- org.apache.spark
- spark-sql_2.11
-
-
- org.apache.spark
- spark-streaming_2.11
- ${spark.version}
- provided
-
-
- org.apache.spark
- spark-streaming-kafka-0-8_2.11
- ${spark.version}
-
-
-
-
- io.dropwizard.metrics
- metrics-core
-
-
-
- io.javalin
- javalin
- 2.4.0
-
-
-
- com.yammer.metrics
- metrics-core
- 2.2.0
-
-
-
-
- org.antlr
- stringtemplate
- 4.0.2
-
-
-
- com.beust
- jcommander
-
-
-
- com.twitter
- bijection-avro_2.11
- 0.9.2
-
-
-
-
- io.confluent
- kafka-avro-serializer
- 3.0.0
-
-
- io.confluent
- common-config
- 3.0.0
-
-
- io.confluent
- common-utils
- 3.0.0
-
-
- io.confluent
- kafka-schema-registry-client
- 3.0.0
-
-
-
-
- commons-codec
- commons-codec
-
-
- commons-dbcp
- commons-dbcp
-
-
- commons-pool
- commons-pool
-
-
-
-
- org.apache.httpcomponents
- httpcore
-
-
-
-
- org.apache.hadoop
- hadoop-client
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-common
-
-
- javax.servlet
- servlet-api
-
-
+ compile
${hive.groupid}
- hive-jdbc
+ hive-service
${hive.version}
- standalone
-
-
- org.slf4j
- slf4j-api
-
-
- javax.servlet
- servlet-api
-
-
+ compile
-
-
- org.apache.hudi
- hudi-client
- ${project.version}
- tests
- test-jar
- test
-
-
- org.apache.hudi
- hudi-common
- ${project.version}
- tests
- test-jar
- test
-
-
- org.apache.hudi
- hudi-hive
- ${project.version}
- tests
- test-jar
- test
-
-
-
-
- org.apache.hadoop
- hadoop-common
- tests
-
-
- org.mortbay.jetty
- *
-
-
- javax.servlet.jsp
- *
-
-
- javax.servlet
- *
-
-
-
-
- org.apache.hadoop
- hadoop-hdfs
- tests
-
-
-
${hive.groupid}
- hive-exec
+ hive-service-rpc
${hive.version}
- test
+ compile
- org.mockito
- mockito-all
- test
+ ${hive.groupid}
+ hive-jdbc
+ ${hive.version}
+ compile
+
+
+
+ ${hive.groupid}
+ hive-metastore
+ ${hive.version}
+ compile
+
+
+
+ ${hive.groupid}
+ hive-common
+ ${hive.version}
+ compile
diff --git a/pom.xml b/pom.xml
index a4ef40f65..894667a52 100644
--- a/pom.xml
+++ b/pom.xml
@@ -131,6 +131,8 @@
2.6
2.19.1
+ 3.1.1
+
2.6.7
2.17
1.8.1
@@ -164,25 +166,12 @@
- scm:git:git@github.com:uber/hudi.git
- scm:git:git@github.com:uber/hudi.git
- git@github.com:uber/hudi.git
+ scm:git:git@github.com:apache/incubator-hudi.git
+ scm:git:git@github.com:apache/incubator-hudi.git
+ git@github.com:apache/incubator-hudi.git
HEAD
-
-
- User List
- hudi-user@googlegroups.com
- https://groups.google.com/d/forum/hudi-user/
-
-
- Developer List
- hudi-dev@googlegroups.com
- https://groups.google.com/d/forum/hudi-dev/
-
-
-
@@ -459,16 +448,6 @@
log4j
${log4j.version}
-
- org.slf4j
- slf4j-api
- ${slf4j.version}
-
-
- org.slf4j
- slf4j-log4j12
- ${slf4j.version}
-
@@ -526,11 +505,7 @@
org.apache.avro
avro
${avro.version}
-
-
- org.apache.avro
- avro-mapred
- ${avro.version}
+ provided
@@ -538,6 +513,7 @@
org.apache.parquet
parquet-avro
${parquet.version}
+ provided
@@ -610,38 +586,6 @@
5.17.2
-
-
- commons-codec
- commons-codec
- 1.4
-
-
- commons-io
- commons-io
- 2.6
-
-
- commons-lang
- commons-lang
- 2.6
-
-
- commons-logging
- commons-logging
- 1.2
-
-
- commons-dbcp
- commons-dbcp
- 1.4
-
-
- commons-pool
- commons-pool
- 1.4
-
-
org.apache.httpcomponents
diff --git a/style/checkstyle.xml b/style/checkstyle.xml
index 268fefc0b..947d14104 100644
--- a/style/checkstyle.xml
+++ b/style/checkstyle.xml
@@ -259,6 +259,7 @@
+