diff --git a/docs/configurations.md b/docs/configurations.md
index cf5c2d7b8..7042d8237 100644
--- a/docs/configurations.md
+++ b/docs/configurations.md
@@ -76,4 +76,7 @@ summary: "Here we list all possible configurations and what they mean"
- [usePrefix](#usePrefix) ()
Standard prefix for all metrics
+ - [S3Configs](s3_hoodie.html) (Hoodie S3 Configs)
+ Configurations required for S3 and Hoodie co-operability.
+
{% include callout.html content="Hoodie is a young project. A lot of pluggable interfaces and configurations to support diverse workloads need to be created. Get involved [here](https://github.com/uber/hoodie)" type="info" %}
diff --git a/docs/s3_filesystem.md b/docs/s3_filesystem.md
index c1bdd2fb1..faa424033 100644
--- a/docs/s3_filesystem.md
+++ b/docs/s3_filesystem.md
@@ -10,6 +10,10 @@ Hoodie works with HDFS by default. There is an experimental work going on Hoodie
## S3 configs
+There are two configurations required for Hoodie-S3 compatibility:
+- Adding AWS Credentials for Hoodie
+- Adding required Jars to classpath
+
Add the required configs in your core-site.xml from where Hoodie can fetch them. Replace the `fs.defaultFS` with your S3 bucket name and Hoodie should be able to read/write from the bucket.
```
@@ -44,4 +48,8 @@ Add the required configs in your core-site.xml from where Hoodie can fetch them.
```
+AWS hadoop libraries to add to your classpath -
+ - com.amazonaws:aws-java-sdk:1.10.34
+ - org.apache.hadoop:hadoop-aws:2.7.3
+
diff --git a/hoodie-client/pom.xml b/hoodie-client/pom.xml
index 617dc9c4b..11e6b85a3 100644
--- a/hoodie-client/pom.xml
+++ b/hoodie-client/pom.xml
@@ -118,6 +118,11 @@
io.dropwizard.metrics
metrics-core
+
+ com.beust
+ jcommander
+ 1.48
+
diff --git a/hoodie-client/src/test/java/HoodieClientExample.java b/hoodie-client/src/test/java/HoodieClientExample.java
index 711b4bb04..39724f67e 100644
--- a/hoodie-client/src/test/java/HoodieClientExample.java
+++ b/hoodie-client/src/test/java/HoodieClientExample.java
@@ -14,6 +14,9 @@
* limitations under the License.
*/
+
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
import com.uber.hoodie.HoodieWriteClient;
import com.uber.hoodie.common.HoodieTestDataGenerator;
import com.uber.hoodie.common.model.HoodieRecord;
@@ -22,13 +25,6 @@ import com.uber.hoodie.common.util.FSUtils;
import com.uber.hoodie.config.HoodieIndexConfig;
import com.uber.hoodie.config.HoodieWriteConfig;
import com.uber.hoodie.index.HoodieIndex;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.BasicParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
@@ -44,38 +40,23 @@ import java.util.Properties;
*/
public class HoodieClientExample {
+ @Parameter(names={"--table-path", "-p"}, description = "path for Hoodie sample table")
+ private String inputTablePath = "file:///tmp/hoodie/sample-table";
+
+ @Parameter(names={"--table-name", "-n"}, description = "table name for Hoodie sample table")
+ private String inputTableName = "sample-table";
private static Logger logger = LogManager.getLogger(HoodieClientExample.class);
- private static final String DEFAULT_TABLE_PATH = "file:///tmp/hoodie/sample-table";
- private static final String DEFAULT_TABLE_NAME = "sample-table";
public static void main(String[] args) throws Exception {
- Options options = new Options();
- Option path = new Option("p", "table-path", true, "input table path");
- path.setRequired(false);
- options.addOption(path);
+ HoodieClientExample cli = new HoodieClientExample();
+ new JCommander(cli, args);
+ cli.run();
+ }
- Option name = new Option("n", "table-name", true, "input table name");
- name.setRequired(false);
- options.addOption(name);
-
- CommandLineParser parser = new BasicParser();
- HelpFormatter formatter = new HelpFormatter();
- CommandLine cmd;
-
- try {
- cmd = parser.parse(options, args);
- } catch (ParseException e) {
- System.out.println(e.getMessage());
- formatter.printHelp("HoodieClientExample", options);
- System.exit(1);
- return;
- }
-
- String inputTablePath = cmd.getOptionValue("table-path", DEFAULT_TABLE_PATH);
- String inputTableName = cmd.getOptionValue("table-name", DEFAULT_TABLE_NAME);
+ public void run() throws Exception {
HoodieTestDataGenerator dataGen = new HoodieTestDataGenerator();
SparkConf sparkConf = new SparkConf().setAppName("hoodie-client-example");
diff --git a/hoodie-common/src/main/java/com/uber/hoodie/common/util/FSUtils.java b/hoodie-common/src/main/java/com/uber/hoodie/common/util/FSUtils.java
index 77448d058..2f554aecb 100644
--- a/hoodie-common/src/main/java/com/uber/hoodie/common/util/FSUtils.java
+++ b/hoodie-common/src/main/java/com/uber/hoodie/common/util/FSUtils.java
@@ -18,7 +18,6 @@ package com.uber.hoodie.common.util;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
-import com.uber.hoodie.common.table.HoodieTimeline;
import com.uber.hoodie.common.table.log.HoodieLogFile;
import com.uber.hoodie.common.table.timeline.HoodieInstant;
import com.uber.hoodie.exception.HoodieIOException;