improve documentations
This commit is contained in:
committed by
vinoth chandar
parent
d6f94b998d
commit
bca7e7dae4
@@ -76,4 +76,7 @@ summary: "Here we list all possible configurations and what they mean"
|
||||
- [usePrefix](#usePrefix) () <br/>
|
||||
<span style="color:grey">Standard prefix for all metrics</span>
|
||||
|
||||
- [S3Configs](s3_hoodie.html) (Hoodie S3 Configs) <br/>
|
||||
<span style="color:grey">Configurations required for S3 and Hoodie co-operability.</span>
|
||||
|
||||
{% include callout.html content="Hoodie is a young project. A lot of pluggable interfaces and configurations to support diverse workloads need to be created. Get involved [here](https://github.com/uber/hoodie)" type="info" %}
|
||||
|
||||
@@ -10,6 +10,10 @@ Hoodie works with HDFS by default. There is an experimental work going on Hoodie
|
||||
|
||||
## S3 configs
|
||||
|
||||
There are two configurations required for Hoodie-S3 compatibility:
|
||||
- Adding AWS Credentials for Hoodie
|
||||
- Adding required Jars to classpath
|
||||
|
||||
Add the required configs in your core-site.xml from where Hoodie can fetch them. Replace the `fs.defaultFS` with your S3 bucket name and Hoodie should be able to read/write from the bucket.
|
||||
|
||||
```
|
||||
@@ -44,4 +48,8 @@ Add the required configs in your core-site.xml from where Hoodie can fetch them.
|
||||
</property>
|
||||
```
|
||||
|
||||
AWS hadoop libraries to add to your classpath -
|
||||
- com.amazonaws:aws-java-sdk:1.10.34
|
||||
- org.apache.hadoop:hadoop-aws:2.7.3
|
||||
|
||||
|
||||
|
||||
@@ -118,6 +118,11 @@
|
||||
<groupId>io.dropwizard.metrics</groupId>
|
||||
<artifactId>metrics-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.beust</groupId>
|
||||
<artifactId>jcommander</artifactId>
|
||||
<version>1.48</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Parent dependencies -->
|
||||
<dependency>
|
||||
|
||||
@@ -14,6 +14,9 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
import com.beust.jcommander.JCommander;
|
||||
import com.beust.jcommander.Parameter;
|
||||
import com.uber.hoodie.HoodieWriteClient;
|
||||
import com.uber.hoodie.common.HoodieTestDataGenerator;
|
||||
import com.uber.hoodie.common.model.HoodieRecord;
|
||||
@@ -22,13 +25,6 @@ import com.uber.hoodie.common.util.FSUtils;
|
||||
import com.uber.hoodie.config.HoodieIndexConfig;
|
||||
import com.uber.hoodie.config.HoodieWriteConfig;
|
||||
import com.uber.hoodie.index.HoodieIndex;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.BasicParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.spark.SparkConf;
|
||||
@@ -44,38 +40,23 @@ import java.util.Properties;
|
||||
*/
|
||||
public class HoodieClientExample {
|
||||
|
||||
@Parameter(names={"--table-path", "-p"}, description = "path for Hoodie sample table")
|
||||
private String inputTablePath = "file:///tmp/hoodie/sample-table";
|
||||
|
||||
@Parameter(names={"--table-name", "-n"}, description = "table name for Hoodie sample table")
|
||||
private String inputTableName = "sample-table";
|
||||
|
||||
private static Logger logger = LogManager.getLogger(HoodieClientExample.class);
|
||||
|
||||
private static final String DEFAULT_TABLE_PATH = "file:///tmp/hoodie/sample-table";
|
||||
private static final String DEFAULT_TABLE_NAME = "sample-table";
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
Options options = new Options();
|
||||
Option path = new Option("p", "table-path", true, "input table path");
|
||||
path.setRequired(false);
|
||||
options.addOption(path);
|
||||
HoodieClientExample cli = new HoodieClientExample();
|
||||
new JCommander(cli, args);
|
||||
cli.run();
|
||||
}
|
||||
|
||||
Option name = new Option("n", "table-name", true, "input table name");
|
||||
name.setRequired(false);
|
||||
options.addOption(name);
|
||||
|
||||
CommandLineParser parser = new BasicParser();
|
||||
HelpFormatter formatter = new HelpFormatter();
|
||||
CommandLine cmd;
|
||||
|
||||
try {
|
||||
cmd = parser.parse(options, args);
|
||||
} catch (ParseException e) {
|
||||
System.out.println(e.getMessage());
|
||||
formatter.printHelp("HoodieClientExample", options);
|
||||
System.exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
String inputTablePath = cmd.getOptionValue("table-path", DEFAULT_TABLE_PATH);
|
||||
String inputTableName = cmd.getOptionValue("table-name", DEFAULT_TABLE_NAME);
|
||||
|
||||
public void run() throws Exception {
|
||||
HoodieTestDataGenerator dataGen = new HoodieTestDataGenerator();
|
||||
|
||||
SparkConf sparkConf = new SparkConf().setAppName("hoodie-client-example");
|
||||
|
||||
@@ -18,7 +18,6 @@ package com.uber.hoodie.common.util;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.uber.hoodie.common.table.HoodieTimeline;
|
||||
import com.uber.hoodie.common.table.log.HoodieLogFile;
|
||||
import com.uber.hoodie.common.table.timeline.HoodieInstant;
|
||||
import com.uber.hoodie.exception.HoodieIOException;
|
||||
|
||||
Reference in New Issue
Block a user