diff --git a/hoodie-client/src/main/java/com/uber/hoodie/HoodieReadClient.java b/hoodie-client/src/main/java/com/uber/hoodie/HoodieReadClient.java index 42f5bf6c3..157aa75c1 100644 --- a/hoodie-client/src/main/java/com/uber/hoodie/HoodieReadClient.java +++ b/hoodie-client/src/main/java/com/uber/hoodie/HoodieReadClient.java @@ -70,7 +70,7 @@ public class HoodieReadClient implements Serializable { private transient final FileSystem fs; /** - * TODO: We need to persist the index type into hoodie.properties & be able to access the index + * TODO: We need to persist the index type into hoodie.properties and be able to access the index * just with a simple basepath pointing to the dataset. Until, then just always assume a * BloomIndex */ diff --git a/hoodie-client/src/main/java/com/uber/hoodie/index/HoodieBloomIndex.java b/hoodie-client/src/main/java/com/uber/hoodie/index/HoodieBloomIndex.java index 71fef152a..a20325da9 100644 --- a/hoodie-client/src/main/java/com/uber/hoodie/index/HoodieBloomIndex.java +++ b/hoodie-client/src/main/java/com/uber/hoodie/index/HoodieBloomIndex.java @@ -323,7 +323,7 @@ public class HoodieBloomIndex extends HoodieIndex /** * Find out pair. All workload grouped by file-level. * - * // Join PairRDD(PartitionPath, RecordKey) and PairRDD(PartitionPath, File) & then repartition such that + * // Join PairRDD(PartitionPath, RecordKey) and PairRDD(PartitionPath, File) and then repartition such that // each RDD partition is a file, then for each file, we do (1) load bloom filter, (2) load rowKeys, (3) Tag rowKey // Make sure the parallelism is atleast the groupby parallelism for tagging location */ diff --git a/hoodie-client/src/test/java/com/uber/hoodie/common/HoodieTestDataGenerator.java b/hoodie-client/src/test/java/com/uber/hoodie/common/HoodieTestDataGenerator.java index ecd87a4ec..530d798d9 100644 --- a/hoodie-client/src/test/java/com/uber/hoodie/common/HoodieTestDataGenerator.java +++ b/hoodie-client/src/test/java/com/uber/hoodie/common/HoodieTestDataGenerator.java @@ -40,7 +40,7 @@ import java.util.Random; import java.util.UUID; /** - * Class to be used in tests to keep generating test inserts & updates against a corpus. + * Class to be used in tests to keep generating test inserts and updates against a corpus. * * Test data uses a toy Uber trips, data model. */ diff --git a/hoodie-hadoop-mr/src/main/java/com/uber/hoodie/hadoop/HoodieROTablePathFilter.java b/hoodie-hadoop-mr/src/main/java/com/uber/hoodie/hadoop/HoodieROTablePathFilter.java index c8f991587..5bf482580 100644 --- a/hoodie-hadoop-mr/src/main/java/com/uber/hoodie/hadoop/HoodieROTablePathFilter.java +++ b/hoodie-hadoop-mr/src/main/java/com/uber/hoodie/hadoop/HoodieROTablePathFilter.java @@ -33,11 +33,11 @@ import java.util.HashSet; /** * Given a path is a part of - * - Hoodie dataset => accepts ONLY the latest version of each path - * - Non-Hoodie dataset => then always accept + * - Hoodie dataset = accepts ONLY the latest version of each path + * - Non-Hoodie dataset = then always accept * - * We can set this filter, on a query engine's Hadoop Config & if it respects path filters, then - * you should be able to query both hoodie & non-hoodie datasets as you would normally do. + * We can set this filter, on a query engine's Hadoop Config and if it respects path filters, then + * you should be able to query both hoodie and non-hoodie datasets as you would normally do. * * hadoopConf.setClass("mapreduce.input.pathFilter.class", * com.uber.hoodie.hadoop.HoodieROTablePathFilter.class, @@ -50,7 +50,7 @@ public class HoodieROTablePathFilter implements PathFilter, Serializable { /** * Its quite common, to have all files from a given partition path be passed into accept(), - * cache the check for hoodie metadata for known partition paths & the latest versions of files + * cache the check for hoodie metadata for known partition paths and the latest versions of files */ private HashMap> hoodiePathCache; diff --git a/pom.xml b/pom.xml index 752a04828..9d0356f86 100644 --- a/pom.xml +++ b/pom.xml @@ -503,6 +503,9 @@ + + -Xdoclint:none + org.apache.maven.plugins