1
0

CodeStyle formatting to conform to basic Checkstyle rules.

The code-style rules follow google style with some changes:

1. Increase line length from 100 to 120
2. Disable JavaDoc related checkstyles as this needs more manual work.

Both source and test code are checked for code-style
This commit is contained in:
Balaji Varadarajan
2018-03-20 16:29:20 -07:00
committed by vinoth chandar
parent 987f5d6b96
commit 788e4f2d2e
200 changed files with 6209 additions and 5975 deletions

View File

@@ -87,8 +87,8 @@ public class DataSourceUtils {
public static HoodieRecordPayload createPayload(String payloadClass, GenericRecord record,
Comparable orderingVal) throws IOException {
try {
return (HoodieRecordPayload) ConstructorUtils
.invokeConstructor(Class.forName(payloadClass), (Object) record, (Object) orderingVal);
return (HoodieRecordPayload) ConstructorUtils.invokeConstructor(Class.forName(payloadClass),
(Object) record, (Object) orderingVal);
} catch (Throwable e) {
throw new IOException("Could not create payload for class: " + payloadClass, e);
}
@@ -103,36 +103,26 @@ public class DataSourceUtils {
});
}
public static HoodieWriteClient createHoodieClient(JavaSparkContext jssc,
String schemaStr,
String basePath,
String tblName,
Map<String, String> parameters) throws Exception {
HoodieWriteConfig writeConfig = HoodieWriteConfig.newBuilder()
.combineInput(true, true)
.withPath(basePath)
.withAutoCommit(false)
.withSchema(schemaStr)
.forTable(tblName)
.withIndexConfig(
HoodieIndexConfig.newBuilder()
.withIndexType(HoodieIndex.IndexType.BLOOM)
.build())
public static HoodieWriteClient createHoodieClient(JavaSparkContext jssc, String schemaStr,
String basePath, String tblName, Map<String, String> parameters) throws Exception {
HoodieWriteConfig writeConfig = HoodieWriteConfig.newBuilder().combineInput(true, true)
.withPath(basePath).withAutoCommit(false)
.withSchema(schemaStr).forTable(tblName).withIndexConfig(
HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.BLOOM).build())
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
.withPayloadClass(parameters.get(DataSourceWriteOptions.PAYLOAD_CLASS_OPT_KEY()))
.withPayloadClass(parameters.get(
DataSourceWriteOptions
.PAYLOAD_CLASS_OPT_KEY()))
.build())
// override above with Hoodie configs specified as options.
.withProps(parameters)
.build();
.withProps(parameters).build();
return new HoodieWriteClient<>(jssc, writeConfig);
}
public static JavaRDD<WriteStatus> doWriteOperation(HoodieWriteClient client,
JavaRDD<HoodieRecord> hoodieRecords,
String commitTime,
String operation) {
JavaRDD<HoodieRecord> hoodieRecords, String commitTime, String operation) {
if (operation.equals(DataSourceWriteOptions.BULK_INSERT_OPERATION_OPT_VAL())) {
return client.bulkInsert(hoodieRecords, commitTime);
} else if (operation.equals(DataSourceWriteOptions.INSERT_OPERATION_OPT_VAL())) {
@@ -143,14 +133,9 @@ public class DataSourceUtils {
}
}
public static HoodieRecord createHoodieRecord(GenericRecord gr,
Comparable orderingVal,
HoodieKey hKey,
String payloadClass) throws IOException {
HoodieRecordPayload payload = DataSourceUtils.createPayload(
payloadClass,
gr,
orderingVal);
public static HoodieRecord createHoodieRecord(GenericRecord gr, Comparable orderingVal,
HoodieKey hKey, String payloadClass) throws IOException {
HoodieRecordPayload payload = DataSourceUtils.createPayload(payloadClass, gr, orderingVal);
return new HoodieRecord<>(hKey, payload);
}
}