1
0

MINOR_CHECKSTYLE (#3616)

Fix checkstyle
This commit is contained in:
liujinhui
2021-09-07 18:19:39 +08:00
committed by GitHub
parent cf002b6918
commit eb5e7eec0a
11 changed files with 19 additions and 19 deletions

View File

@@ -171,7 +171,7 @@ public class HoodieTestSuiteWriter implements Serializable {
}
public JavaRDD<WriteStatus> insertOverwrite(Option<String> instantTime) throws Exception {
if(cfg.useDeltaStreamer){
if (cfg.useDeltaStreamer) {
return deltaStreamerWrapper.insertOverwrite();
} else {
Pair<SchemaProvider, Pair<String, JavaRDD<HoodieRecord>>> nextBatch = fetchSource();
@@ -181,7 +181,7 @@ public class HoodieTestSuiteWriter implements Serializable {
}
public JavaRDD<WriteStatus> insertOverwriteTable(Option<String> instantTime) throws Exception {
if(cfg.useDeltaStreamer){
if (cfg.useDeltaStreamer) {
return deltaStreamerWrapper.insertOverwriteTable();
} else {
Pair<SchemaProvider, Pair<String, JavaRDD<HoodieRecord>>> nextBatch = fetchSource();

View File

@@ -102,7 +102,7 @@ public class DagUtils {
case DAG_CONTENT:
JsonNode dagContent = dagNode.getValue();
Iterator<Entry<String, JsonNode>> contentItr = dagContent.fields();
while(contentItr.hasNext()) {
while (contentItr.hasNext()) {
Entry<String, JsonNode> dagContentNode = contentItr.next();
allNodes.put(dagContentNode.getKey(), convertJsonToDagNode(allNodes, dagContentNode.getKey(), dagContentNode.getValue()));
}

View File

@@ -43,7 +43,7 @@ public abstract class DagNode<O> implements Comparable<DagNode<O>> {
public DagNode clone() {
List<DagNode<O>> tempChildNodes = new ArrayList<>();
for(DagNode dagNode: childNodes) {
for (DagNode dagNode: childNodes) {
tempChildNodes.add(dagNode.clone());
}
this.childNodes = tempChildNodes;

View File

@@ -37,7 +37,7 @@ public class DelayNode extends DagNode<Boolean> {
@Override
public void execute(ExecutionContext context, int curItrCount) throws Exception {
log.warn("Waiting for "+ delayMins+" mins before going for next test run");
log.warn("Waiting for " + delayMins + " mins before going for next test run");
Thread.sleep(delayMins * 60 * 1000);
}
}

View File

@@ -77,9 +77,9 @@ public class ValidateAsyncOperations extends DagNode<Option<String>> {
}
if (config.validateArchival() || config.validateClean()) {
Pattern ARCHIVE_FILE_PATTERN =
final Pattern ARCHIVE_FILE_PATTERN =
Pattern.compile("\\.commits_\\.archive\\..*");
Pattern CLEAN_FILE_PATTERN =
final Pattern CLEAN_FILE_PATTERN =
Pattern.compile(".*\\.clean\\..*");
String metadataPath = executionContext.getHoodieTestSuiteWriter().getCfg().targetBasePath + "/.hoodie";

View File

@@ -72,8 +72,8 @@ public class HoodieTestHiveBase extends ITTestBase {
}
// Run Hoodie Java App
String cmd = String.format("%s --hive-sync --table-path %s --hive-url %s --table-type %s --hive-table %s" +
" --commit-type %s --table-name %s", HOODIE_GENERATE_APP, hdfsUrl, HIVE_SERVER_JDBC_URL,
String cmd = String.format("%s --hive-sync --table-path %s --hive-url %s --table-type %s --hive-table %s"
+ " --commit-type %s --table-name %s", HOODIE_GENERATE_APP, hdfsUrl, HIVE_SERVER_JDBC_URL,
tableType, hiveTableName, commitType, hoodieTableName);
if (partitionType == PartitionType.MULTI_KEYS_PARTITIONED) {
cmd = cmd + " --use-multi-partition-keys";

View File

@@ -90,7 +90,6 @@ public class ITTestHoodieDemo extends ITTestBase {
+ " --hoodie-conf hoodie.datasource.hive_sync.database=default "
+ " --hoodie-conf hoodie.datasource.hive_sync.table=%s";
@AfterEach
public void clean() throws Exception {
String hdfsCmd = "hdfs dfs -rm -R ";

View File

@@ -57,8 +57,8 @@ public class TestGenericRecordPayloadGenerator {
@Test
public void testComplexPayload() throws IOException {
Schema schema = new Schema.Parser().parse(UtilitiesTestBase.Helpers
.readFileFromAbsolutePath(System.getProperty("user.dir") + "/.." +
COMPLEX_SOURCE_SCHEMA_DOCKER_DEMO_RELATIVE_PATH));
.readFileFromAbsolutePath(System.getProperty("user.dir") + "/.."
+ COMPLEX_SOURCE_SCHEMA_DOCKER_DEMO_RELATIVE_PATH));
GenericRecordFullPayloadGenerator payloadGenerator = new GenericRecordFullPayloadGenerator(schema);
GenericRecord record = payloadGenerator.getNewPayload();
// The generated payload should validate with the provided schema
@@ -68,8 +68,8 @@ public class TestGenericRecordPayloadGenerator {
@Test
public void testComplexPartialPayload() throws IOException {
Schema schema = new Schema.Parser().parse(UtilitiesTestBase.Helpers
.readFileFromAbsolutePath(System.getProperty("user.dir") + "/.." +
COMPLEX_SOURCE_SCHEMA_DOCKER_DEMO_RELATIVE_PATH));
.readFileFromAbsolutePath(System.getProperty("user.dir") + "/.."
+ COMPLEX_SOURCE_SCHEMA_DOCKER_DEMO_RELATIVE_PATH));
GenericRecordPartialPayloadGenerator payloadGenerator = new GenericRecordPartialPayloadGenerator(schema);
IntStream.range(0, 10).forEach(a -> {
GenericRecord record = payloadGenerator.getNewPayload();
@@ -124,8 +124,8 @@ public class TestGenericRecordPayloadGenerator {
@Test
public void testComplexPayloadWithLargeMinSize() throws Exception {
Schema schema = new Schema.Parser().parse(UtilitiesTestBase.Helpers
.readFileFromAbsolutePath(System.getProperty("user.dir") + "/.." +
COMPLEX_SOURCE_SCHEMA_DOCKER_DEMO_RELATIVE_PATH));
.readFileFromAbsolutePath(System.getProperty("user.dir") + "/.."
+ COMPLEX_SOURCE_SCHEMA_DOCKER_DEMO_RELATIVE_PATH));
int minPayloadSize = 10000;
GenericRecordFullPayloadGenerator payloadGenerator = new GenericRecordFullPayloadGenerator(
schema, minPayloadSize);