1
0

[HUDI-1707] Reduces log level for too verbose messages from info to debug level. (#2714)

* Reduces log level for too verbose messages from info to debug level.
* Sort config output.
* Code Review : Small restructuring + rebasing to master
 - Fixing flaky multi delta streamer test
 - Using isDebugEnabled() checks
 - Some changes to shorten log message without moving to DEBUG

Co-authored-by: volodymyr.burenin <volodymyr.burenin@cloudkitchens.com>
Co-authored-by: Vinoth Chandar <vinoth@apache.org>
This commit is contained in:
Volodymyr Burenin
2021-05-10 09:16:02 -05:00
committed by GitHub
parent 511ac4881d
commit 8a48d16e41
9 changed files with 44 additions and 13 deletions

View File

@@ -694,7 +694,9 @@ public class DeltaSync implements Serializable {
schemas.add(targetSchema);
}
LOG.info("Registering Schema :" + schemas);
if (LOG.isDebugEnabled()) {
LOG.debug("Registering Schema: " + schemas);
}
jssc.sc().getConf().registerAvroSchemas(JavaConversions.asScalaBuffer(schemas).toList());
}
}

View File

@@ -63,6 +63,7 @@ import org.apache.spark.sql.SparkSession;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -437,6 +438,24 @@ public class HoodieDeltaStreamer implements Serializable {
}
}
private static String toSortedTruncatedString(TypedProperties props) {
List<String> allKeys = new ArrayList<>();
for (Object k : props.keySet()) {
allKeys.add(k.toString());
}
Collections.sort(allKeys);
StringBuilder propsLog = new StringBuilder("Creating delta streamer with configs:\n");
for (String key : allKeys) {
String value = Option.ofNullable(props.get(key)).orElse("").toString();
// Truncate too long values.
if (value.length() > 255 && !LOG.isDebugEnabled()) {
value = value.substring(0, 128) + "[...]";
}
propsLog.append(key).append(": ").append(value).append("\n");
}
return propsLog.toString();
}
public static final Config getConfig(String[] args) {
Config cfg = new Config();
JCommander cmd = new JCommander(cfg, null, args);
@@ -544,7 +563,8 @@ public class HoodieDeltaStreamer implements Serializable {
"'--filter-dupes' needs to be disabled when '--op' is 'UPSERT' to ensure updates are not missed.");
this.props = properties.get();
LOG.info("Creating delta streamer with configs : " + props.toString());
LOG.info(toSortedTruncatedString(props));
this.schemaProvider = UtilHelpers.wrapSchemaProviderWithPostProcessor(
UtilHelpers.createSchemaProvider(cfg.schemaProviderClassName, props, jssc), props, jssc, cfg.transformerClassNames);

View File

@@ -62,7 +62,7 @@ public class SqlQueryBasedTransformer implements Transformer {
LOG.info("Registering tmp table : " + tmpTable);
rowDataset.registerTempTable(tmpTable);
String sqlStr = transformerSQL.replaceAll(SRC_PATTERN, tmpTable);
LOG.info("SQL Query for transformation : (" + sqlStr + ")");
LOG.debug("SQL Query for transformation : (" + sqlStr + ")");
return sparkSession.sql(sqlStr);
}
}

View File

@@ -163,7 +163,12 @@ public class TestHoodieMultiTableDeltaStreamer extends TestHoodieDeltaStreamer {
//insert updates for already existing records in kafka topics
testUtils.sendMessages(topicName1, Helpers.jsonifyRecords(dataGenerator.generateUpdatesAsPerSchema("001", 5, HoodieTestDataGenerator.TRIP_SCHEMA)));
testUtils.sendMessages(topicName2, Helpers.jsonifyRecords(dataGenerator.generateUpdatesAsPerSchema("001", 10, HoodieTestDataGenerator.SHORT_TRIP_SCHEMA)));
streamer = new HoodieMultiTableDeltaStreamer(cfg, jsc);
streamer.getTableExecutionContexts().get(1).setProperties(properties);
streamer.getTableExecutionContexts().get(0).setProperties(properties1);
streamer.sync();
assertEquals(2, streamer.getSuccessTables().size());
assertTrue(streamer.getFailedTables().isEmpty());