1
0

Reduce logging in unit-test runs

This commit is contained in:
Balaji Varadarajan
2019-05-24 22:20:10 -07:00
committed by Balaji Varadarajan
parent f2d91a455e
commit d0d2fa0337
8 changed files with 25 additions and 16 deletions

View File

@@ -158,7 +158,7 @@ public class TestCompactionAdminClient extends TestHoodieClientBase {
Assert.assertTrue("Rename Files must be empty", renameFiles.isEmpty());
}
expRenameFiles.entrySet().stream().forEach(r -> {
System.out.println("Key :" + r.getKey() + " renamed to " + r.getValue() + " rolled back to "
logger.info("Key :" + r.getKey() + " renamed to " + r.getValue() + " rolled back to "
+ renameFilesFromUndo.get(r.getKey()));
});

View File

@@ -50,6 +50,8 @@ import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.parquet.avro.AvroSchemaConverter;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
@@ -64,6 +66,7 @@ import org.apache.spark.sql.SQLContext;
*/
public class HoodieClientTestUtils {
private static final transient Logger log = LogManager.getLogger(HoodieClientTestUtils.class);
public static List<WriteStatus> collectStatuses(Iterator<List<WriteStatus>> statusListItr) {
List<WriteStatus> statuses = new ArrayList<>();
@@ -137,7 +140,7 @@ public class HoodieClientTestUtils {
try {
HashMap<String, String> paths = getLatestFileIDsToFullPath(basePath, commitTimeline,
Arrays.asList(commitInstant));
System.out.println("Path :" + paths.values());
log.info("Path :" + paths.values());
return sqlContext.read().parquet(paths.values().toArray(new String[paths.size()]))
.filter(String.format("%s ='%s'", HoodieRecord.COMMIT_TIME_METADATA_FIELD, commitTime));
} catch (Exception e) {

View File

@@ -80,7 +80,6 @@ public class TestUpdateMapFunction implements Serializable {
public void testSchemaEvolutionOnUpdate() throws Exception {
// Create a bunch of records with a old version of schema
final HoodieWriteConfig config = makeHoodieClientConfig("/exampleSchema.txt");
System.out.println("JSC =" + jsc);
final HoodieCopyOnWriteTable table = new HoodieCopyOnWriteTable(config, jsc);
final List<WriteStatus> statuses = jsc.parallelize(Arrays.asList(1)).map(x -> {

View File

@@ -50,6 +50,8 @@ import java.util.UUID;
import org.apache.avro.generic.GenericRecord;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.parquet.avro.AvroReadSupport;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.spark.TaskContext;
@@ -64,6 +66,8 @@ import scala.Tuple2;
public class TestCopyOnWriteTable {
protected static Logger log = LogManager.getLogger(TestCopyOnWriteTable.class);
private String basePath = null;
private transient JavaSparkContext jsc = null;
@@ -378,7 +382,7 @@ public class TestCopyOnWriteTable {
int counts = 0;
for (File file : new File(basePath + "/2016/01/31").listFiles()) {
if (file.getName().endsWith(".parquet") && FSUtils.getCommitTime(file.getName()).equals(commitTime)) {
System.out.println(file.getName() + "-" + file.length());
log.info(file.getName() + "-" + file.length());
counts++;
}
}