[HUDI-4097] add table info to jobStatus (#5529)
Co-authored-by: wqwl611 <wqwl611@gmail.com>
This commit is contained in:
@@ -174,7 +174,7 @@ public class HDFSParquetImporter implements Serializable {
|
||||
ParquetInputFormat.setReadSupportClass(job, (AvroReadSupport.class));
|
||||
|
||||
HoodieEngineContext context = new HoodieSparkEngineContext(jsc);
|
||||
context.setJobStatus(this.getClass().getSimpleName(), "Build records for import");
|
||||
context.setJobStatus(this.getClass().getSimpleName(), "Build records for import: " + cfg.tableName);
|
||||
return jsc.newAPIHadoopFile(cfg.srcPath, ParquetInputFormat.class, Void.class, GenericRecord.class,
|
||||
job.getConfiguration())
|
||||
// To reduce large number of tasks.
|
||||
|
||||
@@ -107,7 +107,7 @@ public class HoodieSnapshotCopier implements Serializable {
|
||||
fs.delete(new Path(outputDir), true);
|
||||
}
|
||||
|
||||
context.setJobStatus(this.getClass().getSimpleName(), "Creating a snapshot");
|
||||
context.setJobStatus(this.getClass().getSimpleName(), "Creating a snapshot: " + baseDir);
|
||||
|
||||
List<Tuple2<String, String>> filesToCopy = context.flatMap(partitions, partition -> {
|
||||
// Only take latest version files <= latestCommit.
|
||||
|
||||
@@ -177,7 +177,7 @@ public class HoodieSnapshotExporter {
|
||||
: ReflectionUtils.loadClass(cfg.outputPartitioner);
|
||||
|
||||
HoodieEngineContext context = new HoodieSparkEngineContext(jsc);
|
||||
context.setJobStatus(this.getClass().getSimpleName(), "Exporting as non-HUDI dataset");
|
||||
context.setJobStatus(this.getClass().getSimpleName(), "Exporting as non-HUDI dataset: " + cfg.targetOutputPath);
|
||||
final BaseFileOnlyView fsView = getBaseFileOnlyView(jsc, cfg);
|
||||
Iterator<String> exportingFilePaths = jsc
|
||||
.parallelize(partitions, partitions.size())
|
||||
|
||||
Reference in New Issue
Block a user