1
0

[MINOR] Fix typos in Spark client related classes (#6204)

This commit is contained in:
Vander
2022-07-25 12:41:42 +08:00
committed by GitHub
parent 1a910fd473
commit 2a08a65f71
4 changed files with 7 additions and 7 deletions

View File

@@ -136,7 +136,7 @@ public abstract class SingleSparkJobExecutionStrategy<T extends HoodieRecordPayl
/**
* Execute clustering to write inputRecords into new files as defined by rules in strategy parameters.
* The number of new file groups created is bounded by numOutputGroups.
* Note that commit is not done as part of strategy. commit is callers responsibility.
* Note that commit is not done as part of strategy. Commit is callers responsibility.
*/
public abstract Iterator<List<WriteStatus>> performClusteringWithRecordsIterator(final Iterator<HoodieRecord<T>> records, final int numOutputGroups,
final String instantTime,

View File

@@ -81,7 +81,7 @@ public class SparkInternalSchemaConverter {
public static final String HOODIE_VALID_COMMITS_LIST = "hoodie.valid.commits.list";
/**
* Converts a spark schema to an hudi internal schema. Fields without IDs are kept and assigned fallback IDs.
* Convert a spark schema to an hudi internal schema. Fields without IDs are kept and assigned fallback IDs.
*
* @param sparkSchema a spark schema
* @return a matching internal schema for the provided spark schema
@@ -157,7 +157,7 @@ public class SparkInternalSchemaConverter {
}
/**
* Converts Spark schema to Hudi internal schema, and prune fields.
* Convert Spark schema to Hudi internal schema, and prune fields.
* Fields without IDs are kept and assigned fallback IDs.
*
* @param sparkSchema a pruned spark schema

View File

@@ -50,7 +50,7 @@ import java.util.stream.Stream;
import scala.collection.JavaConverters;
/**
* Spark validator utils to verify and run any precommit validators configured.
* Spark validator utils to verify and run any pre-commit validators configured.
*/
public class SparkValidatorUtils {
private static final Logger LOG = LogManager.getLogger(BaseSparkCommitActionExecutor.class);

View File

@@ -308,7 +308,7 @@ public class HoodieAvroDataBlock extends HoodieDataBlock {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream output = new DataOutputStream(baos);
// 2. Compress and Write schema out
// 1. Compress and Write schema out
byte[] schemaContent = compress(schema.toString());
output.writeInt(schemaContent.length);
output.write(schemaContent);
@@ -318,10 +318,10 @@ public class HoodieAvroDataBlock extends HoodieDataBlock {
recordItr.forEachRemaining(records::add);
}
// 3. Write total number of records
// 2. Write total number of records
output.writeInt(records.size());
// 4. Write the records
// 3. Write the records
Iterator<IndexedRecord> itr = records.iterator();
while (itr.hasNext()) {
IndexedRecord s = itr.next();