1
0

[MINOR] Fix typos in Spark client related classes (#4781)

This commit is contained in:
Y Ethan Guo
2022-02-13 06:41:58 -08:00
committed by GitHub
parent ce9762d588
commit 6aba00e84f
45 changed files with 131 additions and 118 deletions

View File

@@ -164,7 +164,7 @@ public class HoodieJavaStreamingApp {
ExecutorService executor = Executors.newFixedThreadPool(2);
int numInitialCommits = 0;
// thread for spark strucutured streaming
// thread for spark structured streaming
try {
Future<Void> streamFuture = executor.submit(() -> {
LOG.info("===== Streaming Starting =====");
@@ -211,7 +211,7 @@ public class HoodieJavaStreamingApp {
Dataset<Row> inputDF3 = newSpark.read().json(jssc.parallelize(deletes, 2));
executor = Executors.newFixedThreadPool(2);
// thread for spark strucutured streaming
// thread for spark structured streaming
try {
Future<Void> streamFuture = executor.submit(() -> {
LOG.info("===== Streaming Starting =====");

View File

@@ -191,7 +191,7 @@ public class TestDataSourceUtils {
@Test
public void testCreateUserDefinedBulkInsertPartitionerRowsWithInValidPartitioner() throws HoodieException {
config = HoodieWriteConfig.newBuilder().withPath("/").withUserDefinedBulkInsertPartitionerClass("NonExistantUserDefinedClass").build();
config = HoodieWriteConfig.newBuilder().withPath("/").withUserDefinedBulkInsertPartitionerClass("NonExistentUserDefinedClass").build();
Exception exception = assertThrows(HoodieException.class, () -> {
DataSourceUtils.createUserDefinedBulkInsertPartitionerWithRows(config);

View File

@@ -179,7 +179,7 @@ public class TestBootstrap extends HoodieClientTestBase {
}
@Test
public void testMetadataBootstrapUnpartitionedCOW() throws Exception {
public void testMetadataBootstrapNonpartitionedCOW() throws Exception {
testBootstrapCommon(false, false, EffectiveMode.METADATA_BOOTSTRAP_MODE);
}
@@ -229,7 +229,7 @@ public class TestBootstrap extends HoodieClientTestBase {
bootstrapInstants = Arrays.asList(bootstrapCommitInstantTs);
break;
default:
bootstrapModeSelectorClass = TestRandomBootstapModeSelector.class.getName();
bootstrapModeSelectorClass = TestRandomBootstrapModeSelector.class.getName();
bootstrapCommitInstantTs = HoodieTimeline.FULL_BOOTSTRAP_INSTANT_TS;
checkNumRawFiles = false;
isBootstrapIndexCreated = true;
@@ -523,11 +523,11 @@ public class TestBootstrap extends HoodieClientTestBase {
}).collect(Collectors.toList()));
}
public static class TestRandomBootstapModeSelector extends BootstrapModeSelector {
public static class TestRandomBootstrapModeSelector extends BootstrapModeSelector {
private int currIdx = new Random().nextInt(2);
public TestRandomBootstapModeSelector(HoodieWriteConfig writeConfig) {
public TestRandomBootstrapModeSelector(HoodieWriteConfig writeConfig) {
super(writeConfig);
}

View File

@@ -172,7 +172,7 @@ public class TestOrcBootstrap extends HoodieClientTestBase {
}
@Test
public void testMetadataBootstrapUnpartitionedCOW() throws Exception {
public void testMetadataBootstrapNonpartitionedCOW() throws Exception {
testBootstrapCommon(false, false, EffectiveMode.METADATA_BOOTSTRAP_MODE);
}
@@ -222,7 +222,7 @@ public class TestOrcBootstrap extends HoodieClientTestBase {
bootstrapInstants = Arrays.asList(bootstrapCommitInstantTs);
break;
default:
bootstrapModeSelectorClass = TestRandomBootstapModeSelector.class.getName();
bootstrapModeSelectorClass = TestRandomBootstrapModeSelector.class.getName();
bootstrapCommitInstantTs = HoodieTimeline.FULL_BOOTSTRAP_INSTANT_TS;
checkNumRawFiles = false;
isBootstrapIndexCreated = true;
@@ -438,10 +438,10 @@ public class TestOrcBootstrap extends HoodieClientTestBase {
}).collect(Collectors.toList()));
}
public static class TestRandomBootstapModeSelector extends BootstrapModeSelector {
public static class TestRandomBootstrapModeSelector extends BootstrapModeSelector {
private int currIdx = new Random().nextInt(2);
public TestRandomBootstapModeSelector(HoodieWriteConfig writeConfig) {
public TestRandomBootstrapModeSelector(HoodieWriteConfig writeConfig) {
super(writeConfig);
}

View File

@@ -25,7 +25,6 @@ import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertFalse;
@@ -98,7 +97,7 @@ public class TestAWSDmsAvroPayload {
try {
Option<IndexedRecord> outputPayload = payload.combineAndGetUpdateValue(oldRecord, avroSchema);
// expect nothing to be comitted to table
// expect nothing to be committed to table
assertFalse(outputPayload.isPresent());
} catch (Exception e) {
fail("Unexpected exception");
@@ -123,7 +122,7 @@ public class TestAWSDmsAvroPayload {
try {
OverwriteWithLatestAvroPayload output = payload.preCombine(insertPayload);
Option<IndexedRecord> outputPayload = output.getInsertValue(avroSchema);
// expect nothing to be comitted to table
// expect nothing to be committed to table
assertFalse(outputPayload.isPresent());
} catch (Exception e) {
fail("Unexpected exception");

View File

@@ -99,9 +99,9 @@ class TestCOWDataSourceStorage extends SparkClientFunctionalTestHarness {
var updateDf: DataFrame = null
if (classOf[TimestampBasedKeyGenerator].getName.equals(keyGenClass)) {
// update current_ts to be same as original record so that partition path does not change with timestamp based key gen
val orignalRow = inputDF1.filter(col("_row_key") === verificationRowKey).collectAsList().get(0)
val originalRow = inputDF1.filter(col("_row_key") === verificationRowKey).collectAsList().get(0)
updateDf = snapshotDF1.filter(col("_row_key") === verificationRowKey).withColumn(verificationCol, lit(updatedVerificationVal))
.withColumn("current_ts", lit(orignalRow.getAs("current_ts")))
.withColumn("current_ts", lit(originalRow.getAs("current_ts")))
} else {
updateDf = snapshotDF1.filter(col("_row_key") === verificationRowKey).withColumn(verificationCol, lit(updatedVerificationVal))
}