1
0

[HUDI-1615] Fixing usage of NULL schema for delete operation in HoodieSparkSqlWriter (#2777)

This commit is contained in:
Sivabalan Narayanan
2021-04-14 03:35:39 -04:00
committed by GitHub
parent ab4a7b0b4a
commit 8d29863c86
3 changed files with 41 additions and 1 deletions

View File

@@ -20,6 +20,7 @@
package org.apache.hudi.common.testutils;
import org.apache.hudi.avro.MercifulJsonConverter;
import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.util.FileIOUtils;
@@ -94,6 +95,11 @@ public class RawTripTestPayload implements HoodieRecordPayload<RawTripTestPayloa
}
}
public static List<String> deleteRecordsToStrings(List<HoodieKey> records) {
return records.stream().map(record -> "{\"_row_key\": \"" + record.getRecordKey() + "\",\"partition\": \"" + record.getPartitionPath() + "\"}")
.collect(Collectors.toList());
}
public String getPartitionPath() {
return partitionPath;
}