1
0

[HUDI-153] Use com.uber.hoodie.common.util.Option instead of Java and Guava Optional

This commit is contained in:
yanghua
2019-08-06 14:20:42 +08:00
committed by Balaji Varadarajan
parent d288e32833
commit 722b6be04a
128 changed files with 769 additions and 769 deletions

View File

@@ -28,6 +28,7 @@ import com.uber.hoodie.common.table.HoodieTimeline;
import com.uber.hoodie.common.table.timeline.HoodieInstant;
import com.uber.hoodie.common.table.view.HoodieTableFileSystemView;
import com.uber.hoodie.common.util.FSUtils;
import com.uber.hoodie.common.util.Option;
import com.uber.hoodie.exception.HoodieException;
import com.uber.hoodie.exception.HoodieIOException;
import com.uber.hoodie.hadoop.HoodieInputFormat;
@@ -38,7 +39,6 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -88,8 +88,8 @@ public class HoodieRealtimeInputFormat extends HoodieInputFormat implements Conf
Map<Path, HoodieTableMetaClient> partitionsToMetaClient = partitionsToParquetSplits.keySet()
.stream().collect(Collectors.toMap(Function.identity(), p -> {
// find if we have a metaclient already for this partition.
Optional<String> matchingBasePath = metaClientMap.keySet().stream()
.filter(basePath -> p.toString().startsWith(basePath)).findFirst();
Option<String> matchingBasePath = Option.fromJavaOptional(metaClientMap.keySet().stream()
.filter(basePath -> p.toString().startsWith(basePath)).findFirst());
if (matchingBasePath.isPresent()) {
return metaClientMap.get(matchingBasePath.get());
}
@@ -116,7 +116,7 @@ public class HoodieRealtimeInputFormat extends HoodieInputFormat implements Conf
try {
// Both commit and delta-commits are included - pick the latest completed one
Optional<HoodieInstant> latestCompletedInstant =
Option<HoodieInstant> latestCompletedInstant =
metaClient.getActiveTimeline().getCommitsTimeline().filterCompletedInstants().lastInstant();
Stream<FileSlice> latestFileSlices = latestCompletedInstant.map(instant ->

View File

@@ -23,9 +23,9 @@ import com.uber.hoodie.common.model.HoodieRecordPayload;
import com.uber.hoodie.common.table.log.HoodieMergedLogRecordScanner;
import com.uber.hoodie.common.util.FSUtils;
import com.uber.hoodie.common.util.HoodieAvroUtils;
import com.uber.hoodie.common.util.Option;
import java.io.IOException;
import java.util.Map;
import java.util.Optional;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.NullWritable;
@@ -81,7 +81,7 @@ class RealtimeCompactedRecordReader extends AbstractRealtimeRecordReader impleme
if (deltaRecordMap.containsKey(key)) {
// TODO(NA): Invoke preCombine here by converting arrayWritable to Avro. This is required since the
// deltaRecord may not be a full record and needs values of columns from the parquet
Optional<GenericRecord> rec;
Option<GenericRecord> rec;
if (usesCustomPayload) {
rec = deltaRecordMap.get(key).getData().getInsertValue(getWriterSchema());
} else {

View File

@@ -21,6 +21,7 @@ package com.uber.hoodie.hadoop.realtime;
import com.uber.hoodie.common.table.log.HoodieUnMergedLogRecordScanner;
import com.uber.hoodie.common.util.DefaultSizeEstimator;
import com.uber.hoodie.common.util.FSUtils;
import com.uber.hoodie.common.util.Option;
import com.uber.hoodie.common.util.queue.BoundedInMemoryExecutor;
import com.uber.hoodie.common.util.queue.BoundedInMemoryQueueProducer;
import com.uber.hoodie.common.util.queue.FunctionBasedQueueProducer;
@@ -31,7 +32,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.NullWritable;
@@ -71,7 +71,7 @@ class RealtimeUnmergedRecordReader extends AbstractRealtimeRecordReader implemen
// Iterator for consuming records from parquet file
this.parquetRecordsIterator = new RecordReaderValueIterator<>(this.parquetReader);
this.executor = new BoundedInMemoryExecutor<>(getMaxCompactionMemoryInBytes(), getParallelProducers(),
Optional.empty(), x -> x, new DefaultSizeEstimator<>());
Option.empty(), x -> x, new DefaultSizeEstimator<>());
// Consumer of this record reader
this.iterator = this.executor.getQueue().iterator();
this.logRecordScanner = new HoodieUnMergedLogRecordScanner(