HUDI-479: Eliminate or Minimize use of Guava if possible (#1159)
This commit is contained in:
@@ -23,7 +23,6 @@ import org.apache.hudi.exception.HoodieException;
|
||||
import org.apache.hudi.exception.HoodieIOException;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.avro.Schema.Type;
|
||||
import org.apache.avro.generic.GenericData;
|
||||
@@ -32,6 +31,7 @@ import org.apache.avro.generic.GenericRecord;
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -49,13 +49,22 @@ public class MercifulJsonConverter {
|
||||
* Build type processor map for each avro type.
|
||||
*/
|
||||
private static Map<Schema.Type, JsonToAvroFieldProcessor> getFieldTypeProcessors() {
|
||||
return new ImmutableMap.Builder<Type, JsonToAvroFieldProcessor>().put(Type.STRING, generateStringTypeHandler())
|
||||
.put(Type.BOOLEAN, generateBooleanTypeHandler()).put(Type.DOUBLE, generateDoubleTypeHandler())
|
||||
.put(Type.FLOAT, generateFloatTypeHandler()).put(Type.INT, generateIntTypeHandler())
|
||||
.put(Type.LONG, generateLongTypeHandler()).put(Type.ARRAY, generateArrayTypeHandler())
|
||||
.put(Type.RECORD, generateRecordTypeHandler()).put(Type.ENUM, generateEnumTypeHandler())
|
||||
.put(Type.MAP, generateMapTypeHandler()).put(Type.BYTES, generateBytesTypeHandler())
|
||||
.put(Type.FIXED, generateFixedTypeHandler()).build();
|
||||
return Collections.unmodifiableMap(new HashMap<Schema.Type, JsonToAvroFieldProcessor>() {
|
||||
{
|
||||
put(Type.STRING, generateStringTypeHandler());
|
||||
put(Type.BOOLEAN, generateBooleanTypeHandler());
|
||||
put(Type.DOUBLE, generateDoubleTypeHandler());
|
||||
put(Type.FLOAT, generateFloatTypeHandler());
|
||||
put(Type.INT, generateIntTypeHandler());
|
||||
put(Type.LONG, generateLongTypeHandler());
|
||||
put(Type.ARRAY, generateArrayTypeHandler());
|
||||
put(Type.RECORD, generateRecordTypeHandler());
|
||||
put(Type.ENUM, generateEnumTypeHandler());
|
||||
put(Type.MAP, generateMapTypeHandler());
|
||||
put(Type.BYTES, generateBytesTypeHandler());
|
||||
put(Type.FIXED, generateFixedTypeHandler());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,10 +18,9 @@
|
||||
|
||||
package org.apache.hudi.common.model;
|
||||
|
||||
import org.apache.hudi.common.util.CollectionUtils;
|
||||
import org.apache.hudi.common.util.Option;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
@@ -38,8 +37,8 @@ public class HoodieRecord<T extends HoodieRecordPayload> implements Serializable
|
||||
public static String FILENAME_METADATA_FIELD = "_hoodie_file_name";
|
||||
|
||||
public static final List<String> HOODIE_META_COLUMNS =
|
||||
new ImmutableList.Builder<String>().add(COMMIT_TIME_METADATA_FIELD).add(COMMIT_SEQNO_METADATA_FIELD)
|
||||
.add(RECORD_KEY_METADATA_FIELD).add(PARTITION_PATH_METADATA_FIELD).add(FILENAME_METADATA_FIELD).build();
|
||||
CollectionUtils.createImmutableList(COMMIT_TIME_METADATA_FIELD, COMMIT_SEQNO_METADATA_FIELD,
|
||||
RECORD_KEY_METADATA_FIELD, PARTITION_PATH_METADATA_FIELD, FILENAME_METADATA_FIELD);
|
||||
|
||||
/**
|
||||
* Identifies the record across the table.
|
||||
|
||||
@@ -26,13 +26,13 @@ import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
|
||||
import org.apache.hudi.common.table.timeline.HoodieArchivedTimeline;
|
||||
import org.apache.hudi.common.table.timeline.HoodieInstant;
|
||||
import org.apache.hudi.common.util.ConsistencyGuardConfig;
|
||||
import org.apache.hudi.common.util.FailSafeConsistencyGuard;
|
||||
import org.apache.hudi.common.util.FSUtils;
|
||||
import org.apache.hudi.common.util.FailSafeConsistencyGuard;
|
||||
import org.apache.hudi.common.util.NoOpConsistencyGuard;
|
||||
import org.apache.hudi.common.util.Option;
|
||||
import org.apache.hudi.common.util.ValidationUtils;
|
||||
import org.apache.hudi.exception.TableNotFoundException;
|
||||
import org.apache.hudi.exception.HoodieException;
|
||||
import org.apache.hudi.exception.TableNotFoundException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
@@ -246,7 +246,7 @@ public class HoodieTableMetaClient implements Serializable {
|
||||
/**
|
||||
* Return raw file-system.
|
||||
*
|
||||
* @return
|
||||
* @return fs
|
||||
*/
|
||||
public FileSystem getRawFs() {
|
||||
return getFs().getFileSystem();
|
||||
|
||||
@@ -26,7 +26,6 @@ import org.apache.hudi.common.util.Option;
|
||||
import org.apache.hudi.common.util.ValidationUtils;
|
||||
import org.apache.hudi.exception.HoodieIOException;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
@@ -38,8 +37,10 @@ import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Function;
|
||||
@@ -106,13 +107,11 @@ public class HoodieActiveTimeline extends HoodieDefaultTimeline {
|
||||
}
|
||||
|
||||
public HoodieActiveTimeline(HoodieTableMetaClient metaClient) {
|
||||
this(metaClient, new ImmutableSet.Builder<String>().addAll(VALID_EXTENSIONS_IN_ACTIVE_TIMELINE).build());
|
||||
this(metaClient, Collections.unmodifiableSet(VALID_EXTENSIONS_IN_ACTIVE_TIMELINE));
|
||||
}
|
||||
|
||||
public HoodieActiveTimeline(HoodieTableMetaClient metaClient, boolean applyLayoutFilter) {
|
||||
this(metaClient,
|
||||
new ImmutableSet.Builder<String>()
|
||||
.addAll(VALID_EXTENSIONS_IN_ACTIVE_TIMELINE).build(), applyLayoutFilter);
|
||||
this(metaClient, Collections.unmodifiableSet(VALID_EXTENSIONS_IN_ACTIVE_TIMELINE), applyLayoutFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -166,7 +165,7 @@ public class HoodieActiveTimeline extends HoodieDefaultTimeline {
|
||||
|
||||
public void deleteCompactionRequested(HoodieInstant instant) {
|
||||
ValidationUtils.checkArgument(instant.isRequested());
|
||||
ValidationUtils.checkArgument(instant.getAction().equals(HoodieTimeline.COMPACTION_ACTION));
|
||||
ValidationUtils.checkArgument(Objects.equals(instant.getAction(), HoodieTimeline.COMPACTION_ACTION));
|
||||
deleteInstantFile(instant);
|
||||
}
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@ import org.apache.hudi.common.table.log.HoodieLogFormat;
|
||||
import org.apache.hudi.common.table.log.block.HoodieAvroDataBlock;
|
||||
import org.apache.hudi.common.util.Option;
|
||||
import org.apache.hudi.exception.HoodieIOException;
|
||||
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
|
||||
@@ -20,11 +20,11 @@ package org.apache.hudi.common.table.timeline;
|
||||
|
||||
import org.apache.hudi.common.table.HoodieTimeline;
|
||||
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
|
||||
import org.apache.hudi.common.util.CollectionUtils;
|
||||
import org.apache.hudi.common.util.Option;
|
||||
import org.apache.hudi.common.util.StringUtils;
|
||||
import org.apache.hudi.exception.HoodieException;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
@@ -113,7 +113,7 @@ public class HoodieDefaultTimeline implements HoodieTimeline {
|
||||
|
||||
@Override
|
||||
public HoodieTimeline getCommitsAndCompactionTimeline() {
|
||||
Set<String> validActions = Sets.newHashSet(COMMIT_ACTION, DELTA_COMMIT_ACTION, COMPACTION_ACTION);
|
||||
Set<String> validActions = CollectionUtils.createSet(COMMIT_ACTION, DELTA_COMMIT_ACTION, COMPACTION_ACTION);
|
||||
return new HoodieDefaultTimeline(instants.stream().filter(s -> validActions.contains(s.getAction())), details);
|
||||
}
|
||||
|
||||
@@ -145,7 +145,7 @@ public class HoodieDefaultTimeline implements HoodieTimeline {
|
||||
* Get all instants (commits, delta commits) that produce new data, in the active timeline.
|
||||
*/
|
||||
public HoodieTimeline getCommitsTimeline() {
|
||||
return getTimelineOfActions(Sets.newHashSet(COMMIT_ACTION, DELTA_COMMIT_ACTION));
|
||||
return getTimelineOfActions(CollectionUtils.createSet(COMMIT_ACTION, DELTA_COMMIT_ACTION));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -153,8 +153,8 @@ public class HoodieDefaultTimeline implements HoodieTimeline {
|
||||
* timeline.
|
||||
*/
|
||||
public HoodieTimeline getAllCommitsTimeline() {
|
||||
return getTimelineOfActions(Sets.newHashSet(COMMIT_ACTION, DELTA_COMMIT_ACTION, CLEAN_ACTION, COMPACTION_ACTION,
|
||||
SAVEPOINT_ACTION, ROLLBACK_ACTION));
|
||||
return getTimelineOfActions(CollectionUtils.createSet(COMMIT_ACTION, DELTA_COMMIT_ACTION,
|
||||
CLEAN_ACTION, COMPACTION_ACTION, SAVEPOINT_ACTION, ROLLBACK_ACTION));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,9 +19,9 @@
|
||||
package org.apache.hudi.common.table.timeline;
|
||||
|
||||
import org.apache.hudi.common.table.HoodieTimeline;
|
||||
import org.apache.hudi.common.util.CollectionUtils;
|
||||
import org.apache.hudi.common.util.FSUtils;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
|
||||
import java.io.Serializable;
|
||||
@@ -41,8 +41,8 @@ public class HoodieInstant implements Serializable, Comparable<HoodieInstant> {
|
||||
* A COMPACTION action eventually becomes COMMIT when completed. So, when grouping instants
|
||||
* for state transitions, this needs to be taken into account
|
||||
*/
|
||||
private static final Map<String, String> COMPARABLE_ACTIONS = new ImmutableMap.Builder<String, String>()
|
||||
.put(HoodieTimeline.COMPACTION_ACTION, HoodieTimeline.COMMIT_ACTION).build();
|
||||
private static final Map<String, String> COMPARABLE_ACTIONS =
|
||||
CollectionUtils.createImmutableMap(HoodieTimeline.COMPACTION_ACTION, HoodieTimeline.COMMIT_ACTION);
|
||||
|
||||
public static final Comparator<HoodieInstant> ACTION_COMPARATOR =
|
||||
Comparator.comparing(instant -> getComparableAction(instant.getAction()));
|
||||
|
||||
@@ -24,8 +24,8 @@ import org.apache.hudi.avro.model.HoodieRestoreMetadata;
|
||||
import org.apache.hudi.avro.model.HoodieRollbackMetadata;
|
||||
import org.apache.hudi.common.model.CompactionOperation;
|
||||
import org.apache.hudi.common.model.FileSlice;
|
||||
import org.apache.hudi.common.model.HoodieCommitMetadata;
|
||||
import org.apache.hudi.common.model.HoodieBaseFile;
|
||||
import org.apache.hudi.common.model.HoodieCommitMetadata;
|
||||
import org.apache.hudi.common.model.HoodieFileGroup;
|
||||
import org.apache.hudi.common.model.HoodieLogFile;
|
||||
import org.apache.hudi.common.table.HoodieTimeline;
|
||||
|
||||
@@ -26,8 +26,8 @@ import org.apache.hudi.common.table.HoodieTableMetaClient;
|
||||
import org.apache.hudi.common.table.HoodieTimeline;
|
||||
import org.apache.hudi.common.table.SyncableFileSystemView;
|
||||
import org.apache.hudi.common.table.timeline.HoodieInstant;
|
||||
import org.apache.hudi.common.table.timeline.dto.CompactionOpDTO;
|
||||
import org.apache.hudi.common.table.timeline.dto.BaseFileDTO;
|
||||
import org.apache.hudi.common.table.timeline.dto.CompactionOpDTO;
|
||||
import org.apache.hudi.common.table.timeline.dto.FileGroupDTO;
|
||||
import org.apache.hudi.common.table.timeline.dto.FileSliceDTO;
|
||||
import org.apache.hudi.common.table.timeline.dto.InstantDTO;
|
||||
|
||||
@@ -28,7 +28,6 @@ import org.apache.hudi.avro.model.HoodieSavepointMetadata;
|
||||
import org.apache.hudi.avro.model.HoodieSavepointPartitionMetadata;
|
||||
import org.apache.hudi.common.HoodieRollbackStat;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.avro.file.DataFileReader;
|
||||
import org.apache.avro.file.DataFileWriter;
|
||||
import org.apache.avro.file.FileReader;
|
||||
@@ -42,6 +41,7 @@ import org.apache.avro.specific.SpecificRecordBase;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -54,18 +54,18 @@ public class AvroUtils {
|
||||
|
||||
public static HoodieRestoreMetadata convertRestoreMetadata(String startRestoreTime, Option<Long> durationInMs,
|
||||
List<String> commits, Map<String, List<HoodieRollbackStat>> commitToStats) {
|
||||
ImmutableMap.Builder<String, List<HoodieRollbackMetadata>> commitToStatBuilder = ImmutableMap.builder();
|
||||
Map<String, List<HoodieRollbackMetadata>> commitToStatsMap = new HashMap<>();
|
||||
for (Map.Entry<String, List<HoodieRollbackStat>> commitToStat : commitToStats.entrySet()) {
|
||||
commitToStatBuilder.put(commitToStat.getKey(),
|
||||
commitToStatsMap.put(commitToStat.getKey(),
|
||||
Collections.singletonList(convertRollbackMetadata(startRestoreTime, durationInMs, commits, commitToStat.getValue())));
|
||||
}
|
||||
return new HoodieRestoreMetadata(startRestoreTime, durationInMs.orElseGet(() -> -1L), commits,
|
||||
commitToStatBuilder.build(), DEFAULT_VERSION);
|
||||
Collections.unmodifiableMap(commitToStatsMap), DEFAULT_VERSION);
|
||||
}
|
||||
|
||||
public static HoodieRollbackMetadata convertRollbackMetadata(String startRollbackTime, Option<Long> durationInMs,
|
||||
List<String> commits, List<HoodieRollbackStat> rollbackStats) {
|
||||
ImmutableMap.Builder<String, HoodieRollbackPartitionMetadata> partitionMetadataBuilder = ImmutableMap.builder();
|
||||
Map<String, HoodieRollbackPartitionMetadata> partitionMetadataBuilder = new HashMap<>();
|
||||
int totalDeleted = 0;
|
||||
for (HoodieRollbackStat stat : rollbackStats) {
|
||||
HoodieRollbackPartitionMetadata metadata = new HoodieRollbackPartitionMetadata(stat.getPartitionPath(),
|
||||
@@ -75,18 +75,18 @@ public class AvroUtils {
|
||||
}
|
||||
|
||||
return new HoodieRollbackMetadata(startRollbackTime, durationInMs.orElseGet(() -> -1L), totalDeleted, commits,
|
||||
partitionMetadataBuilder.build(), DEFAULT_VERSION);
|
||||
Collections.unmodifiableMap(partitionMetadataBuilder), DEFAULT_VERSION);
|
||||
}
|
||||
|
||||
public static HoodieSavepointMetadata convertSavepointMetadata(String user, String comment,
|
||||
Map<String, List<String>> latestFiles) {
|
||||
ImmutableMap.Builder<String, HoodieSavepointPartitionMetadata> partitionMetadataBuilder = ImmutableMap.builder();
|
||||
Map<String, HoodieSavepointPartitionMetadata> partitionMetadataBuilder = new HashMap<>();
|
||||
for (Map.Entry<String, List<String>> stat : latestFiles.entrySet()) {
|
||||
HoodieSavepointPartitionMetadata metadata = new HoodieSavepointPartitionMetadata(stat.getKey(), stat.getValue());
|
||||
partitionMetadataBuilder.put(stat.getKey(), metadata);
|
||||
}
|
||||
return new HoodieSavepointMetadata(user, System.currentTimeMillis(), comment, partitionMetadataBuilder.build(),
|
||||
DEFAULT_VERSION);
|
||||
return new HoodieSavepointMetadata(user, System.currentTimeMillis(), comment,
|
||||
Collections.unmodifiableMap(partitionMetadataBuilder), DEFAULT_VERSION);
|
||||
}
|
||||
|
||||
public static Option<byte[]> serializeCompactionPlan(HoodieCompactionPlan compactionWorkload) throws IOException {
|
||||
|
||||
@@ -28,10 +28,10 @@ import org.apache.hudi.common.versioning.clean.CleanMetadataMigrator;
|
||||
import org.apache.hudi.common.versioning.clean.CleanV1MigrationHandler;
|
||||
import org.apache.hudi.common.versioning.clean.CleanV2MigrationHandler;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class CleanerUtils {
|
||||
public static final Integer CLEAN_METADATA_VERSION_1 = CleanV1MigrationHandler.VERSION;
|
||||
@@ -40,14 +40,14 @@ public class CleanerUtils {
|
||||
|
||||
public static HoodieCleanMetadata convertCleanMetadata(HoodieTableMetaClient metaClient,
|
||||
String startCleanTime, Option<Long> durationInMs, List<HoodieCleanStat> cleanStats) {
|
||||
ImmutableMap.Builder<String, HoodieCleanPartitionMetadata> partitionMetadataBuilder = ImmutableMap.builder();
|
||||
Map<String, HoodieCleanPartitionMetadata> partitionMetadataMap = new HashMap<>();
|
||||
int totalDeleted = 0;
|
||||
String earliestCommitToRetain = null;
|
||||
for (HoodieCleanStat stat : cleanStats) {
|
||||
HoodieCleanPartitionMetadata metadata =
|
||||
new HoodieCleanPartitionMetadata(stat.getPartitionPath(), stat.getPolicy().name(),
|
||||
stat.getDeletePathPatterns(), stat.getSuccessDeleteFiles(), stat.getFailedDeleteFiles());
|
||||
partitionMetadataBuilder.put(stat.getPartitionPath(), metadata);
|
||||
partitionMetadataMap.put(stat.getPartitionPath(), metadata);
|
||||
totalDeleted += stat.getSuccessDeleteFiles().size();
|
||||
if (earliestCommitToRetain == null) {
|
||||
// This will be the same for all partitions
|
||||
@@ -56,8 +56,7 @@ public class CleanerUtils {
|
||||
}
|
||||
|
||||
return new HoodieCleanMetadata(startCleanTime,
|
||||
durationInMs.orElseGet(() -> -1L), totalDeleted, earliestCommitToRetain,
|
||||
partitionMetadataBuilder.build(), CLEAN_METADATA_VERSION_2);
|
||||
durationInMs.orElseGet(() -> -1L), totalDeleted, earliestCommitToRetain, partitionMetadataMap, CLEAN_METADATA_VERSION_2);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,111 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hudi.common.util;
|
||||
|
||||
import org.apache.hudi.common.util.collection.Pair;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class CollectionUtils {
|
||||
/**
|
||||
* Determines whether two iterators contain equal elements in the same order. More specifically,
|
||||
* this method returns {@code true} if {@code iterator1} and {@code iterator2} contain the same
|
||||
* number of elements and every element of {@code iterator1} is equal to the corresponding element
|
||||
* of {@code iterator2}.
|
||||
*
|
||||
* <p>Note that this will modify the supplied iterators, since they will have been advanced some
|
||||
* number of elements forward.
|
||||
*/
|
||||
public static boolean elementsEqual(Iterator<?> iterator1, Iterator<?> iterator2) {
|
||||
while (iterator1.hasNext()) {
|
||||
if (!iterator2.hasNext()) {
|
||||
return false;
|
||||
}
|
||||
Object o1 = iterator1.next();
|
||||
Object o2 = iterator2.next();
|
||||
if (!Objects.equals(o1, o2)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return !iterator2.hasNext();
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
public static <T> Set<T> createSet(final T... elements) {
|
||||
return Stream.of(elements).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
public static <K,V> Map<K, V> createImmutableMap(final K key, final V value) {
|
||||
return Collections.unmodifiableMap(Collections.singletonMap(key, value));
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
public static <T> List<T> createImmutableList(final T... elements) {
|
||||
return Collections.unmodifiableList(Stream.of(elements).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
public static <K,V> Map<K,V> createImmutableMap(final Map<K,V> map) {
|
||||
return Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
public static <K,V> Map<K,V> createImmutableMap(final Pair<K,V>... elements) {
|
||||
Map<K,V> map = new HashMap<>();
|
||||
for (Pair<K,V> pair: elements) {
|
||||
map.put(pair.getLeft(), pair.getRight());
|
||||
}
|
||||
return Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
public static <T> Set<T> createImmutableSet(final T... elements) {
|
||||
return Collections.unmodifiableSet(createSet(elements));
|
||||
}
|
||||
|
||||
public static <T> Set<T> createImmutableSet(final Set<T> set) {
|
||||
return Collections.unmodifiableSet(set);
|
||||
}
|
||||
|
||||
public static <T> List<T> createImmutableList(final List<T> list) {
|
||||
return Collections.unmodifiableList(list);
|
||||
}
|
||||
|
||||
private static Object[] checkElementsNotNull(Object... array) {
|
||||
return checkElementsNotNull(array, array.length);
|
||||
}
|
||||
|
||||
private static Object[] checkElementsNotNull(Object[] array, int length) {
|
||||
for (int i = 0; i < length; i++) {
|
||||
checkElementNotNull(array[i], i);
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
private static Object checkElementNotNull(Object element, int index) {
|
||||
return Objects.requireNonNull(element, "Element is null at index " + index);
|
||||
}
|
||||
}
|
||||
@@ -46,8 +46,8 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Matcher;
|
||||
@@ -213,7 +213,7 @@ public class FSUtils {
|
||||
* @param basePathStr Base-Path
|
||||
* @param consumer Callback for processing
|
||||
* @param excludeMetaFolder Exclude .hoodie folder
|
||||
* @throws IOException
|
||||
* @throws IOException -
|
||||
*/
|
||||
static void processFiles(FileSystem fs, String basePathStr, Function<FileStatus, Boolean> consumer,
|
||||
boolean excludeMetaFolder) throws IOException {
|
||||
|
||||
@@ -18,7 +18,10 @@
|
||||
|
||||
package org.apache.hudi.common.util;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
@@ -91,4 +94,26 @@ public class FileIOUtils {
|
||||
out.flush();
|
||||
out.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes a {@link Closeable}, with control over whether an {@code IOException} may be thrown.
|
||||
* @param closeable the {@code Closeable} object to be closed, or null,
|
||||
* in which case this method does nothing.
|
||||
* @param swallowIOException if true, don't propagate IO exceptions thrown by the {@code close} methods.
|
||||
*
|
||||
* @throws IOException if {@code swallowIOException} is false and {@code close} throws an {@code IOException}.
|
||||
*/
|
||||
public static void close(@Nullable Closeable closeable, boolean swallowIOException)
|
||||
throws IOException {
|
||||
if (closeable == null) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
closeable.close();
|
||||
} catch (IOException e) {
|
||||
if (!swallowIOException) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,14 +21,21 @@ package org.apache.hudi.common.util;
|
||||
import org.apache.hudi.common.model.HoodieRecordPayload;
|
||||
import org.apache.hudi.exception.HoodieException;
|
||||
|
||||
import com.google.common.reflect.ClassPath;
|
||||
import com.google.common.reflect.ClassPath.ClassInfo;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
@@ -36,6 +43,8 @@ import java.util.stream.Stream;
|
||||
*/
|
||||
public class ReflectionUtils {
|
||||
|
||||
private static final Logger LOG = LogManager.getLogger(ReflectionUtils.class);
|
||||
|
||||
private static Map<String, Class<?>> clazzCache = new HashMap<>();
|
||||
|
||||
private static Class<?> getClass(String clazzName) {
|
||||
@@ -90,16 +99,58 @@ public class ReflectionUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Return stream of top level class names in the same class path as passed-in class.
|
||||
*
|
||||
* @param clazz
|
||||
* Scans all classes accessible from the context class loader
|
||||
* which belong to the given package and subpackages.
|
||||
*
|
||||
* @param clazz class
|
||||
* @return Stream of Class names in package
|
||||
*/
|
||||
public static Stream<String> getTopLevelClassesInClasspath(Class clazz) {
|
||||
public static Stream<String> getTopLevelClassesInClasspath(Class<?> clazz) {
|
||||
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
|
||||
String packageName = clazz.getPackage().getName();
|
||||
String path = packageName.replace('.', '/');
|
||||
Enumeration<URL> resources = null;
|
||||
try {
|
||||
ClassPath classPath = ClassPath.from(clazz.getClassLoader());
|
||||
return classPath.getTopLevelClasses().stream().map(ClassInfo::getName);
|
||||
resources = classLoader.getResources(path);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Got exception while dumping top level classes", e);
|
||||
LOG.error("Unable to fetch Resources in package " + e.getMessage());
|
||||
}
|
||||
List<File> directories = new ArrayList<>();
|
||||
while (Objects.requireNonNull(resources).hasMoreElements()) {
|
||||
URL resource = resources.nextElement();
|
||||
try {
|
||||
directories.add(new File(resource.toURI()));
|
||||
} catch (URISyntaxException e) {
|
||||
LOG.error("Unable to get " + e.getMessage());
|
||||
}
|
||||
}
|
||||
List<String> classes = new ArrayList<>();
|
||||
for (File directory : directories) {
|
||||
classes.addAll(findClasses(directory, packageName));
|
||||
}
|
||||
return classes.stream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive method used to find all classes in a given directory and subdirs.
|
||||
*
|
||||
* @param directory The base directory
|
||||
* @param packageName The package name for classes found inside the base directory
|
||||
* @return classes in the package
|
||||
*/
|
||||
private static List<String> findClasses(File directory, String packageName) {
|
||||
List<String> classes = new ArrayList<>();
|
||||
if (!directory.exists()) {
|
||||
return classes;
|
||||
}
|
||||
File[] files = directory.listFiles();
|
||||
for (File file : Objects.requireNonNull(files)) {
|
||||
if (file.isDirectory()) {
|
||||
classes.addAll(findClasses(file, packageName + "." + file.getName()));
|
||||
} else if (file.getName().endsWith(".class")) {
|
||||
classes.add(packageName + '.' + file.getName().substring(0, file.getName().length() - 6));
|
||||
}
|
||||
}
|
||||
return classes;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,20 +20,20 @@ package org.apache.hudi.common.minicluster;
|
||||
|
||||
import org.apache.hudi.common.model.HoodieTestUtils;
|
||||
import org.apache.hudi.common.util.FileIOUtils;
|
||||
import org.apache.hudi.exception.HoodieIOException;
|
||||
|
||||
import com.google.common.io.Files;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hudi.exception.HoodieIOException;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.net.ServerSocket;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An HDFS minicluster service implementation.
|
||||
@@ -53,8 +53,8 @@ public class HdfsTestService {
|
||||
*/
|
||||
private MiniDFSCluster miniDfsCluster;
|
||||
|
||||
public HdfsTestService() {
|
||||
workDir = Files.createTempDir().getAbsolutePath();
|
||||
public HdfsTestService() throws IOException {
|
||||
workDir = Files.createTempDirectory("temp").getName(0).toString();
|
||||
}
|
||||
|
||||
public Configuration getHadoopConf() {
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
|
||||
package org.apache.hudi.common.minicluster;
|
||||
|
||||
import com.google.common.io.Files;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.log4j.LogManager;
|
||||
@@ -35,6 +34,7 @@ import java.io.OutputStream;
|
||||
import java.io.Reader;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.Socket;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@@ -75,8 +75,8 @@ public class ZookeeperTestService {
|
||||
private ZooKeeperServer zooKeeperServer;
|
||||
private boolean started = false;
|
||||
|
||||
public ZookeeperTestService(Configuration config) {
|
||||
this.workDir = Files.createTempDir().getAbsolutePath();
|
||||
public ZookeeperTestService(Configuration config) throws IOException {
|
||||
this.workDir = Files.createTempDirectory(System.currentTimeMillis() + "-").toFile().getAbsolutePath();
|
||||
this.hadoopConf = config;
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
package org.apache.hudi.common.model;
|
||||
|
||||
import org.apache.hudi.common.util.FileIOUtils;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ import org.apache.hudi.common.table.HoodieTimeline;
|
||||
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
|
||||
import org.apache.hudi.common.table.timeline.HoodieInstant;
|
||||
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
|
||||
import org.apache.hudi.common.util.CollectionUtils;
|
||||
import org.apache.hudi.common.util.Option;
|
||||
|
||||
import org.apache.hadoop.fs.Path;
|
||||
@@ -34,7 +35,6 @@ import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
@@ -195,9 +195,9 @@ public class TestHoodieActiveTimeline extends HoodieCommonTestHarness {
|
||||
// Test that various types of getXXX operations from HoodieActiveTimeline
|
||||
// return the correct set of Instant
|
||||
checkTimeline.accept(timeline.getCommitsTimeline(),
|
||||
Sets.newHashSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION));
|
||||
CollectionUtils.createSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION));
|
||||
checkTimeline.accept(timeline.getCommitsAndCompactionTimeline(),
|
||||
Sets.newHashSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION, HoodieTimeline.COMPACTION_ACTION));
|
||||
CollectionUtils.createSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION, HoodieTimeline.COMPACTION_ACTION));
|
||||
checkTimeline.accept(timeline.getCommitTimeline(), Collections.singleton(HoodieTimeline.COMMIT_ACTION));
|
||||
checkTimeline.accept(timeline.getDeltaCommitTimeline(), Collections.singleton(HoodieTimeline.DELTA_COMMIT_ACTION));
|
||||
checkTimeline.accept(timeline.getCleanerTimeline(), Collections.singleton(HoodieTimeline.CLEAN_ACTION));
|
||||
@@ -205,7 +205,7 @@ public class TestHoodieActiveTimeline extends HoodieCommonTestHarness {
|
||||
checkTimeline.accept(timeline.getRestoreTimeline(), Collections.singleton(HoodieTimeline.RESTORE_ACTION));
|
||||
checkTimeline.accept(timeline.getSavePointTimeline(), Collections.singleton(HoodieTimeline.SAVEPOINT_ACTION));
|
||||
checkTimeline.accept(timeline.getAllCommitsTimeline(),
|
||||
Sets.newHashSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION,
|
||||
CollectionUtils.createSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION,
|
||||
HoodieTimeline.CLEAN_ACTION, HoodieTimeline.COMPACTION_ACTION,
|
||||
HoodieTimeline.SAVEPOINT_ACTION, HoodieTimeline.ROLLBACK_ACTION));
|
||||
|
||||
@@ -380,12 +380,12 @@ public class TestHoodieActiveTimeline extends HoodieCommonTestHarness {
|
||||
checkFilter.accept(timeline.filter(i -> false), new HashSet<>());
|
||||
checkFilter.accept(timeline.filterInflights(), Collections.singleton(State.INFLIGHT));
|
||||
checkFilter.accept(timeline.filterInflightsAndRequested(),
|
||||
Sets.newHashSet(State.INFLIGHT, State.REQUESTED));
|
||||
CollectionUtils.createSet(State.INFLIGHT, State.REQUESTED));
|
||||
|
||||
// filterCompletedAndCompactionInstants
|
||||
// This cannot be done using checkFilter as it involves both states and actions
|
||||
final HoodieTimeline t1 = timeline.filterCompletedAndCompactionInstants();
|
||||
final Set<State> states = Sets.newHashSet(State.REQUESTED, State.COMPLETED);
|
||||
final Set<State> states = CollectionUtils.createSet(State.REQUESTED, State.COMPLETED);
|
||||
final Set<String> actions = Collections.singleton(HoodieTimeline.COMPACTION_ACTION);
|
||||
sup.get().filter(i -> states.contains(i.getState()) || actions.contains(i.getAction()))
|
||||
.forEach(i -> assertTrue(t1.containsInstant(i)));
|
||||
|
||||
@@ -27,9 +27,9 @@ import org.apache.hudi.common.HoodieCommonTestHarness;
|
||||
import org.apache.hudi.common.HoodieRollbackStat;
|
||||
import org.apache.hudi.common.model.CompactionOperation;
|
||||
import org.apache.hudi.common.model.FileSlice;
|
||||
import org.apache.hudi.common.model.HoodieBaseFile;
|
||||
import org.apache.hudi.common.model.HoodieCleaningPolicy;
|
||||
import org.apache.hudi.common.model.HoodieCommitMetadata;
|
||||
import org.apache.hudi.common.model.HoodieBaseFile;
|
||||
import org.apache.hudi.common.model.HoodieFileGroup;
|
||||
import org.apache.hudi.common.model.HoodieFileGroupId;
|
||||
import org.apache.hudi.common.model.HoodieTableType;
|
||||
@@ -41,6 +41,7 @@ import org.apache.hudi.common.table.timeline.HoodieInstant;
|
||||
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
|
||||
import org.apache.hudi.common.util.AvroUtils;
|
||||
import org.apache.hudi.common.util.CleanerUtils;
|
||||
import org.apache.hudi.common.util.CollectionUtils;
|
||||
import org.apache.hudi.common.util.CompactionUtils;
|
||||
import org.apache.hudi.common.util.FSUtils;
|
||||
import org.apache.hudi.common.util.Option;
|
||||
@@ -48,8 +49,6 @@ import org.apache.hudi.common.util.ValidationUtils;
|
||||
import org.apache.hudi.common.util.collection.Pair;
|
||||
import org.apache.hudi.exception.HoodieException;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
@@ -135,8 +134,11 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
|
||||
|
||||
// Clean first slice
|
||||
testCleans(view, Collections.singletonList("21"),
|
||||
new ImmutableMap.Builder<String, List<String>>().put("11", Arrays.asList("12", "13", "15")).build(),
|
||||
instantsToFiles, Collections.singletonList("11"));
|
||||
new HashMap<String, List<String>>() {
|
||||
{
|
||||
put("11", Arrays.asList("12", "13", "15"));
|
||||
}
|
||||
}, instantsToFiles, Collections.singletonList("11"));
|
||||
|
||||
// Add one more ingestion instant. This should be 2nd slice now
|
||||
instantsToFiles.putAll(testMultipleWriteSteps(view, Collections.singletonList("22"), true, "19", 2));
|
||||
@@ -251,7 +253,11 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
|
||||
* Case where a clean happened and then rounds of ingestion and compaction happened
|
||||
*/
|
||||
testCleans(view2, Collections.singletonList("19"),
|
||||
new ImmutableMap.Builder<String, List<String>>().put("11", Arrays.asList("12", "13", "14")).build(),
|
||||
new HashMap<String, List<String>>() {
|
||||
{
|
||||
put("11", Arrays.asList("12", "13", "14"));
|
||||
}
|
||||
},
|
||||
instantsToFiles, Collections.singletonList("11"));
|
||||
scheduleCompaction(view2, "20");
|
||||
instantsToFiles.putAll(testMultipleWriteSteps(view2, Arrays.asList("21", "22"), true, "20", 2));
|
||||
@@ -439,7 +445,7 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
|
||||
|
||||
List<HoodieRollbackMetadata> rollbackM = new ArrayList<>();
|
||||
rollbackM.add(rollbackMetadata);
|
||||
metadata.setHoodieRestoreMetadata(new ImmutableMap.Builder().put(rollbackInstant, rollbackM).build());
|
||||
metadata.setHoodieRestoreMetadata(CollectionUtils.createImmutableMap(rollbackInstant, rollbackM));
|
||||
List<String> rollbackInstants = new ArrayList<>();
|
||||
rollbackInstants.add(rollbackInstant);
|
||||
metadata.setInstantsToRollback(rollbackInstants);
|
||||
@@ -646,7 +652,7 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
|
||||
HoodieTimeline timeline1 = view1.getTimeline();
|
||||
HoodieTimeline timeline2 = view2.getTimeline();
|
||||
Assert.assertEquals(view1.getLastInstant(), view2.getLastInstant());
|
||||
Iterators.elementsEqual(timeline1.getInstants().iterator(), timeline2.getInstants().iterator());
|
||||
CollectionUtils.elementsEqual(timeline1.getInstants().iterator(), timeline2.getInstants().iterator());
|
||||
|
||||
// View Checks
|
||||
Map<HoodieFileGroupId, HoodieFileGroup> fileGroupsMap1 = partitions.stream().flatMap(view1::getAllFileGroups)
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
|
||||
package org.apache.hudi.common.table.view;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hudi.common.model.CompactionOperation;
|
||||
import org.apache.hudi.common.model.FileSlice;
|
||||
import org.apache.hudi.common.model.HoodieBaseFile;
|
||||
@@ -30,6 +29,8 @@ import org.apache.hudi.common.table.timeline.HoodieInstant;
|
||||
import org.apache.hudi.common.util.Option;
|
||||
import org.apache.hudi.common.util.collection.ImmutablePair;
|
||||
import org.apache.hudi.common.util.collection.Pair;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
@@ -45,10 +46,10 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.mockito.Mockito.reset;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class TestPriorityBasedFileSystemView extends TestCase {
|
||||
|
||||
@@ -31,8 +31,6 @@ import org.apache.hudi.common.table.timeline.HoodieInstant.State;
|
||||
import org.apache.hudi.common.util.collection.Pair;
|
||||
import org.apache.hudi.exception.HoodieIOException;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.junit.Assert;
|
||||
|
||||
@@ -82,12 +80,17 @@ public class CompactionTestUtils {
|
||||
createDeltaCommit(metaClient, "004");
|
||||
createDeltaCommit(metaClient, "006");
|
||||
|
||||
Map<String, String> baseInstantsToCompaction = new ImmutableMap.Builder<String, String>().put("000", "001")
|
||||
.put("002", "003").put("004", "005").put("006", "007").build();
|
||||
Map<String, String> baseInstantsToCompaction = new HashMap<String, String>() {
|
||||
{
|
||||
put("000", "001");
|
||||
put("002", "003");
|
||||
put("004", "005");
|
||||
put("006", "007");
|
||||
}
|
||||
};
|
||||
List<Integer> expectedNumEntries =
|
||||
Arrays.asList(numEntriesInPlan1, numEntriesInPlan2, numEntriesInPlan3, numEntriesInPlan4);
|
||||
List<HoodieCompactionPlan> plans =
|
||||
new ImmutableList.Builder<HoodieCompactionPlan>().add(plan1, plan2, plan3, plan4).build();
|
||||
List<HoodieCompactionPlan> plans = CollectionUtils.createImmutableList(plan1, plan2, plan3, plan4);
|
||||
IntStream.range(0, 4).boxed().forEach(idx -> {
|
||||
if (expectedNumEntries.get(idx) > 0) {
|
||||
Assert.assertEquals("check if plan " + idx + " has exp entries", expectedNumEntries.get(idx).longValue(),
|
||||
|
||||
@@ -31,7 +31,6 @@ import org.apache.hudi.common.util.CompactionTestUtils.TestHoodieBaseFile;
|
||||
import org.apache.hudi.common.util.collection.Pair;
|
||||
import org.apache.hudi.common.versioning.compaction.CompactionPlanMigrator;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
@@ -39,6 +38,7 @@ import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
@@ -59,8 +59,12 @@ public class TestCompactionUtils extends HoodieCommonTestHarness {
|
||||
|
||||
private static String TEST_WRITE_TOKEN = "1-0-1";
|
||||
|
||||
private static final Map<String, Double> METRICS =
|
||||
new ImmutableMap.Builder<String, Double>().put("key1", 1.0).put("key2", 3.0).build();
|
||||
private static final Map<String, Double> METRICS = new HashMap<String, Double>() {
|
||||
{
|
||||
put("key1", 1.0);
|
||||
put("key2", 3.0);
|
||||
}
|
||||
};
|
||||
private Function<Pair<String, FileSlice>, Map<String, Double>> metricsCaptureFn = (partitionFileSlice) -> METRICS;
|
||||
|
||||
@Before
|
||||
|
||||
@@ -23,8 +23,8 @@ import org.apache.hudi.common.model.HoodieLogFile;
|
||||
import org.apache.hudi.common.model.HoodieTestUtils;
|
||||
import org.apache.hudi.common.table.HoodieTableMetaClient;
|
||||
import org.apache.hudi.common.table.HoodieTimeline;
|
||||
import org.apache.hudi.exception.HoodieException;
|
||||
import org.apache.hudi.common.table.timeline.HoodieInstant;
|
||||
import org.apache.hudi.exception.HoodieException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
Reference in New Issue
Block a user