1
0

HUDI-479: Eliminate or Minimize use of Guava if possible (#1159)

This commit is contained in:
Suneel Marthi
2020-03-28 03:11:32 -04:00
committed by GitHub
parent 1713f686f8
commit 8c3001363d
46 changed files with 429 additions and 217 deletions

View File

@@ -20,20 +20,20 @@ package org.apache.hudi.common.minicluster;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.exception.HoodieIOException;
import com.google.common.io.Files;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.IOException;
import java.util.Objects;
import java.net.ServerSocket;
import java.nio.file.Files;
import java.util.Objects;
/**
* An HDFS minicluster service implementation.
@@ -53,8 +53,8 @@ public class HdfsTestService {
*/
private MiniDFSCluster miniDfsCluster;
public HdfsTestService() {
workDir = Files.createTempDir().getAbsolutePath();
public HdfsTestService() throws IOException {
workDir = Files.createTempDirectory("temp").getName(0).toString();
}
public Configuration getHadoopConf() {

View File

@@ -18,7 +18,6 @@
package org.apache.hudi.common.minicluster;
import com.google.common.io.Files;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.log4j.LogManager;
@@ -35,6 +34,7 @@ import java.io.OutputStream;
import java.io.Reader;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.nio.file.Files;
import java.util.Objects;
/**
@@ -75,8 +75,8 @@ public class ZookeeperTestService {
private ZooKeeperServer zooKeeperServer;
private boolean started = false;
public ZookeeperTestService(Configuration config) {
this.workDir = Files.createTempDir().getAbsolutePath();
public ZookeeperTestService(Configuration config) throws IOException {
this.workDir = Files.createTempDirectory(System.currentTimeMillis() + "-").toFile().getAbsolutePath();
this.hadoopConf = config;
}

View File

@@ -19,6 +19,7 @@
package org.apache.hudi.common.model;
import org.apache.hudi.common.util.FileIOUtils;
import org.junit.Assert;
import org.junit.Test;

View File

@@ -26,6 +26,7 @@ import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.CollectionUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hadoop.fs.Path;
@@ -34,7 +35,6 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
@@ -195,9 +195,9 @@ public class TestHoodieActiveTimeline extends HoodieCommonTestHarness {
// Test that various types of getXXX operations from HoodieActiveTimeline
// return the correct set of Instant
checkTimeline.accept(timeline.getCommitsTimeline(),
Sets.newHashSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION));
CollectionUtils.createSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION));
checkTimeline.accept(timeline.getCommitsAndCompactionTimeline(),
Sets.newHashSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION, HoodieTimeline.COMPACTION_ACTION));
CollectionUtils.createSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION, HoodieTimeline.COMPACTION_ACTION));
checkTimeline.accept(timeline.getCommitTimeline(), Collections.singleton(HoodieTimeline.COMMIT_ACTION));
checkTimeline.accept(timeline.getDeltaCommitTimeline(), Collections.singleton(HoodieTimeline.DELTA_COMMIT_ACTION));
checkTimeline.accept(timeline.getCleanerTimeline(), Collections.singleton(HoodieTimeline.CLEAN_ACTION));
@@ -205,7 +205,7 @@ public class TestHoodieActiveTimeline extends HoodieCommonTestHarness {
checkTimeline.accept(timeline.getRestoreTimeline(), Collections.singleton(HoodieTimeline.RESTORE_ACTION));
checkTimeline.accept(timeline.getSavePointTimeline(), Collections.singleton(HoodieTimeline.SAVEPOINT_ACTION));
checkTimeline.accept(timeline.getAllCommitsTimeline(),
Sets.newHashSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION,
CollectionUtils.createSet(HoodieTimeline.COMMIT_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION,
HoodieTimeline.CLEAN_ACTION, HoodieTimeline.COMPACTION_ACTION,
HoodieTimeline.SAVEPOINT_ACTION, HoodieTimeline.ROLLBACK_ACTION));
@@ -380,12 +380,12 @@ public class TestHoodieActiveTimeline extends HoodieCommonTestHarness {
checkFilter.accept(timeline.filter(i -> false), new HashSet<>());
checkFilter.accept(timeline.filterInflights(), Collections.singleton(State.INFLIGHT));
checkFilter.accept(timeline.filterInflightsAndRequested(),
Sets.newHashSet(State.INFLIGHT, State.REQUESTED));
CollectionUtils.createSet(State.INFLIGHT, State.REQUESTED));
// filterCompletedAndCompactionInstants
// This cannot be done using checkFilter as it involves both states and actions
final HoodieTimeline t1 = timeline.filterCompletedAndCompactionInstants();
final Set<State> states = Sets.newHashSet(State.REQUESTED, State.COMPLETED);
final Set<State> states = CollectionUtils.createSet(State.REQUESTED, State.COMPLETED);
final Set<String> actions = Collections.singleton(HoodieTimeline.COMPACTION_ACTION);
sup.get().filter(i -> states.contains(i.getState()) || actions.contains(i.getAction()))
.forEach(i -> assertTrue(t1.containsInstant(i)));

View File

@@ -27,9 +27,9 @@ import org.apache.hudi.common.HoodieCommonTestHarness;
import org.apache.hudi.common.HoodieRollbackStat;
import org.apache.hudi.common.model.CompactionOperation;
import org.apache.hudi.common.model.FileSlice;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieCleaningPolicy;
import org.apache.hudi.common.model.HoodieCommitMetadata;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.model.HoodieFileGroupId;
import org.apache.hudi.common.model.HoodieTableType;
@@ -41,6 +41,7 @@ import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.AvroUtils;
import org.apache.hudi.common.util.CleanerUtils;
import org.apache.hudi.common.util.CollectionUtils;
import org.apache.hudi.common.util.CompactionUtils;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.Option;
@@ -48,8 +49,6 @@ import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieException;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterators;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
@@ -135,8 +134,11 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
// Clean first slice
testCleans(view, Collections.singletonList("21"),
new ImmutableMap.Builder<String, List<String>>().put("11", Arrays.asList("12", "13", "15")).build(),
instantsToFiles, Collections.singletonList("11"));
new HashMap<String, List<String>>() {
{
put("11", Arrays.asList("12", "13", "15"));
}
}, instantsToFiles, Collections.singletonList("11"));
// Add one more ingestion instant. This should be 2nd slice now
instantsToFiles.putAll(testMultipleWriteSteps(view, Collections.singletonList("22"), true, "19", 2));
@@ -251,7 +253,11 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
* Case where a clean happened and then rounds of ingestion and compaction happened
*/
testCleans(view2, Collections.singletonList("19"),
new ImmutableMap.Builder<String, List<String>>().put("11", Arrays.asList("12", "13", "14")).build(),
new HashMap<String, List<String>>() {
{
put("11", Arrays.asList("12", "13", "14"));
}
},
instantsToFiles, Collections.singletonList("11"));
scheduleCompaction(view2, "20");
instantsToFiles.putAll(testMultipleWriteSteps(view2, Arrays.asList("21", "22"), true, "20", 2));
@@ -439,7 +445,7 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
List<HoodieRollbackMetadata> rollbackM = new ArrayList<>();
rollbackM.add(rollbackMetadata);
metadata.setHoodieRestoreMetadata(new ImmutableMap.Builder().put(rollbackInstant, rollbackM).build());
metadata.setHoodieRestoreMetadata(CollectionUtils.createImmutableMap(rollbackInstant, rollbackM));
List<String> rollbackInstants = new ArrayList<>();
rollbackInstants.add(rollbackInstant);
metadata.setInstantsToRollback(rollbackInstants);
@@ -646,7 +652,7 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
HoodieTimeline timeline1 = view1.getTimeline();
HoodieTimeline timeline2 = view2.getTimeline();
Assert.assertEquals(view1.getLastInstant(), view2.getLastInstant());
Iterators.elementsEqual(timeline1.getInstants().iterator(), timeline2.getInstants().iterator());
CollectionUtils.elementsEqual(timeline1.getInstants().iterator(), timeline2.getInstants().iterator());
// View Checks
Map<HoodieFileGroupId, HoodieFileGroup> fileGroupsMap1 = partitions.stream().flatMap(view1::getAllFileGroups)

View File

@@ -18,7 +18,6 @@
package org.apache.hudi.common.table.view;
import junit.framework.TestCase;
import org.apache.hudi.common.model.CompactionOperation;
import org.apache.hudi.common.model.FileSlice;
import org.apache.hudi.common.model.HoodieBaseFile;
@@ -30,6 +29,8 @@ import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.ImmutablePair;
import org.apache.hudi.common.util.collection.Pair;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -45,10 +46,10 @@ import java.util.Collections;
import java.util.List;
import java.util.stream.Stream;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class TestPriorityBasedFileSystemView extends TestCase {

View File

@@ -31,8 +31,6 @@ import org.apache.hudi.common.table.timeline.HoodieInstant.State;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieIOException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.fs.Path;
import org.junit.Assert;
@@ -82,12 +80,17 @@ public class CompactionTestUtils {
createDeltaCommit(metaClient, "004");
createDeltaCommit(metaClient, "006");
Map<String, String> baseInstantsToCompaction = new ImmutableMap.Builder<String, String>().put("000", "001")
.put("002", "003").put("004", "005").put("006", "007").build();
Map<String, String> baseInstantsToCompaction = new HashMap<String, String>() {
{
put("000", "001");
put("002", "003");
put("004", "005");
put("006", "007");
}
};
List<Integer> expectedNumEntries =
Arrays.asList(numEntriesInPlan1, numEntriesInPlan2, numEntriesInPlan3, numEntriesInPlan4);
List<HoodieCompactionPlan> plans =
new ImmutableList.Builder<HoodieCompactionPlan>().add(plan1, plan2, plan3, plan4).build();
List<HoodieCompactionPlan> plans = CollectionUtils.createImmutableList(plan1, plan2, plan3, plan4);
IntStream.range(0, 4).boxed().forEach(idx -> {
if (expectedNumEntries.get(idx) > 0) {
Assert.assertEquals("check if plan " + idx + " has exp entries", expectedNumEntries.get(idx).longValue(),

View File

@@ -31,7 +31,6 @@ import org.apache.hudi.common.util.CompactionTestUtils.TestHoodieBaseFile;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.common.versioning.compaction.CompactionPlanMigrator;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.fs.Path;
import org.junit.Assert;
import org.junit.Before;
@@ -39,6 +38,7 @@ import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
@@ -59,8 +59,12 @@ public class TestCompactionUtils extends HoodieCommonTestHarness {
private static String TEST_WRITE_TOKEN = "1-0-1";
private static final Map<String, Double> METRICS =
new ImmutableMap.Builder<String, Double>().put("key1", 1.0).put("key2", 3.0).build();
private static final Map<String, Double> METRICS = new HashMap<String, Double>() {
{
put("key1", 1.0);
put("key2", 3.0);
}
};
private Function<Pair<String, FileSlice>, Map<String, Double>> metricsCaptureFn = (partitionFileSlice) -> METRICS;
@Before

View File

@@ -23,8 +23,8 @@ import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodieTestUtils;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.exception.HoodieException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;