1
0

[HUDI-554] Cleanup package structure in hudi-client (#1346)

- Just package, class moves and renames with the following intent
 - `client` now has all the various client classes, that do the transaction management
 - `func` renamed to `execution` and some helpers moved to `client/utils`
 - All compaction code under `io` now under `table/compact`
 - Rollback code under `table/rollback` and in general all code for individual operations under `table`
 - `exception` `config`, `metrics` left untouched
 - Moved the tests also accordingly
 - Fixed some flaky tests
This commit is contained in:
vinoth chandar
2020-02-27 08:05:58 -08:00
committed by GitHub
parent cacd9a3322
commit 71170fafe7
90 changed files with 290 additions and 301 deletions

View File

@@ -27,11 +27,13 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.IOException;
import java.net.ServerSocket;
/**
* An HDFS minicluster service implementation.
@@ -45,12 +47,6 @@ public class HdfsTestService {
*/
private Configuration hadoopConf;
private String workDir;
private String bindIP = "127.0.0.1";
private int namenodeRpcPort = 8020;
private int namenodeHttpPort = 50070;
private int datanodePort = 50010;
private int datanodeIpcPort = 50020;
private int datanodeHttpPort = 50075;
/**
* Embedded HDFS cluster.
@@ -65,6 +61,14 @@ public class HdfsTestService {
return hadoopConf;
}
private static int nextFreePort() {
try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
} catch (IOException e) {
throw new HoodieIOException("Unable to find next free port", e);
}
}
public MiniDFSCluster start(boolean format) throws IOException {
Preconditions.checkState(workDir != null, "The work dir must be set before starting cluster.");
hadoopConf = HoodieTestUtils.getDefaultHadoopConf();
@@ -77,8 +81,14 @@ public class HdfsTestService {
FileIOUtils.deleteDirectory(file);
}
int namenodeRpcPort = nextFreePort();
int datanodePort = nextFreePort();
int datanodeIpcPort = nextFreePort();
int datanodeHttpPort = nextFreePort();
// Configure and start the HDFS cluster
// boolean format = shouldFormatDFSCluster(localDFSLocation, clean);
String bindIP = "127.0.0.1";
configureDFSCluster(hadoopConf, localDFSLocation, bindIP, namenodeRpcPort,
datanodePort, datanodeIpcPort, datanodeHttpPort);
miniDfsCluster = new MiniDFSCluster.Builder(hadoopConf).numDataNodes(1).format(format).checkDataNodeAddrConfig(true)