[MINOR] Remove the declaration of thrown RuntimeException (#1305)
This commit is contained in:
@@ -37,8 +37,7 @@ public class SparkEnvCommand implements CommandMarker {
|
|||||||
public static Map<String, String> env = new HashMap<String, String>();
|
public static Map<String, String> env = new HashMap<String, String>();
|
||||||
|
|
||||||
@CliCommand(value = "set", help = "Set spark launcher env to cli")
|
@CliCommand(value = "set", help = "Set spark launcher env to cli")
|
||||||
public void setEnv(@CliOption(key = {"conf"}, help = "Env config to be set") final String confMap)
|
public void setEnv(@CliOption(key = {"conf"}, help = "Env config to be set") final String confMap) {
|
||||||
throws IllegalArgumentException {
|
|
||||||
String[] map = confMap.split("=");
|
String[] map = confMap.split("=");
|
||||||
if (map.length != 2) {
|
if (map.length != 2) {
|
||||||
throw new IllegalArgumentException("Illegal set parameter, please use like [set --conf SPARK_HOME=/usr/etc/spark]");
|
throw new IllegalArgumentException("Illegal set parameter, please use like [set --conf SPARK_HOME=/usr/etc/spark]");
|
||||||
|
|||||||
@@ -143,15 +143,13 @@ public class MercifulJsonConverter {
|
|||||||
return res.getRight();
|
return res.getRight();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
protected abstract Pair<Boolean, Object> convert(Object value, String name, Schema schema);
|
||||||
throws HoodieJsonToAvroConversionException;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static JsonToAvroFieldProcessor generateBooleanTypeHandler() {
|
private static JsonToAvroFieldProcessor generateBooleanTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
if (value instanceof Boolean) {
|
if (value instanceof Boolean) {
|
||||||
return Pair.of(true, value);
|
return Pair.of(true, value);
|
||||||
}
|
}
|
||||||
@@ -163,8 +161,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateIntTypeHandler() {
|
private static JsonToAvroFieldProcessor generateIntTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
if (value instanceof Number) {
|
if (value instanceof Number) {
|
||||||
return Pair.of(true, ((Number) value).intValue());
|
return Pair.of(true, ((Number) value).intValue());
|
||||||
} else if (value instanceof String) {
|
} else if (value instanceof String) {
|
||||||
@@ -178,8 +175,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateDoubleTypeHandler() {
|
private static JsonToAvroFieldProcessor generateDoubleTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
if (value instanceof Number) {
|
if (value instanceof Number) {
|
||||||
return Pair.of(true, ((Number) value).doubleValue());
|
return Pair.of(true, ((Number) value).doubleValue());
|
||||||
} else if (value instanceof String) {
|
} else if (value instanceof String) {
|
||||||
@@ -193,8 +189,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateFloatTypeHandler() {
|
private static JsonToAvroFieldProcessor generateFloatTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
if (value instanceof Number) {
|
if (value instanceof Number) {
|
||||||
return Pair.of(true, ((Number) value).floatValue());
|
return Pair.of(true, ((Number) value).floatValue());
|
||||||
} else if (value instanceof String) {
|
} else if (value instanceof String) {
|
||||||
@@ -208,8 +203,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateLongTypeHandler() {
|
private static JsonToAvroFieldProcessor generateLongTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
if (value instanceof Number) {
|
if (value instanceof Number) {
|
||||||
return Pair.of(true, ((Number) value).longValue());
|
return Pair.of(true, ((Number) value).longValue());
|
||||||
} else if (value instanceof String) {
|
} else if (value instanceof String) {
|
||||||
@@ -223,8 +217,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateStringTypeHandler() {
|
private static JsonToAvroFieldProcessor generateStringTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
return Pair.of(true, value.toString());
|
return Pair.of(true, value.toString());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -233,8 +226,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateBytesTypeHandler() {
|
private static JsonToAvroFieldProcessor generateBytesTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
return Pair.of(true, value.toString().getBytes());
|
return Pair.of(true, value.toString().getBytes());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -243,8 +235,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateFixedTypeHandler() {
|
private static JsonToAvroFieldProcessor generateFixedTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
byte[] src = value.toString().getBytes();
|
byte[] src = value.toString().getBytes();
|
||||||
byte[] dst = new byte[schema.getFixedSize()];
|
byte[] dst = new byte[schema.getFixedSize()];
|
||||||
System.arraycopy(src, 0, dst, 0, Math.min(schema.getFixedSize(), src.length));
|
System.arraycopy(src, 0, dst, 0, Math.min(schema.getFixedSize(), src.length));
|
||||||
@@ -256,8 +247,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateEnumTypeHandler() {
|
private static JsonToAvroFieldProcessor generateEnumTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
if (schema.getEnumSymbols().contains(value.toString())) {
|
if (schema.getEnumSymbols().contains(value.toString())) {
|
||||||
return Pair.of(true, new GenericData.EnumSymbol(schema, value.toString()));
|
return Pair.of(true, new GenericData.EnumSymbol(schema, value.toString()));
|
||||||
}
|
}
|
||||||
@@ -270,8 +260,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateRecordTypeHandler() {
|
private static JsonToAvroFieldProcessor generateRecordTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
GenericRecord result = new GenericData.Record(schema);
|
GenericRecord result = new GenericData.Record(schema);
|
||||||
return Pair.of(true, convertJsonToAvro((Map<String, Object>) value, schema));
|
return Pair.of(true, convertJsonToAvro((Map<String, Object>) value, schema));
|
||||||
}
|
}
|
||||||
@@ -281,8 +270,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateArrayTypeHandler() {
|
private static JsonToAvroFieldProcessor generateArrayTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
Schema elementSchema = schema.getElementType();
|
Schema elementSchema = schema.getElementType();
|
||||||
List<Object> listRes = new ArrayList<>();
|
List<Object> listRes = new ArrayList<>();
|
||||||
for (Object v : (List) value) {
|
for (Object v : (List) value) {
|
||||||
@@ -296,8 +284,7 @@ public class MercifulJsonConverter {
|
|||||||
private static JsonToAvroFieldProcessor generateMapTypeHandler() {
|
private static JsonToAvroFieldProcessor generateMapTypeHandler() {
|
||||||
return new JsonToAvroFieldProcessor() {
|
return new JsonToAvroFieldProcessor() {
|
||||||
@Override
|
@Override
|
||||||
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
|
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
|
||||||
throws HoodieJsonToAvroConversionException {
|
|
||||||
Schema valueSchema = schema.getValueType();
|
Schema valueSchema = schema.getValueType();
|
||||||
Map<String, Object> mapRes = new HashMap<>();
|
Map<String, Object> mapRes = new HashMap<>();
|
||||||
for (Map.Entry<String, Object> v : ((Map<String, Object>) value).entrySet()) {
|
for (Map.Entry<String, Object> v : ((Map<String, Object>) value).entrySet()) {
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ public class HoodieTableMetaClient implements Serializable {
|
|||||||
private HoodieArchivedTimeline archivedTimeline;
|
private HoodieArchivedTimeline archivedTimeline;
|
||||||
private ConsistencyGuardConfig consistencyGuardConfig = ConsistencyGuardConfig.newBuilder().build();
|
private ConsistencyGuardConfig consistencyGuardConfig = ConsistencyGuardConfig.newBuilder().build();
|
||||||
|
|
||||||
public HoodieTableMetaClient(Configuration conf, String basePath) throws TableNotFoundException {
|
public HoodieTableMetaClient(Configuration conf, String basePath) {
|
||||||
// Do not load any timeline by default
|
// Do not load any timeline by default
|
||||||
this(conf, basePath, false);
|
this(conf, basePath, false);
|
||||||
}
|
}
|
||||||
@@ -104,8 +104,7 @@ public class HoodieTableMetaClient implements Serializable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public HoodieTableMetaClient(Configuration conf, String basePath, boolean loadActiveTimelineOnLoad,
|
public HoodieTableMetaClient(Configuration conf, String basePath, boolean loadActiveTimelineOnLoad,
|
||||||
ConsistencyGuardConfig consistencyGuardConfig, Option<TimelineLayoutVersion> layoutVersion, String payloadClassName)
|
ConsistencyGuardConfig consistencyGuardConfig, Option<TimelineLayoutVersion> layoutVersion, String payloadClassName) {
|
||||||
throws TableNotFoundException {
|
|
||||||
LOG.info("Loading HoodieTableMetaClient from " + basePath);
|
LOG.info("Loading HoodieTableMetaClient from " + basePath);
|
||||||
this.basePath = basePath;
|
this.basePath = basePath;
|
||||||
this.consistencyGuardConfig = consistencyGuardConfig;
|
this.consistencyGuardConfig = consistencyGuardConfig;
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ public class RocksDBDAO {
|
|||||||
/**
|
/**
|
||||||
* Initialized Rocks DB instance.
|
* Initialized Rocks DB instance.
|
||||||
*/
|
*/
|
||||||
private void init() throws HoodieException {
|
private void init() {
|
||||||
try {
|
try {
|
||||||
LOG.info("DELETING RocksDB persisted at " + rocksDBBasePath);
|
LOG.info("DELETING RocksDB persisted at " + rocksDBBasePath);
|
||||||
FileIOUtils.deleteDirectory(new File(rocksDBBasePath));
|
FileIOUtils.deleteDirectory(new File(rocksDBBasePath));
|
||||||
|
|||||||
@@ -38,8 +38,7 @@ public class TableNotFoundException extends HoodieException {
|
|||||||
return "Hoodie table not found in path " + basePath;
|
return "Hoodie table not found in path " + basePath;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void checkTableValidity(FileSystem fs, Path basePathDir, Path metaPathDir)
|
public static void checkTableValidity(FileSystem fs, Path basePathDir, Path metaPathDir) {
|
||||||
throws TableNotFoundException {
|
|
||||||
// Check if the base path is found
|
// Check if the base path is found
|
||||||
try {
|
try {
|
||||||
if (!fs.exists(basePathDir) || !fs.isDirectory(basePathDir)) {
|
if (!fs.exists(basePathDir) || !fs.isDirectory(basePathDir)) {
|
||||||
|
|||||||
@@ -498,12 +498,12 @@ public class HoodieHiveClient {
|
|||||||
*
|
*
|
||||||
* @param sql SQL statement to execute
|
* @param sql SQL statement to execute
|
||||||
*/
|
*/
|
||||||
public CommandProcessorResponse updateHiveSQLUsingHiveDriver(String sql) throws HoodieHiveSyncException {
|
public CommandProcessorResponse updateHiveSQLUsingHiveDriver(String sql) {
|
||||||
List<CommandProcessorResponse> responses = updateHiveSQLs(Collections.singletonList(sql));
|
List<CommandProcessorResponse> responses = updateHiveSQLs(Collections.singletonList(sql));
|
||||||
return responses.get(responses.size() - 1);
|
return responses.get(responses.size() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<CommandProcessorResponse> updateHiveSQLs(List<String> sqls) throws HoodieHiveSyncException {
|
private List<CommandProcessorResponse> updateHiveSQLs(List<String> sqls) {
|
||||||
SessionState ss = null;
|
SessionState ss = null;
|
||||||
org.apache.hadoop.hive.ql.Driver hiveDriver = null;
|
org.apache.hadoop.hive.ql.Driver hiveDriver = null;
|
||||||
List<CommandProcessorResponse> responses = new ArrayList<>();
|
List<CommandProcessorResponse> responses = new ArrayList<>();
|
||||||
|
|||||||
@@ -217,7 +217,7 @@ public class HDFSParquetImporter implements Serializable {
|
|||||||
List<String> validCommands = Arrays.asList("insert", "upsert", "bulkinsert");
|
List<String> validCommands = Arrays.asList("insert", "upsert", "bulkinsert");
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void validate(String name, String value) throws ParameterException {
|
public void validate(String name, String value) {
|
||||||
if (value == null || !validCommands.contains(value.toLowerCase())) {
|
if (value == null || !validCommands.contains(value.toLowerCase())) {
|
||||||
throw new ParameterException(
|
throw new ParameterException(
|
||||||
String.format("Invalid command: value:%s: supported commands:%s", value, validCommands));
|
String.format("Invalid command: value:%s: supported commands:%s", value, validCommands));
|
||||||
@@ -230,7 +230,7 @@ public class HDFSParquetImporter implements Serializable {
|
|||||||
List<String> validFormats = Collections.singletonList("parquet");
|
List<String> validFormats = Collections.singletonList("parquet");
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void validate(String name, String value) throws ParameterException {
|
public void validate(String name, String value) {
|
||||||
if (value == null || !validFormats.contains(value)) {
|
if (value == null || !validFormats.contains(value)) {
|
||||||
throw new ParameterException(
|
throw new ParameterException(
|
||||||
String.format("Invalid format type: value:%s: supported formats:%s", value, validFormats));
|
String.format("Invalid format type: value:%s: supported formats:%s", value, validFormats));
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ public class HoodieWithTimelineServer implements Serializable {
|
|||||||
Preconditions.checkArgument(gotMessages.equals(messages), "Got expected reply from Server");
|
Preconditions.checkArgument(gotMessages.equals(messages), "Got expected reply from Server");
|
||||||
}
|
}
|
||||||
|
|
||||||
public String sendRequest(String driverHost, int port) throws RuntimeException {
|
public String sendRequest(String driverHost, int port) {
|
||||||
String url = String.format("http://%s:%d/", driverHost, port);
|
String url = String.format("http://%s:%d/", driverHost, port);
|
||||||
try (CloseableHttpClient client = HttpClientBuilder.create().build()) {
|
try (CloseableHttpClient client = HttpClientBuilder.create().build()) {
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user