1
0

[HUDI-3525] Introduce JsonkafkaSourceProcessor to support data preprocess before it is transformed to DataSet (#4930)

This commit is contained in:
wangxianghu
2022-03-07 00:41:01 +04:00
committed by GitHub
parent 4b471772aa
commit c9ffdc493e
7 changed files with 236 additions and 7 deletions

View File

@@ -52,6 +52,7 @@ import org.apache.hudi.utilities.schema.SchemaProvider;
import org.apache.hudi.utilities.schema.SchemaProviderWithPostProcessor;
import org.apache.hudi.utilities.schema.SparkAvroPostProcessor;
import org.apache.hudi.utilities.sources.Source;
import org.apache.hudi.utilities.sources.processor.JsonKafkaSourcePostProcessor;
import org.apache.hudi.utilities.transform.ChainedTransformer;
import org.apache.hudi.utilities.transform.Transformer;
@@ -122,6 +123,15 @@ public class UtilHelpers {
}
}
public static JsonKafkaSourcePostProcessor createJsonKafkaSourcePostProcessor(String postProcessorClassName, TypedProperties props) throws IOException {
try {
return StringUtils.isNullOrEmpty(postProcessorClassName) ? null
: (JsonKafkaSourcePostProcessor) ReflectionUtils.loadClass(postProcessorClassName, props);
} catch (Throwable e) {
throw new IOException("Could not load json kafka source post processor class " + postProcessorClassName, e);
}
}
public static SchemaProvider createSchemaProvider(String schemaProviderClass, TypedProperties cfg,
JavaSparkContext jssc) throws IOException {
try {

View File

@@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.utilities.exception;
import org.apache.hudi.exception.HoodieException;
/**
* Exception throws during kafka source post process.
*/
public class HoodieSourcePostProcessException extends HoodieException {
public HoodieSourcePostProcessException(String msg) {
super(msg);
}
public HoodieSourcePostProcessException(String message, Throwable t) {
super(message, t);
}
}

View File

@@ -21,11 +21,14 @@ package org.apache.hudi.utilities.sources;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.utilities.UtilHelpers;
import org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamerMetrics;
import org.apache.hudi.utilities.exception.HoodieSourcePostProcessException;
import org.apache.hudi.utilities.exception.HoodieSourceTimeoutException;
import org.apache.hudi.utilities.schema.SchemaProvider;
import org.apache.hudi.utilities.sources.helpers.KafkaOffsetGen;
import org.apache.hudi.utilities.sources.helpers.KafkaOffsetGen.CheckpointUtils;
import org.apache.hudi.utilities.sources.processor.JsonKafkaSourcePostProcessor;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.log4j.LogManager;
@@ -37,6 +40,8 @@ import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import org.apache.spark.streaming.kafka010.OffsetRange;
import java.io.IOException;
/**
* Read json kafka data.
*/
@@ -74,12 +79,30 @@ public class JsonKafkaSource extends JsonSource {
}
private JavaRDD<String> toRDD(OffsetRange[] offsetRanges) {
return KafkaUtils.createRDD(sparkContext,
JavaRDD<String> jsonStringRDD = KafkaUtils.createRDD(sparkContext,
offsetGen.getKafkaParams(),
offsetRanges,
LocationStrategies.PreferConsistent())
.filter(x -> !StringUtils.isNullOrEmpty((String)x.value()))
.filter(x -> !StringUtils.isNullOrEmpty((String) x.value()))
.map(x -> x.value().toString());
return postProcess(jsonStringRDD);
}
private JavaRDD<String> postProcess(JavaRDD<String> jsonStringRDD) {
String postProcessorClassName = this.props.getString(KafkaOffsetGen.Config.JSON_KAFKA_PROCESSOR_CLASS_OPT.key(), null);
// no processor, do nothing
if (StringUtils.isNullOrEmpty(postProcessorClassName)) {
return jsonStringRDD;
}
JsonKafkaSourcePostProcessor processor;
try {
processor = UtilHelpers.createJsonKafkaSourcePostProcessor(postProcessorClassName, this.props);
} catch (IOException e) {
throw new HoodieSourcePostProcessException("Could not init " + postProcessorClassName, e);
}
return processor.process(jsonStringRDD);
}
@Override

View File

@@ -189,6 +189,12 @@ public class KafkaOffsetGen {
.defaultValue(KafkaResetOffsetStrategies.LATEST)
.withDocumentation("Kafka consumer strategy for reading data.");
public static final ConfigProperty<String> JSON_KAFKA_PROCESSOR_CLASS_OPT = ConfigProperty
.key("hoodie.deltastreamer.source.json.kafka.processor.class")
.noDefaultValue()
.withDocumentation("Json kafka source post processor class name, post process data after consuming from"
+ "source and before giving it to deltastreamer.");
public static final String KAFKA_CHECKPOINT_TYPE_TIMESTAMP = "timestamp";
}

View File

@@ -0,0 +1,40 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.utilities.sources.processor;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.spark.api.java.JavaRDD;
import scala.Serializable;
/**
* Base class for Json kafka source post processor. User can define their own processor that extends this class to do
* some post process on the incoming json string records before the records are converted to DataSet<T>.
*/
public abstract class JsonKafkaSourcePostProcessor implements Serializable {
protected TypedProperties props;
public JsonKafkaSourcePostProcessor(TypedProperties props) {
this.props = props;
}
public abstract JavaRDD<String> process(JavaRDD<String> inputJsonRecords);
}

View File

@@ -62,12 +62,12 @@ import static org.mockito.Mockito.mock;
*/
public class TestJsonKafkaSource extends SparkClientFunctionalTestHarness {
private static final String TEST_TOPIC_PREFIX = "hoodie_test_";
protected static final String TEST_TOPIC_PREFIX = "hoodie_test_";
private static final URL SCHEMA_FILE_URL = TestJsonKafkaSource.class.getClassLoader().getResource("delta-streamer-config/source.avsc");
private static KafkaTestUtils testUtils;
protected static KafkaTestUtils testUtils;
private final HoodieDeltaStreamerMetrics metrics = mock(HoodieDeltaStreamerMetrics.class);
private FilebasedSchemaProvider schemaProvider;
protected final HoodieDeltaStreamerMetrics metrics = mock(HoodieDeltaStreamerMetrics.class);
protected FilebasedSchemaProvider schemaProvider;
@BeforeAll
public static void initClass() throws Exception {
@@ -88,7 +88,7 @@ public class TestJsonKafkaSource extends SparkClientFunctionalTestHarness {
schemaProvider = new FilebasedSchemaProvider(props, jsc());
}
private TypedProperties createPropsForJsonSource(String topic, Long maxEventsToReadFromKafkaSource, String resetStrategy) {
protected TypedProperties createPropsForJsonSource(String topic, Long maxEventsToReadFromKafkaSource, String resetStrategy) {
TypedProperties props = new TypedProperties();
props.setProperty("hoodie.deltastreamer.source.kafka.topic", topic);
props.setProperty("bootstrap.servers", testUtils.brokerAddress());

View File

@@ -0,0 +1,115 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.utilities.sources;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.utilities.deltastreamer.SourceFormatAdapter;
import org.apache.hudi.utilities.exception.HoodieSourcePostProcessException;
import org.apache.hudi.utilities.sources.processor.JsonKafkaSourcePostProcessor;
import org.apache.avro.generic.GenericRecord;
import org.apache.spark.api.java.JavaRDD;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import static org.apache.hudi.utilities.sources.helpers.KafkaOffsetGen.Config.JSON_KAFKA_PROCESSOR_CLASS_OPT;
import static org.apache.hudi.utilities.testutils.UtilitiesTestBase.Helpers.jsonifyRecords;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
public class TestJsonKafkaSourcePostProcessor extends TestJsonKafkaSource {
@Test
public void testNoPostProcessor() {
// topic setup.
final String topic = TEST_TOPIC_PREFIX + "testNoPostProcessor";
testUtils.createTopic(topic, 2);
HoodieTestDataGenerator dataGenerator = new HoodieTestDataGenerator();
TypedProperties props = createPropsForJsonSource(topic, null, "earliest");
Source jsonSource = new JsonKafkaSource(props, jsc(), spark(), schemaProvider, metrics);
SourceFormatAdapter kafkaSource = new SourceFormatAdapter(jsonSource);
testUtils.sendMessages(topic, jsonifyRecords(dataGenerator.generateInserts("000", 1000)));
InputBatch<JavaRDD<GenericRecord>> fetch1 = kafkaSource.fetchNewDataInAvroFormat(Option.empty(), 900);
assertEquals(900, fetch1.getBatch().get().count());
}
@Test
public void testSampleJsonKafkaSourcePostProcessor() {
// topic setup.
final String topic = TEST_TOPIC_PREFIX + "testSampleJsonKafkaSourcePostProcessor";
testUtils.createTopic(topic, 2);
HoodieTestDataGenerator dataGenerator = new HoodieTestDataGenerator();
TypedProperties props = createPropsForJsonSource(topic, null, "earliest");
// processor class name setup
props.setProperty(JSON_KAFKA_PROCESSOR_CLASS_OPT.key(), SampleJsonKafkaSourcePostProcessor.class.getName());
Source jsonSource = new JsonKafkaSource(props, jsc(), spark(), schemaProvider, metrics);
SourceFormatAdapter kafkaSource = new SourceFormatAdapter(jsonSource);
testUtils.sendMessages(topic, jsonifyRecords(dataGenerator.generateInserts("000", 1000)));
InputBatch<JavaRDD<GenericRecord>> fetch1 = kafkaSource.fetchNewDataInAvroFormat(Option.empty(), 900);
assertNotEquals(900, fetch1.getBatch().get().count());
}
@Test
public void testInvalidJsonKafkaSourcePostProcessor() {
// topic setup.
final String topic = TEST_TOPIC_PREFIX + "testInvalidJsonKafkaSourcePostProcessor";
testUtils.createTopic(topic, 2);
HoodieTestDataGenerator dataGenerator = new HoodieTestDataGenerator();
TypedProperties props = createPropsForJsonSource(topic, null, "earliest");
// processor class name setup
props.setProperty(JSON_KAFKA_PROCESSOR_CLASS_OPT.key(), "InvalidJsonKafkaSourcePostProcessor");
Source jsonSource = new JsonKafkaSource(props, jsc(), spark(), schemaProvider, metrics);
SourceFormatAdapter kafkaSource = new SourceFormatAdapter(jsonSource);
testUtils.sendMessages(topic, jsonifyRecords(dataGenerator.generateInserts("000", 1000)));
Assertions.assertThrows(HoodieSourcePostProcessException.class,
() -> kafkaSource.fetchNewDataInAvroFormat(Option.empty(), 900));
}
/**
* JsonKafkaSourcePostProcessor that return a sub RDD of the incoming data which get the data from incoming data using
* {org.apache.spark.api.java.JavaRDD#sample(boolean, double, long)} method.
*/
public static class SampleJsonKafkaSourcePostProcessor extends JsonKafkaSourcePostProcessor {
public SampleJsonKafkaSourcePostProcessor(TypedProperties props) {
super(props);
}
@Override
public JavaRDD<String> process(JavaRDD<String> inputJsonRecords) {
return inputJsonRecords.sample(false, 0.5);
}
}
}