[HUDI-3520] Introduce DeleteSupportSchemaPostProcessor to support adding _hoodie_is_deleted column to schema (#4921)
This commit is contained in:
@@ -0,0 +1,66 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hudi.utilities.schema;
|
||||
|
||||
import org.apache.hudi.common.config.TypedProperties;
|
||||
import org.apache.hudi.common.model.HoodieRecord;
|
||||
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* An implementation of {@link SchemaPostProcessor} which will add a column named "_hoodie_is_deleted" to the end of
|
||||
* a given schema.
|
||||
*/
|
||||
public class DeleteSupportSchemaPostProcessor extends SchemaPostProcessor {
|
||||
|
||||
private static final Logger LOG = LogManager.getLogger(DeleteSupportSchemaPostProcessor.class);
|
||||
|
||||
public DeleteSupportSchemaPostProcessor(TypedProperties props, JavaSparkContext jssc) {
|
||||
super(props, jssc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Schema processSchema(Schema schema) {
|
||||
|
||||
if (schema.getField(HoodieRecord.HOODIE_IS_DELETED) != null) {
|
||||
LOG.warn(String.format("column %s already exists!", HoodieRecord.HOODIE_IS_DELETED));
|
||||
return schema;
|
||||
}
|
||||
|
||||
List<Schema.Field> sourceFields = schema.getFields();
|
||||
List<Schema.Field> targetFields = new ArrayList<>(sourceFields.size() + 1);
|
||||
// copy existing columns
|
||||
for (Schema.Field sourceField : sourceFields) {
|
||||
targetFields.add(new Schema.Field(sourceField.name(), sourceField.schema(), sourceField.doc(), sourceField.defaultVal()));
|
||||
}
|
||||
// add _hoodie_is_deleted column
|
||||
targetFields.add(new Schema.Field(HoodieRecord.HOODIE_IS_DELETED, Schema.create(Schema.Type.BOOLEAN), null, false));
|
||||
|
||||
return Schema.createRecord(schema.getName(), schema.getDoc(), schema.getNamespace(), false, targetFields);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ public class SparkAvroPostProcessor extends SchemaPostProcessor {
|
||||
@Override
|
||||
public Schema processSchema(Schema schema) {
|
||||
return schema != null ? AvroConversionUtils.convertStructTypeToAvroSchema(
|
||||
AvroConversionUtils.convertAvroSchemaToStructType(schema), RowBasedSchemaProvider.HOODIE_RECORD_STRUCT_NAME,
|
||||
AvroConversionUtils.convertAvroSchemaToStructType(schema), RowBasedSchemaProvider.HOODIE_RECORD_STRUCT_NAME,
|
||||
RowBasedSchemaProvider.HOODIE_RECORD_NAMESPACE) : null;
|
||||
}
|
||||
}
|
||||
@@ -18,9 +18,10 @@
|
||||
|
||||
package org.apache.hudi.utilities;
|
||||
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.hudi.common.config.TypedProperties;
|
||||
import org.apache.hudi.utilities.schema.SchemaProvider;
|
||||
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
|
||||
public class DummySchemaProvider extends SchemaProvider {
|
||||
|
||||
@@ -18,9 +18,10 @@
|
||||
|
||||
package org.apache.hudi.utilities;
|
||||
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.hudi.common.config.TypedProperties;
|
||||
import org.apache.hudi.utilities.schema.SchemaProvider;
|
||||
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
|
||||
public class SparkAvroSchemaProvider extends SchemaProvider {
|
||||
|
||||
@@ -19,17 +19,17 @@
|
||||
package org.apache.hudi.utilities;
|
||||
|
||||
import org.apache.hudi.common.config.TypedProperties;
|
||||
import org.apache.hudi.utilities.schema.DeleteSupportSchemaPostProcessor;
|
||||
import org.apache.hudi.utilities.schema.SchemaPostProcessor;
|
||||
import org.apache.hudi.utilities.schema.SchemaPostProcessor.Config;
|
||||
import org.apache.hudi.utilities.schema.SchemaProvider;
|
||||
import org.apache.hudi.utilities.schema.SparkAvroPostProcessor;
|
||||
import org.apache.hudi.utilities.testutils.UtilitiesTestBase;
|
||||
import org.apache.hudi.utilities.transform.FlatteningTransformer;
|
||||
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.avro.Schema.Type;
|
||||
import org.apache.avro.SchemaBuilder;
|
||||
|
||||
import org.apache.hudi.utilities.transform.FlatteningTransformer;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -82,6 +82,14 @@ public class TestSchemaPostProcessor extends UtilitiesTestBase {
|
||||
assertNotNull(schema.getField("day"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteSupport() {
|
||||
DeleteSupportSchemaPostProcessor processor = new DeleteSupportSchemaPostProcessor(properties, null);
|
||||
Schema schema = new Schema.Parser().parse(ORIGINAL_SCHEMA);
|
||||
Schema targetSchema = processor.processSchema(schema);
|
||||
assertNotNull(targetSchema.getField("_hoodie_is_deleted"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSparkAvroSchema() throws IOException {
|
||||
SparkAvroPostProcessor processor = new SparkAvroPostProcessor(properties, null);
|
||||
|
||||
Reference in New Issue
Block a user