1
0

[HUDI-1087] Handle decimal type for realtime record reader with SparkSQL (#1831)

Co-authored-by: Wenning Ding <wenningd@amazon.com>
This commit is contained in:
wenningd
2020-07-15 07:30:58 -07:00
committed by GitHub
parent b399b4ad43
commit bf1d36fa63

View File

@@ -33,6 +33,8 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
@@ -210,9 +212,8 @@ public class HoodieRealtimeRecordReaderUtils {
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) LogicalTypes.fromSchema(schema); LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) LogicalTypes.fromSchema(schema);
HiveDecimalWritable writable = new HiveDecimalWritable(((GenericFixed) value).bytes(), HiveDecimalWritable writable = new HiveDecimalWritable(((GenericFixed) value).bytes(),
decimal.getScale()); decimal.getScale());
return HiveDecimalWritable.enforcePrecisionScale(writable, return HiveDecimalUtils.enforcePrecisionScale(writable,
decimal.getPrecision(), new DecimalTypeInfo(decimal.getPrecision(), decimal.getScale()));
decimal.getScale());
} }
return new BytesWritable(((GenericFixed) value).bytes()); return new BytesWritable(((GenericFixed) value).bytes());
default: default: