From a3879fda1d7945cc8f541c2ad638e828002d527d Mon Sep 17 00:00:00 2001 From: stream2000 <18889897088@163.com> Date: Mon, 26 Feb 2024 17:34:54 +0800 Subject: [PATCH] [MINOR] Fix code style for HiveAvroSerializer --- .../hudi/hadoop/utils/HiveAvroSerializer.java | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HiveAvroSerializer.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HiveAvroSerializer.java index a0d1b086e035..5f33844d60c8 100644 --- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HiveAvroSerializer.java +++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HiveAvroSerializer.java @@ -99,7 +99,7 @@ public GenericRecord serialize(Object o, Schema schema) { List allStructFieldRefs = soi.getAllStructFieldRefs(); List structFieldsDataAsList = soi.getStructFieldsDataAsList(o); - for (int i = 0; i < size; i++) { + for (int i = 0; i < size; i++) { Schema.Field field = schema.getFields().get(i); if (i >= columnTypes.size()) { break; @@ -134,7 +134,7 @@ private void setUpRecordFieldFromWritable(TypeInfo typeInfo, Object structFieldD * Determine if an Avro schema is of type Union[T, NULL]. Avro supports nullable * types via a union of type T and null. This is a very common use case. * As such, we want to silently convert it to just T and allow the value to be null. - * + *

* When a Hive union type is used with AVRO, the schema type becomes * Union[NULL, T1, T2, ...]. The NULL in the union should be silently removed * @@ -266,7 +266,7 @@ private Object serializeStruct(StructTypeInfo typeInfo, StructObjectInspector ss GenericData.Record record = new GenericData.Record(schema); ArrayList allStructFieldTypeInfos = typeInfo.getAllStructFieldTypeInfos(); - for (int i = 0; i < size; i++) { + for (int i = 0; i < size; i++) { Schema.Field field = schema.getFields().get(i); setUpRecordFieldFromWritable(allStructFieldTypeInfos.get(i), structFieldsDataAsList.get(i), allStructFieldRefs.get(i).getFieldObjectInspector(), record, field); @@ -278,26 +278,26 @@ private Object serializePrimitive(PrimitiveObjectInspector fieldOI, Object struc switch (fieldOI.getPrimitiveCategory()) { case BINARY: if (schema.getType() == Schema.Type.BYTES) { - return AvroSerdeUtils.getBufferFromBytes((byte[])fieldOI.getPrimitiveJavaObject(structFieldData)); + return AvroSerdeUtils.getBufferFromBytes((byte[]) fieldOI.getPrimitiveJavaObject(structFieldData)); } else if (schema.getType() == Schema.Type.FIXED) { - GenericData.Fixed fixed = new GenericData.Fixed(schema, (byte[])fieldOI.getPrimitiveJavaObject(structFieldData)); + GenericData.Fixed fixed = new GenericData.Fixed(schema, (byte[]) fieldOI.getPrimitiveJavaObject(structFieldData)); return fixed; } else { throw new HoodieException("Unexpected Avro schema for Binary TypeInfo: " + schema.getType()); } case DECIMAL: - HiveDecimal dec = (HiveDecimal)fieldOI.getPrimitiveJavaObject(structFieldData); - LogicalTypes.Decimal decimal = (LogicalTypes.Decimal)schema.getLogicalType(); + HiveDecimal dec = (HiveDecimal) fieldOI.getPrimitiveJavaObject(structFieldData); + LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) schema.getLogicalType(); BigDecimal bd = new BigDecimal(dec.toString()).setScale(decimal.getScale()); return HoodieAvroUtils.DECIMAL_CONVERSION.toFixed(bd, schema, decimal); case CHAR: - HiveChar ch = (HiveChar)fieldOI.getPrimitiveJavaObject(structFieldData); + HiveChar ch = (HiveChar) fieldOI.getPrimitiveJavaObject(structFieldData); return new Utf8(ch.getStrippedValue()); case VARCHAR: - HiveVarchar vc = (HiveVarchar)fieldOI.getPrimitiveJavaObject(structFieldData); + HiveVarchar vc = (HiveVarchar) fieldOI.getPrimitiveJavaObject(structFieldData); return new Utf8(vc.getValue()); case STRING: - String string = (String)fieldOI.getPrimitiveJavaObject(structFieldData); + String string = (String) fieldOI.getPrimitiveJavaObject(structFieldData); return new Utf8(string); case DATE: return HoodieHiveUtils.getDays(structFieldData); @@ -364,7 +364,7 @@ private Object serializeMap(MapTypeInfo typeInfo, MapObjectInspector fieldOI, Ob ObjectInspector mapValueObjectInspector = fieldOI.getMapValueObjectInspector(); TypeInfo mapKeyTypeInfo = typeInfo.getMapKeyTypeInfo(); TypeInfo mapValueTypeInfo = typeInfo.getMapValueTypeInfo(); - Map map = fieldOI.getMap(structFieldData); + Map map = fieldOI.getMap(structFieldData); Schema valueType = schema.getValueType(); Map deserialized = new LinkedHashMap(fieldOI.getMapSize(structFieldData));