Skip to content

Commit

Permalink
[MINOR] Fix code style for HiveAvroSerializer (#10755)
Browse files Browse the repository at this point in the history
  • Loading branch information
stream2000 authored Feb 26, 2024
1 parent ce039c9 commit 43bdc94
Showing 1 changed file with 11 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ public GenericRecord serialize(Object o, Schema schema) {
List<? extends StructField> allStructFieldRefs = soi.getAllStructFieldRefs();
List<Object> structFieldsDataAsList = soi.getStructFieldsDataAsList(o);

for (int i = 0; i < size; i++) {
for (int i = 0; i < size; i++) {
Schema.Field field = schema.getFields().get(i);
if (i >= columnTypes.size()) {
break;
Expand Down Expand Up @@ -134,7 +134,7 @@ private void setUpRecordFieldFromWritable(TypeInfo typeInfo, Object structFieldD
* Determine if an Avro schema is of type Union[T, NULL]. Avro supports nullable
* types via a union of type T and null. This is a very common use case.
* As such, we want to silently convert it to just T and allow the value to be null.
*
* <p>
* When a Hive union type is used with AVRO, the schema type becomes
* Union[NULL, T1, T2, ...]. The NULL in the union should be silently removed
*
Expand Down Expand Up @@ -266,7 +266,7 @@ private Object serializeStruct(StructTypeInfo typeInfo, StructObjectInspector ss
GenericData.Record record = new GenericData.Record(schema);
ArrayList<TypeInfo> allStructFieldTypeInfos = typeInfo.getAllStructFieldTypeInfos();

for (int i = 0; i < size; i++) {
for (int i = 0; i < size; i++) {
Schema.Field field = schema.getFields().get(i);
setUpRecordFieldFromWritable(allStructFieldTypeInfos.get(i), structFieldsDataAsList.get(i),
allStructFieldRefs.get(i).getFieldObjectInspector(), record, field);
Expand All @@ -278,26 +278,26 @@ private Object serializePrimitive(PrimitiveObjectInspector fieldOI, Object struc
switch (fieldOI.getPrimitiveCategory()) {
case BINARY:
if (schema.getType() == Schema.Type.BYTES) {
return AvroSerdeUtils.getBufferFromBytes((byte[])fieldOI.getPrimitiveJavaObject(structFieldData));
return AvroSerdeUtils.getBufferFromBytes((byte[]) fieldOI.getPrimitiveJavaObject(structFieldData));
} else if (schema.getType() == Schema.Type.FIXED) {
GenericData.Fixed fixed = new GenericData.Fixed(schema, (byte[])fieldOI.getPrimitiveJavaObject(structFieldData));
GenericData.Fixed fixed = new GenericData.Fixed(schema, (byte[]) fieldOI.getPrimitiveJavaObject(structFieldData));
return fixed;
} else {
throw new HoodieException("Unexpected Avro schema for Binary TypeInfo: " + schema.getType());
}
case DECIMAL:
HiveDecimal dec = (HiveDecimal)fieldOI.getPrimitiveJavaObject(structFieldData);
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal)schema.getLogicalType();
HiveDecimal dec = (HiveDecimal) fieldOI.getPrimitiveJavaObject(structFieldData);
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) schema.getLogicalType();
BigDecimal bd = new BigDecimal(dec.toString()).setScale(decimal.getScale());
return HoodieAvroUtils.DECIMAL_CONVERSION.toFixed(bd, schema, decimal);
case CHAR:
HiveChar ch = (HiveChar)fieldOI.getPrimitiveJavaObject(structFieldData);
HiveChar ch = (HiveChar) fieldOI.getPrimitiveJavaObject(structFieldData);
return new Utf8(ch.getStrippedValue());
case VARCHAR:
HiveVarchar vc = (HiveVarchar)fieldOI.getPrimitiveJavaObject(structFieldData);
HiveVarchar vc = (HiveVarchar) fieldOI.getPrimitiveJavaObject(structFieldData);
return new Utf8(vc.getValue());
case STRING:
String string = (String)fieldOI.getPrimitiveJavaObject(structFieldData);
String string = (String) fieldOI.getPrimitiveJavaObject(structFieldData);
return new Utf8(string);
case DATE:
return HoodieHiveUtils.getDays(structFieldData);
Expand Down Expand Up @@ -364,7 +364,7 @@ private Object serializeMap(MapTypeInfo typeInfo, MapObjectInspector fieldOI, Ob
ObjectInspector mapValueObjectInspector = fieldOI.getMapValueObjectInspector();
TypeInfo mapKeyTypeInfo = typeInfo.getMapKeyTypeInfo();
TypeInfo mapValueTypeInfo = typeInfo.getMapValueTypeInfo();
Map<?,?> map = fieldOI.getMap(structFieldData);
Map<?, ?> map = fieldOI.getMap(structFieldData);
Schema valueType = schema.getValueType();

Map<Object, Object> deserialized = new LinkedHashMap<Object, Object>(fieldOI.getMapSize(structFieldData));
Expand Down

0 comments on commit 43bdc94

Please sign in to comment.