Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MINOR] Fix code style for HiveAvroSerializer #10755

Merged
merged 1 commit into from
Feb 26, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ public GenericRecord serialize(Object o, Schema schema) {
List<? extends StructField> allStructFieldRefs = soi.getAllStructFieldRefs();
List<Object> structFieldsDataAsList = soi.getStructFieldsDataAsList(o);

for (int i = 0; i < size; i++) {
for (int i = 0; i < size; i++) {
Schema.Field field = schema.getFields().get(i);
if (i >= columnTypes.size()) {
break;
Expand Down Expand Up @@ -134,7 +134,7 @@ private void setUpRecordFieldFromWritable(TypeInfo typeInfo, Object structFieldD
* Determine if an Avro schema is of type Union[T, NULL]. Avro supports nullable
* types via a union of type T and null. This is a very common use case.
* As such, we want to silently convert it to just T and allow the value to be null.
*
* <p>
* When a Hive union type is used with AVRO, the schema type becomes
* Union[NULL, T1, T2, ...]. The NULL in the union should be silently removed
*
Expand Down Expand Up @@ -266,7 +266,7 @@ private Object serializeStruct(StructTypeInfo typeInfo, StructObjectInspector ss
GenericData.Record record = new GenericData.Record(schema);
ArrayList<TypeInfo> allStructFieldTypeInfos = typeInfo.getAllStructFieldTypeInfos();

for (int i = 0; i < size; i++) {
for (int i = 0; i < size; i++) {
Schema.Field field = schema.getFields().get(i);
setUpRecordFieldFromWritable(allStructFieldTypeInfos.get(i), structFieldsDataAsList.get(i),
allStructFieldRefs.get(i).getFieldObjectInspector(), record, field);
Expand All @@ -278,26 +278,26 @@ private Object serializePrimitive(PrimitiveObjectInspector fieldOI, Object struc
switch (fieldOI.getPrimitiveCategory()) {
case BINARY:
if (schema.getType() == Schema.Type.BYTES) {
return AvroSerdeUtils.getBufferFromBytes((byte[])fieldOI.getPrimitiveJavaObject(structFieldData));
return AvroSerdeUtils.getBufferFromBytes((byte[]) fieldOI.getPrimitiveJavaObject(structFieldData));
} else if (schema.getType() == Schema.Type.FIXED) {
GenericData.Fixed fixed = new GenericData.Fixed(schema, (byte[])fieldOI.getPrimitiveJavaObject(structFieldData));
GenericData.Fixed fixed = new GenericData.Fixed(schema, (byte[]) fieldOI.getPrimitiveJavaObject(structFieldData));
return fixed;
} else {
throw new HoodieException("Unexpected Avro schema for Binary TypeInfo: " + schema.getType());
}
case DECIMAL:
HiveDecimal dec = (HiveDecimal)fieldOI.getPrimitiveJavaObject(structFieldData);
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal)schema.getLogicalType();
HiveDecimal dec = (HiveDecimal) fieldOI.getPrimitiveJavaObject(structFieldData);
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) schema.getLogicalType();
BigDecimal bd = new BigDecimal(dec.toString()).setScale(decimal.getScale());
return HoodieAvroUtils.DECIMAL_CONVERSION.toFixed(bd, schema, decimal);
case CHAR:
HiveChar ch = (HiveChar)fieldOI.getPrimitiveJavaObject(structFieldData);
HiveChar ch = (HiveChar) fieldOI.getPrimitiveJavaObject(structFieldData);
return new Utf8(ch.getStrippedValue());
case VARCHAR:
HiveVarchar vc = (HiveVarchar)fieldOI.getPrimitiveJavaObject(structFieldData);
HiveVarchar vc = (HiveVarchar) fieldOI.getPrimitiveJavaObject(structFieldData);
return new Utf8(vc.getValue());
case STRING:
String string = (String)fieldOI.getPrimitiveJavaObject(structFieldData);
String string = (String) fieldOI.getPrimitiveJavaObject(structFieldData);
return new Utf8(string);
case DATE:
return HoodieHiveUtils.getDays(structFieldData);
Expand Down Expand Up @@ -364,7 +364,7 @@ private Object serializeMap(MapTypeInfo typeInfo, MapObjectInspector fieldOI, Ob
ObjectInspector mapValueObjectInspector = fieldOI.getMapValueObjectInspector();
TypeInfo mapKeyTypeInfo = typeInfo.getMapKeyTypeInfo();
TypeInfo mapValueTypeInfo = typeInfo.getMapValueTypeInfo();
Map<?,?> map = fieldOI.getMap(structFieldData);
Map<?, ?> map = fieldOI.getMap(structFieldData);
Schema valueType = schema.getValueType();

Map<Object, Object> deserialized = new LinkedHashMap<Object, Object>(fieldOI.getMapSize(structFieldData));
Expand Down
Loading