Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
package com.google.cloud.spark.bigquery;

import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.Field.Mode;
import com.google.cloud.bigquery.FieldList;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.Schema;
Expand Down Expand Up @@ -516,14 +515,12 @@ protected Field createBigQueryColumn(StructField sparkField, int depth) {
fieldType = LegacySQLTypeName.RECORD;
subFields =
FieldList.of(
buildMapTypeField(
"key", mapType.keyType(), sparkField.metadata(), Mode.REQUIRED, depth),
buildMapTypeField(
"value",
mapType.valueType(),
sparkField.metadata(),
mapType.valueContainsNull() ? Mode.NULLABLE : Mode.REQUIRED,
depth));
this.createBigQueryColumn(
new StructField("key", mapType.keyType(), false, Metadata.empty()), depth + 1),
Comment on lines +518 to +519

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Creating the StructField for the map key with nullable = false ensures that map keys are never null, which aligns with the semantics of BigQuery. However, it might be beneficial to add a comment explaining why nullable = false is explicitly set here, enhancing code readability.

this.createBigQueryColumn(
new StructField(
"value", mapType.valueType(), mapType.valueContainsNull(), Metadata.empty()),
Comment on lines +521 to +522

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The nullable property of the StructField for the map value is directly derived from mapType.valueContainsNull(). This correctly propagates the nullability of the map value type. Consider adding a comment to explicitly state this propagation for better code understanding.

depth + 1));
} else if (sparkType instanceof DecimalType) {
DecimalType decimalType = (DecimalType) sparkType;
int leftOfDotDigits = decimalType.precision() - decimalType.scale();
Expand Down Expand Up @@ -580,17 +577,6 @@ public static Optional<String> getDescriptionOrCommentOfField(
return marker;
}

private Field buildMapTypeField(
String fieldName, DataType sparkType, Metadata metadata, Mode fieldMode, int depth) {
LegacySQLTypeName sqlType = toBigQueryType(sparkType, metadata);
if (sqlType == LegacySQLTypeName.RECORD) {
FieldList subFields = sparkToBigQueryFields((StructType) sparkType, depth + 1);
return createBigQueryFieldBuilder(fieldName, sqlType, fieldMode, subFields).build();
} else {
return createBigQueryFieldBuilder(fieldName, sqlType, fieldMode, null).build();
}
}

@VisibleForTesting
protected LegacySQLTypeName toBigQueryType(DataType elementType, Metadata metadata) {
Optional<LegacySQLTypeName> bigQueryType =
Expand Down