Skip to content

Commit

Permalink
Add BigQuery Error Details Provider
Browse files Browse the repository at this point in the history
  • Loading branch information
psainics committed Nov 13, 2024
1 parent 601f62b commit 9d501d3
Show file tree
Hide file tree
Showing 10 changed files with 202 additions and 49 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* Copyright © 2024 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package io.cdap.plugin.gcp.bigquery.common;

import io.cdap.plugin.gcp.common.GCPErrorDetailsProvider;

/**
* A custom ErrorDetailsProvider for BigQuery plugins.
*/
public class BigQueryErrorDetailsProvider extends GCPErrorDetailsProvider {

@Override
protected String getExternalDocumentationLink() {
return "https://cloud.google.com/bigquery/docs/error-messages";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@
import io.cdap.cdap.etl.api.FailureCollector;
import io.cdap.cdap.etl.api.batch.BatchSink;
import io.cdap.cdap.etl.api.batch.BatchSinkContext;
import io.cdap.cdap.etl.api.exception.ErrorDetailsProviderSpec;
import io.cdap.plugin.common.Asset;
import io.cdap.plugin.gcp.bigquery.common.BigQueryErrorDetailsProvider;
import io.cdap.plugin.gcp.bigquery.sink.lib.BigQueryTableFieldSchema;
import io.cdap.plugin.gcp.bigquery.util.BigQueryConstants;
import io.cdap.plugin.gcp.bigquery.util.BigQueryTypeSize;
Expand Down Expand Up @@ -116,6 +118,8 @@ public final void prepareRun(BatchSinkContext context) throws Exception {
storage, bucket, bucketName,
config.getLocation(), cmekKeyName);
}
// set error details provider
context.setErrorDetailsProvider(new ErrorDetailsProviderSpec(BigQueryErrorDetailsProvider.class.getName()));
prepareRunInternal(context, bigQuery, bucketName);
}

Expand All @@ -124,9 +128,9 @@ public void onRunFinish(boolean succeeded, BatchSinkContext context) {
String gcsPath;
String bucket = getConfig().getBucket();
if (bucket == null) {
gcsPath = String.format("gs://%s", runUUID.toString());
gcsPath = String.format("gs://%s", runUUID);
} else {
gcsPath = String.format(gcsPathFormat, bucket, runUUID.toString());
gcsPath = String.format(gcsPathFormat, bucket, runUUID);
}
try {
BigQueryUtil.deleteTemporaryDirectory(baseConfiguration, gcsPath);
Expand Down Expand Up @@ -327,9 +331,8 @@ private void validateRecordDepth(@Nullable Schema schema, FailureCollector colle
*
* @return Hadoop configuration
*/
protected Configuration getOutputConfiguration() throws IOException {
Configuration configuration = new Configuration(baseConfiguration);
return configuration;
protected Configuration getOutputConfiguration() {
return new Configuration(baseConfiguration);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,10 @@
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import io.cdap.cdap.api.data.format.StructuredRecord;
import io.cdap.cdap.api.exception.ErrorCategory;
import io.cdap.cdap.api.exception.ErrorType;
import io.cdap.cdap.api.exception.ErrorUtils;
import io.cdap.cdap.etl.api.exception.ErrorPhase;
import io.cdap.plugin.gcp.bigquery.sink.lib.BigQueryStrings;
import io.cdap.plugin.gcp.bigquery.source.BigQueryFactoryWithScopes;
import io.cdap.plugin.gcp.bigquery.util.BigQueryConstants;
Expand Down Expand Up @@ -103,6 +107,7 @@
*/
public class BigQueryOutputFormat extends ForwardingBigQueryFileOutputFormat<StructuredRecord, NullWritable> {
private static final Logger LOG = LoggerFactory.getLogger(BigQueryOutputFormat.class);
private static final String errorMessageFormat = "Error occurred in the phase: '%s'. Error message: %s";

@Override
public RecordWriter<StructuredRecord, NullWritable> getRecordWriter(TaskAttemptContext taskAttemptContext)
Expand Down Expand Up @@ -165,19 +170,31 @@ public void checkOutputSpecs(JobContext job) throws FileAlreadyExistsException,
// Error if the output path already exists.
FileSystem outputFileSystem = outputPath.getFileSystem(conf);
if (outputFileSystem.exists(outputPath)) {
throw new IOException("The output path '" + outputPath + "' already exists.");
String errorMessage = String.format("The output path '%s' already exists.", outputPath);
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
String.format(errorMessageFormat, ErrorPhase.VALIDATING_OUTPUT_SPECS, errorMessage), ErrorType.SYSTEM, true,
new IOException(errorMessage));
}

// Error if compression is set as there's mixed support in BigQuery.
if (FileOutputFormat.getCompressOutput(job)) {
throw new IOException("Compression isn't supported for this OutputFormat.");
String errorMessage = "Compression isn't supported for this OutputFormat.";
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
String.format(errorMessageFormat, ErrorPhase.VALIDATING_OUTPUT_SPECS, errorMessage), ErrorType.SYSTEM, true,
new IOException(errorMessage));
}

// Error if unable to create a BigQuery helper.
try {
new BigQueryFactoryWithScopes(GCPUtils.BIGQUERY_SCOPES).getBigQueryHelper(conf);
} catch (GeneralSecurityException gse) {
throw new IOException("Failed to create BigQuery client", gse);
String errorMessage = "Failed to create BigQuery client";
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
String.format(errorMessageFormat, ErrorPhase.VALIDATING_OUTPUT_SPECS, errorMessage), ErrorType.SYSTEM, true,
new IOException(errorMessage, gse));
}

// Let delegate process its checks.
Expand Down Expand Up @@ -208,7 +225,11 @@ public static class BigQueryOutputCommitter extends ForwardingBigQueryFileOutput
BigQueryFactory bigQueryFactory = new BigQueryFactoryWithScopes(GCPUtils.BIGQUERY_SCOPES);
this.bigQueryHelper = bigQueryFactory.getBigQueryHelper(context.getConfiguration());
} catch (GeneralSecurityException e) {
throw new IOException("Failed to create Bigquery client.", e);
String errorMessage = "Failed to create BigQuery client";
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessage), ErrorType.SYSTEM, true,
new IOException(errorMessage, e));
}
}

Expand Down Expand Up @@ -266,7 +287,11 @@ public void commitJob(JobContext jobContext) throws IOException {
writeDisposition, sourceUris, partitionType, timePartitioningType, range, partitionByField,
requirePartitionFilter, clusteringOrderList, tableExists, jobLabelKeyValue, conf);
} catch (Exception e) {
throw new IOException("Failed to import GCS into BigQuery. ", e);
String errorMessage = "Failed to import GCS into BigQuery.";
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessage), ErrorType.SYSTEM, true,
new IOException(errorMessage, e));
}

cleanup(jobContext);
Expand Down Expand Up @@ -573,19 +598,25 @@ private static void waitForJobCompletion(BigQueryHelper bigQueryHelper, String p
numOfErrors = errors.size();
}
// Only add first error message in the exception. For other errors user should look at BigQuery job logs.
throw new IOException(String.format("Error occurred while importing data to BigQuery '%s'." +
" There are total %s error(s) for BigQuery job %s. Please look at " +
"BigQuery job logs for more information.",
errorMessage, numOfErrors, jobReference.getJobId()));
String errorMessageException = String.format("Error occurred while importing data to BigQuery '%s'." +
" There are total %s error(s) for BigQuery job %s. Please look at " +
"BigQuery job logs for more information.",
errorMessage, numOfErrors, jobReference.getJobId());
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessageException,
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessageException), ErrorType.SYSTEM, true,
new IOException(errorMessageException));

}
} else {
long millisToWait = pollBackOff.nextBackOffMillis();
if (millisToWait == BackOff.STOP) {
throw new IOException(
String.format(
"Job %s failed to complete after %s millis.",
jobReference.getJobId(),
elapsedTime));
String errorMessage = String.format("Job %s failed to complete after %s millis.", jobReference.getJobId()
, elapsedTime);
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessage), ErrorType.SYSTEM, true,
new IOException(errorMessage));
}
// Pause execution for the configured duration before polling job status again.
Thread.sleep(millisToWait);
Expand Down Expand Up @@ -621,8 +652,12 @@ private static Optional<TableSchema> getTableSchema(Configuration conf) throws I
TableSchema tableSchema = createTableSchemaFromFields(fieldsJson);
return Optional.of(tableSchema);
} catch (IOException e) {
throw new IOException(
"Unable to parse key '" + BigQueryConfiguration.OUTPUT_TABLE_SCHEMA.getKey() + "'.", e);
String errorMessage = String.format("Unable to parse key '%s'.",
BigQueryConfiguration.OUTPUT_TABLE_SCHEMA.getKey());
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessage), ErrorType.SYSTEM, true,
new IOException(errorMessage, e));
}
}
return Optional.empty();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ private static void writeSimpleTypes(JsonWriter writer, String name, boolean isA
} else if (jsonString.startsWith("[") && jsonString.endsWith("]")) {
writeJsonArrayToWriter(gson.fromJson(jsonString, JsonArray.class), writer);
} else {
throw new IllegalStateException(String.format("Expected value of Field '%s' to be a valid JSON " +
throw new IllegalArgumentException(String.format("Expected value of Field '%s' to be a valid JSON " +
"object or array.", name));
}
break;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@
import io.cdap.cdap.api.data.format.StructuredRecord;
import io.cdap.cdap.api.data.format.UnexpectedFormatException;
import io.cdap.cdap.api.data.schema.Schema;
import io.cdap.cdap.api.exception.ErrorCategory;
import io.cdap.cdap.api.exception.ErrorType;
import io.cdap.cdap.api.exception.ErrorUtils;
import io.cdap.plugin.common.RecordConverter;
import org.apache.avro.generic.GenericRecord;

Expand Down Expand Up @@ -90,11 +93,11 @@ protected Object convertField(Object field, Schema fieldSchema) throws IOExcepti
try {
LocalDateTime.parse(field.toString());
} catch (DateTimeParseException exception) {
throw new UnexpectedFormatException(
String.format("Datetime field with value '%s' is not in ISO-8601 format.",
fieldSchema.getDisplayName(),
field.toString()),
exception);
String errorMessage = String.format("Datetime field %s with value '%s' is not in ISO-8601 format.",
fieldSchema.getDisplayName(), field);
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN, "DataError"),
errorMessage, exception.getMessage(), ErrorType.USER, true, exception);
}
//If properly formatted return the string
return field.toString();
Expand All @@ -110,7 +113,9 @@ protected Object convertField(Object field, Schema fieldSchema) throws IOExcepti
}
}
} catch (ArithmeticException e) {
throw new IOException("Field type %s has value that is too large." + fieldType);
throw ErrorUtils.getProgramFailureException(new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN,
"DataError"),
"Field type %s has value that is too large.", e.getMessage(), ErrorType.USER, true, e);
}

// Complex types like maps and unions are not supported in BigQuery plugins.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,11 @@
import io.cdap.cdap.etl.api.batch.BatchSourceContext;
import io.cdap.cdap.etl.api.connector.Connector;
import io.cdap.cdap.etl.api.engine.sql.SQLEngineInput;
import io.cdap.cdap.etl.api.exception.ErrorDetailsProviderSpec;
import io.cdap.cdap.etl.api.validation.ValidationFailure;
import io.cdap.plugin.common.Asset;
import io.cdap.plugin.common.LineageRecorder;
import io.cdap.plugin.gcp.bigquery.common.BigQueryErrorDetailsProvider;
import io.cdap.plugin.gcp.bigquery.connector.BigQueryConnector;
import io.cdap.plugin.gcp.bigquery.sqlengine.BigQueryReadDataset;
import io.cdap.plugin.gcp.bigquery.sqlengine.BigQuerySQLEngine;
Expand Down Expand Up @@ -135,7 +137,17 @@ public void prepareRun(BatchSourceContext context) throws Exception {

// Create BigQuery client
String serviceAccount = config.getServiceAccount();
Credentials credentials = BigQuerySourceUtils.getCredentials(config.getConnection());
Credentials credentials = null;
try {
credentials = BigQuerySourceUtils.getCredentials(config.getConnection());
} catch (Exception e) {
String errorReason = "Unable to load service account credentials.";
collector.addFailure(String.format("%s %s", errorReason, e.getMessage()), null)
.withStacktrace(e.getStackTrace());
collector.getOrThrowException();
}


BigQuery bigQuery = GCPUtils.getBigQuery(config.getProject(), credentials, null);
Dataset dataset = bigQuery.getDataset(DatasetId.of(config.getDatasetProject(), config.getDataset()));
Storage storage = GCPUtils.getStorage(config.getProject(), credentials);
Expand All @@ -144,19 +156,30 @@ public void prepareRun(BatchSourceContext context) throws Exception {
bucketPath = UUID.randomUUID().toString();
CryptoKeyName cmekKeyName = CmekUtils.getCmekKey(config.cmekKey, context.getArguments().asMap(), collector);
collector.getOrThrowException();
configuration = BigQueryUtil.getBigQueryConfig(serviceAccount, config.getProject(), cmekKeyName,
config.getServiceAccountType());
try {
configuration = BigQueryUtil.getBigQueryConfig(serviceAccount, config.getProject(), cmekKeyName,
config.getServiceAccountType());
} catch (Exception e) {
String errorReason = "Failed to create BigQuery configuration.";
collector.addFailure(String.format("%s %s", errorReason, e.getMessage()), null)
.withStacktrace(e.getStackTrace());
collector.getOrThrowException();
}

String bucketName = BigQueryUtil.getStagingBucketName(context.getArguments().asMap(), null,
dataset, config.getBucket());

// Configure GCS Bucket to use
String bucket = BigQuerySourceUtils.getOrCreateBucket(configuration,
storage,
bucketName,
dataset,
bucketPath,
cmekKeyName);
String bucket = null;
try {
bucket = BigQuerySourceUtils.getOrCreateBucket(configuration, storage, bucketName, dataset, bucketPath,
cmekKeyName);
} catch (Exception e) {
String errorReason = "Failed to create bucket.";
collector.addFailure(String.format("%s %s", errorReason, e.getMessage()), null)
.withStacktrace(e.getStackTrace());
collector.getOrThrowException();
}

// Configure Service account credentials
BigQuerySourceUtils.configureServiceAccount(configuration, config.getConnection());
Expand All @@ -166,10 +189,17 @@ public void prepareRun(BatchSourceContext context) throws Exception {

// Configure BigQuery input format.
String temporaryGcsPath = BigQuerySourceUtils.getTemporaryGcsPath(bucket, bucketPath, bucketPath);
BigQuerySourceUtils.configureBigQueryInput(configuration,
DatasetId.of(config.getDatasetProject(), config.getDataset()),
config.getTable(),
temporaryGcsPath);
try {
BigQuerySourceUtils.configureBigQueryInput(configuration,
DatasetId.of(config.getDatasetProject(), config.getDataset()),
config.getTable(),
temporaryGcsPath);
} catch (Exception e) {
String errorReason = "Failed to configure BigQuery input.";
collector.addFailure(String.format("%s %s", errorReason, e.getMessage()), null)
.withStacktrace(e.getStackTrace());
collector.getOrThrowException();
}

// Both emitLineage and setOutputFormat internally try to create an external dataset if it does not already exists.
// We call emitLineage before since it creates the dataset with schema.
Expand All @@ -178,6 +208,10 @@ public void prepareRun(BatchSourceContext context) throws Exception {
.setFqn(BigQueryUtil.getFQN(config.getDatasetProject(), config.getDataset(), config.getTable()))
.setLocation(dataset.getLocation())
.build();

// set error details provider
context.setErrorDetailsProvider(new ErrorDetailsProviderSpec(BigQueryErrorDetailsProvider.class.getName()));

emitLineage(context, configuredSchema, sourceTableType, config.getTable(), asset);
setInputFormat(context, configuredSchema);
}
Expand Down
Loading

0 comments on commit 9d501d3

Please sign in to comment.