Skip to content

Commit 60c479d

Browse files
committed
Add BigQuery Error Details Provider
1 parent 601f62b commit 60c479d

File tree

9 files changed

+200
-47
lines changed

9 files changed

+200
-47
lines changed
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
/*
2+
* Copyright © 2024 Cask Data, Inc.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
5+
* use this file except in compliance with the License. You may obtain a copy of
6+
* the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12+
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13+
* License for the specific language governing permissions and limitations under
14+
* the License.
15+
*/
16+
17+
package io.cdap.plugin.gcp.bigquery.common;
18+
19+
import io.cdap.plugin.gcp.common.GCPErrorDetailsProvider;
20+
21+
/**
22+
* A custom ErrorDetailsProvider for BigQuery plugins.
23+
*/
24+
public class BigQueryErrorDetailsProvider extends GCPErrorDetailsProvider {
25+
26+
@Override
27+
protected String getExternalDocumentationLink() {
28+
return "https://cloud.google.com/bigquery/docs/error-messages";
29+
}
30+
}

src/main/java/io/cdap/plugin/gcp/bigquery/sink/AbstractBigQuerySink.java

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,9 @@
3434
import io.cdap.cdap.etl.api.FailureCollector;
3535
import io.cdap.cdap.etl.api.batch.BatchSink;
3636
import io.cdap.cdap.etl.api.batch.BatchSinkContext;
37+
import io.cdap.cdap.etl.api.exception.ErrorDetailsProviderSpec;
3738
import io.cdap.plugin.common.Asset;
39+
import io.cdap.plugin.gcp.bigquery.common.BigQueryErrorDetailsProvider;
3840
import io.cdap.plugin.gcp.bigquery.sink.lib.BigQueryTableFieldSchema;
3941
import io.cdap.plugin.gcp.bigquery.util.BigQueryConstants;
4042
import io.cdap.plugin.gcp.bigquery.util.BigQueryTypeSize;
@@ -116,6 +118,8 @@ public final void prepareRun(BatchSinkContext context) throws Exception {
116118
storage, bucket, bucketName,
117119
config.getLocation(), cmekKeyName);
118120
}
121+
// set error details provider
122+
context.setErrorDetailsProvider(new ErrorDetailsProviderSpec(BigQueryErrorDetailsProvider.class.getName()));
119123
prepareRunInternal(context, bigQuery, bucketName);
120124
}
121125

@@ -124,9 +128,9 @@ public void onRunFinish(boolean succeeded, BatchSinkContext context) {
124128
String gcsPath;
125129
String bucket = getConfig().getBucket();
126130
if (bucket == null) {
127-
gcsPath = String.format("gs://%s", runUUID.toString());
131+
gcsPath = String.format("gs://%s", runUUID);
128132
} else {
129-
gcsPath = String.format(gcsPathFormat, bucket, runUUID.toString());
133+
gcsPath = String.format(gcsPathFormat, bucket, runUUID);
130134
}
131135
try {
132136
BigQueryUtil.deleteTemporaryDirectory(baseConfiguration, gcsPath);
@@ -327,9 +331,8 @@ private void validateRecordDepth(@Nullable Schema schema, FailureCollector colle
327331
*
328332
* @return Hadoop configuration
329333
*/
330-
protected Configuration getOutputConfiguration() throws IOException {
331-
Configuration configuration = new Configuration(baseConfiguration);
332-
return configuration;
334+
protected Configuration getOutputConfiguration() {
335+
return new Configuration(baseConfiguration);
333336
}
334337

335338
/**

src/main/java/io/cdap/plugin/gcp/bigquery/sink/BigQueryOutputFormat.java

Lines changed: 51 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,10 @@
6161
import com.google.common.base.Strings;
6262
import com.google.common.collect.Lists;
6363
import io.cdap.cdap.api.data.format.StructuredRecord;
64+
import io.cdap.cdap.api.exception.ErrorCategory;
65+
import io.cdap.cdap.api.exception.ErrorType;
66+
import io.cdap.cdap.api.exception.ErrorUtils;
67+
import io.cdap.cdap.etl.api.exception.ErrorPhase;
6468
import io.cdap.plugin.gcp.bigquery.sink.lib.BigQueryStrings;
6569
import io.cdap.plugin.gcp.bigquery.source.BigQueryFactoryWithScopes;
6670
import io.cdap.plugin.gcp.bigquery.util.BigQueryConstants;
@@ -103,6 +107,7 @@
103107
*/
104108
public class BigQueryOutputFormat extends ForwardingBigQueryFileOutputFormat<StructuredRecord, NullWritable> {
105109
private static final Logger LOG = LoggerFactory.getLogger(BigQueryOutputFormat.class);
110+
private static final String errorMessageFormat = "Error occurred in the phase: '%s'. Error message: %s";
106111

107112
@Override
108113
public RecordWriter<StructuredRecord, NullWritable> getRecordWriter(TaskAttemptContext taskAttemptContext)
@@ -165,19 +170,31 @@ public void checkOutputSpecs(JobContext job) throws FileAlreadyExistsException,
165170
// Error if the output path already exists.
166171
FileSystem outputFileSystem = outputPath.getFileSystem(conf);
167172
if (outputFileSystem.exists(outputPath)) {
168-
throw new IOException("The output path '" + outputPath + "' already exists.");
173+
String errorMessage = String.format("The output path '%s' already exists.", outputPath);
174+
throw ErrorUtils.getProgramFailureException(
175+
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
176+
String.format(errorMessageFormat, ErrorPhase.VALIDATING_OUTPUT_SPECS, errorMessage), ErrorType.SYSTEM, true,
177+
new IOException(errorMessage));
169178
}
170179

171180
// Error if compression is set as there's mixed support in BigQuery.
172181
if (FileOutputFormat.getCompressOutput(job)) {
173-
throw new IOException("Compression isn't supported for this OutputFormat.");
182+
String errorMessage = "Compression isn't supported for this OutputFormat.";
183+
throw ErrorUtils.getProgramFailureException(
184+
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
185+
String.format(errorMessageFormat, ErrorPhase.VALIDATING_OUTPUT_SPECS, errorMessage), ErrorType.SYSTEM, true,
186+
new IOException(errorMessage));
174187
}
175188

176189
// Error if unable to create a BigQuery helper.
177190
try {
178191
new BigQueryFactoryWithScopes(GCPUtils.BIGQUERY_SCOPES).getBigQueryHelper(conf);
179192
} catch (GeneralSecurityException gse) {
180-
throw new IOException("Failed to create BigQuery client", gse);
193+
String errorMessage = "Failed to create BigQuery client";
194+
throw ErrorUtils.getProgramFailureException(
195+
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
196+
String.format(errorMessageFormat, ErrorPhase.VALIDATING_OUTPUT_SPECS, errorMessage), ErrorType.SYSTEM, true,
197+
new IOException(errorMessage, gse));
181198
}
182199

183200
// Let delegate process its checks.
@@ -208,7 +225,11 @@ public static class BigQueryOutputCommitter extends ForwardingBigQueryFileOutput
208225
BigQueryFactory bigQueryFactory = new BigQueryFactoryWithScopes(GCPUtils.BIGQUERY_SCOPES);
209226
this.bigQueryHelper = bigQueryFactory.getBigQueryHelper(context.getConfiguration());
210227
} catch (GeneralSecurityException e) {
211-
throw new IOException("Failed to create Bigquery client.", e);
228+
String errorMessage = "Failed to create BigQuery client";
229+
throw ErrorUtils.getProgramFailureException(
230+
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
231+
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessage), ErrorType.SYSTEM, true,
232+
new IOException(errorMessage, e));
212233
}
213234
}
214235

@@ -266,7 +287,11 @@ public void commitJob(JobContext jobContext) throws IOException {
266287
writeDisposition, sourceUris, partitionType, timePartitioningType, range, partitionByField,
267288
requirePartitionFilter, clusteringOrderList, tableExists, jobLabelKeyValue, conf);
268289
} catch (Exception e) {
269-
throw new IOException("Failed to import GCS into BigQuery. ", e);
290+
String errorMessage = "Failed to import GCS into BigQuery.";
291+
throw ErrorUtils.getProgramFailureException(
292+
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
293+
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessage), ErrorType.SYSTEM, true,
294+
new IOException(errorMessage, e));
270295
}
271296

272297
cleanup(jobContext);
@@ -573,19 +598,25 @@ private static void waitForJobCompletion(BigQueryHelper bigQueryHelper, String p
573598
numOfErrors = errors.size();
574599
}
575600
// Only add first error message in the exception. For other errors user should look at BigQuery job logs.
576-
throw new IOException(String.format("Error occurred while importing data to BigQuery '%s'." +
577-
" There are total %s error(s) for BigQuery job %s. Please look at " +
578-
"BigQuery job logs for more information.",
579-
errorMessage, numOfErrors, jobReference.getJobId()));
601+
String errorMessageException = String.format("Error occurred while importing data to BigQuery '%s'." +
602+
" There are total %s error(s) for BigQuery job %s. Please look at " +
603+
"BigQuery job logs for more information.",
604+
errorMessage, numOfErrors, jobReference.getJobId());
605+
throw ErrorUtils.getProgramFailureException(
606+
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessageException,
607+
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessageException), ErrorType.SYSTEM, true,
608+
new IOException(errorMessageException));
609+
580610
}
581611
} else {
582612
long millisToWait = pollBackOff.nextBackOffMillis();
583613
if (millisToWait == BackOff.STOP) {
584-
throw new IOException(
585-
String.format(
586-
"Job %s failed to complete after %s millis.",
587-
jobReference.getJobId(),
588-
elapsedTime));
614+
String errorMessage = String.format("Job %s failed to complete after %s millis.", jobReference.getJobId()
615+
, elapsedTime);
616+
throw ErrorUtils.getProgramFailureException(
617+
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
618+
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessage), ErrorType.SYSTEM, true,
619+
new IOException(errorMessage));
589620
}
590621
// Pause execution for the configured duration before polling job status again.
591622
Thread.sleep(millisToWait);
@@ -621,8 +652,12 @@ private static Optional<TableSchema> getTableSchema(Configuration conf) throws I
621652
TableSchema tableSchema = createTableSchemaFromFields(fieldsJson);
622653
return Optional.of(tableSchema);
623654
} catch (IOException e) {
624-
throw new IOException(
625-
"Unable to parse key '" + BigQueryConfiguration.OUTPUT_TABLE_SCHEMA.getKey() + "'.", e);
655+
String errorMessage = String.format("Unable to parse key '%s'.",
656+
BigQueryConfiguration.OUTPUT_TABLE_SCHEMA.getKey());
657+
throw ErrorUtils.getProgramFailureException(
658+
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorMessage,
659+
String.format(errorMessageFormat, ErrorPhase.COMMITTING, errorMessage), ErrorType.SYSTEM, true,
660+
new IOException(errorMessage, e));
626661
}
627662
}
628663
return Optional.empty();

src/main/java/io/cdap/plugin/gcp/bigquery/sink/BigQueryRecordToJson.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ private static void writeSimpleTypes(JsonWriter writer, String name, boolean isA
189189
} else if (jsonString.startsWith("[") && jsonString.endsWith("]")) {
190190
writeJsonArrayToWriter(gson.fromJson(jsonString, JsonArray.class), writer);
191191
} else {
192-
throw new IllegalStateException(String.format("Expected value of Field '%s' to be a valid JSON " +
192+
throw new IllegalArgumentException(String.format("Expected value of Field '%s' to be a valid JSON " +
193193
"object or array.", name));
194194
}
195195
break;

src/main/java/io/cdap/plugin/gcp/bigquery/source/BigQueryAvroToStructuredTransformer.java

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,9 @@
2020
import io.cdap.cdap.api.data.format.StructuredRecord;
2121
import io.cdap.cdap.api.data.format.UnexpectedFormatException;
2222
import io.cdap.cdap.api.data.schema.Schema;
23+
import io.cdap.cdap.api.exception.ErrorCategory;
24+
import io.cdap.cdap.api.exception.ErrorType;
25+
import io.cdap.cdap.api.exception.ErrorUtils;
2326
import io.cdap.plugin.common.RecordConverter;
2427
import org.apache.avro.generic.GenericRecord;
2528

@@ -90,11 +93,11 @@ protected Object convertField(Object field, Schema fieldSchema) throws IOExcepti
9093
try {
9194
LocalDateTime.parse(field.toString());
9295
} catch (DateTimeParseException exception) {
93-
throw new UnexpectedFormatException(
94-
String.format("Datetime field with value '%s' is not in ISO-8601 format.",
95-
fieldSchema.getDisplayName(),
96-
field.toString()),
97-
exception);
96+
String errorMessage = String.format("Datetime field %s with value '%s' is not in ISO-8601 format.",
97+
fieldSchema.getDisplayName(), field);
98+
throw ErrorUtils.getProgramFailureException(
99+
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN, "DataError"),
100+
errorMessage, exception.getMessage(), ErrorType.USER, true, exception);
98101
}
99102
//If properly formatted return the string
100103
return field.toString();
@@ -110,7 +113,9 @@ protected Object convertField(Object field, Schema fieldSchema) throws IOExcepti
110113
}
111114
}
112115
} catch (ArithmeticException e) {
113-
throw new IOException("Field type %s has value that is too large." + fieldType);
116+
throw ErrorUtils.getProgramFailureException(new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN,
117+
"DataError"),
118+
"Field type %s has value that is too large.", e.getMessage(), ErrorType.USER, true, e);
114119
}
115120

116121
// Complex types like maps and unions are not supported in BigQuery plugins.

src/main/java/io/cdap/plugin/gcp/bigquery/source/BigQuerySource.java

Lines changed: 47 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,11 @@
4848
import io.cdap.cdap.etl.api.batch.BatchSourceContext;
4949
import io.cdap.cdap.etl.api.connector.Connector;
5050
import io.cdap.cdap.etl.api.engine.sql.SQLEngineInput;
51+
import io.cdap.cdap.etl.api.exception.ErrorDetailsProviderSpec;
5152
import io.cdap.cdap.etl.api.validation.ValidationFailure;
5253
import io.cdap.plugin.common.Asset;
5354
import io.cdap.plugin.common.LineageRecorder;
55+
import io.cdap.plugin.gcp.bigquery.common.BigQueryErrorDetailsProvider;
5456
import io.cdap.plugin.gcp.bigquery.connector.BigQueryConnector;
5557
import io.cdap.plugin.gcp.bigquery.sqlengine.BigQueryReadDataset;
5658
import io.cdap.plugin.gcp.bigquery.sqlengine.BigQuerySQLEngine;
@@ -135,7 +137,17 @@ public void prepareRun(BatchSourceContext context) throws Exception {
135137

136138
// Create BigQuery client
137139
String serviceAccount = config.getServiceAccount();
138-
Credentials credentials = BigQuerySourceUtils.getCredentials(config.getConnection());
140+
Credentials credentials = null;
141+
try {
142+
credentials = BigQuerySourceUtils.getCredentials(config.getConnection());
143+
} catch (Exception e) {
144+
String errorReason = "Unable to load service account credentials.";
145+
collector.addFailure(String.format("%s %s", errorReason, e.getMessage()), null)
146+
.withStacktrace(e.getStackTrace());
147+
collector.getOrThrowException();
148+
}
149+
150+
139151
BigQuery bigQuery = GCPUtils.getBigQuery(config.getProject(), credentials, null);
140152
Dataset dataset = bigQuery.getDataset(DatasetId.of(config.getDatasetProject(), config.getDataset()));
141153
Storage storage = GCPUtils.getStorage(config.getProject(), credentials);
@@ -144,19 +156,30 @@ public void prepareRun(BatchSourceContext context) throws Exception {
144156
bucketPath = UUID.randomUUID().toString();
145157
CryptoKeyName cmekKeyName = CmekUtils.getCmekKey(config.cmekKey, context.getArguments().asMap(), collector);
146158
collector.getOrThrowException();
147-
configuration = BigQueryUtil.getBigQueryConfig(serviceAccount, config.getProject(), cmekKeyName,
148-
config.getServiceAccountType());
159+
try {
160+
configuration = BigQueryUtil.getBigQueryConfig(serviceAccount, config.getProject(), cmekKeyName,
161+
config.getServiceAccountType());
162+
} catch (Exception e) {
163+
String errorReason = "Failed to create BigQuery configuration.";
164+
collector.addFailure(String.format("%s %s", errorReason, e.getMessage()), null)
165+
.withStacktrace(e.getStackTrace());
166+
collector.getOrThrowException();
167+
}
149168

150169
String bucketName = BigQueryUtil.getStagingBucketName(context.getArguments().asMap(), null,
151170
dataset, config.getBucket());
152171

153172
// Configure GCS Bucket to use
154-
String bucket = BigQuerySourceUtils.getOrCreateBucket(configuration,
155-
storage,
156-
bucketName,
157-
dataset,
158-
bucketPath,
159-
cmekKeyName);
173+
String bucket = null;
174+
try {
175+
bucket = BigQuerySourceUtils.getOrCreateBucket(configuration, storage, bucketName, dataset, bucketPath,
176+
cmekKeyName);
177+
} catch (Exception e) {
178+
String errorReason = "Failed to create bucket.";
179+
collector.addFailure(String.format("%s %s", errorReason, e.getMessage()), null)
180+
.withStacktrace(e.getStackTrace());
181+
collector.getOrThrowException();
182+
}
160183

161184
// Configure Service account credentials
162185
BigQuerySourceUtils.configureServiceAccount(configuration, config.getConnection());
@@ -166,10 +189,17 @@ public void prepareRun(BatchSourceContext context) throws Exception {
166189

167190
// Configure BigQuery input format.
168191
String temporaryGcsPath = BigQuerySourceUtils.getTemporaryGcsPath(bucket, bucketPath, bucketPath);
169-
BigQuerySourceUtils.configureBigQueryInput(configuration,
170-
DatasetId.of(config.getDatasetProject(), config.getDataset()),
171-
config.getTable(),
172-
temporaryGcsPath);
192+
try {
193+
BigQuerySourceUtils.configureBigQueryInput(configuration,
194+
DatasetId.of(config.getDatasetProject(), config.getDataset()),
195+
config.getTable(),
196+
temporaryGcsPath);
197+
} catch (Exception e) {
198+
String errorReason = "Failed to configure BigQuery input.";
199+
collector.addFailure(String.format("%s %s", errorReason, e.getMessage()), null)
200+
.withStacktrace(e.getStackTrace());
201+
collector.getOrThrowException();
202+
}
173203

174204
// Both emitLineage and setOutputFormat internally try to create an external dataset if it does not already exists.
175205
// We call emitLineage before since it creates the dataset with schema.
@@ -178,6 +208,10 @@ public void prepareRun(BatchSourceContext context) throws Exception {
178208
.setFqn(BigQueryUtil.getFQN(config.getDatasetProject(), config.getDataset(), config.getTable()))
179209
.setLocation(dataset.getLocation())
180210
.build();
211+
212+
// set error details provider
213+
context.setErrorDetailsProvider(new ErrorDetailsProviderSpec(BigQueryErrorDetailsProvider.class.getName()));
214+
181215
emitLineage(context, configuredSchema, sourceTableType, config.getTable(), asset);
182216
setInputFormat(context, configuredSchema);
183217
}

0 commit comments

Comments
 (0)