diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml
index 9144f7be65d50..507ddbb3b2abb 100644
--- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml
+++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml
@@ -40,7 +40,7 @@
- name: BigQuery
destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133
dockerRepository: airbyte/destination-bigquery
- dockerImageTag: 1.2.9
+ dockerImageTag: 1.2.11
documentationUrl: https://docs.airbyte.com/integrations/destinations/bigquery
icon: bigquery.svg
normalizationConfig:
@@ -58,7 +58,7 @@
- name: BigQuery (denormalized typed struct)
destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496
dockerRepository: airbyte/destination-bigquery-denormalized
- dockerImageTag: 1.2.10
+ dockerImageTag: 1.2.11
documentationUrl: https://docs.airbyte.com/integrations/destinations/bigquery
icon: bigquery.svg
resourceRequirements:
diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml
index 246ca8661f1be..7473ae8005e43 100644
--- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml
+++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml
@@ -621,7 +621,7 @@
supported_destination_sync_modes:
- "overwrite"
- "append"
-- dockerImage: "airbyte/destination-bigquery:1.2.9"
+- dockerImage: "airbyte/destination-bigquery:1.2.11"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/bigquery"
connectionSpecification:
@@ -831,7 +831,7 @@
- "overwrite"
- "append"
- "append_dedup"
-- dockerImage: "airbyte/destination-bigquery-denormalized:1.2.10"
+- dockerImage: "airbyte/destination-bigquery-denormalized:1.2.11"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/bigquery"
connectionSpecification:
diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile
index 1bdb65e603efc..1cfd9ee045d5d 100644
--- a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile
+++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile
@@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true
COPY --from=build /airbyte /airbyte
-LABEL io.airbyte.version=1.2.10
+LABEL io.airbyte.version=1.2.11
LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized
diff --git a/airbyte-integrations/connectors/destination-bigquery/Dockerfile b/airbyte-integrations/connectors/destination-bigquery/Dockerfile
index d06f311b4e60f..f79d950d1d7f0 100644
--- a/airbyte-integrations/connectors/destination-bigquery/Dockerfile
+++ b/airbyte-integrations/connectors/destination-bigquery/Dockerfile
@@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true
COPY --from=build /airbyte /airbyte
-LABEL io.airbyte.version=1.2.9
+LABEL io.airbyte.version=1.2.11
LABEL io.airbyte.name=airbyte/destination-bigquery
diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumer.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumer.java
index babda88d3b8de..72c62ad9765b4 100644
--- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumer.java
+++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumer.java
@@ -45,8 +45,8 @@ protected void startTracked() {
* Processes STATE and RECORD {@link AirbyteMessage} with all else logged as unexpected
*
*
For STATE messages emit messages back to the platform
- * For RECORD messages upload message to associated Airbyte Stream. This means that RECORDS will be associated with their respective streams when
- * more than one record exists
+ * For RECORD messages upload message to associated Airbyte Stream. This means that RECORDS will
+ * be associated with their respective streams when more than one record exists
*
* @param message {@link AirbyteMessage} to be processed
*/
@@ -66,7 +66,8 @@ public void acceptTracked(final AirbyteMessage message) {
}
/**
- * Processes {@link io.airbyte.protocol.models.AirbyteRecordMessage} by writing Airbyte stream data to Big Query Writer
+ * Processes {@link io.airbyte.protocol.models.AirbyteRecordMessage} by writing Airbyte stream data
+ * to Big Query Writer
*
* @param message record to be written
*/
diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java
index d0478fb6856ac..d5b60af7a5483 100644
--- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java
+++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java
@@ -125,7 +125,7 @@ public static Dataset getOrCreateDataset(final BigQuery bigquery, final String d
}
public static void checkHasCreateAndDeleteDatasetRole(final BigQuery bigquery, final String datasetId, final String datasetLocation) {
- final String tmpTestDatasetId = datasetId + CHECK_TEST_DATASET_SUFFIX + System.currentTimeMillis();
+ final String tmpTestDatasetId = datasetId + CHECK_TEST_DATASET_SUFFIX + System.currentTimeMillis();
final DatasetInfo datasetInfo = DatasetInfo.newBuilder(tmpTestDatasetId).setLocation(datasetLocation).build();
bigquery.create(datasetInfo);
diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/uploader/BigQueryUploaderFactory.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/uploader/BigQueryUploaderFactory.java
index 7925dcdb586b3..b9991140893f8 100644
--- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/uploader/BigQueryUploaderFactory.java
+++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/uploader/BigQueryUploaderFactory.java
@@ -5,10 +5,13 @@
package io.airbyte.integrations.destination.bigquery.uploader;
import static io.airbyte.integrations.destination.s3.avro.AvroConstants.JSON_CONVERTER;
+import static software.amazon.awssdk.http.HttpStatusCode.FORBIDDEN;
+import static software.amazon.awssdk.http.HttpStatusCode.NOT_FOUND;
import com.amazonaws.services.s3.AmazonS3;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQueryException;
import com.google.cloud.bigquery.FormatOptions;
import com.google.cloud.bigquery.JobId;
import com.google.cloud.bigquery.JobInfo;
@@ -16,6 +19,7 @@
import com.google.cloud.bigquery.TableDataWriteChannel;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.WriteChannelConfiguration;
+import io.airbyte.commons.exceptions.ConfigErrorException;
import io.airbyte.integrations.destination.bigquery.BigQueryUtils;
import io.airbyte.integrations.destination.bigquery.formatter.BigQueryRecordFormatter;
import io.airbyte.integrations.destination.bigquery.uploader.config.UploaderConfig;
@@ -30,6 +34,20 @@
public class BigQueryUploaderFactory {
+ private static final String CONFIG_ERROR_MSG = """
+ Failed to write to destination schema.
+
+ 1. Make sure you have all required permissions for writing to the schema.
+
+ 2. Make sure that the actual destination schema's location corresponds to location provided
+ in connector's config.
+
+ 3. Try to change the "Destination schema" from "Mirror Source Structure" (if it's set) tp the
+ "Destination Default" option.
+
+ More details:
+ """;
+
public static AbstractBigQueryUploader> getUploader(final UploaderConfig uploaderConfig)
throws IOException {
final String schemaName = BigQueryUtils.getSchema(uploaderConfig.getConfig(), uploaderConfig.getConfigStream());
@@ -141,7 +159,17 @@ private static BigQueryDirectUploader getBigQueryDirectUploader(
.setProject(bigQuery.getOptions().getProjectId())
.build();
- final TableDataWriteChannel writer = bigQuery.writer(job, writeChannelConfiguration);
+ final TableDataWriteChannel writer;
+
+ try {
+ writer = bigQuery.writer(job, writeChannelConfiguration);
+ } catch (final BigQueryException e) {
+ if (e.getCode() == FORBIDDEN || e.getCode() == NOT_FOUND) {
+ throw new ConfigErrorException(CONFIG_ERROR_MSG + e);
+ } else {
+ throw new BigQueryException(e.getCode(), e.getMessage());
+ }
+ }
// this this optional value. If not set - use default client's value (15MiG)
final Integer bigQueryClientChunkSizeFomConfig =
diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/AbstractBigQueryDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/AbstractBigQueryDestinationAcceptanceTest.java
index 9154f57827dd3..f8819fd6e8cdf 100644
--- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/AbstractBigQueryDestinationAcceptanceTest.java
+++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/AbstractBigQueryDestinationAcceptanceTest.java
@@ -170,7 +170,7 @@ protected List retrieveRecordsFromTable(final String tableName, final
}
protected void setUpBigQuery() throws IOException {
- //secrets file should be set by the inhereting class
+ // secrets file should be set by the inhereting class
Assertions.assertNotNull(secretsFile);
final String datasetId = Strings.addRandomSuffix("airbyte_tests", "_", 8);
config = BigQueryDestinationTestUtils.createConfig(secretsFile, datasetId);
diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java
index 8d93d74543d92..065ff49844cec 100644
--- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java
+++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java
@@ -72,17 +72,20 @@
@TestInstance(PER_CLASS)
class BigQueryDestinationTest {
+
protected static final Path CREDENTIALS_STANDARD_INSERT_PATH = Path.of("secrets/credentials-standard.json");
protected static final Path CREDENTIALS_BAD_PROJECT_PATH = Path.of("secrets/credentials-badproject.json");
protected static final Path CREDENTIALS_NO_DATASET_CREATION_PATH =
Path.of("secrets/credentials-standard-no-dataset-creation.json");
protected static final Path CREDENTIALS_NON_BILLABLE_PROJECT_PATH =
Path.of("secrets/credentials-standard-non-billable-project.json");
+ protected static final Path CREDENTIALS_NO_EDIT_PUBLIC_SCHEMA_ROLE_PATH =
+ Path.of("secrets/credentials-no-edit-public-schema-role.json");
protected static final Path CREDENTIALS_WITH_GCS_STAGING_PATH =
Path.of("secrets/credentials-gcs-staging.json");
protected static final Path[] ALL_PATHS = {CREDENTIALS_WITH_GCS_STAGING_PATH, CREDENTIALS_BAD_PROJECT_PATH, CREDENTIALS_NO_DATASET_CREATION_PATH,
- CREDENTIALS_NON_BILLABLE_PROJECT_PATH, CREDENTIALS_WITH_GCS_STAGING_PATH};
+ CREDENTIALS_NO_EDIT_PUBLIC_SCHEMA_ROLE_PATH, CREDENTIALS_NON_BILLABLE_PROJECT_PATH, CREDENTIALS_WITH_GCS_STAGING_PATH};
private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryDestinationTest.class);
private static final String DATASET_NAME_PREFIX = "bq_dest_integration_test";
@@ -116,8 +119,9 @@ class BigQueryDestinationTest {
protected static JsonNode configWithProjectId;
protected static JsonNode configWithBadProjectId;
protected static JsonNode insufficientRoleConfig;
+ protected static JsonNode noEditPublicSchemaRoleConfig;
protected static JsonNode nonBillableConfig;
- protected static JsonNode gcsStagingConfig; //default BigQuery config. Also used for setup/teardown
+ protected static JsonNode gcsStagingConfig; // default BigQuery config. Also used for setup/teardown
protected BigQuery bigquery;
protected Dataset dataset;
protected static Map configs;
@@ -129,28 +133,26 @@ private Stream successTestConfigProvider() {
return Stream.of(
Arguments.of("config"),
Arguments.of("configWithProjectId"),
- Arguments.of("gcsStagingConfig")
- );
+ Arguments.of("gcsStagingConfig"));
}
private Stream failCheckTestConfigProvider() {
return Stream.of(
Arguments.of("configWithBadProjectId", "User does not have bigquery.datasets.create permission in project"),
Arguments.of("insufficientRoleConfig", "User does not have bigquery.datasets.create permission"),
- Arguments.of("nonBillableConfig", "Access Denied: BigQuery BigQuery: Streaming insert is not allowed in the free tier")
- );
+ Arguments.of("nonBillableConfig", "Access Denied: BigQuery BigQuery: Streaming insert is not allowed in the free tier"));
}
private Stream failWriteTestConfigProvider() {
return Stream.of(
Arguments.of("configWithBadProjectId", "User does not have bigquery.datasets.create permission in project"),
- Arguments.of("insufficientRoleConfig", "Permission bigquery.tables.create denied")
- );
+ Arguments.of("noEditPublicSchemaRoleConfig", "Failed to write to destination schema."), // (or it may not exist)
+ Arguments.of("insufficientRoleConfig", "Permission bigquery.tables.create denied"));
}
@BeforeAll
public static void beforeAll() throws IOException {
- for(Path path : ALL_PATHS) {
+ for (Path path : ALL_PATHS) {
if (!Files.exists(path)) {
throw new IllegalStateException(
String.format("Must provide path to a big query credentials file. Please add file with credentials to %s", path.toAbsolutePath()));
@@ -158,27 +160,29 @@ public static void beforeAll() throws IOException {
}
datasetId = Strings.addRandomSuffix(DATASET_NAME_PREFIX, "_", 8);
- //Set up config objects for test scenarios
- //config - basic config for standard inserts that should succeed check and write tests
- //this config is also used for housekeeping (checking records, and cleaning up)
+ // Set up config objects for test scenarios
+ // config - basic config for standard inserts that should succeed check and write tests
+ // this config is also used for housekeeping (checking records, and cleaning up)
config = BigQueryDestinationTestUtils.createConfig(CREDENTIALS_STANDARD_INSERT_PATH, datasetId);
- //all successful configs use the same project ID
+ // all successful configs use the same project ID
projectId = config.get(BigQueryConsts.CONFIG_PROJECT_ID).asText();
- //configWithProjectId - config that uses project:dataset notation for datasetId
+ // configWithProjectId - config that uses project:dataset notation for datasetId
final String dataSetWithProjectId = String.format("%s:%s", projectId, datasetId);
configWithProjectId = BigQueryDestinationTestUtils.createConfig(CREDENTIALS_STANDARD_INSERT_PATH, dataSetWithProjectId);
- //configWithBadProjectId - config that uses "fake" project ID and should fail
+ // configWithBadProjectId - config that uses "fake" project ID and should fail
final String dataSetWithBadProjectId = String.format("%s:%s", "fake", datasetId);
configWithBadProjectId = BigQueryDestinationTestUtils.createConfig(CREDENTIALS_BAD_PROJECT_PATH, dataSetWithBadProjectId);
- //config that has insufficient privileges
+ // config that has insufficient privileges
insufficientRoleConfig = BigQueryDestinationTestUtils.createConfig(CREDENTIALS_NO_DATASET_CREATION_PATH, datasetId);
- //config that tries to write to a project with disabled billing (free tier)
+ // config that tries to write to a project with disabled billing (free tier)
nonBillableConfig = BigQueryDestinationTestUtils.createConfig(CREDENTIALS_NON_BILLABLE_PROJECT_PATH, "testnobilling");
- //config with GCS staging
+ // config that has no privileges to edit anything in Public schema
+ noEditPublicSchemaRoleConfig = BigQueryDestinationTestUtils.createConfig(CREDENTIALS_NO_EDIT_PUBLIC_SCHEMA_ROLE_PATH, "public");
+ // config with GCS staging
gcsStagingConfig = BigQueryDestinationTestUtils.createConfig(CREDENTIALS_WITH_GCS_STAGING_PATH, datasetId);
MESSAGE_USERS1.getRecord().setNamespace(datasetId);
@@ -188,28 +192,33 @@ public static void beforeAll() throws IOException {
catalog = new ConfiguredAirbyteCatalog().withStreams(Lists.newArrayList(
CatalogHelpers.createConfiguredAirbyteStream(USERS_STREAM_NAME, datasetId,
- io.airbyte.protocol.models.Field.of("name", JsonSchemaType.STRING),
- io.airbyte.protocol.models.Field
- .of("id", JsonSchemaType.STRING))
+ io.airbyte.protocol.models.Field.of("name", JsonSchemaType.STRING),
+ io.airbyte.protocol.models.Field
+ .of("id", JsonSchemaType.STRING))
.withDestinationSyncMode(DestinationSyncMode.APPEND),
CatalogHelpers.createConfiguredAirbyteStream(TASKS_STREAM_NAME, datasetId, Field.of("goal", JsonSchemaType.STRING))));
- configs = new HashMap() {{
- put("config", config);
- put("configWithProjectId", configWithProjectId);
- put("configWithBadProjectId", configWithBadProjectId);
- put("insufficientRoleConfig", insufficientRoleConfig);
- put("nonBillableConfig", nonBillableConfig);
- put("gcsStagingConfig", gcsStagingConfig);
- }};
+ configs = new HashMap() {
+
+ {
+ put("config", config);
+ put("configWithProjectId", configWithProjectId);
+ put("configWithBadProjectId", configWithBadProjectId);
+ put("insufficientRoleConfig", insufficientRoleConfig);
+ put("noEditPublicSchemaRoleConfig", noEditPublicSchemaRoleConfig);
+ put("nonBillableConfig", nonBillableConfig);
+ put("gcsStagingConfig", gcsStagingConfig);
+ }
+
+ };
}
protected void initBigQuery(JsonNode config) throws IOException {
bigquery = BigQueryDestinationTestUtils.initBigQuery(config, projectId);
try {
dataset = BigQueryDestinationTestUtils.initDataSet(config, bigquery, datasetId);
- } catch(Exception ex) {
- //ignore
+ } catch (Exception ex) {
+ // ignore
}
}
@@ -255,7 +264,7 @@ void testCheckSuccess(String configName) throws IOException {
@ParameterizedTest
@MethodSource("failCheckTestConfigProvider")
void testCheckFailures(String configName, String error) {
- //TODO: this should always throw ConfigErrorException
+ // TODO: this should always throw ConfigErrorException
JsonNode testConfig = configs.get(configName);
final Exception ex = assertThrows(Exception.class, () -> {
new BigQueryDestination().check(testConfig);
@@ -322,7 +331,7 @@ void testWriteFailure(String configName, String error) throws Exception {
}
private Set fetchNamesOfTablesInDb() throws InterruptedException {
- if(dataset == null || bigquery == null) {
+ if (dataset == null || bigquery == null) {
return Collections.emptySet();
}
final QueryJobConfiguration queryConfig = QueryJobConfiguration
@@ -330,7 +339,7 @@ private Set fetchNamesOfTablesInDb() throws InterruptedException {
.setUseLegacySql(false)
.build();
- if(!dataset.exists()) {
+ if (!dataset.exists()) {
return Collections.emptySet();
}
return StreamSupport
@@ -429,4 +438,5 @@ private boolean isTablePartitioned(final BigQuery bigquery, final Dataset datase
}
return false;
}
+
}
diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTestUtils.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTestUtils.java
index 3f2b7829550d5..9da544d41444b 100644
--- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTestUtils.java
+++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTestUtils.java
@@ -1,3 +1,7 @@
+/*
+ * Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+ */
+
package io.airbyte.integrations.destination.bigquery;
import com.amazonaws.services.s3.AmazonS3;
@@ -22,6 +26,7 @@ public class BigQueryDestinationTestUtils {
/**
* Parse the config file and replace dataset with datasetId randomly generated by the test
+ *
* @param configFile
* @param datasetId
* @return
@@ -30,13 +35,13 @@ public class BigQueryDestinationTestUtils {
public static JsonNode createConfig(Path configFile, String datasetId) throws IOException {
final String tmpConfigAsString = Files.readString(configFile);
final JsonNode tmpConfigJson = Jsons.deserialize(tmpConfigAsString);
- return Jsons.jsonNode(((ObjectNode)tmpConfigJson).put(BigQueryConsts.CONFIG_DATASET_ID, datasetId));
+ return Jsons.jsonNode(((ObjectNode) tmpConfigJson).put(BigQueryConsts.CONFIG_DATASET_ID, datasetId));
}
/**
- * Get a handle for the BigQuery dataset instance used by the test.
- * This dataset instance will be used to verify results of test operations
- * and for cleaning up after the test runs
+ * Get a handle for the BigQuery dataset instance used by the test. This dataset instance will be
+ * used to verify results of test operations and for cleaning up after the test runs
+ *
* @param config
* @param bigquery
* @param datasetId
@@ -47,8 +52,8 @@ public static Dataset initDataSet(JsonNode config, BigQuery bigquery, String dat
.setLocation(config.get(BigQueryConsts.CONFIG_DATASET_LOCATION).asText()).build();
try {
return bigquery.create(datasetInfo);
- } catch(Exception ex) {
- if(ex.getMessage().indexOf("Already Exists") > -1) {
+ } catch (Exception ex) {
+ if (ex.getMessage().indexOf("Already Exists") > -1) {
return bigquery.getDataset(datasetId);
}
}
@@ -56,8 +61,9 @@ public static Dataset initDataSet(JsonNode config, BigQuery bigquery, String dat
}
/**
- * Initialized bigQuery instance that will be used for verifying results of test operations
- * and for cleaning up BigQuery dataset after the test
+ * Initialized bigQuery instance that will be used for verifying results of test operations and for
+ * cleaning up BigQuery dataset after the test
+ *
* @param config
* @param projectId
* @return
@@ -74,6 +80,7 @@ public static BigQuery initBigQuery(JsonNode config, String projectId) throws IO
/**
* Deletes bigquery data set created during the test
+ *
* @param bigquery
* @param dataset
* @param LOGGER
@@ -81,7 +88,7 @@ public static BigQuery initBigQuery(JsonNode config, String projectId) throws IO
public static void tearDownBigQuery(BigQuery bigquery, Dataset dataset, Logger LOGGER) {
// allows deletion of a dataset that has contents
final BigQuery.DatasetDeleteOption option = BigQuery.DatasetDeleteOption.deleteContents();
- if(bigquery == null || dataset == null) {
+ if (bigquery == null || dataset == null) {
return;
}
try {
@@ -100,10 +107,10 @@ public static void tearDownBigQuery(BigQuery bigquery, Dataset dataset, Logger L
* Remove all the GCS output from the tests.
*/
public static void tearDownGcs(AmazonS3 s3Client, JsonNode config, Logger LOGGER) {
- if(s3Client == null) {
+ if (s3Client == null) {
return;
}
- if(BigQueryUtils.getLoadingMethod(config) != UploadingMethod.GCS) {
+ if (BigQueryUtils.getLoadingMethod(config) != UploadingMethod.GCS) {
return;
}
final JsonNode properties = config.get(BigQueryConsts.LOADING_METHOD);
@@ -130,4 +137,5 @@ public static void tearDownGcs(AmazonS3 s3Client, JsonNode config, Logger LOGGER
LOGGER.error("Failed to remove GCS resources after the test", ex);
}
}
+
}
diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java
index 4f13a6cf0d51b..bd115f09f76e3 100644
--- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java
+++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java
@@ -16,32 +16,34 @@
@TestInstance(PER_CLASS)
public class BigQueryGcsDestinationAcceptanceTest extends AbstractBigQueryDestinationAcceptanceTest {
+
private AmazonS3 s3Client;
private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryGcsDestinationAcceptanceTest.class);
/**
- * Sets up secretsFile path as well as BigQuery and GCS instances for verification and cleanup
- * This function will be called before EACH test.
- * @see DestinationAcceptanceTest#setUpInternal()
+ * Sets up secretsFile path as well as BigQuery and GCS instances for verification and cleanup This
+ * function will be called before EACH test.
+ *
+ * @see DestinationAcceptanceTest#setUpInternal()
* @param testEnv - information about the test environment.
* @throws Exception - can throw any exception, test framework will handle.
*/
@Override
protected void setup(TestDestinationEnv testEnv) throws Exception {
- //use secrets file with GCS staging config
+ // use secrets file with GCS staging config
secretsFile = Path.of("secrets/credentials-gcs-staging.json");
setUpBigQuery();
- //the setup steps below are specific to GCS staging use case
+ // the setup steps below are specific to GCS staging use case
final GcsDestinationConfig gcsDestinationConfig = GcsDestinationConfig
.getGcsDestinationConfig(BigQueryUtils.getGcsJsonNodeConfig(config));
this.s3Client = gcsDestinationConfig.getS3Client();
}
/**
- * Removes data from bigquery and GCS
- * This function will be called after EACH test
- * @see DestinationAcceptanceTest#tearDownInternal()
+ * Removes data from bigquery and GCS This function will be called after EACH test
+ *
+ * @see DestinationAcceptanceTest#tearDownInternal()
* @param testEnv - information about the test environment.
* @throws Exception - can throw any exception, test framework will handle.
*/
@@ -54,4 +56,5 @@ protected void tearDown(TestDestinationEnv testEnv) {
protected void tearDownGcs() {
BigQueryDestinationTestUtils.tearDownGcs(s3Client, config, LOGGER);
}
+
}
diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryStandardDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryStandardDestinationAcceptanceTest.java
index 0542c8a3837a5..4ae95a63352ad 100644
--- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryStandardDestinationAcceptanceTest.java
+++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryStandardDestinationAcceptanceTest.java
@@ -1,3 +1,7 @@
+/*
+ * Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+ */
+
package io.airbyte.integrations.destination.bigquery;
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
@@ -10,12 +14,14 @@
@TestInstance(PER_CLASS)
public class BigQueryStandardDestinationAcceptanceTest extends AbstractBigQueryDestinationAcceptanceTest {
+
private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryStandardDestinationAcceptanceTest.class);
/**
- * Sets up secretsFile path and BigQuery instance for verification and cleanup
- * This function will be called before EACH test.
- * @see DestinationAcceptanceTest#setUpInternal()
+ * Sets up secretsFile path and BigQuery instance for verification and cleanup This function will be
+ * called before EACH test.
+ *
+ * @see DestinationAcceptanceTest#setUpInternal()
* @param testEnv - information about the test environment.
* @throws Exception - can throw any exception, test framework will handle.
*/
@@ -26,9 +32,9 @@ protected void setup(TestDestinationEnv testEnv) throws Exception {
}
/**
- * Removes data from bigquery
- * This function will be called after EACH test
- * @see DestinationAcceptanceTest#tearDownInternal()
+ * Removes data from bigquery This function will be called after EACH test
+ *
+ * @see DestinationAcceptanceTest#tearDownInternal()
* @param testEnv - information about the test environment.
* @throws Exception - can throw any exception, test framework will handle.
*/
@@ -36,4 +42,5 @@ protected void setup(TestDestinationEnv testEnv) throws Exception {
protected void tearDown(TestDestinationEnv testEnv) {
tearDownBigQuery();
}
+
}
diff --git a/docs/integrations/destinations/bigquery.md b/docs/integrations/destinations/bigquery.md
index 449767925c63f..e418918e1f78d 100644
--- a/docs/integrations/destinations/bigquery.md
+++ b/docs/integrations/destinations/bigquery.md
@@ -136,6 +136,7 @@ Now that you have set up the BigQuery destination connector, check out the follo
| Version | Date | Pull Request | Subject |
|:--------|:-----------|:----------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------|
+| 1.2.11 | 2023-01-18 | [#21144](https://github.com/airbytehq/airbyte/pull/21144) | Added explicit error message if sync fails due to a config issue |
| 1.2.9 | 2022-12-14 | [#20501](https://github.com/airbytehq/airbyte/pull/20501) | Report GCS staging failures that occur during connection check |
| 1.2.8 | 2022-11-22 | [#19489](https://github.com/airbytehq/airbyte/pull/19489) | Added non-billable projects handle to check connection stage |
| 1.2.7 | 2022-11-11 | [#19358](https://github.com/airbytehq/airbyte/pull/19358) | Fixed check method to capture mismatch dataset location |
@@ -191,6 +192,7 @@ Now that you have set up the BigQuery destination connector, check out the follo
| Version | Date | Pull Request | Subject |
|:--------|:-----------|:----------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------|
+| 1.2.11 | 2023-01-18 | [#21144](https://github.com/airbytehq/airbyte/pull/21144) | Added explicit error message if sync fails due to a config issue |
| 1.2.10 | 2023-01-04 | [#20730](https://github.com/airbytehq/airbyte/pull/20730) | An incoming source Number type will create a big query integer rather than a float. |
| 1.2.9 | 2022-12-14 | [#20501](https://github.com/airbytehq/airbyte/pull/20501) | Report GCS staging failures that occur during connection check |
| 1.2.8 | 2022-11-22 | [#19489](https://github.com/airbytehq/airbyte/pull/19489) | Added non-billable projects handle to check connection stage |