Skip to content

[MVP] Integrate sentry to all java-based connectors #9745

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 29 commits into from
Jan 30, 2022
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
730ce51
airbyte-9328: Added Sentry integration to BigQuery and BigQuery denor…
alexandertsukanov Jan 19, 2022
b6fcde3
airbyte-5050: Added strategy for INSERT ROW.
alexandertsukanov Jan 19, 2022
7f581bb
airbyte-9328: Added Sentry integration to Snowflake.
alexandertsukanov Jan 24, 2022
a0fe903
airbyte-9328: Fix Sentry config.
alexandertsukanov Jan 25, 2022
460d959
airbyte-9328: Fixed PR comments.
alexandertsukanov Jan 26, 2022
760fc42
airbyte-9328: Fixed PR comments.
alexandertsukanov Jan 26, 2022
cdc47d8
airbyte-9328: Fix PR comments.
alexandertsukanov Jan 26, 2022
ba11f20
airbyte-9328: Fixed PR comments.
alexandertsukanov Jan 26, 2022
ed047b4
airbyte-9328: Fixed PR comments.
alexandertsukanov Jan 26, 2022
0f5b521
airbyte-9328: Fixed PR comments.
alexandertsukanov Jan 26, 2022
ad1824d
airbyte-9328: Small changes.
alexandertsukanov Jan 27, 2022
000328f
airbyte-9328: Small changes.
alexandertsukanov Jan 27, 2022
a89eb7d
airbyte-9328: Move SENTRY DSN keys to Dockerfiles.
alexandertsukanov Jan 28, 2022
9e23433
Merge branch 'master' into otsukanov/airbyte-9328
tuliren Jan 28, 2022
2881b0e
Use new dsn
tuliren Jan 29, 2022
4c0fa25
Revert format
tuliren Jan 29, 2022
9207afb
Remove sentry dsn from compose temporarily
tuliren Jan 29, 2022
a9b7fe5
Log sentry event id
tuliren Jan 29, 2022
74f20d0
Move sentry to java base
tuliren Jan 29, 2022
ef00a7b
Merge branch 'master' into otsukanov/airbyte-9328
tuliren Jan 29, 2022
6cb7ce2
Remove sentry code from bigquery
tuliren Jan 29, 2022
ef1c61d
Update dockerfiles
tuliren Jan 29, 2022
7bf53a2
Fix build
tuliren Jan 29, 2022
304520b
Update release tag format
tuliren Jan 29, 2022
9ba3d2f
Bump version
tuliren Jan 29, 2022
493d484
Add env to dockerfiles
tuliren Jan 29, 2022
d115b4b
Fix e2e test connector dockerfil
tuliren Jan 30, 2022
d416f07
Fix snowflake bigquery dockerfile
tuliren Jan 30, 2022
d1c881a
Mark new versions as unpublished
tuliren Jan 30, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ FROM airbyte/integration-base-java:dev
WORKDIR /airbyte

ENV APPLICATION destination-bigquery-denormalized
ENV SENTRY_DSN "https://[email protected]/6145965"

COPY --from=build /airbyte /airbyte

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ application {
dependencies {
implementation 'com.google.cloud:google-cloud-bigquery:1.122.2'
implementation 'org.apache.commons:commons-lang3:3.11'
implementation 'io.sentry:sentry-log4j2:5.5.3'

implementation project(':airbyte-config:models')
implementation project(':airbyte-integrations:bases:base-java')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@
import io.airbyte.integrations.destination.bigquery.formatter.DefaultBigQueryDenormalizedRecordFormatter;
import io.airbyte.integrations.destination.bigquery.formatter.GcsBigQueryDenormalizedRecordFormatter;
import io.airbyte.integrations.destination.bigquery.uploader.UploaderType;
import io.sentry.ITransaction;
import io.sentry.Sentry;
import io.sentry.SpanStatus;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -19,6 +22,18 @@ public class BigQueryDenormalizedDestination extends BigQueryDestination {

private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryDenormalizedDestination.class);

public static void initSentry() {
Sentry.init(options -> {
// allow setting properties from env variables see https://docs.sentry.io/platforms/java/configuration/
options.setEnableExternalConfiguration(true);
// To set a uniform sample rate
options.setTracesSampleRate(1.0);
});
Sentry.configureScope(scope -> {
scope.setTag("connector", "destination-bigquery-denormalized");
});
}

@Override
protected String getTargetTableName(final String streamName) {
// This BigQuery destination does not write to a staging "raw" table but directly to a normalized
Expand Down Expand Up @@ -46,10 +61,19 @@ protected boolean isDefaultAirbyteTmpTableSchema() {
}

public static void main(final String[] args) throws Exception {
initSentry();
final Destination destination = new BigQueryDenormalizedDestination();
LOGGER.info("starting destination: {}", BigQueryDenormalizedDestination.class);
new IntegrationRunner(destination).run(args);
LOGGER.info("completed destination: {}", BigQueryDenormalizedDestination.class);
ITransaction transaction = Sentry.startTransaction("IntegrationRunner()", "run");
try {
LOGGER.info("starting destination: {}", BigQueryDestination.class);
new IntegrationRunner(destination).run(args);
} catch (Exception e) {
transaction.setThrowable(e);
transaction.setStatus(SpanStatus.INTERNAL_ERROR);
} finally {
transaction.finish();
LOGGER.info("completed destination: {}", BigQueryDestination.class);
}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="warn" packages="org.apache.logging.log4j.core,io.sentry.log4j2">
<Appenders>
<Sentry name="Sentry"/>
</Appenders>
<Loggers>
<Root level="ERROR">
<AppenderRef ref="Sentry"/>
</Root>
</Loggers>
</Configuration>
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ FROM airbyte/integration-base-java:dev
WORKDIR /airbyte

ENV APPLICATION destination-bigquery
ENV SENTRY_DSN "https://[email protected]/6145965"

COPY --from=build /airbyte /airbyte

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ application {
dependencies {
implementation 'com.google.cloud:google-cloud-bigquery:1.122.2'
implementation 'org.apache.commons:commons-lang3:3.11'
implementation 'io.sentry:sentry-log4j2:5.5.3'

// csv
implementation 'org.apache.commons:commons-csv:1.4'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@
import io.airbyte.protocol.models.AirbyteStream;
import io.airbyte.protocol.models.ConfiguredAirbyteCatalog;
import io.airbyte.protocol.models.ConfiguredAirbyteStream;
import io.sentry.ITransaction;
import io.sentry.Sentry;
import io.sentry.SpanStatus;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.HashMap;
Expand All @@ -53,6 +56,18 @@ public BigQueryDestination() {
namingResolver = new BigQuerySQLNameTransformer();
}

public static void initSentry() {
Sentry.init(options -> {
// allow setting properties from env variables see https://docs.sentry.io/platforms/java/configuration/
options.setEnableExternalConfiguration(true);
// To set a uniform sample rate
options.setTracesSampleRate(1.0);
});
Sentry.configureScope(scope -> {
scope.setTag("connector", "destination-bigquery");
});
}

@Override
public AirbyteConnectionStatus check(final JsonNode config) {
try {
Expand Down Expand Up @@ -124,32 +139,29 @@ protected BigQuery getBigQuery(final JsonNode config) {
* 1. Create a temporary table for each stream
* </p>
* <p>
* 2. Write records to each stream directly (the bigquery client handles managing when to push the
* records over the network)
* 2. Write records to each stream directly (the bigquery client handles managing when to push the records over the network)
* </p>
* <p>
* 4. Once all records have been written close the writers, so that any remaining records are
* flushed.
* 4. Once all records have been written close the writers, so that any remaining records are flushed.
* </p>
* <p>
* 5. Copy the temp tables to the final table name (overwriting if necessary).
* </p>
*
* @param config - integration-specific configuration object as json. e.g. { "username": "airbyte",
* "password": "super secure" }
* @param config - integration-specific configuration object as json. e.g. { "username": "airbyte", "password": "super secure" }
* @param catalog - schema of the incoming messages.
* @return consumer that writes singer messages to the database.
*/
@Override
public AirbyteMessageConsumer getConsumer(final JsonNode config,
final ConfiguredAirbyteCatalog catalog,
final Consumer<AirbyteMessage> outputRecordCollector)
final ConfiguredAirbyteCatalog catalog,
final Consumer<AirbyteMessage> outputRecordCollector)
throws IOException {
return getRecordConsumer(getUploaderMap(config, catalog), outputRecordCollector);
}

protected Map<AirbyteStreamNameNamespacePair, AbstractBigQueryUploader<?>> getUploaderMap(final JsonNode config,
final ConfiguredAirbyteCatalog catalog)
final ConfiguredAirbyteCatalog catalog)
throws IOException {
final BigQuery bigquery = getBigQuery(config);

Expand All @@ -176,9 +188,8 @@ protected Map<AirbyteStreamNameNamespacePair, AbstractBigQueryUploader<?>> getUp
}

/**
* BigQuery might have different structure of the Temporary table. If this method returns TRUE,
* temporary table will have only three common Airbyte attributes. In case of FALSE, temporary table
* structure will be in line with Airbyte message JsonSchema.
* BigQuery might have different structure of the Temporary table. If this method returns TRUE, temporary table will have only three common Airbyte
* attributes. In case of FALSE, temporary table structure will be in line with Airbyte message JsonSchema.
*
* @return use default AirbyteSchema or build using JsonSchema
*/
Expand All @@ -197,15 +208,23 @@ protected String getTargetTableName(final String streamName) {
}

protected AirbyteMessageConsumer getRecordConsumer(final Map<AirbyteStreamNameNamespacePair, AbstractBigQueryUploader<?>> writeConfigs,
final Consumer<AirbyteMessage> outputRecordCollector) {
final Consumer<AirbyteMessage> outputRecordCollector) {
return new BigQueryRecordConsumer(writeConfigs, outputRecordCollector);
}

public static void main(final String[] args) throws Exception {
ITransaction transaction = Sentry.startTransaction("IntegrationRunner()", "run");
final Destination destination = new BigQueryDestination();
LOGGER.info("starting destination: {}", BigQueryDestination.class);
new IntegrationRunner(destination).run(args);
LOGGER.info("completed destination: {}", BigQueryDestination.class);
try {
LOGGER.info("starting destination: {}", BigQueryDestination.class);
new IntegrationRunner(destination).run(args);
} catch (Exception e) {
transaction.setThrowable(e);
transaction.setStatus(SpanStatus.INTERNAL_ERROR);
} finally {
transaction.finish();
LOGGER.info("completed destination: {}", BigQueryDestination.class);
}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="warn" packages="org.apache.logging.log4j.core,io.sentry.log4j2">
<Appenders>
<Sentry name="Sentry"/>
</Appenders>
<Loggers>
<Root level="ERROR">
<AppenderRef ref="Sentry"/>
</Root>
</Loggers>
</Configuration>
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ WORKDIR /airbyte

ENV APPLICATION destination-snowflake

ENV SENTRY_DSN "https://[email protected]/6145965"
# Needed for JDK17 (in turn, needed on M1 macs) - see https://github.com/snowflakedb/snowflake-jdbc/issues/589#issuecomment-983944767
ENV DESTINATION_SNOWFLAKE_OPTS "--add-opens java.base/java.nio=ALL-UNNAMED"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ dependencies {
implementation 'org.apache.commons:commons-csv:1.4'
implementation 'com.github.alexmojaki:s3-stream-upload:2.2.2'
implementation "io.aesy:datasize:1.0.0"
implementation 'io.sentry:sentry-log4j2:5.5.3'

implementation project(':airbyte-config:models')
implementation project(':airbyte-db:lib')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ public class SnowflakeDatabase {
private static final Duration QUERY_TIMEOUT = Duration.ofHours(3);
private static final SnowflakeSQLNameTransformer nameTransformer = new SnowflakeSQLNameTransformer();

public static Connection getConnection(final JsonNode config) throws SQLException {
public static Connection getConnection(final JsonNode config)
throws SQLException {
final String connectUrl = String.format("jdbc:snowflake://%s", config.get("host").asText());

final Properties properties = new Properties();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@
import io.airbyte.integrations.base.Destination;
import io.airbyte.integrations.base.IntegrationRunner;
import io.airbyte.integrations.destination.jdbc.copy.SwitchingDestination;
import io.sentry.ITransaction;
import io.sentry.Sentry;
import io.sentry.SpanStatus;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -17,6 +20,18 @@ public class SnowflakeDestination extends SwitchingDestination<SnowflakeDestinat

private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeDestination.class);

public static void initSentry() {
Sentry.init(options -> {
// allow setting properties from env variables see https://docs.sentry.io/platforms/java/configuration/
options.setEnableExternalConfiguration(true);
// To set a uniform sample rate
options.setTracesSampleRate(1.0);
});
Sentry.configureScope(scope -> {
scope.setTag("connector", "destination-snowflake");
});
}

enum DestinationType {
INSERT,
COPY_S3,
Expand Down Expand Up @@ -65,10 +80,24 @@ public static Map<DestinationType, Destination> getTypeToDestination() {
}

public static void main(final String[] args) throws Exception {
initSentry();
try {
throw new Exception("This is hello from Sentry");
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
final Destination destination = new SnowflakeDestination();
LOGGER.info("starting destination: {}", SnowflakeDestination.class);
new IntegrationRunner(destination).run(args);
LOGGER.info("completed destination: {}", SnowflakeDestination.class);
ITransaction transaction = Sentry.startTransaction("IntegrationRunner()", "run");
try {
LOGGER.info("starting destination: {}", SnowflakeDestination.class);
new IntegrationRunner(destination).run(args);
} catch (Exception e) {
transaction.setThrowable(e);
transaction.setStatus(SpanStatus.INTERNAL_ERROR);
} finally {
transaction.finish();
LOGGER.info("completed destination: {}", SnowflakeDestination.class);
}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="warn" packages="org.apache.logging.log4j.core,io.sentry.log4j2">
<Appenders>
<Sentry name="Sentry"/>
</Appenders>
<Loggers>
<Root level="ERROR">
<AppenderRef ref="Sentry"/>
</Root>
</Loggers>
</Configuration>