|
25 | 25 | package io.airbyte.integrations.destination.databricks;
|
26 | 26 |
|
27 | 27 | import com.fasterxml.jackson.databind.JsonNode;
|
28 |
| -import com.google.common.collect.ImmutableMap; |
29 |
| -import io.airbyte.commons.json.Jsons; |
| 28 | +import io.airbyte.db.Databases; |
30 | 29 | import io.airbyte.db.jdbc.JdbcDatabase;
|
31 |
| -import io.airbyte.integrations.base.Destination; |
32 |
| -import io.airbyte.integrations.base.IntegrationRunner; |
33 |
| -import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; |
34 |
| -import io.airbyte.protocol.models.AirbyteConnectionStatus; |
35 |
| -import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; |
36 |
| -import java.io.File; |
37 |
| -import java.net.MalformedURLException; |
38 |
| -import java.net.URL; |
39 |
| -import java.net.URLClassLoader; |
40 |
| -import java.util.Optional; |
41 |
| -import org.slf4j.Logger; |
42 |
| -import org.slf4j.LoggerFactory; |
| 30 | +import io.airbyte.integrations.base.AirbyteMessageConsumer; |
| 31 | +import io.airbyte.integrations.destination.ExtendedNameTransformer; |
| 32 | +import io.airbyte.integrations.destination.jdbc.SqlOperations; |
| 33 | +import io.airbyte.integrations.destination.jdbc.copy.CopyConsumerFactory; |
| 34 | +import io.airbyte.integrations.destination.jdbc.copy.CopyDestination; |
| 35 | +import io.airbyte.integrations.destination.jdbc.copy.s3.S3Config; |
| 36 | +import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopier; |
| 37 | +import io.airbyte.protocol.models.AirbyteMessage; |
| 38 | +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; |
| 39 | +import java.util.function.Consumer; |
43 | 40 |
|
44 |
| -public class DatabricksDestination extends AbstractJdbcDestination implements Destination { |
| 41 | +public class DatabricksDestination extends CopyDestination { |
45 | 42 |
|
46 |
| - private static final Logger LOGGER = LoggerFactory.getLogger(DatabricksDestination.class); |
47 |
| - |
48 |
| - public static final String DRIVER_CLASS = "com.simba.spark.jdbc.Driver"; |
49 |
| - |
50 |
| - // TODO: this isn't working yet! |
51 |
| - public static void getDriver() throws MalformedURLException, ClassNotFoundException { |
52 |
| - File driverJar = new File("/Users/phlair/Downloads/SparkDriver/SparkJDBC42.jar"); |
53 |
| - URL jarUrl = new URL("jar", "", "file:" + driverJar.getAbsolutePath() + "!/"); |
54 |
| - URLClassLoader myLoader = new URLClassLoader(new URL[] { jarUrl } ); |
55 |
| - myLoader.loadClass(DRIVER_CLASS); |
56 |
| - } |
| 43 | + private static final String DRIVER_CLASS = "com.simba.spark.jdbc.Driver"; |
57 | 44 |
|
58 | 45 | @Override
|
59 |
| - public AirbyteConnectionStatus check(JsonNode config) { |
60 |
| - |
61 |
| - try (final JdbcDatabase database = getDatabase(config)) { |
62 |
| - DatabricksSqlOperations databricksSqlOperations = (DatabricksSqlOperations) getSqlOperations(); |
63 |
| - |
64 |
| - String outputSchema = getNamingResolver().getIdentifier(config.get("database").asText()); |
65 |
| - attemptSQLCreateAndDropTableOperations(outputSchema, database, getNamingResolver(), databricksSqlOperations); |
66 |
| - |
67 |
| - databricksSqlOperations.verifyLocalFileEnabled(database); |
68 |
| - |
69 |
| - // TODO: enforce databricks runtime version instead of this mySql code |
70 |
| -// VersionCompatibility compatibility = dbSqlOperations.isCompatibleVersion(database); |
71 |
| -// if (!compatibility.isCompatible()) { |
72 |
| -// throw new RuntimeException(String |
73 |
| -// .format("Your MySQL version %s is not compatible with Airbyte", |
74 |
| -// compatibility.getVersion())); |
75 |
| -// } |
76 |
| - |
77 |
| - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); |
78 |
| - } catch (Exception e) { |
79 |
| - LOGGER.error("Exception while checking connection: ", e); |
80 |
| - return new AirbyteConnectionStatus() |
81 |
| - .withStatus(Status.FAILED) |
82 |
| - .withMessage("Could not connect with provided configuration. \n" + e.getMessage()); |
83 |
| - } |
| 46 | + public AirbyteMessageConsumer getConsumer(JsonNode config, ConfiguredAirbyteCatalog catalog, Consumer<AirbyteMessage> outputRecordCollector) { |
| 47 | + return CopyConsumerFactory.create( |
| 48 | + outputRecordCollector, |
| 49 | + getDatabase(config), |
| 50 | + getSqlOperations(), |
| 51 | + getNameTransformer(), |
| 52 | + S3Config.getS3Config(config), |
| 53 | + catalog, |
| 54 | + new DatabricksStreamCopierFactory(), |
| 55 | + config.get("schema").asText() |
| 56 | + ); |
84 | 57 | }
|
85 | 58 |
|
86 |
| - public DatabricksDestination() { |
87 |
| - super(DRIVER_CLASS, new DatabricksNameTransformer(), new DatabricksSqlOperations()); |
| 59 | + @Override |
| 60 | + public void checkPersistence(JsonNode config) { |
| 61 | + S3StreamCopier.attemptS3WriteAndDelete(S3Config.getS3Config(config)); |
88 | 62 | }
|
89 | 63 |
|
90 | 64 | @Override
|
91 |
| - public JsonNode toJdbcConfig(JsonNode databricksConfig) { |
92 |
| - return getJdbcConfig(databricksConfig); |
| 65 | + public ExtendedNameTransformer getNameTransformer() { |
| 66 | + return new DatabricksNameTransformer(); |
93 | 67 | }
|
94 | 68 |
|
95 |
| - public static JsonNode getJdbcConfig(JsonNode databricksConfig) { |
96 |
| - final String schema = Optional.ofNullable(databricksConfig.get("schema")).map(JsonNode::asText).orElse("default"); |
97 |
| - |
98 |
| - return Jsons.jsonNode(ImmutableMap.builder() |
99 |
| - .put("username", "dummy") |
100 |
| - .put("password", "dummy") |
101 |
| -// .put("jdbc_url", String.format("jdbc:TODO://%s:%s/%s", |
102 |
| -// databricksConfig.get("host").asText(), |
103 |
| -// databricksConfig.get("port").asText(), |
104 |
| -// databricksConfig.get("database").asText())) |
105 |
| -// .put("schema", schema) |
106 |
| - .put("jdbc_url", databricksConfig.get("jdbcUrl").asText()) |
107 |
| - .build()); |
| 69 | + @Override |
| 70 | + public JdbcDatabase getDatabase(JsonNode databricksConfig) { |
| 71 | + return Databases.createJdbcDatabase( |
| 72 | + databricksConfig.get("username").asText(), |
| 73 | + databricksConfig.has("password") ? databricksConfig.get("password").asText() : null, |
| 74 | + databricksConfig.get("jdbc_url").asText(), |
| 75 | + DRIVER_CLASS |
| 76 | + ); |
108 | 77 | }
|
109 | 78 |
|
110 |
| - public static void main(String[] args) throws Exception { |
111 |
| - LOGGER.info("starting destination: {}", DatabricksDestination.class); |
112 |
| - getDriver(); |
113 |
| - new IntegrationRunner(new DatabricksDestination()).run(args); |
114 |
| - LOGGER.info("completed destination: {}", DatabricksDestination.class); |
| 79 | + @Override |
| 80 | + public SqlOperations getSqlOperations() { |
| 81 | + return new DatabricksSqlOperations(); |
115 | 82 | }
|
116 | 83 |
|
117 | 84 | }
|
0 commit comments