Skip to content

Commit f74f5d9

Browse files
remove log4j from java cdk (#38583)
## What <!-- * Describe what the change is solving. Link all GitHub issues related to this change. --> ## How <!-- * Describe how code changes achieve the solution. --> ## Review guide <!-- 1. `x.py` 2. `y.py` --> ## User Impact <!-- * What is the end result perceived by the user? * If there are negative side effects, please list them. --> ## Can this PR be safely reverted and rolled back? <!-- * If unsure, leave it blank. --> - [ ] YES 💚 - [ ] NO ❌
1 parent d82639c commit f74f5d9

File tree

147 files changed

+1103
-1534
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

147 files changed

+1103
-1534
lines changed

airbyte-cdk/java/airbyte-cdk/azure-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/jdbc/copy/azure/AzureBlobStorageStreamCopier.kt

+35-50
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ import io.airbyte.cdk.integrations.destination.jdbc.copy.StreamCopier
1717
import io.airbyte.commons.json.Jsons
1818
import io.airbyte.protocol.models.v0.AirbyteRecordMessage
1919
import io.airbyte.protocol.models.v0.DestinationSyncMode
20+
import io.github.oshai.kotlinlogging.KotlinLogging
2021
import java.io.*
2122
import java.nio.charset.StandardCharsets
2223
import java.sql.SQLException
@@ -26,8 +27,8 @@ import java.util.*
2627
import java.util.function.Consumer
2728
import org.apache.commons.csv.CSVFormat
2829
import org.apache.commons.csv.CSVPrinter
29-
import org.slf4j.Logger
30-
import org.slf4j.LoggerFactory
30+
31+
private val LOGGER = KotlinLogging.logger {}
3132

3233
abstract class AzureBlobStorageStreamCopier(
3334
protected val stagingFolder: String,
@@ -104,38 +105,32 @@ abstract class AzureBlobStorageStreamCopier(
104105

105106
@Throws(Exception::class)
106107
override fun closeStagingUploader(hasFailed: Boolean) {
107-
LOGGER.info("Uploading remaining data for {} stream.", streamName)
108+
LOGGER.info { "Uploading remaining data for $streamName stream." }
108109
for (csvPrinter in csvPrinters.values) {
109110
csvPrinter.close()
110111
}
111-
LOGGER.info("All data for {} stream uploaded.", streamName)
112+
LOGGER.info { "All data for $streamName stream uploaded." }
112113
}
113114

114115
@Throws(Exception::class)
115116
override fun createDestinationSchema() {
116-
LOGGER.info("Creating schema in destination if it doesn't exist: {}", schemaName)
117+
LOGGER.info { "Creating schema in destination if it doesn't exist: $schemaName" }
117118
sqlOperations.createSchemaIfNotExists(db, schemaName)
118119
}
119120

120121
@Throws(Exception::class)
121122
override fun createTemporaryTable() {
122-
LOGGER.info(
123-
"Preparing tmp table in destination for stream: {}, schema: {}, tmp table name: {}.",
124-
streamName,
125-
schemaName,
126-
tmpTableName
127-
)
123+
LOGGER.info {
124+
"Preparing tmp table in destination for stream: $streamName, schema: $schemaName, tmp table name: $tmpTableName."
125+
}
128126
sqlOperations.createTableIfNotExists(db, schemaName, tmpTableName)
129127
}
130128

131129
@Throws(Exception::class)
132130
override fun copyStagingFileToTemporaryTable() {
133-
LOGGER.info(
134-
"Starting copy to tmp table: {} in destination for stream: {}, schema: {}.",
135-
tmpTableName,
136-
streamName,
137-
schemaName
138-
)
131+
LOGGER.info {
132+
"Starting copy to tmp table: $tmpTableName in destination for stream: $streamName, schema: $schemaName."
133+
}
139134
for (azureStagingFile in azureStagingFiles) {
140135
copyAzureBlobCsvFileIntoTable(
141136
db,
@@ -145,11 +140,9 @@ abstract class AzureBlobStorageStreamCopier(
145140
azureBlobConfig
146141
)
147142
}
148-
LOGGER.info(
149-
"Copy to tmp table {} in destination for stream {} complete.",
150-
tmpTableName,
151-
streamName
152-
)
143+
LOGGER.info {
144+
"Copy to tmp table $tmpTableName in destination for stream $streamName complete."
145+
}
153146
}
154147

155148
private fun getFullAzurePath(azureStagingFile: String?): String {
@@ -166,50 +159,45 @@ abstract class AzureBlobStorageStreamCopier(
166159
@Throws(Exception::class)
167160
override fun createDestinationTable(): String? {
168161
@Suppress("DEPRECATION") val destTableName = nameTransformer.getRawTableName(streamName)
169-
LOGGER.info("Preparing table {} in destination.", destTableName)
162+
LOGGER.info { "Preparing table $destTableName in destination." }
170163
sqlOperations.createTableIfNotExists(db, schemaName, destTableName)
171-
LOGGER.info("Table {} in destination prepared.", tmpTableName)
164+
LOGGER.info { "Table $tmpTableName in destination prepared." }
172165

173166
return destTableName
174167
}
175168

176169
@Throws(Exception::class)
177170
override fun generateMergeStatement(destTableName: String?): String {
178-
LOGGER.info(
179-
"Preparing to merge tmp table {} to dest table: {}, schema: {}, in destination.",
180-
tmpTableName,
181-
destTableName,
182-
schemaName
183-
)
171+
LOGGER.info {
172+
"Preparing to merge tmp table $tmpTableName to dest table: $destTableName, schema: $schemaName, in destination."
173+
}
184174
val queries = StringBuilder()
185175
if (destSyncMode == DestinationSyncMode.OVERWRITE) {
186176
queries.append(sqlOperations.truncateTableQuery(db, schemaName, destTableName))
187-
LOGGER.info(
188-
"Destination OVERWRITE mode detected. Dest table: {}, schema: {}, truncated.",
189-
destTableName,
190-
schemaName
191-
)
177+
LOGGER.info {
178+
"Destination OVERWRITE mode detected. Dest table: $destTableName, schema: $schemaName, truncated."
179+
}
192180
}
193181
queries.append(sqlOperations.insertTableQuery(db, schemaName, tmpTableName, destTableName))
194182
return queries.toString()
195183
}
196184

197185
@Throws(Exception::class)
198186
override fun removeFileAndDropTmpTable() {
199-
LOGGER.info("Begin cleaning azure blob staging files.")
187+
LOGGER.info { "Begin cleaning azure blob staging files." }
200188
for (appendBlobClient in blobClients.values) {
201189
appendBlobClient.delete()
202190
}
203-
LOGGER.info("Azure Blob staging files cleaned.")
191+
LOGGER.info { "Azure Blob staging files cleaned." }
204192

205-
LOGGER.info("Begin cleaning {} tmp table in destination.", tmpTableName)
193+
LOGGER.info { "Begin cleaning $tmpTableName tmp table in destination." }
206194
sqlOperations.dropTableIfExists(db, schemaName, tmpTableName)
207-
LOGGER.info("{} tmp table in destination cleaned.", tmpTableName)
195+
LOGGER.info { "$tmpTableName tmp table in destination cleaned." }
208196
}
209197

210198
@Throws(Exception::class)
211199
override fun closeNonCurrentStagingFileWriters() {
212-
LOGGER.info("Begin closing non current file writers")
200+
LOGGER.info { "Begin closing non current file writers" }
213201
val removedKeys: MutableSet<String> = HashSet()
214202
for (key in activeStagingWriterFileNames) {
215203
if (key != currentFile) {
@@ -231,8 +219,7 @@ abstract class AzureBlobStorageStreamCopier(
231219
)
232220

233221
companion object {
234-
private val LOGGER: Logger =
235-
LoggerFactory.getLogger(AzureBlobStorageStreamCopier::class.java)
222+
236223
fun attemptAzureBlobWriteAndDelete(config: AzureBlobStorageConfig) {
237224
var appendBlobClient: AppendBlobClient? = null
238225
try {
@@ -249,7 +236,7 @@ abstract class AzureBlobStorageStreamCopier(
249236
listCreatedBlob(containerClient)
250237
} finally {
251238
if (appendBlobClient != null && appendBlobClient.exists()) {
252-
LOGGER.info("Deleting blob: " + appendBlobClient.blobName)
239+
LOGGER.info { "Deleting blob: ${appendBlobClient.blobName}" }
253240
appendBlobClient.delete()
254241
}
255242
}
@@ -260,16 +247,14 @@ abstract class AzureBlobStorageStreamCopier(
260247
.listBlobs()
261248
.forEach(
262249
Consumer { blobItem: BlobItem ->
263-
LOGGER.info(
264-
"Blob name: " + blobItem.name + "Snapshot: " + blobItem.snapshot
265-
)
250+
LOGGER.info { "Blob name: ${blobItem.name} Snapshot: ${blobItem.snapshot}" }
266251
}
267252
)
268253
}
269254

270255
private fun writeTestDataIntoBlob(appendBlobClient: AppendBlobClient?) {
271256
val test = "test_data"
272-
LOGGER.info("Writing test data to Azure Blob storage: $test")
257+
LOGGER.info { "Writing test data to Azure Blob storage: $test" }
273258
val dataStream: InputStream =
274259
ByteArrayInputStream(test.toByteArray(StandardCharsets.UTF_8))
275260

@@ -278,7 +263,7 @@ abstract class AzureBlobStorageStreamCopier(
278263
.appendBlock(dataStream, test.length.toLong())
279264
.blobCommittedBlockCount
280265

281-
LOGGER.info("blobCommittedBlockCount: $blobCommittedBlockCount")
266+
LOGGER.info { "blobCommittedBlockCount: $blobCommittedBlockCount" }
282267
}
283268

284269
private fun getBlobContainerClient(
@@ -291,9 +276,9 @@ abstract class AzureBlobStorageStreamCopier(
291276

292277
if (!appendBlobClient.exists()) {
293278
appendBlobClient.create()
294-
LOGGER.info("blobContainerClient created")
279+
LOGGER.info { "blobContainerClient created" }
295280
} else {
296-
LOGGER.info("blobContainerClient already exists")
281+
LOGGER.info { "blobContainerClient already exists" }
297282
}
298283
return containerClient
299284
}

airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/AbstractJdbcCompatibleSourceOperations.kt

+5-11
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility
1313
import io.airbyte.commons.json.Jsons
1414
import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta
1515
import io.airbyte.protocol.models.v0.AirbyteRecordMessageMetaChange
16+
import io.github.oshai.kotlinlogging.KotlinLogging
1617
import java.math.BigDecimal
1718
import java.sql.*
1819
import java.sql.Date
@@ -21,16 +22,12 @@ import java.time.*
2122
import java.time.chrono.IsoEra
2223
import java.time.format.DateTimeParseException
2324
import java.util.*
24-
import org.slf4j.Logger
25-
import org.slf4j.LoggerFactory
2625

26+
private val LOGGER = KotlinLogging.logger {}
2727
/** Source operation skeleton for JDBC compatible databases. */
2828
abstract class AbstractJdbcCompatibleSourceOperations<Datatype> :
2929
JdbcCompatibleSourceOperations<Datatype> {
3030

31-
private val LOGGER: Logger =
32-
LoggerFactory.getLogger(AbstractJdbcCompatibleSourceOperations::class.java)
33-
3431
@Throws(SQLException::class)
3532
override fun convertDatabaseRowToAirbyteRecordData(queryContext: ResultSet): AirbyteRecordData {
3633
// the first call communicates with the database. after that the result is cached.
@@ -47,12 +44,9 @@ abstract class AbstractJdbcCompatibleSourceOperations<Datatype> :
4744
copyToJsonField(queryContext, i, jsonNode)
4845
} catch (e: java.lang.Exception) {
4946
jsonNode.putNull(columnName)
50-
LOGGER.info(
51-
"Failed to serialize column: {}, of type {}, with error {}",
52-
columnName,
53-
columnTypeName,
54-
e.message
55-
)
47+
LOGGER.info {
48+
"Failed to serialize column: $columnName, of type $columnTypeName, with error ${e.message}"
49+
}
5650
AirbyteTraceMessageUtility.emitAnalyticsTrace(dataTypesSerializationErrorMessage())
5751
metaChanges.add(
5852
AirbyteRecordMessageMetaChange()

airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/DateTimeConverter.kt

+17-16
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,18 @@ package io.airbyte.cdk.db.jdbc
55

66
import com.fasterxml.jackson.databind.node.ObjectNode
77
import io.airbyte.cdk.db.DataTypeUtils
8+
import io.github.oshai.kotlinlogging.KotlinLogging
89
import java.sql.*
910
import java.time.*
1011
import java.time.format.DateTimeFormatter
1112
import java.util.concurrent.*
1213
import kotlin.math.abs
1314
import kotlin.math.min
14-
import org.slf4j.Logger
15-
import org.slf4j.LoggerFactory
15+
16+
private val LOGGER = KotlinLogging.logger {}
1617

1718
object DateTimeConverter {
18-
private val LOGGER: Logger = LoggerFactory.getLogger(DateTimeConverter::class.java)
19+
1920
val TIME_WITH_TIMEZONE_FORMATTER: DateTimeFormatter =
2021
DateTimeFormatter.ofPattern(
2122
"HH:mm:ss[.][SSSSSSSSS][SSSSSSS][SSSSSS][SSSSS][SSSS][SSS][SS][S][''][XXX][XX][X]"
@@ -34,7 +35,7 @@ object DateTimeConverter {
3435
else time.toString()
3536
} else {
3637
if (!loggedUnknownTimeWithTimeZoneClass) {
37-
LOGGER.info("Unknown class for Time with timezone data type" + time.javaClass)
38+
LOGGER.info { "Unknown class for Time with timezone data type ${time.javaClass}" }
3839
loggedUnknownTimeWithTimeZoneClass = true
3940
}
4041
val timetz = OffsetTime.parse(time.toString(), TIME_WITH_TIMEZONE_FORMATTER)
@@ -78,9 +79,9 @@ object DateTimeConverter {
7879
return AbstractJdbcCompatibleSourceOperations.Companion.resolveEra(localDate, value)
7980
} else {
8081
if (!loggedUnknownTimestampWithTimeZoneClass) {
81-
LOGGER.info(
82-
"Unknown class for Timestamp with time zone data type" + timestamp.javaClass
83-
)
82+
LOGGER.info {
83+
"Unknown class for Timestamp with time zone data type ${timestamp.javaClass}"
84+
}
8485
loggedUnknownTimestampWithTimeZoneClass = true
8586
}
8687
val instant = Instant.parse(timestamp.toString())
@@ -123,7 +124,7 @@ object DateTimeConverter {
123124
)
124125
} else {
125126
if (!loggedUnknownTimestampClass) {
126-
LOGGER.info("Unknown class for Timestamp data type" + timestamp.javaClass)
127+
LOGGER.info { "Unknown class for Timestamp data type ${timestamp.javaClass}" }
127128
loggedUnknownTimestampClass = true
128129
}
129130
val localDateTime = LocalDateTime.parse(timestamp.toString())
@@ -158,7 +159,7 @@ object DateTimeConverter {
158159
return LocalDate.ofEpochDay(date.toLong()).format(DataTypeUtils.DATE_FORMATTER)
159160
} else {
160161
if (!loggedUnknownDateClass) {
161-
LOGGER.info("Unknown class for Date data type" + date.javaClass)
162+
LOGGER.info { "Unknown class for Date data type${date.javaClass}" }
162163
loggedUnknownDateClass = true
163164
}
164165
val localDate = LocalDate.parse(date.toString())
@@ -182,22 +183,22 @@ object DateTimeConverter {
182183
} else {
183184
val updatedValue =
184185
min(abs(value.toDouble()), LocalTime.MAX.toNanoOfDay().toDouble()).toLong()
185-
LOGGER.debug(
186-
"Time values must use number of nanoseconds greater than 0 and less than 86400000000000 but its {}, converting to {} ",
187-
value,
188-
updatedValue
189-
)
186+
LOGGER.debug {
187+
"Time values must use number of nanoseconds greater than 0 and less than 86400000000000 but its $value, converting to $updatedValue "
188+
}
190189
return formatTime(LocalTime.ofNanoOfDay(updatedValue))
191190
}
192191
} else {
193192
if (!loggedUnknownTimeClass) {
194-
LOGGER.info("Unknown class for Time data type" + time.javaClass)
193+
LOGGER.info { "Unknown class for Time data type ${time.javaClass}" }
195194
loggedUnknownTimeClass = true
196195
}
197196

198197
val valueAsString = time.toString()
199198
if (valueAsString.startsWith("24")) {
200-
LOGGER.debug("Time value {} is above range, converting to 23:59:59", valueAsString)
199+
LOGGER.debug {
200+
"Time value ${valueAsString} is above range, converting to 23:59:59"
201+
}
201202
return LocalTime.MAX.toString()
202203
}
203204
return formatTime(LocalTime.parse(valueAsString))

airbyte-cdk/java/airbyte-cdk/core/src/main/kotlin/io/airbyte/cdk/db/jdbc/DefaultJdbcDatabase.kt

+9-15
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,14 @@ import io.airbyte.cdk.db.JdbcCompatibleSourceOperations
88
import io.airbyte.commons.exceptions.ConnectionErrorException
99
import io.airbyte.commons.functional.CheckedConsumer
1010
import io.airbyte.commons.functional.CheckedFunction
11+
import io.github.oshai.kotlinlogging.KotlinLogging
1112
import java.sql.*
1213
import java.util.*
1314
import java.util.function.Function
1415
import java.util.stream.Stream
1516
import javax.sql.DataSource
16-
import org.slf4j.Logger
17-
import org.slf4j.LoggerFactory
1817

18+
private val LOGGER = KotlinLogging.logger {}
1919
/**
2020
* Database object for interacting with a JDBC connection. Can be used for any JDBC compliant db.
2121
*/
@@ -50,15 +50,13 @@ constructor(
5050
): Stream<T> {
5151
val connection = dataSource.connection
5252
return JdbcDatabase.Companion.toUnsafeStream<T>(query.apply(connection), recordTransform)
53-
.onClose(
54-
Runnable {
55-
try {
56-
connection.close()
57-
} catch (e: SQLException) {
58-
throw RuntimeException(e)
59-
}
53+
.onClose {
54+
try {
55+
connection.close()
56+
} catch (e: SQLException) {
57+
throw RuntimeException(e)
6058
}
61-
)
59+
}
6260
}
6361

6462
@get:Throws(SQLException::class)
@@ -125,16 +123,12 @@ constructor(
125123
.onClose(
126124
Runnable {
127125
try {
128-
LOGGER.info("closing connection")
126+
LOGGER.info { "closing connection" }
129127
connection.close()
130128
} catch (e: SQLException) {
131129
throw RuntimeException(e)
132130
}
133131
}
134132
)
135133
}
136-
137-
companion object {
138-
private val LOGGER: Logger = LoggerFactory.getLogger(DefaultJdbcDatabase::class.java)
139-
}
140134
}

0 commit comments

Comments
 (0)