38
38
import io .airbyte .integrations .base .destination .typing_deduping .StreamConfig ;
39
39
import io .airbyte .integrations .base .destination .typing_deduping .TyperDeduper ;
40
40
import io .airbyte .integrations .destination .bigquery .formatter .BigQueryRecordFormatter ;
41
- import io .airbyte .integrations .destination .bigquery .formatter .DefaultBigQueryRecordFormatter ;
42
- import io .airbyte .integrations .destination .bigquery .formatter .GcsCsvBigQueryRecordFormatter ;
43
41
import io .airbyte .integrations .destination .bigquery .typing_deduping .BigQueryDestinationHandler ;
44
42
import io .airbyte .integrations .destination .bigquery .typing_deduping .BigQuerySqlGenerator ;
45
43
import io .airbyte .integrations .destination .bigquery .typing_deduping .BigQueryV1V2Migrator ;
46
44
import io .airbyte .integrations .destination .bigquery .typing_deduping .BigQueryV2TableMigrator ;
47
- import io .airbyte .integrations .destination .bigquery .uploader .AbstractBigQueryUploader ;
45
+ import io .airbyte .integrations .destination .bigquery .uploader .BigQueryDirectUploader ;
48
46
import io .airbyte .integrations .destination .bigquery .uploader .BigQueryUploaderFactory ;
49
47
import io .airbyte .integrations .destination .bigquery .uploader .UploaderType ;
50
48
import io .airbyte .integrations .destination .bigquery .uploader .config .UploaderConfig ;
@@ -292,14 +290,14 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN
292
290
BigQueryUtils .getDatasetId (config ));
293
291
}
294
292
295
- protected Supplier <ConcurrentMap <AirbyteStreamNameNamespacePair , AbstractBigQueryUploader <?> >> getUploaderMap (
296
- final BigQuery bigquery ,
297
- final JsonNode config ,
298
- final ConfiguredAirbyteCatalog catalog ,
299
- final ParsedCatalog parsedCatalog )
293
+ protected Supplier <ConcurrentMap <AirbyteStreamNameNamespacePair , BigQueryDirectUploader >> getUploaderMap (
294
+ final BigQuery bigquery ,
295
+ final JsonNode config ,
296
+ final ConfiguredAirbyteCatalog catalog ,
297
+ final ParsedCatalog parsedCatalog )
300
298
throws IOException {
301
299
return () -> {
302
- final ConcurrentMap <AirbyteStreamNameNamespacePair , AbstractBigQueryUploader <?> > uploaderMap = new ConcurrentHashMap <>();
300
+ final ConcurrentMap <AirbyteStreamNameNamespacePair , BigQueryDirectUploader > uploaderMap = new ConcurrentHashMap <>();
303
301
for (final ConfiguredAirbyteStream configStream : catalog .getStreams ()) {
304
302
final AirbyteStream stream = configStream .getStream ();
305
303
final StreamConfig parsedStream ;
@@ -315,7 +313,7 @@ protected Supplier<ConcurrentMap<AirbyteStreamNameNamespacePair, AbstractBigQuer
315
313
.configStream (configStream )
316
314
.parsedStream (parsedStream )
317
315
.config (config )
318
- .formatterMap (getFormatterMap (stream . getJsonSchema () ))
316
+ .formatterMap (getFormatterMap ())
319
317
.targetTableName (targetTableName )
320
318
// This refers to whether this is BQ denormalized or not
321
319
.isDefaultAirbyteTmpSchema (isDefaultAirbyteTmpTableSchema ())
@@ -333,7 +331,7 @@ protected Supplier<ConcurrentMap<AirbyteStreamNameNamespacePair, AbstractBigQuer
333
331
334
332
protected void putStreamIntoUploaderMap (final AirbyteStream stream ,
335
333
final UploaderConfig uploaderConfig ,
336
- final Map <AirbyteStreamNameNamespacePair , AbstractBigQueryUploader <?> > uploaderMap )
334
+ final Map <AirbyteStreamNameNamespacePair , BigQueryDirectUploader > uploaderMap )
337
335
throws IOException {
338
336
uploaderMap .put (
339
337
AirbyteStreamNameNamespacePair .fromAirbyteStream (stream ),
@@ -351,10 +349,10 @@ protected boolean isDefaultAirbyteTmpTableSchema() {
351
349
return true ;
352
350
}
353
351
354
- protected Map <UploaderType , BigQueryRecordFormatter > getFormatterMap (final JsonNode jsonSchema ) {
352
+ protected Map <UploaderType , BigQueryRecordFormatter > getFormatterMap () {
355
353
return Map .of (
356
- UploaderType .STANDARD , new DefaultBigQueryRecordFormatter ( jsonSchema , namingResolver ),
357
- UploaderType .CSV , new GcsCsvBigQueryRecordFormatter ( jsonSchema , namingResolver ));
354
+ UploaderType .STANDARD , new BigQueryRecordFormatter ( namingResolver ),
355
+ UploaderType .CSV , new BigQueryRecordFormatter ( namingResolver ));
358
356
}
359
357
360
358
private SerializedAirbyteMessageConsumer getStandardRecordConsumer (final BigQuery bigquery ,
@@ -364,7 +362,7 @@ private SerializedAirbyteMessageConsumer getStandardRecordConsumer(final BigQuer
364
362
final Consumer <AirbyteMessage > outputRecordCollector ,
365
363
final TyperDeduper typerDeduper )
366
364
throws Exception {
367
- final Supplier <ConcurrentMap <AirbyteStreamNameNamespacePair , AbstractBigQueryUploader <?> >> writeConfigs = getUploaderMap (
365
+ final Supplier <ConcurrentMap <AirbyteStreamNameNamespacePair , BigQueryDirectUploader >> writeConfigs = getUploaderMap (
368
366
bigquery ,
369
367
config ,
370
368
catalog ,
@@ -390,7 +388,7 @@ private SerializedAirbyteMessageConsumer getStandardRecordConsumer(final BigQuer
390
388
LOGGER .info ("Raw table {} not found, continuing with creation" , rawTableId );
391
389
}
392
390
LOGGER .info ("Creating table {}" , rawTableId );
393
- BigQueryUtils .createPartitionedTableIfNotExists (bigquery , rawTableId , DefaultBigQueryRecordFormatter .SCHEMA_V2 );
391
+ BigQueryUtils .createPartitionedTableIfNotExists (bigquery , rawTableId , BigQueryRecordFormatter .SCHEMA_V2 );
394
392
} else {
395
393
uploader .createRawTable ();
396
394
}
@@ -415,7 +413,7 @@ private SerializedAirbyteMessageConsumer getStandardRecordConsumer(final BigQuer
415
413
}
416
414
417
415
protected Function <JsonNode , BigQueryRecordFormatter > getCsvRecordFormatterCreator (final BigQuerySQLNameTransformer namingResolver ) {
418
- return streamSchema -> new GcsCsvBigQueryRecordFormatter ( streamSchema , namingResolver );
416
+ return streamSchema -> new BigQueryRecordFormatter ( namingResolver );
419
417
}
420
418
421
419
private void setDefaultStreamNamespace (final ConfiguredAirbyteCatalog catalog , final String namespace ) {
0 commit comments