@@ -5325,9 +5325,74 @@ def test_load_table_from_dataframe_w_custom_job_config(self):
5325
5325
)
5326
5326
5327
5327
sent_config = load_table_from_file .mock_calls [0 ][2 ]["job_config" ]
5328
- assert sent_config is job_config
5329
5328
assert sent_config .source_format == job .SourceFormat .PARQUET
5330
5329
5330
+ @unittest .skipIf (pandas is None , "Requires `pandas`" )
5331
+ @unittest .skipIf (pyarrow is None , "Requires `pyarrow`" )
5332
+ def test_load_table_from_dataframe_w_automatic_schema (self ):
5333
+ from google .cloud .bigquery .client import _DEFAULT_NUM_RETRIES
5334
+ from google .cloud .bigquery import job
5335
+ from google .cloud .bigquery .schema import SchemaField
5336
+
5337
+ client = self ._make_client ()
5338
+ dt_col = pandas .Series (
5339
+ [
5340
+ datetime .datetime (2010 , 1 , 2 , 3 , 44 , 50 ),
5341
+ datetime .datetime (2011 , 2 , 3 , 14 , 50 , 59 ),
5342
+ datetime .datetime (2012 , 3 , 14 , 15 , 16 ),
5343
+ ],
5344
+ dtype = "datetime64[ns]" ,
5345
+ )
5346
+ ts_col = pandas .Series (
5347
+ [
5348
+ datetime .datetime (2010 , 1 , 2 , 3 , 44 , 50 ),
5349
+ datetime .datetime (2011 , 2 , 3 , 14 , 50 , 59 ),
5350
+ datetime .datetime (2012 , 3 , 14 , 15 , 16 ),
5351
+ ],
5352
+ dtype = "datetime64[ns]" ,
5353
+ ).dt .tz_localize (pytz .utc )
5354
+ df_data = {
5355
+ "int_col" : [1 , 2 , 3 ],
5356
+ "float_col" : [1.0 , 2.0 , 3.0 ],
5357
+ "bool_col" : [True , False , True ],
5358
+ "dt_col" : dt_col ,
5359
+ "ts_col" : ts_col ,
5360
+ }
5361
+ dataframe = pandas .DataFrame (
5362
+ df_data , columns = ["int_col" , "float_col" , "bool_col" , "dt_col" , "ts_col" ]
5363
+ )
5364
+ load_patch = mock .patch (
5365
+ "google.cloud.bigquery.client.Client.load_table_from_file" , autospec = True
5366
+ )
5367
+
5368
+ with load_patch as load_table_from_file :
5369
+ client .load_table_from_dataframe (
5370
+ dataframe , self .TABLE_REF , location = self .LOCATION
5371
+ )
5372
+
5373
+ load_table_from_file .assert_called_once_with (
5374
+ client ,
5375
+ mock .ANY ,
5376
+ self .TABLE_REF ,
5377
+ num_retries = _DEFAULT_NUM_RETRIES ,
5378
+ rewind = True ,
5379
+ job_id = mock .ANY ,
5380
+ job_id_prefix = None ,
5381
+ location = self .LOCATION ,
5382
+ project = None ,
5383
+ job_config = mock .ANY ,
5384
+ )
5385
+
5386
+ sent_config = load_table_from_file .mock_calls [0 ][2 ]["job_config" ]
5387
+ assert sent_config .source_format == job .SourceFormat .PARQUET
5388
+ assert tuple (sent_config .schema ) == (
5389
+ SchemaField ("int_col" , "INTEGER" ),
5390
+ SchemaField ("float_col" , "FLOAT" ),
5391
+ SchemaField ("bool_col" , "BOOLEAN" ),
5392
+ SchemaField ("dt_col" , "DATETIME" ),
5393
+ SchemaField ("ts_col" , "TIMESTAMP" ),
5394
+ )
5395
+
5331
5396
@unittest .skipIf (pandas is None , "Requires `pandas`" )
5332
5397
@unittest .skipIf (pyarrow is None , "Requires `pyarrow`" )
5333
5398
def test_load_table_from_dataframe_w_schema_wo_pyarrow (self ):
@@ -5475,7 +5540,7 @@ def test_load_table_from_dataframe_w_nulls(self):
5475
5540
)
5476
5541
5477
5542
sent_config = load_table_from_file .mock_calls [0 ][2 ]["job_config" ]
5478
- assert sent_config is job_config
5543
+ assert sent_config . schema == schema
5479
5544
assert sent_config .source_format == job .SourceFormat .PARQUET
5480
5545
5481
5546
# Low-level tests
0 commit comments