Skip to content

Commit a6126b7

Browse files
committed
Fix test coverage.
1 parent b901cf2 commit a6126b7

File tree

2 files changed

+74
-3
lines changed

2 files changed

+74
-3
lines changed

bigquery/google/cloud/bigquery/client.py

+7-1
Original file line numberDiff line numberDiff line change
@@ -1532,7 +1532,13 @@ def load_table_from_dataframe(
15321532
location = self.location
15331533

15341534
if not job_config.schema:
1535-
job_config.schema = _pandas_helpers.dataframe_to_bq_schema(dataframe)
1535+
autodetected_schema = _pandas_helpers.dataframe_to_bq_schema(dataframe)
1536+
1537+
# Only use an explicit schema if we were able to determine one
1538+
# matching the dataframe. If not, fallback to the pandas to_parquet
1539+
# method.
1540+
if autodetected_schema:
1541+
job_config.schema = autodetected_schema
15361542

15371543
tmpfd, tmppath = tempfile.mkstemp(suffix="_job_{}.parquet".format(job_id[:8]))
15381544
os.close(tmpfd)

bigquery/tests/unit/test_client.py

+67-2
Original file line numberDiff line numberDiff line change
@@ -5325,9 +5325,74 @@ def test_load_table_from_dataframe_w_custom_job_config(self):
53255325
)
53265326

53275327
sent_config = load_table_from_file.mock_calls[0][2]["job_config"]
5328-
assert sent_config is job_config
53295328
assert sent_config.source_format == job.SourceFormat.PARQUET
53305329

5330+
@unittest.skipIf(pandas is None, "Requires `pandas`")
5331+
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
5332+
def test_load_table_from_dataframe_w_automatic_schema(self):
5333+
from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES
5334+
from google.cloud.bigquery import job
5335+
from google.cloud.bigquery.schema import SchemaField
5336+
5337+
client = self._make_client()
5338+
dt_col = pandas.Series(
5339+
[
5340+
datetime.datetime(2010, 1, 2, 3, 44, 50),
5341+
datetime.datetime(2011, 2, 3, 14, 50, 59),
5342+
datetime.datetime(2012, 3, 14, 15, 16),
5343+
],
5344+
dtype="datetime64[ns]",
5345+
)
5346+
ts_col = pandas.Series(
5347+
[
5348+
datetime.datetime(2010, 1, 2, 3, 44, 50),
5349+
datetime.datetime(2011, 2, 3, 14, 50, 59),
5350+
datetime.datetime(2012, 3, 14, 15, 16),
5351+
],
5352+
dtype="datetime64[ns]",
5353+
).dt.tz_localize(pytz.utc)
5354+
df_data = {
5355+
"int_col": [1, 2, 3],
5356+
"float_col": [1.0, 2.0, 3.0],
5357+
"bool_col": [True, False, True],
5358+
"dt_col": dt_col,
5359+
"ts_col": ts_col,
5360+
}
5361+
dataframe = pandas.DataFrame(
5362+
df_data, columns=["int_col", "float_col", "bool_col", "dt_col", "ts_col"]
5363+
)
5364+
load_patch = mock.patch(
5365+
"google.cloud.bigquery.client.Client.load_table_from_file", autospec=True
5366+
)
5367+
5368+
with load_patch as load_table_from_file:
5369+
client.load_table_from_dataframe(
5370+
dataframe, self.TABLE_REF, location=self.LOCATION
5371+
)
5372+
5373+
load_table_from_file.assert_called_once_with(
5374+
client,
5375+
mock.ANY,
5376+
self.TABLE_REF,
5377+
num_retries=_DEFAULT_NUM_RETRIES,
5378+
rewind=True,
5379+
job_id=mock.ANY,
5380+
job_id_prefix=None,
5381+
location=self.LOCATION,
5382+
project=None,
5383+
job_config=mock.ANY,
5384+
)
5385+
5386+
sent_config = load_table_from_file.mock_calls[0][2]["job_config"]
5387+
assert sent_config.source_format == job.SourceFormat.PARQUET
5388+
assert tuple(sent_config.schema) == (
5389+
SchemaField("int_col", "INTEGER"),
5390+
SchemaField("float_col", "FLOAT"),
5391+
SchemaField("bool_col", "BOOLEAN"),
5392+
SchemaField("dt_col", "DATETIME"),
5393+
SchemaField("ts_col", "TIMESTAMP"),
5394+
)
5395+
53315396
@unittest.skipIf(pandas is None, "Requires `pandas`")
53325397
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
53335398
def test_load_table_from_dataframe_w_schema_wo_pyarrow(self):
@@ -5475,7 +5540,7 @@ def test_load_table_from_dataframe_w_nulls(self):
54755540
)
54765541

54775542
sent_config = load_table_from_file.mock_calls[0][2]["job_config"]
5478-
assert sent_config is job_config
5543+
assert sent_config.schema == schema
54795544
assert sent_config.source_format == job.SourceFormat.PARQUET
54805545

54815546
# Low-level tests

0 commit comments

Comments
 (0)