Skip to content

Commit 8ad2e5b

Browse files
Mattix23gcf-owl-bot[bot]tswast
authored
docs: created samples for load table and create table from schema file (#1436)
* docs: created samples for load table and create table from schema file * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Apply suggestions from code review Co-authored-by: Tim Swast <[email protected]> * Update samples/snippets/create_table_schema_from_json.py Co-authored-by: Tim Swast <[email protected]> Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com> Co-authored-by: Tim Swast <[email protected]>
1 parent 89f8e9b commit 8ad2e5b

11 files changed

+201
-4
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
import pathlib
16+
17+
18+
def create_table(table_id: str) -> None:
19+
orig_table_id = table_id
20+
current_directory = pathlib.Path(__file__).parent
21+
orig_schema_path = str(current_directory / "schema.json")
22+
# [START bigquery_schema_file_create]
23+
from google.cloud import bigquery
24+
25+
client = bigquery.Client()
26+
27+
# TODO(dev): Change table_id to the full name of the table you want to create.
28+
table_id = "your-project.your_dataset.your_table_name"
29+
# TODO(dev): Change schema_path variable to the path of your schema file.
30+
schema_path = "path/to/schema.json"
31+
# [END bigquery_schema_file_create]
32+
table_id = orig_table_id
33+
schema_path = orig_schema_path
34+
35+
# [START bigquery_schema_file_create]
36+
# To load a schema file use the schema_from_json method.
37+
schema = client.schema_from_json(schema_path)
38+
39+
table = bigquery.Table(table_id, schema=schema)
40+
table = client.create_table(table) # API request
41+
print(f"Created table {table_id}.")
42+
# [END bigquery_schema_file_create]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
import typing
16+
17+
import create_table_schema_from_json
18+
19+
if typing.TYPE_CHECKING:
20+
import pytest
21+
22+
23+
def test_create_table(
24+
capsys: "pytest.CaptureFixture[str]",
25+
random_table_id: str,
26+
) -> None:
27+
28+
create_table_schema_from_json.create_table(random_table_id)
29+
30+
out, _ = capsys.readouterr()
31+
assert "Created" in out
32+
assert random_table_id in out

samples/snippets/dataset_access_test.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818
import update_dataset_access
1919

2020
if typing.TYPE_CHECKING:
21-
import pytest
2221
from google.cloud import bigquery
22+
import pytest
2323

2424

2525
def test_dataset_access_permissions(

samples/snippets/delete_job.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ def delete_job_metadata(job_id: str, location: str) -> None:
1717
orig_job_id = job_id
1818
orig_location = location
1919
# [START bigquery_delete_job]
20-
from google.cloud import bigquery
2120
from google.api_core import exceptions
21+
from google.cloud import bigquery
2222

2323
# TODO(developer): Set the job ID to the ID of the job whose metadata you
2424
# wish to delete.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
import pathlib
16+
17+
18+
def load_table(table_id: str) -> None:
19+
orig_uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv"
20+
orig_table_id = table_id
21+
current_directory = pathlib.Path(__file__).parent
22+
orig_schema_path = str(current_directory / "schema_us_states.json")
23+
# [START bigquery_schema_file_load]
24+
from google.cloud import bigquery
25+
26+
client = bigquery.Client()
27+
28+
# TODO(dev): Change uri variable to the path of your data file.
29+
uri = "gs://your-bucket/path/to/your-file.csv"
30+
# TODO(dev): Change table_id to the full name of the table you want to create.
31+
table_id = "your-project.your_dataset.your_table"
32+
# TODO(dev): Change schema_path variable to the path of your schema file.
33+
schema_path = "path/to/schema.json"
34+
# [END bigquery_schema_file_load]
35+
uri = orig_uri
36+
table_id = orig_table_id
37+
schema_path = orig_schema_path
38+
# [START bigquery_schema_file_load]
39+
# To load a schema file use the schema_from_json method.
40+
schema = client.schema_from_json(schema_path)
41+
42+
job_config = bigquery.LoadJobConfig(
43+
# To use the schema you loaded pass it into the
44+
# LoadJobConfig constructor.
45+
schema=schema,
46+
skip_leading_rows=1,
47+
)
48+
49+
# Pass the job_config object to the load_table_from_file,
50+
# load_table_from_json, or load_table_from_uri method
51+
# to use the schema on a new table.
52+
load_job = client.load_table_from_uri(
53+
uri, table_id, job_config=job_config
54+
) # Make an API request.
55+
56+
load_job.result() # Waits for the job to complete.
57+
58+
destination_table = client.get_table(table_id) # Make an API request.
59+
print(f"Loaded {destination_table.num_rows} rows to {table_id}.")
60+
# [END bigquery_schema_file_load]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
# Copyright 2022 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# https://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
import typing
16+
17+
import load_table_schema_from_json
18+
19+
if typing.TYPE_CHECKING:
20+
import pytest
21+
22+
23+
def test_load_table(
24+
capsys: "pytest.CaptureFixture[str]",
25+
random_table_id: str,
26+
) -> None:
27+
28+
load_table_schema_from_json.load_table(random_table_id)
29+
30+
out, _ = capsys.readouterr()
31+
assert "Loaded" in out
32+
assert random_table_id in out

samples/snippets/materialized_view.py

+1
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@ def update_materialized_view(
6060

6161
# [START bigquery_update_materialized_view]
6262
import datetime
63+
6364
from google.cloud import bigquery
6465

6566
bigquery_client = bigquery.Client()

samples/snippets/quickstart_test.py

-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121
import quickstart
2222

23-
2423
# Must match the dataset listed in quickstart.py (there's no easy way to
2524
# extract this).
2625
DATASET_ID = "my_new_dataset"

samples/snippets/schema.json

+20
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
[
2+
{
3+
"name": "qtr",
4+
"type": "STRING",
5+
"mode": "REQUIRED",
6+
"description": "quarter"
7+
},
8+
{
9+
"name": "rep",
10+
"type": "STRING",
11+
"mode": "NULLABLE",
12+
"description": "sales representative"
13+
},
14+
{
15+
"name": "sales",
16+
"type": "FLOAT",
17+
"mode": "NULLABLE",
18+
"defaultValueExpression": "2.55"
19+
}
20+
]
+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
[
2+
{
3+
"name": "name",
4+
"type": "STRING",
5+
"mode": "NULLABLE"
6+
},
7+
{
8+
"name": "post_abbr",
9+
"type": "STRING",
10+
"mode": "NULLABLE"
11+
}
12+
]

samples/snippets/user_credentials_test.py

-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121

2222
from user_credentials import main
2323

24-
2524
PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"]
2625

2726
MockType = Union[mock.mock.MagicMock, mock.mock.AsyncMock]

0 commit comments

Comments
 (0)