hc99 commited on
Commit
c61e0be
·
verified ·
1 Parent(s): 8766bc5

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. testbed/googleapis__python-bigquery/samples/CONTRIBUTING.md +1 -0
  2. testbed/googleapis__python-bigquery/samples/__init__.py +0 -0
  3. testbed/googleapis__python-bigquery/samples/add_empty_column.py +40 -0
  4. testbed/googleapis__python-bigquery/samples/client_query_batch.py +53 -0
  5. testbed/googleapis__python-bigquery/samples/client_query_destination_table.py +40 -0
  6. testbed/googleapis__python-bigquery/samples/client_query_destination_table_clustered.py +43 -0
  7. testbed/googleapis__python-bigquery/samples/client_query_destination_table_cmek.py +48 -0
  8. testbed/googleapis__python-bigquery/samples/client_query_destination_table_legacy.py +44 -0
  9. testbed/googleapis__python-bigquery/samples/client_query_dry_run.py +44 -0
  10. testbed/googleapis__python-bigquery/samples/client_query_legacy_sql.py +40 -0
  11. testbed/googleapis__python-bigquery/samples/client_query_shortmode.py +53 -0
  12. testbed/googleapis__python-bigquery/samples/client_query_w_named_params.py +42 -0
  13. testbed/googleapis__python-bigquery/samples/client_query_w_positional_params.py +44 -0
  14. testbed/googleapis__python-bigquery/samples/client_query_w_struct_params.py +39 -0
  15. testbed/googleapis__python-bigquery/samples/client_query_w_timestamp_params.py +41 -0
  16. testbed/googleapis__python-bigquery/samples/copy_table.py +34 -0
  17. testbed/googleapis__python-bigquery/samples/copy_table_cmek.py +46 -0
  18. testbed/googleapis__python-bigquery/samples/copy_table_multiple_source.py +36 -0
  19. testbed/googleapis__python-bigquery/samples/create_dataset.py +37 -0
  20. testbed/googleapis__python-bigquery/samples/create_job.py +66 -0
  21. testbed/googleapis__python-bigquery/samples/create_routine.py +50 -0
  22. testbed/googleapis__python-bigquery/samples/create_routine_ddl.py +40 -0
  23. testbed/googleapis__python-bigquery/samples/create_table.py +36 -0
  24. testbed/googleapis__python-bigquery/samples/create_table_clustered.py +46 -0
  25. testbed/googleapis__python-bigquery/samples/create_table_range_partitioned.py +49 -0
  26. testbed/googleapis__python-bigquery/samples/dataset_exists.py +31 -0
  27. testbed/googleapis__python-bigquery/samples/delete_dataset.py +34 -0
  28. testbed/googleapis__python-bigquery/samples/delete_dataset_labels.py +40 -0
  29. testbed/googleapis__python-bigquery/samples/delete_model.py +32 -0
  30. testbed/googleapis__python-bigquery/samples/delete_routine.py +30 -0
  31. testbed/googleapis__python-bigquery/samples/delete_table.py +31 -0
  32. testbed/googleapis__python-bigquery/samples/download_public_data.py +32 -0
  33. testbed/googleapis__python-bigquery/samples/download_public_data_sandbox.py +35 -0
  34. testbed/googleapis__python-bigquery/samples/get_dataset.py +55 -0
  35. testbed/googleapis__python-bigquery/samples/get_dataset_labels.py +37 -0
  36. testbed/googleapis__python-bigquery/samples/get_model.py +36 -0
  37. testbed/googleapis__python-bigquery/samples/get_routine.py +43 -0
  38. testbed/googleapis__python-bigquery/samples/get_table.py +36 -0
  39. testbed/googleapis__python-bigquery/samples/label_dataset.py +32 -0
  40. testbed/googleapis__python-bigquery/samples/list_datasets.py +33 -0
  41. testbed/googleapis__python-bigquery/samples/list_datasets_by_label.py +33 -0
  42. testbed/googleapis__python-bigquery/samples/list_models.py +39 -0
  43. testbed/googleapis__python-bigquery/samples/list_routines.py +33 -0
  44. testbed/googleapis__python-bigquery/samples/list_tables.py +33 -0
  45. testbed/googleapis__python-bigquery/samples/load_table_clustered.py +59 -0
  46. testbed/googleapis__python-bigquery/samples/load_table_dataframe.py +120 -0
  47. testbed/googleapis__python-bigquery/samples/load_table_file.py +49 -0
  48. testbed/googleapis__python-bigquery/samples/load_table_uri_autodetect_csv.py +44 -0
  49. testbed/googleapis__python-bigquery/samples/load_table_uri_autodetect_json.py +41 -0
  50. testbed/googleapis__python-bigquery/samples/load_table_uri_avro.py +37 -0
testbed/googleapis__python-bigquery/samples/CONTRIBUTING.md ADDED
@@ -0,0 +1 @@
 
 
1
+ See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md
testbed/googleapis__python-bigquery/samples/__init__.py ADDED
File without changes
testbed/googleapis__python-bigquery/samples/add_empty_column.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def add_empty_column(table_id: str) -> None:
17
+ # [START bigquery_add_empty_column]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set table_id to the ID of the table
24
+ # to add an empty column.
25
+ # table_id = "your-project.your_dataset.your_table_name"
26
+
27
+ table = client.get_table(table_id) # Make an API request.
28
+
29
+ original_schema = table.schema
30
+ new_schema = original_schema[:] # Creates a copy of the schema.
31
+ new_schema.append(bigquery.SchemaField("phone", "STRING"))
32
+
33
+ table.schema = new_schema
34
+ table = client.update_table(table, ["schema"]) # Make an API request.
35
+
36
+ if len(table.schema) == len(original_schema) + 1 == len(new_schema):
37
+ print("A new column has been added.")
38
+ else:
39
+ print("The column has not been added.")
40
+ # [END bigquery_add_empty_column]
testbed/googleapis__python-bigquery/samples/client_query_batch.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def client_query_batch() -> "bigquery.QueryJob":
22
+ # [START bigquery_query_batch]
23
+ from google.cloud import bigquery
24
+
25
+ # Construct a BigQuery client object.
26
+ client = bigquery.Client()
27
+
28
+ job_config = bigquery.QueryJobConfig(
29
+ # Run at batch priority, which won't count toward concurrent rate limit.
30
+ priority=bigquery.QueryPriority.BATCH
31
+ )
32
+
33
+ sql = """
34
+ SELECT corpus
35
+ FROM `bigquery-public-data.samples.shakespeare`
36
+ GROUP BY corpus;
37
+ """
38
+
39
+ # Start the query, passing in the extra configuration.
40
+ query_job = client.query(sql, job_config=job_config) # Make an API request.
41
+
42
+ # Check on the progress by getting the job's updated state. Once the state
43
+ # is `DONE`, the results are ready.
44
+ query_job = typing.cast(
45
+ "bigquery.QueryJob",
46
+ client.get_job(
47
+ query_job.job_id, location=query_job.location
48
+ ), # Make an API request.
49
+ )
50
+
51
+ print("Job {} is currently in state {}".format(query_job.job_id, query_job.state))
52
+ # [END bigquery_query_batch]
53
+ return query_job
testbed/googleapis__python-bigquery/samples/client_query_destination_table.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_destination_table(table_id: str) -> None:
17
+ # [START bigquery_query_destination_table]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set table_id to the ID of the destination table.
24
+ # table_id = "your-project.your_dataset.your_table_name"
25
+
26
+ job_config = bigquery.QueryJobConfig(destination=table_id)
27
+
28
+ sql = """
29
+ SELECT corpus
30
+ FROM `bigquery-public-data.samples.shakespeare`
31
+ GROUP BY corpus;
32
+ """
33
+
34
+ # Start the query, passing in the extra configuration.
35
+ client.query_and_wait(
36
+ sql, job_config=job_config
37
+ ) # Make an API request and wait for the query to finish.
38
+
39
+ print("Query results loaded to the table {}".format(table_id))
40
+ # [END bigquery_query_destination_table]
testbed/googleapis__python-bigquery/samples/client_query_destination_table_clustered.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_destination_table_clustered(table_id: str) -> None:
17
+ # [START bigquery_query_clustered_table]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set table_id to the ID of the destination table.
24
+ # table_id = "your-project.your_dataset.your_table_name"
25
+
26
+ sql = "SELECT * FROM `bigquery-public-data.samples.shakespeare`"
27
+ cluster_fields = ["corpus"]
28
+
29
+ job_config = bigquery.QueryJobConfig(
30
+ clustering_fields=cluster_fields, destination=table_id
31
+ )
32
+
33
+ # Start the query, passing in the extra configuration.
34
+ client.query_and_wait(
35
+ sql, job_config=job_config
36
+ ) # Make an API request and wait for job to complete.
37
+
38
+ table = client.get_table(table_id) # Make an API request.
39
+ if table.clustering_fields == cluster_fields:
40
+ print(
41
+ "The destination table is written using the cluster_fields configuration."
42
+ )
43
+ # [END bigquery_query_clustered_table]
testbed/googleapis__python-bigquery/samples/client_query_destination_table_cmek.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_destination_table_cmek(table_id: str, kms_key_name: str) -> None:
17
+ # [START bigquery_query_destination_table_cmek]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set table_id to the ID of the destination table.
24
+ # table_id = "your-project.your_dataset.your_table_name"
25
+
26
+ # Set the encryption key to use for the destination.
27
+ # TODO(developer): Replace this key with a key you have created in KMS.
28
+ # kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
29
+ # your-project, location, your-ring, your-key
30
+ # )
31
+
32
+ job_config = bigquery.QueryJobConfig(
33
+ destination=table_id,
34
+ destination_encryption_configuration=bigquery.EncryptionConfiguration(
35
+ kms_key_name=kms_key_name
36
+ ),
37
+ )
38
+
39
+ # Start the query, passing in the extra configuration.
40
+ query_job = client.query(
41
+ "SELECT 17 AS my_col;", job_config=job_config
42
+ ) # Make an API request.
43
+ query_job.result() # Wait for the job to complete.
44
+
45
+ table = client.get_table(table_id) # Make an API request.
46
+ if table.encryption_configuration.kms_key_name == kms_key_name:
47
+ print("The destination table is written using the encryption configuration")
48
+ # [END bigquery_query_destination_table_cmek]
testbed/googleapis__python-bigquery/samples/client_query_destination_table_legacy.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_destination_table_legacy(table_id: str) -> None:
17
+ # [START bigquery_query_legacy_large_results]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set table_id to the ID of the destination table.
24
+ # table_id = "your-project.your_dataset.your_table_name"
25
+
26
+ # Set the destination table and use_legacy_sql to True to use
27
+ # legacy SQL syntax.
28
+ job_config = bigquery.QueryJobConfig(
29
+ allow_large_results=True, destination=table_id, use_legacy_sql=True
30
+ )
31
+
32
+ sql = """
33
+ SELECT corpus
34
+ FROM [bigquery-public-data:samples.shakespeare]
35
+ GROUP BY corpus;
36
+ """
37
+
38
+ # Start the query, passing in the extra configuration.
39
+ client.query_and_wait(
40
+ sql, job_config=job_config
41
+ ) # Make an API request and wait for the query to finish.
42
+
43
+ print("Query results loaded to the table {}".format(table_id))
44
+ # [END bigquery_query_legacy_large_results]
testbed/googleapis__python-bigquery/samples/client_query_dry_run.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def client_query_dry_run() -> "bigquery.QueryJob":
22
+ # [START bigquery_query_dry_run]
23
+ from google.cloud import bigquery
24
+
25
+ # Construct a BigQuery client object.
26
+ client = bigquery.Client()
27
+
28
+ job_config = bigquery.QueryJobConfig(dry_run=True, use_query_cache=False)
29
+
30
+ # Start the query, passing in the extra configuration.
31
+ query_job = client.query(
32
+ (
33
+ "SELECT name, COUNT(*) as name_count "
34
+ "FROM `bigquery-public-data.usa_names.usa_1910_2013` "
35
+ "WHERE state = 'WA' "
36
+ "GROUP BY name"
37
+ ),
38
+ job_config=job_config,
39
+ ) # Make an API request.
40
+
41
+ # A dry run query completes immediately.
42
+ print("This query will process {} bytes.".format(query_job.total_bytes_processed))
43
+ # [END bigquery_query_dry_run]
44
+ return query_job
testbed/googleapis__python-bigquery/samples/client_query_legacy_sql.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_legacy_sql() -> None:
17
+ # [START bigquery_query_legacy]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ query = (
24
+ "SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] "
25
+ 'WHERE state = "TX" '
26
+ "LIMIT 100"
27
+ )
28
+
29
+ # Set use_legacy_sql to True to use legacy SQL syntax.
30
+ job_config = bigquery.QueryJobConfig(use_legacy_sql=True)
31
+
32
+ # Start the query and waits for query job to complete, passing in the extra configuration.
33
+ results = client.query_and_wait(
34
+ query, job_config=job_config
35
+ ) # Make an API request.
36
+
37
+ print("The query data:")
38
+ for row in results:
39
+ print(row)
40
+ # [END bigquery_query_legacy]
testbed/googleapis__python-bigquery/samples/client_query_shortmode.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2024 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_shortmode() -> None:
17
+ # [START bigquery_query_shortquery]
18
+ # This example demonstrates issuing a query that may be run in short query mode.
19
+ #
20
+ # To enable the short query mode preview feature, the QUERY_PREVIEW_ENABLED
21
+ # environmental variable should be set to `TRUE`.
22
+ from google.cloud import bigquery
23
+
24
+ # Construct a BigQuery client object.
25
+ client = bigquery.Client()
26
+
27
+ query = """
28
+ SELECT
29
+ name,
30
+ gender,
31
+ SUM(number) AS total
32
+ FROM
33
+ bigquery-public-data.usa_names.usa_1910_2013
34
+ GROUP BY
35
+ name, gender
36
+ ORDER BY
37
+ total DESC
38
+ LIMIT 10
39
+ """
40
+ # Run the query. The returned `rows` iterator can return information about
41
+ # how the query was executed as well as the result data.
42
+ rows = client.query_and_wait(query)
43
+
44
+ if rows.job_id is not None:
45
+ print("Query was run with job state. Job ID: {}".format(rows.job_id))
46
+ else:
47
+ print("Query was run in short mode. Query ID: {}".format(rows.query_id))
48
+
49
+ print("The query data:")
50
+ for row in rows:
51
+ # Row values can be accessed by field name or index.
52
+ print("name={}, gender={}, total={}".format(row[0], row[1], row["total"]))
53
+ # [END bigquery_query_shortquery]
testbed/googleapis__python-bigquery/samples/client_query_w_named_params.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_w_named_params() -> None:
17
+ # [START bigquery_query_params_named]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ query = """
24
+ SELECT word, word_count
25
+ FROM `bigquery-public-data.samples.shakespeare`
26
+ WHERE corpus = @corpus
27
+ AND word_count >= @min_word_count
28
+ ORDER BY word_count DESC;
29
+ """
30
+ job_config = bigquery.QueryJobConfig(
31
+ query_parameters=[
32
+ bigquery.ScalarQueryParameter("corpus", "STRING", "romeoandjuliet"),
33
+ bigquery.ScalarQueryParameter("min_word_count", "INT64", 250),
34
+ ]
35
+ )
36
+ results = client.query_and_wait(
37
+ query, job_config=job_config
38
+ ) # Make an API request.
39
+
40
+ for row in results:
41
+ print("{}: \t{}".format(row.word, row.word_count))
42
+ # [END bigquery_query_params_named]
testbed/googleapis__python-bigquery/samples/client_query_w_positional_params.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_w_positional_params() -> None:
17
+ # [START bigquery_query_params_positional]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ query = """
24
+ SELECT word, word_count
25
+ FROM `bigquery-public-data.samples.shakespeare`
26
+ WHERE corpus = ?
27
+ AND word_count >= ?
28
+ ORDER BY word_count DESC;
29
+ """
30
+ # Set the name to None to use positional parameters.
31
+ # Note that you cannot mix named and positional parameters.
32
+ job_config = bigquery.QueryJobConfig(
33
+ query_parameters=[
34
+ bigquery.ScalarQueryParameter(None, "STRING", "romeoandjuliet"),
35
+ bigquery.ScalarQueryParameter(None, "INT64", 250),
36
+ ]
37
+ )
38
+ results = client.query_and_wait(
39
+ query, job_config=job_config
40
+ ) # Make an API request.
41
+
42
+ for row in results:
43
+ print("{}: \t{}".format(row.word, row.word_count))
44
+ # [END bigquery_query_params_positional]
testbed/googleapis__python-bigquery/samples/client_query_w_struct_params.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_w_struct_params() -> None:
17
+ # [START bigquery_query_params_structs]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ query = "SELECT @struct_value AS s;"
24
+ job_config = bigquery.QueryJobConfig(
25
+ query_parameters=[
26
+ bigquery.StructQueryParameter(
27
+ "struct_value",
28
+ bigquery.ScalarQueryParameter("x", "INT64", 1),
29
+ bigquery.ScalarQueryParameter("y", "STRING", "foo"),
30
+ )
31
+ ]
32
+ )
33
+ results = client.query_and_wait(
34
+ query, job_config=job_config
35
+ ) # Make an API request and waits for results.
36
+
37
+ for row in results:
38
+ print(row.s)
39
+ # [END bigquery_query_params_structs]
testbed/googleapis__python-bigquery/samples/client_query_w_timestamp_params.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def client_query_w_timestamp_params() -> None:
17
+ # [START bigquery_query_params_timestamps]
18
+ import datetime
19
+
20
+ from google.cloud import bigquery
21
+
22
+ # Construct a BigQuery client object.
23
+ client = bigquery.Client()
24
+
25
+ query = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);"
26
+ job_config = bigquery.QueryJobConfig(
27
+ query_parameters=[
28
+ bigquery.ScalarQueryParameter(
29
+ "ts_value",
30
+ "TIMESTAMP",
31
+ datetime.datetime(2016, 12, 7, 8, 0, tzinfo=datetime.timezone.utc),
32
+ )
33
+ ]
34
+ )
35
+ results = client.query_and_wait(
36
+ query, job_config=job_config
37
+ ) # Make an API request.
38
+
39
+ for row in results:
40
+ print(row)
41
+ # [END bigquery_query_params_timestamps]
testbed/googleapis__python-bigquery/samples/copy_table.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def copy_table(source_table_id: str, destination_table_id: str) -> None:
17
+ # [START bigquery_copy_table]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set source_table_id to the ID of the original table.
25
+ # source_table_id = "your-project.source_dataset.source_table"
26
+
27
+ # TODO(developer): Set destination_table_id to the ID of the destination table.
28
+ # destination_table_id = "your-project.destination_dataset.destination_table"
29
+
30
+ job = client.copy_table(source_table_id, destination_table_id)
31
+ job.result() # Wait for the job to complete.
32
+
33
+ print("A copy of the table created.")
34
+ # [END bigquery_copy_table]
testbed/googleapis__python-bigquery/samples/copy_table_cmek.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def copy_table_cmek(dest_table_id: str, orig_table_id: str, kms_key_name: str) -> None:
17
+ # [START bigquery_copy_table_cmek]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set dest_table_id to the ID of the destination table.
24
+ # dest_table_id = "your-project.your_dataset.your_table_name"
25
+
26
+ # TODO(developer): Set orig_table_id to the ID of the original table.
27
+ # orig_table_id = "your-project.your_dataset.your_table_name"
28
+
29
+ # Set the encryption key to use for the destination.
30
+ # TODO(developer): Replace this key with a key you have created in KMS.
31
+ # kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
32
+ # your-project, location, your-ring, your-key
33
+ # )
34
+
35
+ job_config = bigquery.CopyJobConfig(
36
+ destination_encryption_configuration=bigquery.EncryptionConfiguration(
37
+ kms_key_name=kms_key_name
38
+ )
39
+ )
40
+ job = client.copy_table(orig_table_id, dest_table_id, job_config=job_config)
41
+ job.result() # Wait for the job to complete.
42
+
43
+ dest_table = client.get_table(dest_table_id) # Make an API request.
44
+ if dest_table.encryption_configuration.kms_key_name == kms_key_name:
45
+ print("A copy of the table created")
46
+ # [END bigquery_copy_table_cmek]
testbed/googleapis__python-bigquery/samples/copy_table_multiple_source.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from typing import Sequence
16
+
17
+
18
+ def copy_table_multiple_source(dest_table_id: str, table_ids: Sequence[str]) -> None:
19
+ # [START bigquery_copy_table_multiple_source]
20
+
21
+ from google.cloud import bigquery
22
+
23
+ # Construct a BigQuery client object.
24
+ client = bigquery.Client()
25
+
26
+ # TODO(developer): Set dest_table_id to the ID of the destination table.
27
+ # dest_table_id = "your-project.your_dataset.your_table_name"
28
+
29
+ # TODO(developer): Set table_ids to the list of the IDs of the original tables.
30
+ # table_ids = ["your-project.your_dataset.your_table_name", ...]
31
+
32
+ job = client.copy_table(table_ids, dest_table_id) # Make an API request.
33
+ job.result() # Wait for the job to complete.
34
+
35
+ print("The tables {} have been appended to {}".format(table_ids, dest_table_id))
36
+ # [END bigquery_copy_table_multiple_source]
testbed/googleapis__python-bigquery/samples/create_dataset.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def create_dataset(dataset_id: str) -> None:
17
+ # [START bigquery_create_dataset]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set dataset_id to the ID of the dataset to create.
24
+ # dataset_id = "{}.your_dataset".format(client.project)
25
+
26
+ # Construct a full Dataset object to send to the API.
27
+ dataset = bigquery.Dataset(dataset_id)
28
+
29
+ # TODO(developer): Specify the geographic location where the dataset should reside.
30
+ dataset.location = "US"
31
+
32
+ # Send the dataset to the API for creation, with an explicit timeout.
33
+ # Raises google.api_core.exceptions.Conflict if the Dataset already
34
+ # exists within the project.
35
+ dataset = client.create_dataset(dataset, timeout=30) # Make an API request.
36
+ print("Created dataset {}.{}".format(client.project, dataset.dataset_id))
37
+ # [END bigquery_create_dataset]
testbed/googleapis__python-bigquery/samples/create_job.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+ from typing import Union
17
+
18
+ if typing.TYPE_CHECKING:
19
+ from google.cloud.bigquery import LoadJob, CopyJob, ExtractJob, QueryJob
20
+
21
+
22
+ def create_job() -> "Union[LoadJob, CopyJob, ExtractJob, QueryJob]":
23
+ # [START bigquery_create_job]
24
+ from google.cloud import bigquery
25
+
26
+ # Construct a BigQuery client object.
27
+ client = bigquery.Client()
28
+
29
+ query_job = client.create_job(
30
+ # Specify a job configuration, providing a query
31
+ # and/or optional job resource properties, as needed.
32
+ # The job instance can be a LoadJob, CopyJob, ExtractJob, QueryJob
33
+ # Here, we demonstrate a "query" job.
34
+ # References:
35
+ # https://googleapis.dev/python/bigquery/latest/generated/google.cloud.bigquery.client.Client.html#google.cloud.bigquery.client.Client.create_job
36
+ # https://cloud.google.com/bigquery/docs/reference/rest/v2/Job
37
+ #
38
+ # Example use cases for .create_job() include:
39
+ # * to retry failed jobs
40
+ # * to generate jobs with an experimental API property that hasn't
41
+ # been added to one of the manually written job configuration
42
+ # classes yet
43
+ #
44
+ # NOTE: unless it is necessary to create a job in this way, the
45
+ # preferred approach is to use one of the dedicated API calls:
46
+ # client.query()
47
+ # client.extract_table()
48
+ # client.copy_table()
49
+ # client.load_table_file(), client.load_table_from_dataframe(), etc
50
+ job_config={
51
+ "query": {
52
+ "query": """
53
+ SELECT country_name
54
+ FROM `bigquery-public-data.utility_us.country_code_iso`
55
+ LIMIT 5
56
+ """,
57
+ },
58
+ "labels": {"example-label": "example-value"},
59
+ "maximum_bytes_billed": 10000000,
60
+ }
61
+ ) # Make an API request.
62
+
63
+ print(f"Started job: {query_job.job_id}")
64
+ # [END bigquery_create_job]
65
+
66
+ return query_job
testbed/googleapis__python-bigquery/samples/create_routine.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def create_routine(routine_id: str) -> "bigquery.Routine":
22
+ # [START bigquery_create_routine]
23
+ from google.cloud import bigquery
24
+
25
+ # Construct a BigQuery client object.
26
+ client = bigquery.Client()
27
+
28
+ # TODO(developer): Choose a fully qualified ID for the routine.
29
+ # routine_id = "my-project.my_dataset.my_routine"
30
+
31
+ routine = bigquery.Routine(
32
+ routine_id,
33
+ type_="SCALAR_FUNCTION",
34
+ language="SQL",
35
+ body="x * 3",
36
+ arguments=[
37
+ bigquery.RoutineArgument(
38
+ name="x",
39
+ data_type=bigquery.StandardSqlDataType(
40
+ type_kind=bigquery.StandardSqlTypeNames.INT64
41
+ ),
42
+ )
43
+ ],
44
+ )
45
+
46
+ routine = client.create_routine(routine) # Make an API request.
47
+
48
+ print("Created routine {}".format(routine.reference))
49
+ # [END bigquery_create_routine]
50
+ return routine
testbed/googleapis__python-bigquery/samples/create_routine_ddl.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def create_routine_ddl(routine_id: str) -> None:
17
+ # [START bigquery_create_routine_ddl]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Choose a fully-qualified ID for the routine.
25
+ # routine_id = "my-project.my_dataset.my_routine"
26
+
27
+ sql = """
28
+ CREATE FUNCTION `{}`(
29
+ arr ARRAY<STRUCT<name STRING, val INT64>>
30
+ ) AS (
31
+ (SELECT SUM(IF(elem.name = "foo",elem.val,null)) FROM UNNEST(arr) AS elem)
32
+ )
33
+ """.format(
34
+ routine_id
35
+ )
36
+ query_job = client.query(sql) # Make an API request.
37
+ query_job.result() # Wait for the job to complete.
38
+
39
+ print("Created routine {}".format(query_job.ddl_target_routine))
40
+ # [END bigquery_create_routine_ddl]
testbed/googleapis__python-bigquery/samples/create_table.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def create_table(table_id: str) -> None:
17
+ # [START bigquery_create_table]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set table_id to the ID of the table to create.
24
+ # table_id = "your-project.your_dataset.your_table_name"
25
+
26
+ schema = [
27
+ bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"),
28
+ bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"),
29
+ ]
30
+
31
+ table = bigquery.Table(table_id, schema=schema)
32
+ table = client.create_table(table) # Make an API request.
33
+ print(
34
+ "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id)
35
+ )
36
+ # [END bigquery_create_table]
testbed/googleapis__python-bigquery/samples/create_table_clustered.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def create_table_clustered(table_id: str) -> "bigquery.Table":
22
+ # [START bigquery_create_table_clustered]
23
+ from google.cloud import bigquery
24
+
25
+ # Construct a BigQuery client object.
26
+ client = bigquery.Client()
27
+
28
+ # TODO(developer): Set table_id to the ID of the table to create.
29
+ # table_id = "your-project.your_dataset.your_table_name"
30
+
31
+ schema = [
32
+ bigquery.SchemaField("full_name", "STRING"),
33
+ bigquery.SchemaField("city", "STRING"),
34
+ bigquery.SchemaField("zipcode", "INTEGER"),
35
+ ]
36
+
37
+ table = bigquery.Table(table_id, schema=schema)
38
+ table.clustering_fields = ["city", "zipcode"]
39
+ table = client.create_table(table) # Make an API request.
40
+ print(
41
+ "Created clustered table {}.{}.{}".format(
42
+ table.project, table.dataset_id, table.table_id
43
+ )
44
+ )
45
+ # [END bigquery_create_table_clustered]
46
+ return table
testbed/googleapis__python-bigquery/samples/create_table_range_partitioned.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def create_table_range_partitioned(table_id: str) -> "bigquery.Table":
22
+ # [START bigquery_create_table_range_partitioned]
23
+ from google.cloud import bigquery
24
+
25
+ # Construct a BigQuery client object.
26
+ client = bigquery.Client()
27
+
28
+ # TODO(developer): Set table_id to the ID of the table to create.
29
+ # table_id = "your-project.your_dataset.your_table_name"
30
+
31
+ schema = [
32
+ bigquery.SchemaField("full_name", "STRING"),
33
+ bigquery.SchemaField("city", "STRING"),
34
+ bigquery.SchemaField("zipcode", "INTEGER"),
35
+ ]
36
+
37
+ table = bigquery.Table(table_id, schema=schema)
38
+ table.range_partitioning = bigquery.RangePartitioning(
39
+ # To use integer range partitioning, select a top-level REQUIRED /
40
+ # NULLABLE column with INTEGER / INT64 data type.
41
+ field="zipcode",
42
+ range_=bigquery.PartitionRange(start=0, end=100000, interval=10),
43
+ )
44
+ table = client.create_table(table) # Make an API request.
45
+ print(
46
+ "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id)
47
+ )
48
+ # [END bigquery_create_table_range_partitioned]
49
+ return table
testbed/googleapis__python-bigquery/samples/dataset_exists.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def dataset_exists(dataset_id: str) -> None:
17
+ # [START bigquery_dataset_exists]
18
+ from google.cloud import bigquery
19
+ from google.cloud.exceptions import NotFound
20
+
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set dataset_id to the ID of the dataset to determine existence.
24
+ # dataset_id = "your-project.your_dataset"
25
+
26
+ try:
27
+ client.get_dataset(dataset_id) # Make an API request.
28
+ print("Dataset {} already exists".format(dataset_id))
29
+ except NotFound:
30
+ print("Dataset {} is not found".format(dataset_id))
31
+ # [END bigquery_dataset_exists]
testbed/googleapis__python-bigquery/samples/delete_dataset.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def delete_dataset(dataset_id: str) -> None:
17
+ # [START bigquery_delete_dataset]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set model_id to the ID of the model to fetch.
25
+ # dataset_id = 'your-project.your_dataset'
26
+
27
+ # Use the delete_contents parameter to delete a dataset and its contents.
28
+ # Use the not_found_ok parameter to not receive an error if the dataset has already been deleted.
29
+ client.delete_dataset(
30
+ dataset_id, delete_contents=True, not_found_ok=True
31
+ ) # Make an API request.
32
+
33
+ print("Deleted dataset '{}'.".format(dataset_id))
34
+ # [END bigquery_delete_dataset]
testbed/googleapis__python-bigquery/samples/delete_dataset_labels.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def delete_dataset_labels(dataset_id: str) -> "bigquery.Dataset":
22
+ # [START bigquery_delete_label_dataset]
23
+
24
+ from google.cloud import bigquery
25
+
26
+ # Construct a BigQuery client object.
27
+ client = bigquery.Client()
28
+
29
+ # TODO(developer): Set dataset_id to the ID of the dataset to fetch.
30
+ # dataset_id = "your-project.your_dataset"
31
+
32
+ dataset = client.get_dataset(dataset_id) # Make an API request.
33
+
34
+ # To delete a label from a dataset, set its value to None.
35
+ dataset.labels["color"] = None
36
+
37
+ dataset = client.update_dataset(dataset, ["labels"]) # Make an API request.
38
+ print("Labels deleted from {}".format(dataset_id))
39
+ # [END bigquery_delete_label_dataset]
40
+ return dataset
testbed/googleapis__python-bigquery/samples/delete_model.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def delete_model(model_id: str) -> None:
17
+ """Sample ID: go/samples-tracker/1534"""
18
+
19
+ # [START bigquery_delete_model]
20
+
21
+ from google.cloud import bigquery
22
+
23
+ # Construct a BigQuery client object.
24
+ client = bigquery.Client()
25
+
26
+ # TODO(developer): Set model_id to the ID of the model to fetch.
27
+ # model_id = 'your-project.your_dataset.your_model'
28
+
29
+ client.delete_model(model_id) # Make an API request.
30
+
31
+ print("Deleted model '{}'.".format(model_id))
32
+ # [END bigquery_delete_model]
testbed/googleapis__python-bigquery/samples/delete_routine.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def delete_routine(routine_id: str) -> None:
17
+ # [START bigquery_delete_routine]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set the fully-qualified ID for the routine.
25
+ # routine_id = "my-project.my_dataset.my_routine"
26
+
27
+ client.delete_routine(routine_id) # Make an API request.
28
+
29
+ print("Deleted routine {}.".format(routine_id))
30
+ # [END bigquery_delete_routine]
testbed/googleapis__python-bigquery/samples/delete_table.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def delete_table(table_id: str) -> None:
17
+ # [START bigquery_delete_table]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set table_id to the ID of the table to fetch.
25
+ # table_id = 'your-project.your_dataset.your_table'
26
+
27
+ # If the table does not exist, delete_table raises
28
+ # google.api_core.exceptions.NotFound unless not_found_ok is True.
29
+ client.delete_table(table_id, not_found_ok=True) # Make an API request.
30
+ print("Deleted table '{}'.".format(table_id))
31
+ # [END bigquery_delete_table]
testbed/googleapis__python-bigquery/samples/download_public_data.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def download_public_data() -> None:
17
+ # [START bigquery_pandas_public_data]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set table_id to the fully-qualified table ID in standard
25
+ # SQL format, including the project ID and dataset ID.
26
+ table_id = "bigquery-public-data.usa_names.usa_1910_current"
27
+
28
+ # Use the BigQuery Storage API to speed-up downloads of large tables.
29
+ dataframe = client.list_rows(table_id).to_dataframe(create_bqstorage_client=True)
30
+
31
+ print(dataframe.info())
32
+ # [END bigquery_pandas_public_data]
testbed/googleapis__python-bigquery/samples/download_public_data_sandbox.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def download_public_data_sandbox() -> None:
17
+ # [START bigquery_pandas_public_data_sandbox]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # `SELECT *` is an anti-pattern in BigQuery because it is cheaper and
25
+ # faster to use the BigQuery Storage API directly, but BigQuery Sandbox
26
+ # users can only use the BigQuery Storage API to download query results.
27
+ query_string = "SELECT * FROM `bigquery-public-data.usa_names.usa_1910_current`"
28
+
29
+ # Use the BigQuery Storage API to speed-up downloads of large tables.
30
+ dataframe = client.query_and_wait(query_string).to_dataframe(
31
+ create_bqstorage_client=True
32
+ )
33
+
34
+ print(dataframe.info())
35
+ # [END bigquery_pandas_public_data_sandbox]
testbed/googleapis__python-bigquery/samples/get_dataset.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def get_dataset(dataset_id: str) -> None:
17
+ # [START bigquery_get_dataset]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set dataset_id to the ID of the dataset to fetch.
25
+ # dataset_id = 'your-project.your_dataset'
26
+
27
+ dataset = client.get_dataset(dataset_id) # Make an API request.
28
+
29
+ full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id)
30
+ friendly_name = dataset.friendly_name
31
+ print(
32
+ "Got dataset '{}' with friendly_name '{}'.".format(
33
+ full_dataset_id, friendly_name
34
+ )
35
+ )
36
+
37
+ # View dataset properties.
38
+ print("Description: {}".format(dataset.description))
39
+ print("Labels:")
40
+ labels = dataset.labels
41
+ if labels:
42
+ for label, value in labels.items():
43
+ print("\t{}: {}".format(label, value))
44
+ else:
45
+ print("\tDataset has no labels defined.")
46
+
47
+ # View tables in dataset.
48
+ print("Tables:")
49
+ tables = list(client.list_tables(dataset)) # Make an API request(s).
50
+ if tables:
51
+ for table in tables:
52
+ print("\t{}".format(table.table_id))
53
+ else:
54
+ print("\tThis dataset does not contain any tables.")
55
+ # [END bigquery_get_dataset]
testbed/googleapis__python-bigquery/samples/get_dataset_labels.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def get_dataset_labels(dataset_id: str) -> None:
17
+ # [START bigquery_get_dataset_labels]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set dataset_id to the ID of the dataset to fetch.
25
+ # dataset_id = "your-project.your_dataset"
26
+
27
+ dataset = client.get_dataset(dataset_id) # Make an API request.
28
+
29
+ # View dataset labels.
30
+ print("Dataset ID: {}".format(dataset_id))
31
+ print("Labels:")
32
+ if dataset.labels:
33
+ for label, value in dataset.labels.items():
34
+ print("\t{}: {}".format(label, value))
35
+ else:
36
+ print("\tDataset has no labels defined.")
37
+ # [END bigquery_get_dataset_labels]
testbed/googleapis__python-bigquery/samples/get_model.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def get_model(model_id: str) -> None:
17
+ """Sample ID: go/samples-tracker/1510"""
18
+
19
+ # [START bigquery_get_model]
20
+
21
+ from google.cloud import bigquery
22
+
23
+ # Construct a BigQuery client object.
24
+ client = bigquery.Client()
25
+
26
+ # TODO(developer): Set model_id to the ID of the model to fetch.
27
+ # model_id = 'your-project.your_dataset.your_model'
28
+
29
+ model = client.get_model(model_id) # Make an API request.
30
+
31
+ full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id)
32
+ friendly_name = model.friendly_name
33
+ print(
34
+ "Got model '{}' with friendly_name '{}'.".format(full_model_id, friendly_name)
35
+ )
36
+ # [END bigquery_get_model]
testbed/googleapis__python-bigquery/samples/get_routine.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def get_routine(routine_id: str) -> "bigquery.Routine":
22
+ # [START bigquery_get_routine]
23
+
24
+ from google.cloud import bigquery
25
+
26
+ # Construct a BigQuery client object.
27
+ client = bigquery.Client()
28
+
29
+ # TODO(developer): Set the fully-qualified ID for the routine.
30
+ # routine_id = "my-project.my_dataset.my_routine"
31
+
32
+ routine = client.get_routine(routine_id) # Make an API request.
33
+
34
+ print("Routine '{}':".format(routine.reference))
35
+ print("\tType: '{}'".format(routine.type_))
36
+ print("\tLanguage: '{}'".format(routine.language))
37
+ print("\tArguments:")
38
+
39
+ for argument in routine.arguments:
40
+ print("\t\tName: '{}'".format(argument.name))
41
+ print("\t\tType: '{}'".format(argument.data_type))
42
+ # [END bigquery_get_routine]
43
+ return routine
testbed/googleapis__python-bigquery/samples/get_table.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def get_table(table_id: str) -> None:
17
+ # [START bigquery_get_table]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set table_id to the ID of the model to fetch.
25
+ # table_id = 'your-project.your_dataset.your_table'
26
+
27
+ table = client.get_table(table_id) # Make an API request.
28
+
29
+ # View table properties
30
+ print(
31
+ "Got table '{}.{}.{}'.".format(table.project, table.dataset_id, table.table_id)
32
+ )
33
+ print("Table schema: {}".format(table.schema))
34
+ print("Table description: {}".format(table.description))
35
+ print("Table has {} rows".format(table.num_rows))
36
+ # [END bigquery_get_table]
testbed/googleapis__python-bigquery/samples/label_dataset.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def label_dataset(dataset_id: str) -> None:
17
+ # [START bigquery_label_dataset]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set dataset_id to the ID of the dataset to fetch.
25
+ # dataset_id = "your-project.your_dataset"
26
+
27
+ dataset = client.get_dataset(dataset_id) # Make an API request.
28
+ dataset.labels = {"color": "green"}
29
+ dataset = client.update_dataset(dataset, ["labels"]) # Make an API request.
30
+
31
+ print("Labels added to {}".format(dataset_id))
32
+ # [END bigquery_label_dataset]
testbed/googleapis__python-bigquery/samples/list_datasets.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def list_datasets() -> None:
17
+ # [START bigquery_list_datasets]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ datasets = list(client.list_datasets()) # Make an API request.
25
+ project = client.project
26
+
27
+ if datasets:
28
+ print("Datasets in project {}:".format(project))
29
+ for dataset in datasets:
30
+ print("\t{}".format(dataset.dataset_id))
31
+ else:
32
+ print("{} project does not contain any datasets.".format(project))
33
+ # [END bigquery_list_datasets]
testbed/googleapis__python-bigquery/samples/list_datasets_by_label.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def list_datasets_by_label() -> None:
17
+ # [START bigquery_list_datasets_by_label]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ label_filter = "labels.color:green"
25
+ datasets = list(client.list_datasets(filter=label_filter)) # Make an API request.
26
+
27
+ if datasets:
28
+ print("Datasets filtered by {}:".format(label_filter))
29
+ for dataset in datasets:
30
+ print("\t{}.{}".format(dataset.project, dataset.dataset_id))
31
+ else:
32
+ print("No datasets found with this filter.")
33
+ # [END bigquery_list_datasets_by_label]
testbed/googleapis__python-bigquery/samples/list_models.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def list_models(dataset_id: str) -> None:
17
+ """Sample ID: go/samples-tracker/1512"""
18
+
19
+ # [START bigquery_list_models]
20
+
21
+ from google.cloud import bigquery
22
+
23
+ # Construct a BigQuery client object.
24
+ client = bigquery.Client()
25
+
26
+ # TODO(developer): Set dataset_id to the ID of the dataset that contains
27
+ # the models you are listing.
28
+ # dataset_id = 'your-project.your_dataset'
29
+
30
+ models = client.list_models(dataset_id) # Make an API request.
31
+
32
+ print("Models contained in '{}':".format(dataset_id))
33
+ for model in models:
34
+ full_model_id = "{}.{}.{}".format(
35
+ model.project, model.dataset_id, model.model_id
36
+ )
37
+ friendly_name = model.friendly_name
38
+ print("{}: friendly_name='{}'".format(full_model_id, friendly_name))
39
+ # [END bigquery_list_models]
testbed/googleapis__python-bigquery/samples/list_routines.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def list_routines(dataset_id: str) -> None:
17
+ # [START bigquery_list_routines]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set dataset_id to the ID of the dataset that contains
25
+ # the routines you are listing.
26
+ # dataset_id = 'your-project.your_dataset'
27
+
28
+ routines = client.list_routines(dataset_id) # Make an API request.
29
+
30
+ print("Routines contained in dataset {}:".format(dataset_id))
31
+ for routine in routines:
32
+ print(routine.reference)
33
+ # [END bigquery_list_routines]
testbed/googleapis__python-bigquery/samples/list_tables.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def list_tables(dataset_id: str) -> None:
17
+ # [START bigquery_list_tables]
18
+
19
+ from google.cloud import bigquery
20
+
21
+ # Construct a BigQuery client object.
22
+ client = bigquery.Client()
23
+
24
+ # TODO(developer): Set dataset_id to the ID of the dataset that contains
25
+ # the tables you are listing.
26
+ # dataset_id = 'your-project.your_dataset'
27
+
28
+ tables = client.list_tables(dataset_id) # Make an API request.
29
+
30
+ print("Tables contained in '{}':".format(dataset_id))
31
+ for table in tables:
32
+ print("{}.{}.{}".format(table.project, table.dataset_id, table.table_id))
33
+ # [END bigquery_list_tables]
testbed/googleapis__python-bigquery/samples/load_table_clustered.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def load_table_clustered(table_id: str) -> "bigquery.Table":
22
+ # [START bigquery_load_table_clustered]
23
+ from google.cloud import bigquery
24
+
25
+ # Construct a BigQuery client object.
26
+ client = bigquery.Client()
27
+
28
+ # TODO(developer): Set table_id to the ID of the table to create.
29
+ # table_id = "your-project.your_dataset.your_table_name"
30
+
31
+ job_config = bigquery.LoadJobConfig(
32
+ skip_leading_rows=1,
33
+ source_format=bigquery.SourceFormat.CSV,
34
+ schema=[
35
+ bigquery.SchemaField("timestamp", bigquery.SqlTypeNames.TIMESTAMP),
36
+ bigquery.SchemaField("origin", bigquery.SqlTypeNames.STRING),
37
+ bigquery.SchemaField("destination", bigquery.SqlTypeNames.STRING),
38
+ bigquery.SchemaField("amount", bigquery.SqlTypeNames.NUMERIC),
39
+ ],
40
+ time_partitioning=bigquery.TimePartitioning(field="timestamp"),
41
+ clustering_fields=["origin", "destination"],
42
+ )
43
+
44
+ job = client.load_table_from_uri(
45
+ ["gs://cloud-samples-data/bigquery/sample-transactions/transactions.csv"],
46
+ table_id,
47
+ job_config=job_config,
48
+ )
49
+
50
+ job.result() # Waits for the job to complete.
51
+
52
+ table = client.get_table(table_id) # Make an API request.
53
+ print(
54
+ "Loaded {} rows and {} columns to {}".format(
55
+ table.num_rows, len(table.schema), table_id
56
+ )
57
+ )
58
+ # [END bigquery_load_table_clustered]
59
+ return table
testbed/googleapis__python-bigquery/samples/load_table_dataframe.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2019 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def load_table_dataframe(table_id: str) -> "bigquery.Table":
22
+ # [START bigquery_load_table_dataframe]
23
+ import datetime
24
+
25
+ from google.cloud import bigquery
26
+ import pandas
27
+ import pytz
28
+
29
+ # Construct a BigQuery client object.
30
+ client = bigquery.Client()
31
+
32
+ # TODO(developer): Set table_id to the ID of the table to create.
33
+ # table_id = "your-project.your_dataset.your_table_name"
34
+
35
+ records = [
36
+ {
37
+ "title": "The Meaning of Life",
38
+ "release_year": 1983,
39
+ "length_minutes": 112.5,
40
+ "release_date": pytz.timezone("Europe/Paris")
41
+ .localize(datetime.datetime(1983, 5, 9, 13, 0, 0))
42
+ .astimezone(pytz.utc),
43
+ # Assume UTC timezone when a datetime object contains no timezone.
44
+ "dvd_release": datetime.datetime(2002, 1, 22, 7, 0, 0),
45
+ },
46
+ {
47
+ "title": "Monty Python and the Holy Grail",
48
+ "release_year": 1975,
49
+ "length_minutes": 91.5,
50
+ "release_date": pytz.timezone("Europe/London")
51
+ .localize(datetime.datetime(1975, 4, 9, 23, 59, 2))
52
+ .astimezone(pytz.utc),
53
+ "dvd_release": datetime.datetime(2002, 7, 16, 9, 0, 0),
54
+ },
55
+ {
56
+ "title": "Life of Brian",
57
+ "release_year": 1979,
58
+ "length_minutes": 94.25,
59
+ "release_date": pytz.timezone("America/New_York")
60
+ .localize(datetime.datetime(1979, 8, 17, 23, 59, 5))
61
+ .astimezone(pytz.utc),
62
+ "dvd_release": datetime.datetime(2008, 1, 14, 8, 0, 0),
63
+ },
64
+ {
65
+ "title": "And Now for Something Completely Different",
66
+ "release_year": 1971,
67
+ "length_minutes": 88.0,
68
+ "release_date": pytz.timezone("Europe/London")
69
+ .localize(datetime.datetime(1971, 9, 28, 23, 59, 7))
70
+ .astimezone(pytz.utc),
71
+ "dvd_release": datetime.datetime(2003, 10, 22, 10, 0, 0),
72
+ },
73
+ ]
74
+ dataframe = pandas.DataFrame(
75
+ records,
76
+ # In the loaded table, the column order reflects the order of the
77
+ # columns in the DataFrame.
78
+ columns=[
79
+ "title",
80
+ "release_year",
81
+ "length_minutes",
82
+ "release_date",
83
+ "dvd_release",
84
+ ],
85
+ # Optionally, set a named index, which can also be written to the
86
+ # BigQuery table.
87
+ index=pandas.Index(
88
+ ["Q24980", "Q25043", "Q24953", "Q16403"], name="wikidata_id"
89
+ ),
90
+ )
91
+ job_config = bigquery.LoadJobConfig(
92
+ # Specify a (partial) schema. All columns are always written to the
93
+ # table. The schema is used to assist in data type definitions.
94
+ schema=[
95
+ # Specify the type of columns whose type cannot be auto-detected. For
96
+ # example the "title" column uses pandas dtype "object", so its
97
+ # data type is ambiguous.
98
+ bigquery.SchemaField("title", bigquery.enums.SqlTypeNames.STRING),
99
+ # Indexes are written if included in the schema by name.
100
+ bigquery.SchemaField("wikidata_id", bigquery.enums.SqlTypeNames.STRING),
101
+ ],
102
+ # Optionally, set the write disposition. BigQuery appends loaded rows
103
+ # to an existing table by default, but with WRITE_TRUNCATE write
104
+ # disposition it replaces the table with the loaded data.
105
+ write_disposition="WRITE_TRUNCATE",
106
+ )
107
+
108
+ job = client.load_table_from_dataframe(
109
+ dataframe, table_id, job_config=job_config
110
+ ) # Make an API request.
111
+ job.result() # Wait for the job to complete.
112
+
113
+ table = client.get_table(table_id) # Make an API request.
114
+ print(
115
+ "Loaded {} rows and {} columns to {}".format(
116
+ table.num_rows, len(table.schema), table_id
117
+ )
118
+ )
119
+ # [END bigquery_load_table_dataframe]
120
+ return table
testbed/googleapis__python-bigquery/samples/load_table_file.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import typing
16
+
17
+ if typing.TYPE_CHECKING:
18
+ from google.cloud import bigquery
19
+
20
+
21
+ def load_table_file(file_path: str, table_id: str) -> "bigquery.Table":
22
+ # [START bigquery_load_from_file]
23
+ from google.cloud import bigquery
24
+
25
+ # Construct a BigQuery client object.
26
+ client = bigquery.Client()
27
+
28
+ # TODO(developer): Set table_id to the ID of the table to create.
29
+ # table_id = "your-project.your_dataset.your_table_name"
30
+
31
+ job_config = bigquery.LoadJobConfig(
32
+ source_format=bigquery.SourceFormat.CSV,
33
+ skip_leading_rows=1,
34
+ autodetect=True,
35
+ )
36
+
37
+ with open(file_path, "rb") as source_file:
38
+ job = client.load_table_from_file(source_file, table_id, job_config=job_config)
39
+
40
+ job.result() # Waits for the job to complete.
41
+
42
+ table = client.get_table(table_id) # Make an API request.
43
+ print(
44
+ "Loaded {} rows and {} columns to {}".format(
45
+ table.num_rows, len(table.schema), table_id
46
+ )
47
+ )
48
+ # [END bigquery_load_from_file]
49
+ return table
testbed/googleapis__python-bigquery/samples/load_table_uri_autodetect_csv.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def load_table_uri_autodetect_csv(table_id: str) -> None:
17
+ # [START bigquery_load_table_gcs_csv_autodetect]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set table_id to the ID of the table to create.
24
+ # table_id = "your-project.your_dataset.your_table_name
25
+
26
+ # Set the encryption key to use for the destination.
27
+ # TODO: Replace this key with a key you have created in KMS.
28
+ # kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
29
+ # "cloud-samples-tests", "us", "test", "test"
30
+ # )
31
+ job_config = bigquery.LoadJobConfig(
32
+ autodetect=True,
33
+ skip_leading_rows=1,
34
+ # The source format defaults to CSV, so the line below is optional.
35
+ source_format=bigquery.SourceFormat.CSV,
36
+ )
37
+ uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv"
38
+ load_job = client.load_table_from_uri(
39
+ uri, table_id, job_config=job_config
40
+ ) # Make an API request.
41
+ load_job.result() # Waits for the job to complete.
42
+ destination_table = client.get_table(table_id)
43
+ print("Loaded {} rows.".format(destination_table.num_rows))
44
+ # [END bigquery_load_table_gcs_csv_autodetect]
testbed/googleapis__python-bigquery/samples/load_table_uri_autodetect_json.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def load_table_uri_autodetect_json(table_id: str) -> None:
17
+ # [START bigquery_load_table_gcs_json_autodetect]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set table_id to the ID of the table to create.
24
+ # table_id = "your-project.your_dataset.your_table_name
25
+
26
+ # Set the encryption key to use for the destination.
27
+ # TODO: Replace this key with a key you have created in KMS.
28
+ # kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
29
+ # "cloud-samples-tests", "us", "test", "test"
30
+ # )
31
+ job_config = bigquery.LoadJobConfig(
32
+ autodetect=True, source_format=bigquery.SourceFormat.NEWLINE_DELIMITED_JSON
33
+ )
34
+ uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json"
35
+ load_job = client.load_table_from_uri(
36
+ uri, table_id, job_config=job_config
37
+ ) # Make an API request.
38
+ load_job.result() # Waits for the job to complete.
39
+ destination_table = client.get_table(table_id)
40
+ print("Loaded {} rows.".format(destination_table.num_rows))
41
+ # [END bigquery_load_table_gcs_json_autodetect]
testbed/googleapis__python-bigquery/samples/load_table_uri_avro.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ def load_table_uri_avro(table_id: str) -> None:
17
+ # [START bigquery_load_table_gcs_avro]
18
+ from google.cloud import bigquery
19
+
20
+ # Construct a BigQuery client object.
21
+ client = bigquery.Client()
22
+
23
+ # TODO(developer): Set table_id to the ID of the table to create.
24
+ # table_id = "your-project.your_dataset.your_table_name
25
+
26
+ job_config = bigquery.LoadJobConfig(source_format=bigquery.SourceFormat.AVRO)
27
+ uri = "gs://cloud-samples-data/bigquery/us-states/us-states.avro"
28
+
29
+ load_job = client.load_table_from_uri(
30
+ uri, table_id, job_config=job_config
31
+ ) # Make an API request.
32
+
33
+ load_job.result() # Waits for the job to complete.
34
+
35
+ destination_table = client.get_table(table_id)
36
+ print("Loaded {} rows.".format(destination_table.num_rows))
37
+ # [END bigquery_load_table_gcs_avro]