diff --git a/.librarian/generator-input/client-post-processing/bigtable-integration.yaml b/.librarian/generator-input/client-post-processing/bigtable-integration.yaml
new file mode 100644
index 000000000000..4ed17231fd7e
--- /dev/null
+++ b/.librarian/generator-input/client-post-processing/bigtable-integration.yaml
@@ -0,0 +1,965 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+description: Integrate Google Cloud Bigtable Handwritten code
+url: https://github.com/googleapis/gapic-generator-python/issues/123
+replacements:
+ - paths: [
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_authorized_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_authorized_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_backup_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_backup_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_schema_bundle_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_schema_bundle_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_snapshot_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_snapshot_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_table_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_delete_table_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_async_internal.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_sync_internal.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_sync.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_async.py,
+ packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_sync.py,
+ packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py,
+ packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py,
+ packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py,
+ packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py,
+ ]
+ before: "client = bigtable_admin_v2.BaseBigtable"
+ after: "client = bigtable_admin_v2.Bigtable"
+ count: 140
+ - paths: [
+ packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py
+ ]
+ before: |
+ assert \(\n\s*gapic_v1\.routing_header\.to_grpc_metadata\(expected_headers\) in kw\["metadata"\]\n.*
+ after: |
+ # assert the expected headers are present, in any order
+ routing_string = next(
+ iter([m[1] for m in kw["metadata"] if m[0] == "x-goog-request-params"])
+ )
+ assert all([f"{k}={v}" in routing_string for k, v in expected_headers.items()])
+ count: 78
+ - paths: [
+ packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py
+ ]
+ before: |
+ expected_headers = \{"name": "projects/sample1/instances/sample2"\}
+ after: |
+ expected_headers = {
+ "name": "projects/sample1/instances/sample2",
+ "app_profile_id": "",
+ }
+ count: 15
+ - paths: [
+ packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py
+ ]
+ before: |
+ expected_headers = {
+ "table_name": "projects/sample1/instances/sample2/tables/sample3"
+ }
+ after: |
+ expected_headers = {
+ "table_name": "projects/sample1/instances/sample2/tables/sample3",
+ "app_profile_id": "",
+ }
+ count: 36
+ - paths: [
+ packages/google-cloud-bigtable/setup.py,
+ ]
+ before: extras = \{\}
+ after: |
+ extras = {
+ "libcst": "libcst >= 0.2.5",
+ }
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/setup.py,
+ ]
+ before: |
+ "protobuf >= 4.25.8, < 8.0.0",
+ \]
+ after: |
+ "protobuf >= 4.25.8, < 8.0.0",
+ "google-cloud-core >= 1.4.4, <3.0.0",
+ "grpc-google-iam-v1 >= 0.12.4, <1.0.0",
+ "google-crc32c>=1.5.0, <2.0.0dev",
+ ]
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/testing/constraints-3.9.txt
+ ]
+ before: |
+ google-api-core==2.21.0
+ google-auth==2.35.0
+ after: |
+ google-api-core==2.21.0
+ google-cloud-core==2.0.0
+ grpc-google-iam-v1==0.12.4
+ google-auth==2.35.0
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/async_client.py,
+ packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/client.py,
+ ]
+ before: |
+ if request.app_profile_id:
+ after: |
+ if True: # always attach app_profile_id, even if empty string
+ count: 18
+ - paths: [
+ packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py
+ ]
+ before: |
+ from google.cloud.bigtable_admin_v2.types import types\n
+ __protobuf__ = proto.module\(
+ after: |
+ from google.cloud.bigtable_admin_v2.types import types
+ from google.cloud.bigtable_admin_v2.utils import oneof_message
+
+ __protobuf__ = proto.module(
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py
+ ]
+ before: |
+ class GcRule\(proto.Message\):
+ after: |
+ class GcRule(oneof_message.OneofMessage):
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py
+ ]
+ before: '"UpdateTableRequest",\n\)\n\z'
+ after: |
+ "UpdateTableRequest",
+ )
+
+ from .overlay import * # noqa: F403
+
+ __all__ += overlay.__all__ # noqa: F405
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/google/cloud/bigtable_admin/__init__.py
+ ]
+ before: '"Type",\n\)\n\z'
+ after: |
+ "Type",
+ )
+
+ import google.cloud.bigtable_admin_v2.overlay # noqa: F401
+ from google.cloud.bigtable_admin_v2.overlay import * # noqa: F401, F403
+
+ __all__ += google.cloud.bigtable_admin_v2.overlay.__all__
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py
+ ]
+ before: |
+ from google.cloud.bigtable import gapic_version as package_version
+ [\s\S]*"Type",
+ \)
+ after: |
+ """Google Cloud Bigtable API package."""
+
+ from google.cloud.bigtable import gapic_version as package_version
+ from google.cloud.bigtable.client import Client
+
+ __version__: str
+
+ __version__ = package_version.__version__
+
+ __all__ = ["__version__", "Client"]
+ count: 1
+ - paths: [
+ "packages/google-cloud-bigtable/docs/conf.py",
+ ]
+ before: |
+ exclude_patterns = \[
+ \ "_build",
+ after: |
+ exclude_patterns = [
+ "bigtable_v2/**",
+ "bigtable_admin_v2/**",
+ "_build",
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/docs/index.rst
+ ]
+ before: |
+ API Reference
+ -------------
+ .. toctree::
+ :maxdepth: 2
+
+ bigtable_admin_v2/services_
+ bigtable_admin_v2/types_
+ after: |
+ Client Types
+ -------------
+ .. toctree::
+ :maxdepth: 3
+
+ data_client/data_client_usage
+ classic_client/usage
+ admin_client/admin_client_usage
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/README.rst
+ ]
+ before: |
+ .. _Product Documentation: https://cloud.google.com/bigtable\n
+ Quick Start
+ after: |
+ .. _Product Documentation: https://cloud.google.com/bigtable/docs
+
+
+ Async Data Client
+ -------------------------
+
+ :code:`v2.23.0` includes a release of the new :code:`BigtableDataClientAsync` client, accessible at the import path
+ :code:`google.cloud.bigtable.data`.
+
+ The new client brings a simplified API and increased performance using asyncio.
+ The new client is focused on the data API (i.e. reading and writing Bigtable data), with admin operations
+ remaining exclusively in the existing synchronous client.
+
+ Feedback and bug reports are welcome at cbt-python-client-v3-feedback@google.com,
+ or through the Github `issue tracker`_.
+
+
+ .. note::
+
+ It is generally not recommended to use the async client in an otherwise synchronous codebase. To make use of asyncio's
+ performance benefits, the codebase should be designed to be async from the ground up.
+
+
+ .. _issue tracker: https://github.com/googleapis/google-cloud-python/issues
+
+
+ Quick Start
+ count: 1
+ - paths: [
+ packages/google-cloud-bigtable/mypy.ini
+ ]
+ before: |
+ \[mypy\]
+ [\s\S]*?incremental = True
+ after: |
+ [mypy]
+ python_version = 3.13
+ namespace_packages = True
+ check_untyped_defs = True
+ warn_unreachable = True
+ disallow_any_generics = True
+ exclude = tests/unit/gapic/
+
+ [mypy-grpc.*]
+ ignore_missing_imports = True
+
+ [mypy-google.auth.*]
+ ignore_missing_imports = True
+
+ [mypy-google.iam.*]
+ ignore_missing_imports = True
+
+ [mypy-google.longrunning.*]
+ ignore_missing_imports = True
+
+ [mypy-google.oauth2.*]
+ ignore_missing_imports = True
+
+ [mypy-google.rpc.*]
+ ignore_missing_imports = True
+
+ [mypy-proto.*]
+ ignore_missing_imports = True
+
+ [mypy-pytest]
+ ignore_missing_imports = True
+
+ [mypy-google.cloud.*]
+ ignore_errors = True
+
+ # only verify data client
+ [mypy-google.cloud.bigtable.data.*]
+ ignore_errors = False
+ count: 1
+ # Note: noxfile.py is heavily customized so we clobber the whole file.
+ - paths: [
+ packages/google-cloud-bigtable/noxfile.py
+ ]
+ before: |
+ import os
+ [\s\S]*?Run all tests with core dependencies installed from source
+ after: |
+ import os
+ import pathlib
+ import re
+ import shutil
+ import warnings
+ from typing import Dict, List
+
+ import nox
+
+ RUFF_VERSION = "ruff==0.14.14"
+ BLACK_VERSION = "black[jupyter]==23.7.0"
+ LINT_PATHS = ["google", "samples", "tests", "noxfile.py", "setup.py"]
+
+ DEFAULT_PYTHON_VERSION = "3.14"
+
+ ALL_PYTHON = [
+ "3.9",
+ "3.10",
+ "3.11",
+ "3.12",
+ "3.13",
+ "3.14",
+ ]
+
+ UNIT_TEST_STANDARD_DEPENDENCIES = [
+ "mock",
+ "asyncmock",
+ "pytest",
+ "pytest-cov",
+ "pytest-asyncio",
+ BLACK_VERSION,
+ "autoflake",
+ ]
+ UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = []
+ UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = []
+ UNIT_TEST_DEPENDENCIES: List[str] = []
+ UNIT_TEST_EXTRAS: List[str] = []
+ UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {}
+
+ SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.10", "3.14"]
+ SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [
+ "mock",
+ "pytest",
+ "google-cloud-testutils",
+ ]
+ SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [
+ "pytest-asyncio==0.21.2",
+ BLACK_VERSION,
+ "pyyaml==6.0.2",
+ ]
+ SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = []
+ SYSTEM_TEST_DEPENDENCIES: List[str] = []
+ SYSTEM_TEST_EXTRAS: List[str] = []
+ SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {}
+
+ CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+ # 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+ nox.options.sessions = [
+ "unit-3.10",
+ "unit-3.11",
+ "unit-3.12",
+ "unit-3.13",
+ "unit-3.14",
+ "system_emulated",
+ "system",
+ "mypy",
+ "cover",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+ "format",
+ ]
+
+ # Error if a python version is missing
+ nox.options.error_on_missing_interpreters = True
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ def lint(session):
+ """Run linters.
+
+ Returns a failure if the linters find linting errors or sufficiently
+ serious code quality issues.
+ """
+ session.install("flake8", RUFF_VERSION)
+
+ # 2. Check formatting
+ session.run(
+ "ruff",
+ "format",
+ "--check",
+ f"--target-version=py{ALL_PYTHON[0].replace('.', '')}",
+ "--line-length=88",
+ *LINT_PATHS,
+ )
+
+ session.run("flake8", "google", "tests")
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ def blacken(session):
+ """(Deprecated) Legacy session. Please use 'nox -s format'."""
+ session.log(
+ "WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future."
+ )
+
+ # Just run the ruff formatter (keeping legacy behavior of only formatting, not sorting imports)
+ session.install(RUFF_VERSION)
+ session.run(
+ "ruff",
+ "format",
+ f"--target-version=py{ALL_PYTHON[0].replace('.', '')}",
+ "--line-length=88",
+ *LINT_PATHS,
+ )
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ def format(session):
+ """
+ Run ruff to sort imports and format code.
+ """
+ # 1. Install ruff (skipped automatically if you run with --no-venv)
+ session.install(RUFF_VERSION)
+
+ # 2. Run Ruff to fix imports
+ # check --select I: Enables strict import sorting
+ # --fix: Applies the changes automatically
+ session.run(
+ "ruff",
+ "check",
+ "--select",
+ "I",
+ "--fix",
+ f"--target-version=py{ALL_PYTHON[0].replace('.', '')}",
+ "--line-length=88", # Standard Black line length
+ *LINT_PATHS,
+ )
+
+ # 3. Run Ruff to format code
+ session.run(
+ "ruff",
+ "format",
+ f"--target-version=py{ALL_PYTHON[0].replace('.', '')}",
+ "--line-length=88", # Standard Black line length
+ *LINT_PATHS,
+ )
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ def mypy(session):
+ """Verify type hints are mypy compatible."""
+ session.install("-e", ".")
+ session.install(
+ "mypy", "types-setuptools", "types-protobuf", "types-mock", "types-requests"
+ )
+ session.install("google-cloud-testutils")
+ session.run("mypy", "-p", "google.cloud.bigtable.data")
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ def lint_setup_py(session):
+ """Verify that setup.py is valid (including RST check)."""
+ session.install("setuptools", "docutils", "pygments")
+ session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
+
+
+ def install_unittest_dependencies(session, *constraints):
+ standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES
+ session.install(*standard_deps, *constraints)
+
+ if UNIT_TEST_EXTERNAL_DEPENDENCIES:
+ warnings.warn(
+ "'unit_test_external_dependencies' is deprecated. Instead, please "
+ "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.",
+ DeprecationWarning,
+ )
+ session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints)
+
+ if UNIT_TEST_LOCAL_DEPENDENCIES:
+ session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints)
+
+ if UNIT_TEST_EXTRAS_BY_PYTHON:
+ extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, [])
+ elif UNIT_TEST_EXTRAS:
+ extras = UNIT_TEST_EXTRAS
+ else:
+ extras = []
+
+ if extras:
+ session.install("-e", f".[{','.join(extras)}]", *constraints)
+ else:
+ session.install("-e", ".", *constraints)
+
+
+ @nox.session(python=ALL_PYTHON)
+ @nox.parametrize(
+ "protobuf_implementation",
+ ["python", "upb", "cpp"],
+ )
+ def unit(session, protobuf_implementation):
+ if session.python in ("3.7"):
+ session.skip("Python 3.7 is no longer supported")
+
+ # Install all test dependencies, then install this package in-place.
+ py_version = tuple([int(v) for v in session.python.split(".")])
+ if protobuf_implementation == "cpp" and py_version >= (3, 11):
+ session.skip("cpp implementation is not supported in python 3.11+")
+
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ install_unittest_dependencies(session, "-c", constraints_path)
+
+ # TODO(https://github.com/googleapis/synthtool/issues/1976):
+ # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped.
+ # The 'cpp' implementation requires Protobuf<4.
+ if protobuf_implementation == "cpp":
+ session.install("protobuf<4")
+
+ # Run py.test against the unit tests.
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google",
+ "--cov=tests/unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ os.path.join("tests", "unit"),
+ *session.posargs,
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ },
+ )
+
+
+ def install_systemtest_dependencies(session, *constraints):
+ # Use pre-release gRPC for system tests.
+ # Exclude version 1.52.0rc1 which has a known issue.
+ # See https://github.com/grpc/grpc/issues/32163
+ session.install("--pre", "grpcio!=1.52.0rc1")
+
+ session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints)
+
+ if SYSTEM_TEST_EXTERNAL_DEPENDENCIES:
+ session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints)
+
+ if SYSTEM_TEST_LOCAL_DEPENDENCIES:
+ session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints)
+
+ if SYSTEM_TEST_DEPENDENCIES:
+ session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints)
+
+ if SYSTEM_TEST_EXTRAS_BY_PYTHON:
+ extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, [])
+ elif SYSTEM_TEST_EXTRAS:
+ extras = SYSTEM_TEST_EXTRAS
+ else:
+ extras = []
+
+ if extras:
+ session.install("-e", f".[{','.join(extras)}]", *constraints)
+ else:
+ session.install("-e", ".", *constraints)
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ def system_emulated(session):
+ import signal
+ import subprocess
+
+ try:
+ subprocess.call(["gcloud", "--version"])
+ except OSError:
+ session.skip("gcloud not found but required for emulator support")
+
+ # Currently, CI/CD doesn't have beta component of gcloud.
+ subprocess.call(["gcloud", "components", "install", "beta", "bigtable"])
+
+ hostport = "localhost:8789"
+ session.env["BIGTABLE_EMULATOR_HOST"] = hostport
+
+ p = subprocess.Popen(
+ ["gcloud", "beta", "emulators", "bigtable", "start", "--host-port", hostport]
+ )
+
+ try:
+ system_default(session)
+ finally:
+ # Stop Emulator
+ os.killpg(os.getpgid(p.pid), signal.SIGKILL)
+
+
+ # Run the system/emulator tests
+ @nox.session(py="3.12")
+ @nox.parametrize(
+ "test_type",
+ ["system_default", "system_emulated"],
+ )
+ def system(session, test_type):
+ """Run the system/emulator tests."""
+ # system and emulator tests
+ test_map = {
+ "system_default": system_default,
+ "system_emulated": system_emulated,
+ }
+ test_map[test_type](session)
+
+
+ def system_default(session):
+ """Run the system test suite."""
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ system_test_path = os.path.join("tests", "system.py")
+ system_test_folder_path = os.path.join("tests", "system")
+
+ # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
+ if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
+ session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
+
+ system_test_exists = os.path.exists(system_test_path)
+ system_test_folder_exists = os.path.exists(system_test_folder_path)
+ # Sanity check: only run tests if found.
+ if not system_test_exists and not system_test_folder_exists:
+ session.skip("System tests were not found")
+
+ install_systemtest_dependencies(session, "-c", constraints_path)
+
+ # Run py.test against the system tests.
+ if system_test_exists:
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
+ if system_test_folder_exists:
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ def cover(session):
+ """Run the final coverage report.
+
+ This outputs the coverage report aggregating coverage from the unit
+ test runs (not system test runs), and then erases coverage data.
+ """
+ session.install("coverage", "pytest-cov")
+ session.run("coverage", "report", "--show-missing", "--fail-under=99")
+
+ session.run("coverage", "erase")
+
+
+ @nox.session(python="3.10")
+ def docs(session):
+ """Build the docs for this library."""
+
+ session.install("-e", ".")
+ session.install(
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
+ "sphinx==4.5.0",
+ "alabaster",
+ "recommonmark",
+ )
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-W", # warnings as errors
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
+
+
+ @nox.session(python="3.10")
+ def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".")
+ session.install(
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
+ "gcp-sphinx-docfx-yaml",
+ "alabaster",
+ "recommonmark",
+ )
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
+ # Customization: Add extra sections to the table of contents for the Classic vs Async clients
+ session.install("pyyaml")
+ session.run("python", "docs/scripts/patch_devsite_toc.py")
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ @nox.parametrize(
+ "protobuf_implementation",
+ ["python", "upb", "cpp"],
+ )
+ def prerelease_deps(session, protobuf_implementation):
+ """Run all tests with prerelease versions of dependencies installed."""
+
+ py_version = tuple([int(v) for v in session.python.split(".")])
+ if protobuf_implementation == "cpp" and py_version >= (3, 11):
+ session.skip("cpp implementation is not supported in python 3.11+")
+
+ # Install all dependencies
+ session.install("-e", ".[all, tests, tracing]")
+ unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES
+ session.install(*unit_deps_all)
+ system_deps_all = (
+ SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES
+ )
+ session.install(*system_deps_all)
+
+ # Because we test minimum dependency versions on the minimum Python
+ # version, the first version we test with in the unit tests sessions has a
+ # constraints file containing all dependencies and extras.
+ with open(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt",
+ encoding="utf-8",
+ ) as constraints_file:
+ constraints_text = constraints_file.read()
+
+ # Ignore leading whitespace and comment lines.
+ constraints_deps = [
+ match.group(1)
+ for match in re.finditer(
+ r"^\\s*(\\S+)(?===\\S+)", constraints_text, flags=re.MULTILINE
+ )
+ ]
+
+ session.install(*constraints_deps)
+
+ prerel_deps = [
+ "protobuf",
+ # dependency of grpc
+ "six",
+ "grpc-google-iam-v1",
+ "googleapis-common-protos",
+ "grpcio",
+ "grpcio-status",
+ "google-api-core",
+ "google-auth",
+ "proto-plus",
+ "google-cloud-testutils",
+ # dependencies of google-cloud-testutils"
+ "click",
+ ]
+
+ for dep in prerel_deps:
+ session.install("--pre", "--no-deps", "--upgrade", dep)
+
+ # Remaining dependencies
+ other_deps = [
+ "requests",
+ "cryptography",
+ ]
+ session.install(*other_deps)
+
+ # Print out prerelease package versions
+ session.run(
+ "python", "-c", "import google.protobuf; print(google.protobuf.__version__)"
+ )
+ session.run("python", "-c", "import grpc; print(grpc.__version__)")
+ session.run("python", "-c", "import google.auth; print(google.auth.__version__)")
+
+ session.run(
+ "py.test",
+ "tests/unit",
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ },
+ )
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ def generate_sync(session):
+ """
+ Re-generate sync files for the library from CrossSync-annotated async source
+ """
+ session.install(BLACK_VERSION)
+ session.install("autoflake")
+ session.run("python", ".cross_sync/generate.py", ".")
+
+
+ @nox.session(python=DEFAULT_PYTHON_VERSION)
+ @nox.parametrize(
+ "protobuf_implementation",
+ ["python", "upb"],
+ )
+ def core_deps_from_source(session, protobuf_implementation):
+ """Run all tests with core dependencies installed from source,
+ count: 1
diff --git a/.librarian/generator-input/packages/google-cloud-bigtable/.repo-metadata.json b/.librarian/generator-input/packages/google-cloud-bigtable/.repo-metadata.json
new file mode 100644
index 000000000000..011989a371e5
--- /dev/null
+++ b/.librarian/generator-input/packages/google-cloud-bigtable/.repo-metadata.json
@@ -0,0 +1,81 @@
+{
+ "name": "bigtable",
+ "name_pretty": "Google Cloud Bigtable",
+ "api_description": "is Google's NoSQL Big Data database service. It's the\nsame database that powers many core Google services, including Search,\nAnalytics, Maps, and Gmail.",
+ "product_documentation": "https://cloud.google.com/bigtable",
+ "client_documentation": "https://cloud.google.com/python/docs/reference/bigtable/latest",
+ "issue_tracker": "https://issuetracker.google.com/savedsearches/559777",
+ "release_level": "stable",
+ "language": "python",
+ "library_type": "GAPIC_COMBO",
+ "repo": "googleapis/google-cloud-python",
+ "distribution_name": "google-cloud-bigtable",
+ "api_id": "bigtable.googleapis.com",
+ "requires_billing": true,
+ "samples": [
+ {
+ "name": "Hello World in Cloud Bigtable",
+ "description": "Demonstrates how to connect to Cloud Bigtable and run some basic operations. More information available at: https://cloud.google.com/bigtable/docs/samples-python-hello",
+ "file": "main.py",
+ "runnable": true,
+ "custom_content": "
usage: main.py [-h] [--table TABLE] project_id instance_id
Demonstrates how to connect to Cloud Bigtable and run some basic operations.
Prerequisites: - Create a Cloud Bigtable cluster.
https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google
Application Default Credentials.
https://developers.google.com/identity/protocols/application-default-
credentials
positional arguments:
project_id Your Cloud Platform project ID.
instance_id ID of the Cloud Bigtable instance to connect to.
optional arguments:
-h, --help show this help message and exit
--table TABLE Table to create and destroy. (default: Hello-Bigtable)
",
+ "override_path": "hello"
+ },
+ {
+ "name": "Hello World using HappyBase",
+ "description": "This sample demonstrates using the Google Cloud Client Library HappyBase package, an implementation of the HappyBase API to connect to and interact with Cloud Bigtable. More information available at: https://cloud.google.com/bigtable/docs/samples-python-hello-happybase",
+ "file": "main.py",
+ "runnable": true,
+ "custom_content": "usage: main.py [-h] [--table TABLE] project_id instance_id
Demonstrates how to connect to Cloud Bigtable and run some basic operations.
Prerequisites: - Create a Cloud Bigtable cluster.
https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google
Application Default Credentials.
https://developers.google.com/identity/protocols/application-default-
credentials
positional arguments:
project_id Your Cloud Platform project ID.
instance_id ID of the Cloud Bigtable instance to connect to.
optional arguments:
-h, --help show this help message and exit
--table TABLE Table to create and destroy. (default: Hello-Bigtable)
",
+ "override_path": "hello_happybase"
+ },
+ {
+ "name": "cbt Command Demonstration",
+ "description": "This page explains how to use the cbt command to connect to a Cloud Bigtable instance, perform basic administrative tasks, and read and write data in a table. More information about this quickstart is available at https://cloud.google.com/bigtable/docs/quickstart-cbt",
+ "file": "instanceadmin.py",
+ "runnable": true,
+ "custom_content": "usage: instanceadmin.py [-h] [run] [dev-instance] [del-instance] [add-cluster] [del-cluster] project_id instance_id cluster_id
Demonstrates how to connect to Cloud Bigtable and run some basic operations.
Prerequisites: - Create a Cloud Bigtable cluster.
https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google
Application Default Credentials.
https://developers.google.com/identity/protocols/application-default-
credentials
positional arguments:
project_id Your Cloud Platform project ID.
instance_id ID of the Cloud Bigtable instance to connect to.
optional arguments:
-h, --help show this help message and exit
--table TABLE Table to create and destroy. (default: Hello-Bigtable)
",
+ "override_path": "instanceadmin"
+ },
+ {
+ "name": "Metric Scaler",
+ "description": "This sample demonstrates how to use Stackdriver Monitoring to scale Cloud Bigtable based on CPU usage.",
+ "file": "metricscaler.py",
+ "runnable": true,
+ "custom_content": "usage: metricscaler.py [-h] [--high_cpu_threshold HIGH_CPU_THRESHOLD] [--low_cpu_threshold LOW_CPU_THRESHOLD] [--short_sleep SHORT_SLEEP] [--long_sleep LONG_SLEEP] bigtable_instance bigtable_cluster
usage: metricscaler.py [-h] [--high_cpu_threshold HIGH_CPU_THRESHOLD]
[--low_cpu_threshold LOW_CPU_THRESHOLD]
[--short_sleep SHORT_SLEEP] [--long_sleep LONG_SLEEP]
bigtable_instance bigtable_cluster
Scales Cloud Bigtable clusters based on CPU usage.
positional arguments:
bigtable_instance ID of the Cloud Bigtable instance to connect to.
bigtable_cluster ID of the Cloud Bigtable cluster to connect to.
optional arguments:
-h, --help show this help message and exit
--high_cpu_threshold HIGH_CPU_THRESHOLD
If Cloud Bigtable CPU usage is above this threshold,
scale up
--low_cpu_threshold LOW_CPU_THRESHOLD
If Cloud Bigtable CPU usage is below this threshold,
scale down
--short_sleep SHORT_SLEEP
How long to sleep in seconds between checking metrics
after no scale operation
--long_sleep LONG_SLEEP
How long to sleep in seconds between checking metrics
after a scaling operation
",
+ "override_path": "metricscaler"
+ },
+ {
+ "name": "Quickstart",
+ "description": "Demonstrates of Cloud Bigtable. This sample creates a Bigtable client, connects to an instance and then to a table, then closes the connection.",
+ "file": "main.py",
+ "runnable": true,
+ "custom_content": "usage: main.py [-h] [--table TABLE] project_id instance_id
positional arguments:
project_id Your Cloud Platform project ID.
instance_id ID of the Cloud Bigtable instance to connect to.
optional arguments:
-h, --help show this help message and exit
--table TABLE Existing table used in the quickstart. (default: my-table)
",
+ "override_path": "quickstart"
+ },
+ {
+ "name": "Quickstart using HappyBase",
+ "description": "Demonstrates of Cloud Bigtable using HappyBase. This sample creates a Bigtable client, connects to an instance and then to a table, then closes the connection.",
+ "file": "main.py",
+ "runnable": true,
+ "custom_content": "usage: main.py [-h] [--table TABLE] project_id instance_id
usage: main.py [-h] [--table TABLE] project_id instance_id
positional arguments:
project_id Your Cloud Platform project ID.
instance_id ID of the Cloud Bigtable instance to connect to.
optional arguments:
-h, --help show this help message and exit
--table TABLE Existing table used in the quickstart. (default: my-table)
",
+ "override_path": "quickstart_happybase"
+ },
+ {
+ "name": "Snippets",
+ "description": "This folder contains snippets for Python Cloud Bigtable.",
+ "override_path": "snippets"
+ },
+ {
+ "name": "Table Admin",
+ "description": "Demonstrates how to connect to Cloud Bigtable and run some basic operations.",
+ "file": "tableadmin.py",
+ "runnable": true,
+ "custom_content": "usage: tableadmin.py [-h] [run] [delete] [--table TABLE] project_id instance_id
Demonstrates how to connect to Cloud Bigtable and run some basic operations.
Prerequisites: - Create a Cloud Bigtable cluster.
https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google
Application Default Credentials.
https://developers.google.com/identity/protocols/application-default-
credentials
positional arguments:
project_id Your Cloud Platform project ID.
instance_id ID of the Cloud Bigtable instance to connect to.
optional arguments:
-h, --help show this help message and exit
--table TABLE Table to create and destroy. (default: Hello-Bigtable)
",
+ "override_path": "tableadmin"
+ }
+ ],
+ "default_version": "v2",
+ "codeowner_team": "@googleapis/api-bigtable @googleapis/api-bigtable-partners",
+ "api_shortname": "bigtable"
+}
diff --git a/.librarian/state.yaml b/.librarian/state.yaml
index f4e53f215001..faec54146997 100644
--- a/.librarian/state.yaml
+++ b/.librarian/state.yaml
@@ -954,12 +954,50 @@ libraries:
tag_format: '{id}-v{version}'
- id: google-cloud-bigtable
version: 2.36.0
- last_generated_commit: ""
- apis: []
+ last_generated_commit: a6cbf809c4c165e618ee23a059442af90a80a0f5
+ apis:
+ - path: google/bigtable/admin/v2
+ service_config: bigtableadmin_v2.yaml
+ - path: google/bigtable/v2
+ service_config: bigtable_v2.yaml
source_roots:
- packages/google-cloud-bigtable
- preserve_regex: []
- remove_regex: []
+ preserve_regex:
+ - packages/google-cloud-bigtable/CHANGELOG.md
+ - docs/CHANGELOG.md
+ remove_regex:
+ - ^packages/google-cloud-bigtable/.coveragerc
+ - ^packages/google-cloud-bigtable/.flake8
+ - ^packages/google-cloud-bigtable/.repo-metadata.json
+ - ^packages/google-cloud-bigtable/LICENSE
+ - ^packages/google-cloud-bigtable/MANIFEST.in
+ - ^packages/google-cloud-bigtable/README.rst
+ - ^packages/google-cloud-bigtable/mypy.ini
+ - ^packages/google-cloud-bigtable/noxfile.py
+ - ^packages/google-cloud-bigtable/setup.py
+ - ^packages/google-cloud-bigtable/docs/conf.py
+ - ^packages/google-cloud-bigtable/docs/index.rst
+ - ^packages/google-cloud-bigtable/docs/multiprocessing.rst
+ - ^packages/google-cloud-bigtable/docs/summary_overview.md
+ - ^packages/google-cloud-bigtable/docs/README.rst
+ - ^packages/google-cloud-bigtable/docs/_static/custom.css
+ - ^packages/google-cloud-bigtable/docs/_templates
+ - ^packages/google-cloud-bigtable/docs/bigtable
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable/gapic_version.py
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable/py.typed
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable_v2
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable_admin/
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/gapic
+ - ^packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/py.typed
+ - ^packages/google-cloud-bigtable/samples/generated_samples
+ - ^packages/google-cloud-bigtable/testing
+ - ^packages/google-cloud-bigtable/tests/__init__.py
+ - ^packages/google-cloud-bigtable/tests/unit/__init__.py
+ - ^packages/google-cloud-bigtable/tests/unit/gapic
tag_format: '{id}-v{version}'
- id: google-cloud-billing
version: 1.19.0
diff --git a/packages/google-cloud-bigtable/.coveragerc b/packages/google-cloud-bigtable/.coveragerc
index f12d4dc21a9f..9b4f2d9d52fc 100644
--- a/packages/google-cloud-bigtable/.coveragerc
+++ b/packages/google-cloud-bigtable/.coveragerc
@@ -1,35 +1,13 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Generated by synthtool. DO NOT EDIT!
[run]
branch = True
-omit =
- google/cloud/bigtable_admin/__init__.py
- google/cloud/bigtable_admin/gapic_version.py
[report]
-fail_under = 99
show_missing = True
+omit =
+ google/cloud/bigtable/__init__.py
+ google/cloud/bigtable/gapic_version.py
exclude_lines =
# Re-enable the standard pragma
pragma: NO COVER
# Ignore debug-only repr
def __repr__
- # Ignore abstract methods
- raise NotImplementedError
-omit =
- */site-packages/*.py
diff --git a/packages/google-cloud-bigtable/.flake8 b/packages/google-cloud-bigtable/.flake8
index 32986c79287a..90316de21489 100644
--- a/packages/google-cloud-bigtable/.flake8
+++ b/packages/google-cloud-bigtable/.flake8
@@ -1,28 +1,29 @@
# -*- coding: utf-8 -*-
-#
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-# Generated by synthtool. DO NOT EDIT!
+#
[flake8]
+# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333):
+# Resolve flake8 lint issues
ignore = E203, E231, E266, E501, W503
exclude =
- # Exclude generated code.
- **/proto/**
+ # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333):
+ # Ensure that generated code passes flake8 lint
**/gapic/**
**/services/**
**/types/**
+ # Exclude Protobuf gencode
*_pb2.py
# Standard linting exemptions.
diff --git a/packages/google-cloud-bigtable/.repo-metadata.json b/packages/google-cloud-bigtable/.repo-metadata.json
index 7c2effe06c93..011989a371e5 100644
--- a/packages/google-cloud-bigtable/.repo-metadata.json
+++ b/packages/google-cloud-bigtable/.repo-metadata.json
@@ -1,6 +1,7 @@
{
"name": "bigtable",
- "name_pretty": "Cloud Bigtable",
+ "name_pretty": "Google Cloud Bigtable",
+ "api_description": "is Google's NoSQL Big Data database service. It's the\nsame database that powers many core Google services, including Search,\nAnalytics, Maps, and Gmail.",
"product_documentation": "https://cloud.google.com/bigtable",
"client_documentation": "https://cloud.google.com/python/docs/reference/bigtable/latest",
"issue_tracker": "https://issuetracker.google.com/savedsearches/559777",
@@ -57,7 +58,7 @@
"description": "Demonstrates of Cloud Bigtable using HappyBase. This sample creates a Bigtable client, connects to an instance and then to a table, then closes the connection.",
"file": "main.py",
"runnable": true,
- "custom_content": "usage: main.py [-h] [--table TABLE] project_id instance_id
usage: main.py [-h] [--table TABLE] project_id instance_id
positional arguments:
project_id Your Cloud Platform project ID.
instance_id ID of the Cloud Bigtable instance to connect to.
optional arguments:
-h, --help show this help message and exit
--table TABLE Existing table used in the quickstart. (default: my-table)usage: main.py [-h] [--table TABLE] project_id instance_id
usage: main.py [-h] [--table TABLE] project_id instance_id
positional arguments:
project_id Your Cloud Platform project ID.
instance_id ID of the Cloud Bigtable instance to connect to.
optional arguments:
-h, --help show this help message and exit
--table TABLE Existing table used in the quickstart. (default: my-table)
",
"override_path": "quickstart_happybase"
},
{
diff --git a/packages/google-cloud-bigtable/MANIFEST.in b/packages/google-cloud-bigtable/MANIFEST.in
index d6814cd60037..dae249ec8976 100644
--- a/packages/google-cloud-bigtable/MANIFEST.in
+++ b/packages/google-cloud-bigtable/MANIFEST.in
@@ -1,25 +1,20 @@
# -*- coding: utf-8 -*-
-#
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-# Generated by synthtool. DO NOT EDIT!
+#
include README.rst LICENSE
-recursive-include google *.json *.proto py.typed
+recursive-include google *.py *.pyi *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
-
-# Exclude scripts for samples readmegen
-prune scripts/readme-gen
diff --git a/packages/google-cloud-bigtable/README.rst b/packages/google-cloud-bigtable/README.rst
index b054a1c2917c..8d1740221806 100644
--- a/packages/google-cloud-bigtable/README.rst
+++ b/packages/google-cloud-bigtable/README.rst
@@ -1,23 +1,23 @@
Python Client for Google Cloud Bigtable
=======================================
-|GA| |pypi| |versions|
+|stable| |pypi| |versions|
-`Google Cloud Bigtable`_ is Google's NoSQL Big Data database service. It's the
+`Google Cloud Bigtable`_: is Google's NoSQL Big Data database service. It's the
same database that powers many core Google services, including Search,
Analytics, Maps, and Gmail.
- `Client Library Documentation`_
- `Product Documentation`_
-.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg
- :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability
+.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg
+ :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels
.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigtable.svg
:target: https://pypi.org/project/google-cloud-bigtable/
.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigtable.svg
:target: https://pypi.org/project/google-cloud-bigtable/
.. _Google Cloud Bigtable: https://cloud.google.com/bigtable
-.. _Client Library Documentation: https://googleapis.dev/python/bigtable/latest
+.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigtable/latest/summary_overview
.. _Product Documentation: https://cloud.google.com/bigtable/docs
@@ -51,54 +51,64 @@ In order to use this library, you first need to go through the following steps:
1. `Select or create a Cloud Platform project.`_
2. `Enable billing for your project.`_
-3. `Enable the Cloud Bigtable API.`_
-4. `Setup Authentication.`_
+3. `Enable the Google Cloud Bigtable.`_
+4. `Set up Authentication.`_
.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project
.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project
-.. _Enable the Cloud Bigtable API.: https://cloud.google.com/bigtable
-.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html
+.. _Enable the Google Cloud Bigtable.: https://cloud.google.com/bigtable
+.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html
Installation
~~~~~~~~~~~~
-Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to
-create isolated Python environments. The basic problem it addresses is one of
-dependencies and versions, and indirectly permissions.
+Install this library in a virtual environment using `venv`_. `venv`_ is a tool that
+creates isolated Python environments. These isolated environments can have separate
+versions of Python packages, which allows you to isolate one project's dependencies
+from the dependencies of other projects.
-With `virtualenv`_, it's possible to install this library without needing system
+With `venv`_, it's possible to install this library without needing system
install permissions, and without clashing with the installed system
dependencies.
-.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/
+.. _`venv`: https://docs.python.org/3/library/venv.html
+
+
+Code samples and snippets
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Code samples and snippets live in the `samples/`_ folder.
+
+.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigtable/samples
Supported Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^
+Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of
+Python.
-Python >= 3.7
+Python >= 3.9, including 3.14
-Deprecated Python Versions
-^^^^^^^^^^^^^^^^^^^^^^^^^^
+.. _active: https://devguide.python.org/devcycle/#in-development-main-branch
+.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches
-- Python 2.7: the last released version which supported Python 2.7 was
- version 1.7.0, released 2021-02-09.
+Unsupported Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Python <= 3.8
-- Python 3.5: the last released version which supported Python 3.5 was
- version 1.7.0, released 2021-02-09.
+If you are using an `end-of-life`_
+version of Python, we recommend that you update as soon as possible to an actively supported version.
-- Python 3.6: the last released version which supported Python 3.6 was
- version v2.10.1, released 2022-06-03.
+.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches
Mac/Linux
^^^^^^^^^
.. code-block:: console
- pip install virtualenv
- virtualenv
+ python3 -m venv
source /bin/activate
- /bin/pip install google-cloud-bigtable
+ pip install google-cloud-bigtable
Windows
@@ -106,15 +116,108 @@ Windows
.. code-block:: console
- pip install virtualenv
- virtualenv
- \Scripts\activate
- \Scripts\pip.exe install google-cloud-bigtable
+ py -m venv
+ .\\Scripts\activate
+ pip install google-cloud-bigtable
Next Steps
~~~~~~~~~~
-- Read the `Client Library Documentation`_ for Cloud Bigtable API
+- Read the `Client Library Documentation`_ for Google Cloud Bigtable
to see other available methods on the client.
-- Read the `Product documentation`_ to learn
+- Read the `Google Cloud Bigtable Product documentation`_ to learn
more about the product and see How-to Guides.
+- View this `README`_ to see the full list of Cloud
+ APIs that we cover.
+
+.. _Google Cloud Bigtable Product documentation: https://cloud.google.com/bigtable
+.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst
+
+Logging
+-------
+
+This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes.
+Note the following:
+
+#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging.
+#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**.
+#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below.
+
+Simple, environment-based configuration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google
+logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged
+messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging
+event.
+
+A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log.
+
+- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc.
+- Invalid logging scopes: :code:`foo`, :code:`123`, etc.
+
+**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers.
+
+Environment-Based Examples
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Enabling the default handler for all Google-based loggers
+
+.. code-block:: console
+
+ export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google
+
+- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`):
+
+.. code-block:: console
+
+ export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1
+
+
+Advanced, code-based configuration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can also configure a valid logging scope using Python's standard `logging` mechanism.
+
+Code-Based Examples
+^^^^^^^^^^^^^^^^^^^
+
+- Configuring a handler for all Google-based loggers
+
+.. code-block:: python
+
+ import logging
+
+ from google.cloud import library_v1
+
+ base_logger = logging.getLogger("google")
+ base_logger.addHandler(logging.StreamHandler())
+ base_logger.setLevel(logging.DEBUG)
+
+- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`):
+
+.. code-block:: python
+
+ import logging
+
+ from google.cloud import library_v1
+
+ base_logger = logging.getLogger("google.cloud.library_v1")
+ base_logger.addHandler(logging.StreamHandler())
+ base_logger.setLevel(logging.DEBUG)
+
+Logging details
+~~~~~~~~~~~~~~~
+
+#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root
+ logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set
+ :code:`logging.getLogger("google").propagate = True` in your code.
+#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for
+ one library, but decide you need to also set up environment-based logging configuration for another library.
+
+ #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual
+ if the code -based configuration gets applied first.
+
+#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get
+ executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured.
+ (This is the reason for 2.i. above.)
diff --git a/packages/google-cloud-bigtable/docs/README.rst b/packages/google-cloud-bigtable/docs/README.rst
deleted file mode 120000
index 89a0106941ff..000000000000
--- a/packages/google-cloud-bigtable/docs/README.rst
+++ /dev/null
@@ -1 +0,0 @@
-../README.rst
\ No newline at end of file
diff --git a/packages/google-cloud-bigtable/docs/README.rst b/packages/google-cloud-bigtable/docs/README.rst
new file mode 100644
index 000000000000..8d1740221806
--- /dev/null
+++ b/packages/google-cloud-bigtable/docs/README.rst
@@ -0,0 +1,223 @@
+Python Client for Google Cloud Bigtable
+=======================================
+
+|stable| |pypi| |versions|
+
+`Google Cloud Bigtable`_: is Google's NoSQL Big Data database service. It's the
+same database that powers many core Google services, including Search,
+Analytics, Maps, and Gmail.
+
+- `Client Library Documentation`_
+- `Product Documentation`_
+
+.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg
+ :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels
+.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigtable.svg
+ :target: https://pypi.org/project/google-cloud-bigtable/
+.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigtable.svg
+ :target: https://pypi.org/project/google-cloud-bigtable/
+.. _Google Cloud Bigtable: https://cloud.google.com/bigtable
+.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigtable/latest/summary_overview
+.. _Product Documentation: https://cloud.google.com/bigtable/docs
+
+
+Async Data Client
+-------------------------
+
+:code:`v2.23.0` includes a release of the new :code:`BigtableDataClientAsync` client, accessible at the import path
+:code:`google.cloud.bigtable.data`.
+
+The new client brings a simplified API and increased performance using asyncio.
+The new client is focused on the data API (i.e. reading and writing Bigtable data), with admin operations
+remaining exclusively in the existing synchronous client.
+
+Feedback and bug reports are welcome at cbt-python-client-v3-feedback@google.com,
+or through the Github `issue tracker`_.
+
+
+ .. note::
+
+ It is generally not recommended to use the async client in an otherwise synchronous codebase. To make use of asyncio's
+ performance benefits, the codebase should be designed to be async from the ground up.
+
+
+.. _issue tracker: https://github.com/googleapis/google-cloud-python/issues
+
+
+Quick Start
+-----------
+
+In order to use this library, you first need to go through the following steps:
+
+1. `Select or create a Cloud Platform project.`_
+2. `Enable billing for your project.`_
+3. `Enable the Google Cloud Bigtable.`_
+4. `Set up Authentication.`_
+
+.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project
+.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project
+.. _Enable the Google Cloud Bigtable.: https://cloud.google.com/bigtable
+.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html
+
+Installation
+~~~~~~~~~~~~
+
+Install this library in a virtual environment using `venv`_. `venv`_ is a tool that
+creates isolated Python environments. These isolated environments can have separate
+versions of Python packages, which allows you to isolate one project's dependencies
+from the dependencies of other projects.
+
+With `venv`_, it's possible to install this library without needing system
+install permissions, and without clashing with the installed system
+dependencies.
+
+.. _`venv`: https://docs.python.org/3/library/venv.html
+
+
+Code samples and snippets
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Code samples and snippets live in the `samples/`_ folder.
+
+.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigtable/samples
+
+
+Supported Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of
+Python.
+
+Python >= 3.9, including 3.14
+
+.. _active: https://devguide.python.org/devcycle/#in-development-main-branch
+.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches
+
+Unsupported Python Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Python <= 3.8
+
+If you are using an `end-of-life`_
+version of Python, we recommend that you update as soon as possible to an actively supported version.
+
+.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches
+
+Mac/Linux
+^^^^^^^^^
+
+.. code-block:: console
+
+ python3 -m venv
+ source /bin/activate
+ pip install google-cloud-bigtable
+
+
+Windows
+^^^^^^^
+
+.. code-block:: console
+
+ py -m venv
+ .\\Scripts\activate
+ pip install google-cloud-bigtable
+
+Next Steps
+~~~~~~~~~~
+
+- Read the `Client Library Documentation`_ for Google Cloud Bigtable
+ to see other available methods on the client.
+- Read the `Google Cloud Bigtable Product documentation`_ to learn
+ more about the product and see How-to Guides.
+- View this `README`_ to see the full list of Cloud
+ APIs that we cover.
+
+.. _Google Cloud Bigtable Product documentation: https://cloud.google.com/bigtable
+.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst
+
+Logging
+-------
+
+This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes.
+Note the following:
+
+#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging.
+#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**.
+#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below.
+
+Simple, environment-based configuration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google
+logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged
+messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging
+event.
+
+A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log.
+
+- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc.
+- Invalid logging scopes: :code:`foo`, :code:`123`, etc.
+
+**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers.
+
+Environment-Based Examples
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Enabling the default handler for all Google-based loggers
+
+.. code-block:: console
+
+ export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google
+
+- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`):
+
+.. code-block:: console
+
+ export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1
+
+
+Advanced, code-based configuration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can also configure a valid logging scope using Python's standard `logging` mechanism.
+
+Code-Based Examples
+^^^^^^^^^^^^^^^^^^^
+
+- Configuring a handler for all Google-based loggers
+
+.. code-block:: python
+
+ import logging
+
+ from google.cloud import library_v1
+
+ base_logger = logging.getLogger("google")
+ base_logger.addHandler(logging.StreamHandler())
+ base_logger.setLevel(logging.DEBUG)
+
+- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`):
+
+.. code-block:: python
+
+ import logging
+
+ from google.cloud import library_v1
+
+ base_logger = logging.getLogger("google.cloud.library_v1")
+ base_logger.addHandler(logging.StreamHandler())
+ base_logger.setLevel(logging.DEBUG)
+
+Logging details
+~~~~~~~~~~~~~~~
+
+#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root
+ logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set
+ :code:`logging.getLogger("google").propagate = True` in your code.
+#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for
+ one library, but decide you need to also set up environment-based logging configuration for another library.
+
+ #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual
+ if the code -based configuration gets applied first.
+
+#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get
+ executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured.
+ (This is the reason for 2.i. above.)
diff --git a/packages/google-cloud-bigtable/docs/_templates/layout.html b/packages/google-cloud-bigtable/docs/_templates/layout.html
index 6316a537f72b..95e9c77fcfe1 100644
--- a/packages/google-cloud-bigtable/docs/_templates/layout.html
+++ b/packages/google-cloud-bigtable/docs/_templates/layout.html
@@ -20,8 +20,8 @@
{% endblock %}
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
diff --git a/packages/google-cloud-bigtable/docs/bigtable_admin_v2/bigtable_instance_admin.rst b/packages/google-cloud-bigtable/docs/bigtable_admin_v2/bigtable_instance_admin.rst
new file mode 100644
index 000000000000..42f7caad7cb1
--- /dev/null
+++ b/packages/google-cloud-bigtable/docs/bigtable_admin_v2/bigtable_instance_admin.rst
@@ -0,0 +1,10 @@
+BigtableInstanceAdmin
+---------------------------------------
+
+.. automodule:: google.cloud.bigtable_admin_v2.services.bigtable_instance_admin
+ :members:
+ :inherited-members:
+
+.. automodule:: google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.pagers
+ :members:
+ :inherited-members:
diff --git a/packages/google-cloud-bigtable/docs/bigtable_admin_v2/bigtable_table_admin.rst b/packages/google-cloud-bigtable/docs/bigtable_admin_v2/bigtable_table_admin.rst
new file mode 100644
index 000000000000..e10ff3ac6e02
--- /dev/null
+++ b/packages/google-cloud-bigtable/docs/bigtable_admin_v2/bigtable_table_admin.rst
@@ -0,0 +1,10 @@
+BigtableTableAdmin
+------------------------------------
+
+.. automodule:: google.cloud.bigtable_admin_v2.services.bigtable_table_admin
+ :members:
+ :inherited-members:
+
+.. automodule:: google.cloud.bigtable_admin_v2.services.bigtable_table_admin.pagers
+ :members:
+ :inherited-members:
diff --git a/packages/google-cloud-bigtable/docs/bigtable_admin_v2/services_.rst b/packages/google-cloud-bigtable/docs/bigtable_admin_v2/services_.rst
new file mode 100644
index 000000000000..ea55c7da14a3
--- /dev/null
+++ b/packages/google-cloud-bigtable/docs/bigtable_admin_v2/services_.rst
@@ -0,0 +1,7 @@
+Services for Google Cloud Bigtable Admin v2 API
+===============================================
+.. toctree::
+ :maxdepth: 2
+
+ bigtable_instance_admin
+ bigtable_table_admin
diff --git a/packages/google-cloud-bigtable/docs/bigtable_admin_v2/types_.rst b/packages/google-cloud-bigtable/docs/bigtable_admin_v2/types_.rst
new file mode 100644
index 000000000000..2f935927abbd
--- /dev/null
+++ b/packages/google-cloud-bigtable/docs/bigtable_admin_v2/types_.rst
@@ -0,0 +1,6 @@
+Types for Google Cloud Bigtable Admin v2 API
+============================================
+
+.. automodule:: google.cloud.bigtable_admin_v2.types
+ :members:
+ :show-inheritance:
diff --git a/packages/google-cloud-bigtable/docs/bigtable_v2/bigtable.rst b/packages/google-cloud-bigtable/docs/bigtable_v2/bigtable.rst
new file mode 100644
index 000000000000..9f92e0fee6fb
--- /dev/null
+++ b/packages/google-cloud-bigtable/docs/bigtable_v2/bigtable.rst
@@ -0,0 +1,6 @@
+Bigtable
+--------------------------
+
+.. automodule:: google.cloud.bigtable_v2.services.bigtable
+ :members:
+ :inherited-members:
diff --git a/packages/google-cloud-bigtable/docs/bigtable_v2/services_.rst b/packages/google-cloud-bigtable/docs/bigtable_v2/services_.rst
new file mode 100644
index 000000000000..1de472763075
--- /dev/null
+++ b/packages/google-cloud-bigtable/docs/bigtable_v2/services_.rst
@@ -0,0 +1,6 @@
+Services for Google Cloud Bigtable v2 API
+=========================================
+.. toctree::
+ :maxdepth: 2
+
+ bigtable
diff --git a/packages/google-cloud-bigtable/docs/bigtable_v2/types_.rst b/packages/google-cloud-bigtable/docs/bigtable_v2/types_.rst
new file mode 100644
index 000000000000..56a8941a2b7d
--- /dev/null
+++ b/packages/google-cloud-bigtable/docs/bigtable_v2/types_.rst
@@ -0,0 +1,6 @@
+Types for Google Cloud Bigtable v2 API
+======================================
+
+.. automodule:: google.cloud.bigtable_v2.types
+ :members:
+ :show-inheritance:
diff --git a/packages/google-cloud-bigtable/docs/conf.py b/packages/google-cloud-bigtable/docs/conf.py
index d8f0352cdd1c..18fe3a5dc5db 100644
--- a/packages/google-cloud-bigtable/docs/conf.py
+++ b/packages/google-cloud-bigtable/docs/conf.py
@@ -1,18 +1,18 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-#
+##
# google-cloud-bigtable documentation build configuration file
#
# This file is execfile()d with the current directory set to its
@@ -24,9 +24,11 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
-import sys
+import logging
import os
import shlex
+import sys
+from typing import Any
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
@@ -42,7 +44,7 @@
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.5.5"
+needs_sphinx = "4.5.0"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -81,7 +83,7 @@
# General information about the project.
project = "google-cloud-bigtable"
-copyright = "2019, Google"
+copyright = "2025, Google, LLC"
author = "Google APIs"
# The version info for the project you're documenting, acts as replacement for
@@ -109,6 +111,8 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
+ "bigtable_v2/**",
+ "bigtable_admin_v2/**",
"_build",
"**/.nox/**/*",
"samples/AUTHORING_GUIDE.md",
@@ -156,7 +160,7 @@
html_theme_options = {
"description": "Google Cloud Client Libraries for google-cloud-bigtable",
"github_user": "googleapis",
- "github_repo": "python-bigtable",
+ "github_repo": "google-cloud-python",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
@@ -266,13 +270,13 @@
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
- #'papersize': 'letterpaper',
+ # 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
- #'pointsize': '10pt',
+ # 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
- #'preamble': '',
+ # 'preamble': '',
# Latex figure (float) alignment
- #'figure_align': 'htbp',
+ # 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
@@ -382,3 +386,34 @@
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
+
+
+# Setup for sphinx behaviors such as warning filters.
+class UnexpectedUnindentFilter(logging.Filter):
+ """Filter out warnings about unexpected unindentation following bullet lists."""
+
+ def filter(self, record: logging.LogRecord) -> bool:
+ """Filter the log record.
+
+ Args:
+ record (logging.LogRecord): The log record.
+
+ Returns:
+ bool: False to suppress the warning, True to allow it.
+ """
+ msg = record.getMessage()
+ if "Bullet list ends without a blank line" in msg:
+ return False
+ return True
+
+
+def setup(app: Any) -> None:
+ """Setup the Sphinx application.
+
+ Args:
+ app (Any): The Sphinx application.
+ """
+ # Sphinx's logger is hierarchical. Adding a filter to the
+ # root 'sphinx' logger will catch warnings from all sub-loggers.
+ logger = logging.getLogger("sphinx")
+ logger.addFilter(UnexpectedUnindentFilter())
diff --git a/packages/google-cloud-bigtable/docs/index.rst b/packages/google-cloud-bigtable/docs/index.rst
index 1eb603684846..1c6a396f6090 100644
--- a/packages/google-cloud-bigtable/docs/index.rst
+++ b/packages/google-cloud-bigtable/docs/index.rst
@@ -2,6 +2,7 @@
.. include:: multiprocessing.rst
+
Client Types
-------------
.. toctree::
@@ -11,15 +12,16 @@ Client Types
classic_client/usage
admin_client/admin_client_usage
+
Changelog
---------
For a list of all ``google-cloud-bigtable`` releases:
.. toctree::
- :maxdepth: 2
+ :maxdepth: 2
- CHANGELOG
+ CHANGELOG
.. toctree::
:hidden:
diff --git a/packages/google-cloud-bigtable/docs/summary_overview.md b/packages/google-cloud-bigtable/docs/summary_overview.md
index 2379e8b6bc1f..b13302894649 100644
--- a/packages/google-cloud-bigtable/docs/summary_overview.md
+++ b/packages/google-cloud-bigtable/docs/summary_overview.md
@@ -5,14 +5,14 @@ reverted. Instead, if you want to place additional content, create an
pick up on the content and merge the content.
]: #
-# Cloud Bigtable API
+# Google Cloud Bigtable API
-Overview of the APIs available for Cloud Bigtable API.
+Overview of the APIs available for Google Cloud Bigtable API.
## All entries
Classes, methods and properties & attributes for
-Cloud Bigtable API.
+Google Cloud Bigtable API.
[classes](https://cloud.google.com/python/docs/reference/bigtable/latest/summary_class.html)
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py
index 7331ff24150c..bc469b893859 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py
@@ -1,4 +1,5 @@
-# Copyright 2015 Google LLC
+# -*- coding: utf-8 -*-
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,12 +12,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
+#
"""Google Cloud Bigtable API package."""
-from google.cloud.bigtable.client import Client
-
from google.cloud.bigtable import gapic_version as package_version
+from google.cloud.bigtable.client import Client
__version__: str
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/app_profile.py b/packages/google-cloud-bigtable/google/cloud/bigtable/app_profile.py
index 8cde66146f9a..12af35aa984a 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/app_profile.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/app_profile.py
@@ -14,13 +14,13 @@
"""User-friendly container for Google Cloud Bigtable AppProfile."""
-
import re
+from google.api_core.exceptions import NotFound
+from google.protobuf import field_mask_pb2
+
from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.types import instance
-from google.protobuf import field_mask_pb2
-from google.api_core.exceptions import NotFound
_APP_PROFILE_NAME_RE = re.compile(
r"^projects/(?P
[^/]+)/"
@@ -165,7 +165,7 @@ def from_pb(cls, app_profile_pb, instance):
match_app_profile_name = _APP_PROFILE_NAME_RE.match(app_profile_pb.name)
if match_app_profile_name is None:
raise ValueError(
- "AppProfile protobuf name was not in the " "expected format.",
+ "AppProfile protobuf name was not in the expected format.",
app_profile_pb.name,
)
if match_app_profile_name.group("instance") != instance.instance_id:
@@ -175,8 +175,7 @@ def from_pb(cls, app_profile_pb, instance):
)
if match_app_profile_name.group("project") != instance._client.project:
raise ValueError(
- "Project ID on app_profile does not match the "
- "project ID on the client"
+ "Project ID on app_profile does not match the project ID on the client"
)
app_profile_id = match_app_profile_name.group("app_profile_id")
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/backup.py b/packages/google-cloud-bigtable/google/cloud/bigtable/backup.py
index f6fa24421f02..ef3c97543818 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/backup.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/backup.py
@@ -17,13 +17,14 @@
import re
from google.cloud._helpers import _datetime_to_pb_timestamp # type: ignore
-from google.cloud.bigtable_admin_v2 import BaseBigtableTableAdminClient
-from google.cloud.bigtable_admin_v2.types import table
-from google.cloud.bigtable.encryption_info import EncryptionInfo
-from google.cloud.bigtable.policy import Policy
from google.cloud.exceptions import NotFound # type: ignore
from google.protobuf import field_mask_pb2
+from google.cloud.bigtable.encryption_info import EncryptionInfo
+from google.cloud.bigtable.policy import Policy
+from google.cloud.bigtable_admin_v2 import BaseBigtableTableAdminClient
+from google.cloud.bigtable_admin_v2.types import table
+
_BACKUP_NAME_RE = re.compile(
r"^projects/(?P[^/]+)/"
r"instances/(?P[a-z][-a-z0-9]*)/"
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/batcher.py b/packages/google-cloud-bigtable/google/cloud/bigtable/batcher.py
index f9b85386d827..4643c3402af4 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/batcher.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/batcher.py
@@ -13,15 +13,14 @@
# limitations under the License.
"""User friendly container for Google Cloud Bigtable MutationBatcher."""
-import threading
-import queue
-import concurrent.futures
-import atexit
-
-from google.api_core.exceptions import from_grpc_status
+import atexit
+import concurrent.futures
+import queue
+import threading
from dataclasses import dataclass
+from google.api_core.exceptions import from_grpc_status
FLUSH_COUNT = 100 # after this many elements, send out the batch
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable/client.py
index 37de10b6e772..165d55243c15 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/client.py
@@ -27,33 +27,27 @@
* a :class:`~google.cloud.bigtable.table.Table` owns a
:class:`~google.cloud.bigtable.row.Row` (and all the cells in the row)
"""
+
import os
import warnings
-import grpc # type: ignore
+import grpc # type: ignore
from google.api_core.gapic_v1 import client_info as client_info_lib
from google.auth.credentials import AnonymousCredentials # type: ignore
+from google.cloud.client import ClientWithProject # type: ignore
+from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore
-from google.cloud import bigtable_v2
-from google.cloud import bigtable_admin_v2
-from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport
+from google.cloud import bigtable, bigtable_admin_v2, bigtable_v2
+from google.cloud.bigtable.cluster import _CLUSTER_NAME_RE, Cluster
+from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.transports import (
BigtableInstanceAdminGrpcTransport,
)
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports import (
BigtableTableAdminGrpcTransport,
)
-
-from google.cloud import bigtable
-from google.cloud.bigtable.instance import Instance
-from google.cloud.bigtable.cluster import Cluster
-
-from google.cloud.client import ClientWithProject # type: ignore
-
from google.cloud.bigtable_admin_v2.types import instance
-from google.cloud.bigtable.cluster import _CLUSTER_NAME_RE
-from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore
-
+from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport
INSTANCE_TYPE_PRODUCTION = instance.Instance.Type.PRODUCTION
INSTANCE_TYPE_DEVELOPMENT = instance.Instance.Type.DEVELOPMENT
@@ -164,7 +158,7 @@ def __init__(
)
if read_only and admin:
raise ValueError(
- "A read-only client cannot also perform" "administrative actions."
+ "A read-only client cannot also performadministrative actions."
)
# NOTE: We set the scopes **before** calling the parent constructor.
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/cluster.py b/packages/google-cloud-bigtable/google/cloud/bigtable/cluster.py
index 967ec707e1c3..a15b24dc1ec7 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/cluster.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/cluster.py
@@ -14,12 +14,12 @@
"""User friendly container for Google Cloud Bigtable Cluster."""
-
import re
-from google.cloud.bigtable_admin_v2.types import instance
+
from google.api_core.exceptions import NotFound
from google.protobuf import field_mask_pb2
+from google.cloud.bigtable_admin_v2.types import instance
_CLUSTER_NAME_RE = re.compile(
r"^projects/(?P[^/]+)/"
@@ -166,16 +166,16 @@ def from_pb(cls, cluster_pb, instance):
match_cluster_name = _CLUSTER_NAME_RE.match(cluster_pb.name)
if match_cluster_name is None:
raise ValueError(
- "Cluster protobuf name was not in the " "expected format.",
+ "Cluster protobuf name was not in the expected format.",
cluster_pb.name,
)
if match_cluster_name.group("instance") != instance.instance_id:
raise ValueError(
- "Instance ID on cluster does not match the " "instance ID on the client"
+ "Instance ID on cluster does not match the instance ID on the client"
)
if match_cluster_name.group("project") != instance._client.project:
raise ValueError(
- "Project ID on cluster does not match the " "project ID on the client"
+ "Project ID on cluster does not match the project ID on the client"
)
cluster_id = match_cluster_name.group("cluster_id")
@@ -191,15 +191,9 @@ def _update_from_pb(self, cluster_pb):
self.location_id = cluster_pb.location.split("/")[-1]
self.serve_nodes = cluster_pb.serve_nodes
- self.min_serve_nodes = (
- cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes
- )
- self.max_serve_nodes = (
- cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes
- )
- self.cpu_utilization_percent = (
- cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent
- )
+ self.min_serve_nodes = cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes
+ self.max_serve_nodes = cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes
+ self.cpu_utilization_percent = cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent
self.default_storage_type = cluster_pb.default_storage_type
if cluster_pb.encryption_config:
@@ -528,16 +522,10 @@ def _to_pb(self):
)
if self.min_serve_nodes:
- cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes = (
- self.min_serve_nodes
- )
+ cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.min_serve_nodes = self.min_serve_nodes
if self.max_serve_nodes:
- cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes = (
- self.max_serve_nodes
- )
+ cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_limits.max_serve_nodes = self.max_serve_nodes
if self.cpu_utilization_percent:
- cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent = (
- self.cpu_utilization_percent
- )
+ cluster_pb.cluster_config.cluster_autoscaling_config.autoscaling_targets.cpu_utilization_percent = self.cpu_utilization_percent
return cluster_pb
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/column_family.py b/packages/google-cloud-bigtable/google/cloud/bigtable/column_family.py
index 80232958d492..0adfb93fb3fb 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/column_family.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/column_family.py
@@ -14,13 +14,13 @@
"""User friendly container for Google Cloud Bigtable Column Family."""
+from google.api_core.gapic_v1.method import DEFAULT
from google.cloud import _helpers
-from google.cloud.bigtable_admin_v2.types import table as table_v2_pb2
from google.cloud.bigtable_admin_v2.types import (
bigtable_table_admin as table_admin_v2_pb2,
)
-from google.api_core.gapic_v1.method import DEFAULT
+from google.cloud.bigtable_admin_v2.types import table as table_v2_pb2
class GarbageCollectionRule(object):
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/__init__.py
index c18eae683461..0cec39103b8e 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/__init__.py
@@ -14,56 +14,56 @@
# limitations under the License.
#
from google.cloud.bigtable import gapic_version as package_version
-
-from google.cloud.bigtable.data._async.client import BigtableDataClientAsync
-from google.cloud.bigtable.data._async.client import TableAsync
-from google.cloud.bigtable.data._async.client import AuthorizedViewAsync
+from google.cloud.bigtable.data._async._mutate_rows import _MutateRowsOperationAsync
+from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync
+from google.cloud.bigtable.data._async.client import (
+ AuthorizedViewAsync,
+ BigtableDataClientAsync,
+ TableAsync,
+)
from google.cloud.bigtable.data._async.mutations_batcher import MutationsBatcherAsync
-from google.cloud.bigtable.data._sync_autogen.client import BigtableDataClient
-from google.cloud.bigtable.data._sync_autogen.client import Table
-from google.cloud.bigtable.data._sync_autogen.client import AuthorizedView
+from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._helpers import (
+ TABLE_DEFAULT,
+ RowKeySamples,
+ ShardedQuery,
+)
+from google.cloud.bigtable.data._sync_autogen._mutate_rows import _MutateRowsOperation
+from google.cloud.bigtable.data._sync_autogen._read_rows import _ReadRowsOperation
+from google.cloud.bigtable.data._sync_autogen.client import (
+ AuthorizedView,
+ BigtableDataClient,
+ Table,
+)
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import MutationsBatcher
-
-from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
-from google.cloud.bigtable.data.read_rows_query import RowRange
-from google.cloud.bigtable.data.row import Row
-from google.cloud.bigtable.data.row import Cell
-
-from google.cloud.bigtable.data.mutations import Mutation
-from google.cloud.bigtable.data.mutations import RowMutationEntry
-from google.cloud.bigtable.data.mutations import AddToCell
-from google.cloud.bigtable.data.mutations import SetCell
-from google.cloud.bigtable.data.mutations import DeleteRangeFromColumn
-from google.cloud.bigtable.data.mutations import DeleteAllFromFamily
-from google.cloud.bigtable.data.mutations import DeleteAllFromRow
-
-from google.cloud.bigtable.data.exceptions import InvalidChunk
-from google.cloud.bigtable.data.exceptions import FailedMutationEntryError
-from google.cloud.bigtable.data.exceptions import FailedQueryShardError
-
-from google.cloud.bigtable.data.exceptions import RetryExceptionGroup
-from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
-from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
-from google.cloud.bigtable.data.exceptions import ParameterTypeInferenceFailed
-
-from google.cloud.bigtable.data._helpers import TABLE_DEFAULT
-from google.cloud.bigtable.data._helpers import RowKeySamples
-from google.cloud.bigtable.data._helpers import ShardedQuery
+from google.cloud.bigtable.data.exceptions import (
+ FailedMutationEntryError,
+ FailedQueryShardError,
+ InvalidChunk,
+ MutationsExceptionGroup,
+ ParameterTypeInferenceFailed,
+ RetryExceptionGroup,
+ ShardedReadRowsExceptionGroup,
+)
+from google.cloud.bigtable.data.mutations import (
+ AddToCell,
+ DeleteAllFromFamily,
+ DeleteAllFromRow,
+ DeleteRangeFromColumn,
+ Mutation,
+ RowMutationEntry,
+ SetCell,
+)
+from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery, RowRange
+from google.cloud.bigtable.data.row import Cell, Row
# setup custom CrossSync mappings for library
from google.cloud.bigtable_v2.services.bigtable.async_client import (
BigtableAsyncClient,
)
-from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync
-from google.cloud.bigtable.data._async._mutate_rows import _MutateRowsOperationAsync
-
from google.cloud.bigtable_v2.services.bigtable.client import (
BigtableClient,
)
-from google.cloud.bigtable.data._sync_autogen._read_rows import _ReadRowsOperation
-from google.cloud.bigtable.data._sync_autogen._mutate_rows import _MutateRowsOperation
-
-from google.cloud.bigtable.data._cross_sync import CrossSync
CrossSync.add_mapping("GapicClient", BigtableAsyncClient)
CrossSync._Sync_Impl.add_mapping("GapicClient", BigtableClient)
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/__init__.py
index e13c9acb7c0e..395c314c1030 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/__init__.py
@@ -12,12 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from google.cloud.bigtable.data._async.client import BigtableDataClientAsync
-from google.cloud.bigtable.data._async.client import TableAsync
-
+from google.cloud.bigtable.data._async.client import BigtableDataClientAsync, TableAsync
from google.cloud.bigtable.data._async.mutations_batcher import MutationsBatcherAsync
-
__all__ = [
"BigtableDataClientAsync",
"TableAsync",
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_mutate_rows.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_mutate_rows.py
index 8e6833bcafee..6efb9e5f25be 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_mutate_rows.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_mutate_rows.py
@@ -14,38 +14,42 @@
#
from __future__ import annotations
-from typing import Sequence, TYPE_CHECKING
+from typing import TYPE_CHECKING, Sequence
from google.api_core import exceptions as core_exceptions
from google.api_core import retry as retries
-import google.cloud.bigtable_v2.types.bigtable as types_pb
+
import google.cloud.bigtable.data.exceptions as bt_exceptions
-from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
-from google.cloud.bigtable.data._helpers import _retry_exception_factory
+import google.cloud.bigtable_v2.types.bigtable as types_pb
+from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._helpers import (
+ _attempt_timeout_generator,
+ _retry_exception_factory,
+)
# mutate_rows requests are limited to this number of mutations
-from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT
-from google.cloud.bigtable.data.mutations import _EntryWithProto
-
-from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data.mutations import (
+ _MUTATE_ROWS_REQUEST_MUTATION_LIMIT,
+ _EntryWithProto,
+)
if TYPE_CHECKING:
from google.cloud.bigtable.data.mutations import RowMutationEntry
if CrossSync.is_async:
- from google.cloud.bigtable_v2.services.bigtable.async_client import (
- BigtableAsyncClient as GapicClientType,
- )
from google.cloud.bigtable.data._async.client import ( # type: ignore
_DataApiTargetAsync as TargetType,
)
- else:
- from google.cloud.bigtable_v2.services.bigtable.client import ( # type: ignore
- BigtableClient as GapicClientType,
+ from google.cloud.bigtable_v2.services.bigtable.async_client import (
+ BigtableAsyncClient as GapicClientType,
)
+ else:
from google.cloud.bigtable.data._sync_autogen.client import ( # type: ignore
_DataApiTarget as TargetType,
)
+ from google.cloud.bigtable_v2.services.bigtable.client import ( # type: ignore
+ BigtableClient as GapicClientType,
+ )
__CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen._mutate_rows"
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_read_rows.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_read_rows.py
index 8787bfa71411..f8e203bc10b3 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_read_rows.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_read_rows.py
@@ -15,25 +15,27 @@
from __future__ import annotations
-from typing import Sequence, TYPE_CHECKING
-
-from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB
-from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB
-from google.cloud.bigtable_v2.types import RowSet as RowSetPB
-from google.cloud.bigtable_v2.types import RowRange as RowRangePB
-
-from google.cloud.bigtable.data.row import Row, Cell
-from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
-from google.cloud.bigtable.data.exceptions import InvalidChunk
-from google.cloud.bigtable.data.exceptions import _RowSetComplete
-from google.cloud.bigtable.data.exceptions import _ResetRow
-from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
-from google.cloud.bigtable.data._helpers import _retry_exception_factory
+from typing import TYPE_CHECKING, Sequence
from google.api_core import retry as retries
from google.api_core.retry import exponential_sleep_generator
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._helpers import (
+ _attempt_timeout_generator,
+ _retry_exception_factory,
+)
+from google.cloud.bigtable.data.exceptions import (
+ InvalidChunk,
+ _ResetRow,
+ _RowSetComplete,
+)
+from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
+from google.cloud.bigtable.data.row import Cell, Row
+from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB
+from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB
+from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+from google.cloud.bigtable_v2.types import RowSet as RowSetPB
if TYPE_CHECKING:
if CrossSync.is_async:
@@ -41,7 +43,9 @@
_DataApiTargetAsync as TargetType,
)
else:
- from google.cloud.bigtable.data._sync_autogen.client import _DataApiTarget as TargetType # type: ignore
+ from google.cloud.bigtable.data._sync_autogen.client import (
+ _DataApiTarget as TargetType, # type: ignore
+ )
__CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen._read_rows"
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_swappable_channel.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_swappable_channel.py
index bbc9a0d47ec1..d65f833a8f9d 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_swappable_channel.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_swappable_channel.py
@@ -16,10 +16,10 @@
from typing import Callable
-from google.cloud.bigtable.data._cross_sync import CrossSync
-
from grpc import ChannelConnectivity
+from google.cloud.bigtable.data._cross_sync import CrossSync
+
if CrossSync.is_async:
from grpc.aio import Channel
else:
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/client.py
index f86c886f032f..62d233bed3ba 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/client.py
@@ -15,115 +15,126 @@
from __future__ import annotations
+import abc
+import concurrent.futures
+import os
+import random
+import time
+import warnings
+from functools import partial
from typing import (
- cast,
+ TYPE_CHECKING,
Any,
AsyncIterable,
Callable,
Optional,
- Set,
Sequence,
- TYPE_CHECKING,
+ Set,
+ cast,
)
-import abc
-import time
-import warnings
-import random
-import os
-import concurrent.futures
-
-from functools import partial
+import google.auth._default
+import google.auth.credentials
+from google.api_core import client_options as client_options_lib
+from google.api_core import retry as retries
+from google.api_core.exceptions import (
+ Aborted,
+ Cancelled,
+ DeadlineExceeded,
+ ServiceUnavailable,
+)
+from google.cloud.client import ClientWithProject
+from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore
+from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
+from google.protobuf.message import Message
from grpc import Channel
-from google.cloud.bigtable.data.execute_query.values import ExecuteQueryValueType
-from google.cloud.bigtable.data.execute_query.metadata import (
- SqlType,
- _pb_metadata_to_metadata_types,
+from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT
+from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._helpers import (
+ _CONCURRENCY_LIMIT,
+ TABLE_DEFAULT,
+ _align_timeouts,
+ _attempt_timeout_generator,
+ _get_error_type,
+ _get_retryable_errors,
+ _get_timeouts,
+ _retry_exception_factory,
+ _validate_timeouts,
+ _WarmedInstanceKey,
+)
+from google.cloud.bigtable.data._metrics import BigtableClientSideMetricsController
+from google.cloud.bigtable.data.exceptions import (
+ FailedQueryShardError,
+ ShardedReadRowsExceptionGroup,
)
from google.cloud.bigtable.data.execute_query._parameters_formatting import (
_format_execute_query_params,
_to_param_types,
)
-from google.cloud.bigtable_v2.services.bigtable.transports.base import (
- DEFAULT_CLIENT_INFO,
+from google.cloud.bigtable.data.execute_query.metadata import (
+ SqlType,
+ _pb_metadata_to_metadata_types,
)
-from google.cloud.bigtable_v2.types.bigtable import PingAndWarmRequest
-from google.cloud.bigtable_v2.types.bigtable import SampleRowKeysRequest
-from google.cloud.bigtable_v2.types.bigtable import MutateRowRequest
-from google.cloud.bigtable_v2.types.bigtable import CheckAndMutateRowRequest
-from google.cloud.bigtable_v2.types.bigtable import ReadModifyWriteRowRequest
-from google.cloud.client import ClientWithProject
-from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore
-from google.api_core import retry as retries
-from google.api_core.exceptions import DeadlineExceeded
-from google.api_core.exceptions import ServiceUnavailable
-from google.api_core.exceptions import Aborted
-from google.api_core.exceptions import Cancelled
-from google.protobuf.message import Message
-from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
-
-import google.auth.credentials
-import google.auth._default
-from google.api_core import client_options as client_options_lib
-from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT
-from google.cloud.bigtable.data.row import Row
-from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
-from google.cloud.bigtable.data.exceptions import FailedQueryShardError
-from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
-
-from google.cloud.bigtable.data._helpers import TABLE_DEFAULT, _align_timeouts
-from google.cloud.bigtable.data._helpers import _WarmedInstanceKey
-from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
-from google.cloud.bigtable.data._helpers import _retry_exception_factory
-from google.cloud.bigtable.data._helpers import _validate_timeouts
-from google.cloud.bigtable.data._helpers import _get_error_type
-from google.cloud.bigtable.data._helpers import _get_retryable_errors
-from google.cloud.bigtable.data._helpers import _get_timeouts
-from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
+from google.cloud.bigtable.data.execute_query.values import ExecuteQueryValueType
from google.cloud.bigtable.data.mutations import Mutation, RowMutationEntry
-
from google.cloud.bigtable.data.read_modify_write_rules import ReadModifyWriteRule
-from google.cloud.bigtable.data.row_filters import RowFilter
-from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
-from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter
-from google.cloud.bigtable.data.row_filters import RowFilterChain
-from google.cloud.bigtable.data._metrics import BigtableClientSideMetricsController
-
-from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
+from google.cloud.bigtable.data.row import Row
+from google.cloud.bigtable.data.row_filters import (
+ CellsRowLimitFilter,
+ RowFilter,
+ RowFilterChain,
+ StripValueTransformerFilter,
+)
+from google.cloud.bigtable_v2.services.bigtable.transports.base import (
+ DEFAULT_CLIENT_INFO,
+)
+from google.cloud.bigtable_v2.types.bigtable import (
+ CheckAndMutateRowRequest,
+ MutateRowRequest,
+ PingAndWarmRequest,
+ ReadModifyWriteRowRequest,
+ SampleRowKeysRequest,
+)
if CrossSync.is_async:
from grpc.aio import insecure_channel
- from google.cloud.bigtable_v2.services.bigtable.transports import (
- BigtableGrpcAsyncIOTransport as TransportType,
- )
- from google.cloud.bigtable_v2.services.bigtable import (
- BigtableAsyncClient as GapicClient,
- )
- from google.cloud.bigtable.data._async.mutations_batcher import _MB_SIZE
+
from google.cloud.bigtable.data._async._swappable_channel import (
AsyncSwappableChannel as SwappableChannelType,
)
from google.cloud.bigtable.data._async.metrics_interceptor import (
AsyncBigtableMetricsInterceptor as MetricsInterceptorType,
)
+ from google.cloud.bigtable.data._async.mutations_batcher import _MB_SIZE
+ from google.cloud.bigtable_v2.services.bigtable import (
+ BigtableAsyncClient as GapicClient,
+ )
+ from google.cloud.bigtable_v2.services.bigtable.transports import (
+ BigtableGrpcAsyncIOTransport as TransportType,
+ )
else:
from typing import Iterable # noqa: F401
- from grpc import insecure_channel
- from grpc import intercept_channel
- from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport as TransportType # type: ignore
- from google.cloud.bigtable_v2.services.bigtable import BigtableClient as GapicClient # type: ignore
- from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE
+
+ from grpc import insecure_channel, intercept_channel
+
from google.cloud.bigtable.data._sync_autogen._swappable_channel import ( # noqa: F401
SwappableChannel as SwappableChannelType,
)
from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( # noqa: F401
BigtableMetricsInterceptor as MetricsInterceptorType,
)
+ from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE
+ from google.cloud.bigtable_v2.services.bigtable import ( # type: ignore
+ BigtableClient as GapicClient,
+ )
+ from google.cloud.bigtable_v2.services.bigtable.transports import ( # type: ignore
+ BigtableGrpcTransport as TransportType,
+ )
if TYPE_CHECKING:
- from google.cloud.bigtable.data._helpers import RowKeySamples
- from google.cloud.bigtable.data._helpers import ShardedQuery
+ from google.cloud.bigtable.data._helpers import RowKeySamples, ShardedQuery
if CrossSync.is_async:
from google.cloud.bigtable.data._async.mutations_batcher import (
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/metrics_interceptor.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/metrics_interceptor.py
index 249dcdcc97c4..4fe213ac0cb0 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/metrics_interceptor.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/metrics_interceptor.py
@@ -13,24 +13,25 @@
# limitations under the License.
from __future__ import annotations
-from typing import Sequence
-
import time
from functools import wraps
-
-from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric
-from google.cloud.bigtable.data._metrics.data_model import OperationState
-from google.cloud.bigtable.data._metrics.data_model import OperationType
+from typing import Sequence
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._metrics.data_model import (
+ ActiveOperationMetric,
+ OperationState,
+ OperationType,
+)
if CrossSync.is_async:
- from grpc.aio import UnaryUnaryClientInterceptor
- from grpc.aio import UnaryStreamClientInterceptor
- from grpc.aio import AioRpcError
+ from grpc.aio import (
+ AioRpcError,
+ UnaryStreamClientInterceptor,
+ UnaryUnaryClientInterceptor,
+ )
else:
- from grpc import UnaryUnaryClientInterceptor
- from grpc import UnaryStreamClientInterceptor
+ from grpc import UnaryStreamClientInterceptor, UnaryUnaryClientInterceptor
__CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen.metrics_interceptor"
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/mutations_batcher.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/mutations_batcher.py
index a8e99ea9e91b..405983393ee7 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/mutations_batcher.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/mutations_batcher.py
@@ -14,24 +14,26 @@
#
from __future__ import annotations
-from typing import Sequence, TYPE_CHECKING, cast
import atexit
+import concurrent.futures
import warnings
from collections import deque
-import concurrent.futures
-
-from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
-from google.cloud.bigtable.data.exceptions import FailedMutationEntryError
-from google.cloud.bigtable.data._helpers import _get_retryable_errors
-from google.cloud.bigtable.data._helpers import _get_timeouts
-from google.cloud.bigtable.data._helpers import TABLE_DEFAULT
+from typing import TYPE_CHECKING, Sequence, cast
+from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._helpers import (
+ TABLE_DEFAULT,
+ _get_retryable_errors,
+ _get_timeouts,
+)
+from google.cloud.bigtable.data.exceptions import (
+ FailedMutationEntryError,
+ MutationsExceptionGroup,
+)
from google.cloud.bigtable.data.mutations import (
_MUTATE_ROWS_REQUEST_MUTATION_LIMIT,
+ Mutation,
)
-from google.cloud.bigtable.data.mutations import Mutation
-
-from google.cloud.bigtable.data._cross_sync import CrossSync
if TYPE_CHECKING:
from google.cloud.bigtable.data.mutations import RowMutationEntry
@@ -41,7 +43,9 @@
_DataApiTargetAsync as TargetType,
)
else:
- from google.cloud.bigtable.data._sync_autogen.client import _DataApiTarget as TargetType # type: ignore
+ from google.cloud.bigtable.data._sync_autogen.client import (
+ _DataApiTarget as TargetType, # type: ignore
+ )
__CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen.mutations_batcher"
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/__init__.py
index 77a9ddae9d38..a392baa16709 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/__init__.py
@@ -14,7 +14,6 @@
from .cross_sync import CrossSync
-
__all__ = [
"CrossSync",
]
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_decorators.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_decorators.py
index a0dd140dd01d..02cc761956e3 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_decorators.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_decorators.py
@@ -15,12 +15,14 @@
Contains a set of AstDecorator classes, which define the behavior of CrossSync decorators.
Each AstDecorator class is used through @CrossSync.
"""
+
from __future__ import annotations
+
from typing import TYPE_CHECKING, Iterable
if TYPE_CHECKING:
import ast
- from typing import Callable, Any
+ from typing import Any, Callable
class AstDecorator:
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_mapping_meta.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_mapping_meta.py
index 5312708ccc46..4e9324d79a59 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_mapping_meta.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_mapping_meta.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
+
from typing import Any
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/cross_sync.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/cross_sync.py
index 1f1ee111aee9..7dc2916c3479 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/cross_sync.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/cross_sync.py
@@ -38,30 +38,31 @@ async def async_func(self, arg: int) -> int:
from __future__ import annotations
+import asyncio
+import concurrent.futures
+import queue
+import sys
+import threading
+import time
+import typing
from typing import (
- TypeVar,
+ TYPE_CHECKING,
Any,
+ AsyncGenerator,
+ AsyncIterable,
+ AsyncIterator,
Callable,
Coroutine,
Sequence,
+ TypeVar,
Union,
- AsyncIterable,
- AsyncIterator,
- AsyncGenerator,
- TYPE_CHECKING,
)
-import typing
-import asyncio
-import sys
-import concurrent.futures
import google.api_core.retry as retries
-import queue
-import threading
-import time
+
from ._decorators import (
- ConvertClass,
Convert,
+ ConvertClass,
Drop,
Pytest,
PytestFixture,
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_helpers.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_helpers.py
index e848ebc6f1dc..01eda4ec7591 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_helpers.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_helpers.py
@@ -14,21 +14,23 @@
"""
Helper functions used in various places in the library.
"""
+
from __future__ import annotations
-from typing import Sequence, List, Tuple, TYPE_CHECKING, Union
-import time
import enum
+import time
from collections import namedtuple
-from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
+from typing import TYPE_CHECKING, List, Sequence, Tuple, Union
from google.api_core import exceptions as core_exceptions
-from google.api_core.retry import exponential_sleep_generator
-from google.api_core.retry import RetryFailureReason
+from google.api_core.retry import RetryFailureReason, exponential_sleep_generator
+
from google.cloud.bigtable.data.exceptions import RetryExceptionGroup
+from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
if TYPE_CHECKING:
import grpc
+
from google.cloud.bigtable.data._async.client import _DataApiTargetAsync
from google.cloud.bigtable.data._sync_autogen.client import _DataApiTarget
@@ -211,7 +213,7 @@ def _validate_timeouts(
def _get_error_type(
- call_code: Union["grpc.StatusCode", int, type[Exception]]
+ call_code: Union["grpc.StatusCode", int, type[Exception]],
) -> type[Exception]:
"""Helper function for ensuring the object is an exception type.
If it is not, the proper GoogleAPICallError type is infered from the status
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/__init__.py
index 26cfc1326bbe..77b8580bc524 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/__init__.py
@@ -11,16 +11,17 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from google.cloud.bigtable.data._metrics.data_model import (
+ ActiveAttemptMetric,
+ ActiveOperationMetric,
+ CompletedAttemptMetric,
+ CompletedOperationMetric,
+ OperationState,
+ OperationType,
+)
from google.cloud.bigtable.data._metrics.metrics_controller import (
BigtableClientSideMetricsController,
)
-
-from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric
-from google.cloud.bigtable.data._metrics.data_model import ActiveAttemptMetric
-from google.cloud.bigtable.data._metrics.data_model import CompletedOperationMetric
-from google.cloud.bigtable.data._metrics.data_model import CompletedAttemptMetric
-from google.cloud.bigtable.data._metrics.data_model import OperationState
-from google.cloud.bigtable.data._metrics.data_model import OperationType
from google.cloud.bigtable.data._metrics.tracked_retry import tracked_retry
__all__ = (
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/data_model.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/data_model.py
index 64dd63bfa32e..78eb15bb15d6 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/data_model.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/data_model.py
@@ -13,25 +13,22 @@
# limitations under the License.
from __future__ import annotations
-from typing import ClassVar, Tuple, cast, TYPE_CHECKING
-
-import time
-import re
-import logging
import contextvars
-
+import logging
+import re
+import time
+from dataclasses import dataclass, field
from enum import Enum
from functools import lru_cache
-from dataclasses import dataclass
-from dataclasses import field
-from grpc import StatusCode
-from grpc import RpcError
+from typing import TYPE_CHECKING, ClassVar, Tuple, cast
+
+from google.protobuf.message import DecodeError
+from grpc import RpcError, StatusCode
from grpc.aio import AioRpcError
import google.cloud.bigtable.data.exceptions as bt_exceptions
-from google.cloud.bigtable_v2.types.response_params import ResponseParams
from google.cloud.bigtable.data._helpers import TrackedBackoffGenerator
-from google.protobuf.message import DecodeError
+from google.cloud.bigtable_v2.types.response_params import ResponseParams
if TYPE_CHECKING:
from google.cloud.bigtable.data._metrics.handlers._base import MetricsHandler
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/handlers/_base.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/handlers/_base.py
index 884091fddf54..020e3943d23f 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/handlers/_base.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/handlers/_base.py
@@ -11,9 +11,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric
-from google.cloud.bigtable.data._metrics.data_model import CompletedAttemptMetric
-from google.cloud.bigtable.data._metrics.data_model import CompletedOperationMetric
+from google.cloud.bigtable.data._metrics.data_model import (
+ ActiveOperationMetric,
+ CompletedAttemptMetric,
+ CompletedOperationMetric,
+)
class MetricsHandler:
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/metrics_controller.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/metrics_controller.py
index e9815f201930..2ee1d9eb07e8 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/metrics_controller.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/metrics_controller.py
@@ -13,9 +13,11 @@
# limitations under the License.
from __future__ import annotations
-from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric
+from google.cloud.bigtable.data._metrics.data_model import (
+ ActiveOperationMetric,
+ OperationType,
+)
from google.cloud.bigtable.data._metrics.handlers._base import MetricsHandler
-from google.cloud.bigtable.data._metrics.data_model import OperationType
class BigtableClientSideMetricsController:
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/tracked_retry.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/tracked_retry.py
index 94d2e5dcb66d..9fd3550d9512 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/tracked_retry.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/tracked_retry.py
@@ -19,18 +19,18 @@
methods to update the associated ActiveOperationMetric when exceptions
are encountered through the retryable rpc.
"""
+
from __future__ import annotations
from typing import Callable, List, Optional, Tuple, TypeVar
-from grpc import StatusCode
from google.api_core.exceptions import GoogleAPICallError
from google.api_core.retry import RetryFailureReason
-from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
-from google.cloud.bigtable.data._helpers import _retry_exception_factory
-from google.cloud.bigtable.data._metrics import ActiveOperationMetric
-from google.cloud.bigtable.data._metrics import OperationState
+from grpc import StatusCode
+from google.cloud.bigtable.data._helpers import _retry_exception_factory
+from google.cloud.bigtable.data._metrics import ActiveOperationMetric, OperationState
+from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
T = TypeVar("T")
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_mutate_rows.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_mutate_rows.py
index 3bf7b562f1db..fad8e2469ecb 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_mutate_rows.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_mutate_rows.py
@@ -16,25 +16,29 @@
# This file is automatically generated by CrossSync. Do not edit manually.
from __future__ import annotations
-from typing import Sequence, TYPE_CHECKING
+from typing import TYPE_CHECKING, Sequence
from google.api_core import exceptions as core_exceptions
from google.api_core import retry as retries
-import google.cloud.bigtable_v2.types.bigtable as types_pb
import google.cloud.bigtable.data.exceptions as bt_exceptions
-from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
-from google.cloud.bigtable.data._helpers import _retry_exception_factory
-from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT
-from google.cloud.bigtable.data.mutations import _EntryWithProto
+import google.cloud.bigtable_v2.types.bigtable as types_pb
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._helpers import (
+ _attempt_timeout_generator,
+ _retry_exception_factory,
+)
+from google.cloud.bigtable.data.mutations import (
+ _MUTATE_ROWS_REQUEST_MUTATION_LIMIT,
+ _EntryWithProto,
+)
if TYPE_CHECKING:
from google.cloud.bigtable.data.mutations import RowMutationEntry
- from google.cloud.bigtable_v2.services.bigtable.client import (
- BigtableClient as GapicClientType,
- )
from google.cloud.bigtable.data._sync_autogen.client import (
_DataApiTarget as TargetType,
)
+ from google.cloud.bigtable_v2.services.bigtable.client import (
+ BigtableClient as GapicClientType,
+ )
class _MutateRowsOperation:
@@ -130,8 +134,7 @@ def _run_attempt(self):
GoogleAPICallError: if the gapic rpc fails"""
request_entries = [self.mutations[idx].proto for idx in self.remaining_indices]
active_request_indices = {
- req_idx: orig_idx
- for (req_idx, orig_idx) in enumerate(self.remaining_indices)
+ req_idx: orig_idx for req_idx, orig_idx in enumerate(self.remaining_indices)
}
self.remaining_indices = []
if not request_entries:
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_read_rows.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_read_rows.py
index 3593475a98d2..9ccde8b07761 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_read_rows.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_read_rows.py
@@ -17,21 +17,25 @@
# This file is automatically generated by CrossSync. Do not edit manually.
from __future__ import annotations
-from typing import Sequence, TYPE_CHECKING
-from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB
-from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB
-from google.cloud.bigtable_v2.types import RowSet as RowSetPB
-from google.cloud.bigtable_v2.types import RowRange as RowRangePB
-from google.cloud.bigtable.data.row import Row, Cell
-from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
-from google.cloud.bigtable.data.exceptions import InvalidChunk
-from google.cloud.bigtable.data.exceptions import _RowSetComplete
-from google.cloud.bigtable.data.exceptions import _ResetRow
-from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
-from google.cloud.bigtable.data._helpers import _retry_exception_factory
+from typing import TYPE_CHECKING, Sequence
from google.api_core import retry as retries
from google.api_core.retry import exponential_sleep_generator
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._helpers import (
+ _attempt_timeout_generator,
+ _retry_exception_factory,
+)
+from google.cloud.bigtable.data.exceptions import (
+ InvalidChunk,
+ _ResetRow,
+ _RowSetComplete,
+)
+from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
+from google.cloud.bigtable.data.row import Cell, Row
+from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB
+from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB
+from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+from google.cloud.bigtable_v2.types import RowSet as RowSetPB
if TYPE_CHECKING:
from google.cloud.bigtable.data._sync_autogen.client import (
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/client.py
index 62200276380a..a5873ecc0931 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/client.py
@@ -17,84 +17,93 @@
# This file is automatically generated by CrossSync. Do not edit manually.
from __future__ import annotations
-from typing import cast, Any, Callable, Optional, Set, Sequence, TYPE_CHECKING
import abc
+import concurrent.futures
+import os
+import random
import time
import warnings
-import random
-import os
-import concurrent.futures
from functools import partial
+from typing import TYPE_CHECKING, Any, Callable, Optional, Sequence, Set, cast
+import google.auth._default
+import google.auth.credentials
+from google.api_core import client_options as client_options_lib
+from google.api_core import retry as retries
+from google.api_core.exceptions import (
+ Aborted,
+ Cancelled,
+ DeadlineExceeded,
+ ServiceUnavailable,
+)
+from google.cloud.client import ClientWithProject
+from google.cloud.environment_vars import BIGTABLE_EMULATOR
+from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
+from google.protobuf.message import Message
from grpc import Channel
-from google.cloud.bigtable.data.execute_query.values import ExecuteQueryValueType
-from google.cloud.bigtable.data.execute_query.metadata import (
- SqlType,
- _pb_metadata_to_metadata_types,
+from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT
+from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._helpers import (
+ _CONCURRENCY_LIMIT,
+ TABLE_DEFAULT,
+ _align_timeouts,
+ _attempt_timeout_generator,
+ _get_error_type,
+ _get_retryable_errors,
+ _get_timeouts,
+ _retry_exception_factory,
+ _validate_timeouts,
+ _WarmedInstanceKey,
+)
+from google.cloud.bigtable.data._metrics import BigtableClientSideMetricsController
+from google.cloud.bigtable.data.exceptions import (
+ FailedQueryShardError,
+ ShardedReadRowsExceptionGroup,
)
from google.cloud.bigtable.data.execute_query._parameters_formatting import (
_format_execute_query_params,
_to_param_types,
)
-from google.cloud.bigtable_v2.services.bigtable.transports.base import (
- DEFAULT_CLIENT_INFO,
+from google.cloud.bigtable.data.execute_query.metadata import (
+ SqlType,
+ _pb_metadata_to_metadata_types,
)
-from google.cloud.bigtable_v2.types.bigtable import PingAndWarmRequest
-from google.cloud.bigtable_v2.types.bigtable import SampleRowKeysRequest
-from google.cloud.bigtable_v2.types.bigtable import MutateRowRequest
-from google.cloud.bigtable_v2.types.bigtable import CheckAndMutateRowRequest
-from google.cloud.bigtable_v2.types.bigtable import ReadModifyWriteRowRequest
-from google.cloud.client import ClientWithProject
-from google.cloud.environment_vars import BIGTABLE_EMULATOR
-from google.api_core import retry as retries
-from google.api_core.exceptions import DeadlineExceeded
-from google.api_core.exceptions import ServiceUnavailable
-from google.api_core.exceptions import Aborted
-from google.api_core.exceptions import Cancelled
-from google.protobuf.message import Message
-from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
-import google.auth.credentials
-import google.auth._default
-from google.api_core import client_options as client_options_lib
-from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT
-from google.cloud.bigtable.data.row import Row
-from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
-from google.cloud.bigtable.data.exceptions import FailedQueryShardError
-from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
-from google.cloud.bigtable.data._helpers import TABLE_DEFAULT, _align_timeouts
-from google.cloud.bigtable.data._helpers import _WarmedInstanceKey
-from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
-from google.cloud.bigtable.data._helpers import _retry_exception_factory
-from google.cloud.bigtable.data._helpers import _validate_timeouts
-from google.cloud.bigtable.data._helpers import _get_error_type
-from google.cloud.bigtable.data._helpers import _get_retryable_errors
-from google.cloud.bigtable.data._helpers import _get_timeouts
-from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
+from google.cloud.bigtable.data.execute_query.values import ExecuteQueryValueType
from google.cloud.bigtable.data.mutations import Mutation, RowMutationEntry
from google.cloud.bigtable.data.read_modify_write_rules import ReadModifyWriteRule
-from google.cloud.bigtable.data.row_filters import RowFilter
-from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
-from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter
-from google.cloud.bigtable.data.row_filters import RowFilterChain
-from google.cloud.bigtable.data._metrics import BigtableClientSideMetricsController
-from google.cloud.bigtable.data._cross_sync import CrossSync
-from typing import Iterable
-from grpc import insecure_channel
-from grpc import intercept_channel
-from google.cloud.bigtable_v2.services.bigtable.transports import (
- BigtableGrpcTransport as TransportType,
+from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
+from google.cloud.bigtable.data.row import Row
+from google.cloud.bigtable.data.row_filters import (
+ CellsRowLimitFilter,
+ RowFilter,
+ RowFilterChain,
+ StripValueTransformerFilter,
)
-from google.cloud.bigtable_v2.services.bigtable import BigtableClient as GapicClient
-from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE
+from google.cloud.bigtable_v2.services.bigtable.transports.base import (
+ DEFAULT_CLIENT_INFO,
+)
+from google.cloud.bigtable_v2.types.bigtable import (
+ CheckAndMutateRowRequest,
+ MutateRowRequest,
+ PingAndWarmRequest,
+ ReadModifyWriteRowRequest,
+ SampleRowKeysRequest,
+)
+from typing import Iterable
+from grpc import insecure_channel, intercept_channel
from google.cloud.bigtable.data._sync_autogen._swappable_channel import (
SwappableChannel as SwappableChannelType,
)
from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import (
BigtableMetricsInterceptor as MetricsInterceptorType,
)
+from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE
+from google.cloud.bigtable_v2.services.bigtable import BigtableClient as GapicClient
+from google.cloud.bigtable_v2.services.bigtable.transports import (
+ BigtableGrpcTransport as TransportType,
+)
if TYPE_CHECKING:
- from google.cloud.bigtable.data._helpers import RowKeySamples
- from google.cloud.bigtable.data._helpers import ShardedQuery
+ from google.cloud.bigtable.data._helpers import RowKeySamples, ShardedQuery
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import (
MutationsBatcher,
)
@@ -310,7 +319,7 @@ def _ping_and_warm_instances(
],
wait_for_ready=True,
)
- for (instance_name, app_profile_id) in instance_list
+ for instance_name, app_profile_id in instance_list
]
result_list = CrossSync._Sync_Impl.gather_partials(
partial_list, return_exceptions=True, sync_executor=self._executor
@@ -659,7 +668,7 @@ def execute_query(
prepare_predicate = retries.if_exception_type(
*[_get_error_type(e) for e in prepare_retryable_errors]
)
- (prepare_operation_timeout, prepare_attempt_timeout) = _align_timeouts(
+ prepare_operation_timeout, prepare_attempt_timeout = _align_timeouts(
prepare_operation_timeout, prepare_attempt_timeout
)
prepare_sleep_generator = retries.exponential_sleep_generator(0.01, 2, 60)
@@ -685,7 +694,7 @@ def execute_query(
"prepared_query": prepare_result.prepared_query,
"params": pb_params,
}
- (operation_timeout, attempt_timeout) = _align_timeouts(
+ operation_timeout, attempt_timeout = _align_timeouts(
operation_timeout, attempt_timeout
)
return CrossSync._Sync_Impl.ExecuteQueryIterator(
@@ -886,7 +895,7 @@ def read_rows_stream(
from any retries that failed
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
"""
- (operation_timeout, attempt_timeout) = _get_timeouts(
+ operation_timeout, attempt_timeout = _get_timeouts(
operation_timeout, attempt_timeout, self
)
retryable_excs = _get_retryable_errors(retryable_errors, self)
@@ -1032,7 +1041,7 @@ def read_rows_sharded(
ValueError: if the query_list is empty"""
if not sharded_query:
raise ValueError("empty sharded_query")
- (operation_timeout, attempt_timeout) = _get_timeouts(
+ operation_timeout, attempt_timeout = _get_timeouts(
operation_timeout, attempt_timeout, self
)
rpc_timeout_generator = _attempt_timeout_generator(
@@ -1075,7 +1084,7 @@ def read_rows_with_semaphore(query):
raise ShardedReadRowsExceptionGroup(
[
FailedQueryShardError(idx, sharded_query[idx], e)
- for (idx, e) in error_dict.items()
+ for idx, e in error_dict.items()
],
results_list,
len(sharded_query),
@@ -1165,7 +1174,7 @@ def sample_row_keys(
from any retries that failed
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
"""
- (operation_timeout, attempt_timeout) = _get_timeouts(
+ operation_timeout, attempt_timeout = _get_timeouts(
operation_timeout, attempt_timeout, self
)
attempt_timeout_gen = _attempt_timeout_generator(
@@ -1280,7 +1289,7 @@ def mutate_row(
google.api_core.exceptions.GoogleAPIError: raised on non-idempotent operations that cannot be
safely retried.
ValueError: if invalid arguments are provided"""
- (operation_timeout, attempt_timeout) = _get_timeouts(
+ operation_timeout, attempt_timeout = _get_timeouts(
operation_timeout, attempt_timeout, self
)
if not mutations:
@@ -1352,7 +1361,7 @@ def bulk_mutate_rows(
MutationsExceptionGroup: if one or more mutations fails
Contains details about any failed entries in .exceptions
ValueError: if invalid arguments are provided"""
- (operation_timeout, attempt_timeout) = _get_timeouts(
+ operation_timeout, attempt_timeout = _get_timeouts(
operation_timeout, attempt_timeout, self
)
retryable_excs = _get_retryable_errors(retryable_errors, self)
@@ -1405,7 +1414,7 @@ def check_and_mutate_row(
bool indicating whether the predicate was true or false
Raises:
google.api_core.exceptions.GoogleAPIError: exceptions from grpc call"""
- (operation_timeout, _) = _get_timeouts(operation_timeout, None, self)
+ operation_timeout, _ = _get_timeouts(operation_timeout, None, self)
if true_case_mutations is not None and (
not isinstance(true_case_mutations, list)
):
@@ -1460,7 +1469,7 @@ def read_modify_write_row(
Raises:
google.api_core.exceptions.GoogleAPIError: exceptions from grpc call
ValueError: if invalid arguments are provided"""
- (operation_timeout, _) = _get_timeouts(operation_timeout, None, self)
+ operation_timeout, _ = _get_timeouts(operation_timeout, None, self)
if operation_timeout <= 0:
raise ValueError("operation_timeout must be greater than 0")
if rules is not None and (not isinstance(rules, list)):
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/metrics_interceptor.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/metrics_interceptor.py
index c5a59787c0fd..fc60c6e4e89b 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/metrics_interceptor.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/metrics_interceptor.py
@@ -15,14 +15,15 @@
# This file is automatically generated by CrossSync. Do not edit manually.
from __future__ import annotations
-from typing import Sequence
import time
from functools import wraps
-from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric
-from google.cloud.bigtable.data._metrics.data_model import OperationState
-from google.cloud.bigtable.data._metrics.data_model import OperationType
-from grpc import UnaryUnaryClientInterceptor
-from grpc import UnaryStreamClientInterceptor
+from typing import Sequence
+from google.cloud.bigtable.data._metrics.data_model import (
+ ActiveOperationMetric,
+ OperationState,
+ OperationType,
+)
+from grpc import UnaryStreamClientInterceptor, UnaryUnaryClientInterceptor
def _with_active_operation(func):
@@ -50,7 +51,7 @@ def _get_metadata(source) -> dict[str, str | bytes] | None:
try:
metadata: Sequence[tuple[str, str | bytes]]
metadata = source.trailing_metadata() + source.initial_metadata()
- return {k: v for (k, v) in metadata}
+ return {k: v for k, v in metadata}
except Exception:
return None
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/mutations_batcher.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/mutations_batcher.py
index 84f0ba8c0618..41952f73ba98 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/mutations_batcher.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/mutations_batcher.py
@@ -16,19 +16,25 @@
# This file is automatically generated by CrossSync. Do not edit manually.
from __future__ import annotations
-from typing import Sequence, TYPE_CHECKING, cast
import atexit
+import concurrent.futures
import warnings
from collections import deque
-import concurrent.futures
-from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
-from google.cloud.bigtable.data.exceptions import FailedMutationEntryError
-from google.cloud.bigtable.data._helpers import _get_retryable_errors
-from google.cloud.bigtable.data._helpers import _get_timeouts
-from google.cloud.bigtable.data._helpers import TABLE_DEFAULT
-from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT
-from google.cloud.bigtable.data.mutations import Mutation
+from typing import TYPE_CHECKING, Sequence, cast
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._helpers import (
+ TABLE_DEFAULT,
+ _get_retryable_errors,
+ _get_timeouts,
+)
+from google.cloud.bigtable.data.exceptions import (
+ FailedMutationEntryError,
+ MutationsExceptionGroup,
+)
+from google.cloud.bigtable.data.mutations import (
+ _MUTATE_ROWS_REQUEST_MUTATION_LIMIT,
+ Mutation,
+)
if TYPE_CHECKING:
from google.cloud.bigtable.data.mutations import RowMutationEntry
@@ -193,7 +199,7 @@ def __init__(
batch_retryable_errors: Sequence[type[Exception]]
| TABLE_DEFAULT = TABLE_DEFAULT.MUTATE_ROWS,
):
- (self._operation_timeout, self._attempt_timeout) = _get_timeouts(
+ self._operation_timeout, self._attempt_timeout = _get_timeouts(
batch_operation_timeout, batch_attempt_timeout, table
)
self._retryable_errors: list[type[Exception]] = _get_retryable_errors(
@@ -202,7 +208,7 @@ def __init__(
self._closed = CrossSync._Sync_Impl.Event()
self._target = table
self._staged_entries: list[RowMutationEntry] = []
- (self._staged_count, self._staged_bytes) = (0, 0)
+ self._staged_count, self._staged_bytes = (0, 0)
self._flow_control = CrossSync._Sync_Impl._FlowControl(
flow_control_max_mutation_count, flow_control_max_bytes
)
@@ -283,8 +289,8 @@ def _schedule_flush(self) -> CrossSync._Sync_Impl.Future[None] | None:
Future[None] | None:
future representing the background task, if started"""
if self._staged_entries:
- (entries, self._staged_entries) = (self._staged_entries, [])
- (self._staged_count, self._staged_bytes) = (0, 0)
+ entries, self._staged_entries = (self._staged_entries, [])
+ self._staged_count, self._staged_bytes = (0, 0)
new_task = CrossSync._Sync_Impl.create_task(
self._flush_internal, entries, sync_executor=self._sync_flush_executor
)
@@ -363,14 +369,14 @@ def _raise_exceptions(self):
Raises:
MutationsExceptionGroup: exception group with all unreported exceptions"""
if self._oldest_exceptions or self._newest_exceptions:
- (oldest, self._oldest_exceptions) = (self._oldest_exceptions, [])
+ oldest, self._oldest_exceptions = (self._oldest_exceptions, [])
newest = list(self._newest_exceptions)
self._newest_exceptions.clear()
- (entry_count, self._entries_processed_since_last_raise) = (
+ entry_count, self._entries_processed_since_last_raise = (
self._entries_processed_since_last_raise,
0,
)
- (exc_count, self._exceptions_since_last_raise) = (
+ exc_count, self._exceptions_since_last_raise = (
self._exceptions_since_last_raise,
0,
)
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/exceptions.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/exceptions.py
index b19e0e5ea126..bfc1a9eadddd 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/exceptions.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/exceptions.py
@@ -15,10 +15,10 @@
from __future__ import annotations
import sys
-
-from typing import Any, TYPE_CHECKING
+from typing import TYPE_CHECKING, Any
from google.api_core import exceptions as core_exceptions
+
from google.cloud.bigtable.data.row import Row
is_311_plus = sys.version_info >= (3, 11)
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/__init__.py
index 029e79b9390a..65ff8733134e 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/__init__.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from google.cloud.bigtable.data._cross_sync import CrossSync
from google.cloud.bigtable.data.execute_query._async.execute_query_iterator import (
ExecuteQueryIteratorAsync,
)
@@ -27,7 +28,6 @@
QueryResultRow,
Struct,
)
-from google.cloud.bigtable.data._cross_sync import CrossSync
CrossSync.add_mapping("ExecuteQueryIterator", ExecuteQueryIteratorAsync)
CrossSync._Sync_Impl.add_mapping("ExecuteQueryIterator", ExecuteQueryIterator)
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py
index 2beda4cd65be..4a207b057bdd 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py
@@ -15,18 +15,19 @@
from __future__ import annotations
from typing import (
+ TYPE_CHECKING,
Any,
Dict,
Optional,
Sequence,
Tuple,
- TYPE_CHECKING,
)
+
from google.api_core import retry as retries
-from google.protobuf.message import Message
from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
+from google.protobuf.message import Message
-from google.cloud.bigtable.data.execute_query._byte_cursor import _ByteCursor
+from google.cloud.bigtable.data._cross_sync import CrossSync
from google.cloud.bigtable.data._helpers import (
_attempt_timeout_generator,
_retry_exception_factory,
@@ -35,19 +36,20 @@
EarlyMetadataCallError,
InvalidExecuteQueryResponse,
)
-from google.cloud.bigtable.data.execute_query.values import QueryResultRow
-from google.cloud.bigtable.data.execute_query.metadata import Metadata
+from google.cloud.bigtable.data.execute_query._byte_cursor import _ByteCursor
from google.cloud.bigtable.data.execute_query._reader import (
_QueryResultRowReader,
_Reader,
)
+from google.cloud.bigtable.data.execute_query.metadata import Metadata
+from google.cloud.bigtable.data.execute_query.values import QueryResultRow
from google.cloud.bigtable_v2.types.bigtable import (
ExecuteQueryRequest as ExecuteQueryRequestPB,
+)
+from google.cloud.bigtable_v2.types.bigtable import (
ExecuteQueryResponse,
)
-from google.cloud.bigtable.data._cross_sync import CrossSync
-
if TYPE_CHECKING:
if CrossSync.is_async:
from google.cloud.bigtable.data import BigtableDataClientAsync as DataClientType
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py
index a43539e55de0..b7eb1e9e9163 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py
@@ -13,14 +13,15 @@
# limitations under the License.
from __future__ import annotations
-from typing import Any, Callable, Dict, Type, Optional, Union
+from typing import Any, Callable, Dict, Optional, Type, Union
-from google.protobuf.message import Message
+from google.api_core.datetime_helpers import DatetimeWithNanoseconds
from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
-from google.cloud.bigtable.data.execute_query.values import Struct
+from google.protobuf.message import Message
+
from google.cloud.bigtable.data.execute_query.metadata import SqlType
+from google.cloud.bigtable.data.execute_query.values import Struct
from google.cloud.bigtable_v2 import Value as PBValue
-from google.api_core.datetime_helpers import DatetimeWithNanoseconds
_REQUIRED_PROTO_FIELDS = {
SqlType.Bytes: "bytes_value",
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_reader.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_reader.py
index 467c2030fe67..d6c6c9ee6fb1 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_reader.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_reader.py
@@ -13,29 +13,27 @@
# limitations under the License.
from __future__ import annotations
+from abc import ABC, abstractmethod
from typing import (
- List,
- TypeVar,
Generic,
Iterable,
+ List,
Optional,
Sequence,
+ TypeVar,
)
-from abc import ABC, abstractmethod
-from google.protobuf.message import Message
-from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
-from google.cloud.bigtable_v2 import ProtoRows, Value as PBValue
+from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
+from google.protobuf.message import Message
from google.cloud.bigtable.data.execute_query._query_result_parsing_utils import (
_parse_pb_value_to_python_value,
)
-
-from google.cloud.bigtable.helpers import batched
-
-from google.cloud.bigtable.data.execute_query.values import QueryResultRow
from google.cloud.bigtable.data.execute_query.metadata import Metadata
-
+from google.cloud.bigtable.data.execute_query.values import QueryResultRow
+from google.cloud.bigtable.helpers import batched
+from google.cloud.bigtable_v2 import ProtoRows
+from google.cloud.bigtable_v2 import Value as PBValue
T = TypeVar("T")
@@ -105,9 +103,9 @@ def _construct_query_result_row(
result = QueryResultRow()
columns = metadata.columns
- assert len(values) == len(
- columns
- ), "This function should be called only when count of values matches count of columns."
+ assert len(values) == len(columns), (
+ "This function should be called only when count of values matches count of columns."
+ )
for column, value in zip(columns, values):
parsed_value = _parse_pb_value_to_python_value(
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_sync_autogen/execute_query_iterator.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_sync_autogen/execute_query_iterator.py
index 68594d0e867a..d00457724eb5 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_sync_autogen/execute_query_iterator.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_sync_autogen/execute_query_iterator.py
@@ -16,11 +16,11 @@
# This file is automatically generated by CrossSync. Do not edit manually.
from __future__ import annotations
-from typing import Any, Dict, Optional, Sequence, Tuple, TYPE_CHECKING
+from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence, Tuple
from google.api_core import retry as retries
-from google.protobuf.message import Message
from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
-from google.cloud.bigtable.data.execute_query._byte_cursor import _ByteCursor
+from google.protobuf.message import Message
+from google.cloud.bigtable.data._cross_sync import CrossSync
from google.cloud.bigtable.data._helpers import (
_attempt_timeout_generator,
_retry_exception_factory,
@@ -29,17 +29,17 @@
EarlyMetadataCallError,
InvalidExecuteQueryResponse,
)
-from google.cloud.bigtable.data.execute_query.values import QueryResultRow
-from google.cloud.bigtable.data.execute_query.metadata import Metadata
+from google.cloud.bigtable.data.execute_query._byte_cursor import _ByteCursor
from google.cloud.bigtable.data.execute_query._reader import (
_QueryResultRowReader,
_Reader,
)
+from google.cloud.bigtable.data.execute_query.metadata import Metadata
+from google.cloud.bigtable.data.execute_query.values import QueryResultRow
from google.cloud.bigtable_v2.types.bigtable import (
ExecuteQueryRequest as ExecuteQueryRequestPB,
- ExecuteQueryResponse,
)
-from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable_v2.types.bigtable import ExecuteQueryResponse
if TYPE_CHECKING:
from google.cloud.bigtable.data import BigtableDataClient as DataClientType
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/metadata.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/metadata.py
index 74b6cb836688..2bf675e5ccb3 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/metadata.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/metadata.py
@@ -20,8 +20,8 @@
as for specifying query parameter types explicitly.
"""
-from collections import defaultdict
import datetime
+from collections import defaultdict
from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union
from google.api_core.datetime_helpers import DatetimeWithNanoseconds
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/values.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/values.py
index 80a0bff6f7b9..76af13d9b8eb 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/values.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/values.py
@@ -14,18 +14,19 @@
from collections import defaultdict
from typing import (
- Optional,
- List,
Dict,
- Set,
- Union,
- TypeVar,
Generic,
- Tuple,
+ List,
Mapping,
+ Optional,
+ Set,
+ Tuple,
+ TypeVar,
+ Union,
)
-from google.type import date_pb2 # type: ignore
+
from google.api_core.datetime_helpers import DatetimeWithNanoseconds
+from google.type import date_pb2 # type: ignore
T = TypeVar("T")
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/mutations.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/mutations.py
index f19b1e49e862..41c067f345b1 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/mutations.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/mutations.py
@@ -13,18 +13,17 @@
# limitations under the License.
#
from __future__ import annotations
-from typing import Any
+
import time
-from dataclasses import dataclass
from abc import ABC, abstractmethod
+from dataclasses import dataclass
from sys import getsizeof
+from typing import Any
import google.cloud.bigtable_v2.types.bigtable as types_pb
import google.cloud.bigtable_v2.types.data as data_pb
-
from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE
-
# special value for SetCell mutation timestamps. If set, server will assign a timestamp
_SERVER_SIDE_TIMESTAMP = -1
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/read_rows_query.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/read_rows_query.py
index 7652bfbb9af7..9fd42e2d871a 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/read_rows_query.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/read_rows_query.py
@@ -13,19 +13,18 @@
# limitations under the License.
#
from __future__ import annotations
-from typing import TYPE_CHECKING, Any
-from bisect import bisect_left
-from bisect import bisect_right
+
+from bisect import bisect_left, bisect_right
from collections import defaultdict
-from google.cloud.bigtable.data.row_filters import RowFilter
+from typing import TYPE_CHECKING, Any
+from google.cloud.bigtable.data.row_filters import RowFilter
+from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB
from google.cloud.bigtable_v2.types import RowRange as RowRangePB
from google.cloud.bigtable_v2.types import RowSet as RowSetPB
-from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB
if TYPE_CHECKING:
- from google.cloud.bigtable.data import RowKeySamples
- from google.cloud.bigtable.data import ShardedQuery
+ from google.cloud.bigtable.data import RowKeySamples, ShardedQuery
class RowRange:
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/row.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/row.py
index 50e65a958c51..9c47612c415c 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/row.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/row.py
@@ -15,8 +15,8 @@
from __future__ import annotations
from collections import OrderedDict
-from typing import Generator, overload, Any
from functools import total_ordering
+from typing import Any, Generator, overload
from google.cloud.bigtable_v2.types import Row as RowPB
@@ -56,9 +56,9 @@ def __init__(
self.row_key = key
self.cells: list[Cell] = cells
# index is lazily created when needed
- self._index_data: OrderedDict[
- _family_type, OrderedDict[_qualifier_type, list[Cell]]
- ] | None = None
+ self._index_data: (
+ OrderedDict[_family_type, OrderedDict[_qualifier_type, list[Cell]]] | None
+ ) = None
@property
def _index(
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/row_filters.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/row_filters.py
index 9f09133d533d..007a09f5f830 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/row_filters.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/row_filters.py
@@ -13,15 +13,18 @@
# limitations under the License.
"""Filters for Google Cloud Bigtable Row classes."""
+
from __future__ import annotations
import struct
-
-from typing import Any, Sequence, TYPE_CHECKING, overload
from abc import ABC, abstractmethod
+from typing import TYPE_CHECKING, Any, Sequence, overload
+
+from google.cloud._helpers import (
+ _microseconds_from_datetime, # type: ignore
+ _to_bytes, # type: ignore
+)
-from google.cloud._helpers import _microseconds_from_datetime # type: ignore
-from google.cloud._helpers import _to_bytes # type: ignore
from google.cloud.bigtable_v2.types import data as data_v2_pb2
if TYPE_CHECKING:
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/enums.py b/packages/google-cloud-bigtable/google/cloud/bigtable/enums.py
index 327b2f828c3b..2fa55da8056c 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/enums.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/enums.py
@@ -13,9 +13,7 @@
# limitations under the License.
"""Wrappers for gapic enum types."""
-from google.cloud.bigtable_admin_v2.types import common
-from google.cloud.bigtable_admin_v2.types import instance
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2.types import common, instance, table
class StorageType(object):
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/gapic_version.py b/packages/google-cloud-bigtable/google/cloud/bigtable/gapic_version.py
index d0a538508f75..e138acef04f7 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/gapic_version.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/gapic_version.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2022 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/helpers.py b/packages/google-cloud-bigtable/google/cloud/bigtable/helpers.py
index 78af430892fc..6bc423f50439 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/helpers.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/helpers.py
@@ -12,9 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import TypeVar, Iterable, Generator, Tuple
-
from itertools import islice
+from typing import Generator, Iterable, Tuple, TypeVar
T = TypeVar("T")
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/instance.py b/packages/google-cloud-bigtable/google/cloud/bigtable/instance.py
index 23fb1c95dece..db5d67e270d8 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/instance.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/instance.py
@@ -15,24 +15,18 @@
"""User-friendly container for Google Cloud Bigtable Instance."""
import re
+import warnings
+
+from google.api_core.exceptions import NotFound
+from google.iam.v1 import options_pb2 # type: ignore
+from google.protobuf import field_mask_pb2
from google.cloud.bigtable.app_profile import AppProfile
from google.cloud.bigtable.cluster import Cluster
+from google.cloud.bigtable.policy import Policy
from google.cloud.bigtable.table import Table
-
-from google.protobuf import field_mask_pb2
-
from google.cloud.bigtable_admin_v2.types import instance
-from google.iam.v1 import options_pb2 # type: ignore
-
-from google.api_core.exceptions import NotFound
-
-from google.cloud.bigtable.policy import Policy
-
-import warnings
-
-
_INSTANCE_NAME_RE = re.compile(
r"^projects/(?P[^/]+)/" r"instances/(?P[a-z][-a-z0-9]*)$"
)
@@ -156,12 +150,12 @@ def from_pb(cls, instance_pb, client):
match = _INSTANCE_NAME_RE.match(instance_pb.name)
if match is None:
raise ValueError(
- "Instance protobuf name was not in the " "expected format.",
+ "Instance protobuf name was not in the expected format.",
instance_pb.name,
)
if match.group("project") != client.project:
raise ValueError(
- "Project ID on instance does not match the " "project ID on the client"
+ "Project ID on instance does not match the project ID on the client"
)
instance_id = match.group("instance_id")
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/row.py b/packages/google-cloud-bigtable/google/cloud/bigtable/row.py
index 752458a08a79..bca23eca7381 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/row.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/row.py
@@ -14,14 +14,15 @@
"""User-friendly container for Google Cloud Bigtable Row."""
-
import struct
-from google.cloud._helpers import _datetime_from_microseconds # type: ignore
-from google.cloud._helpers import _microseconds_from_datetime # type: ignore
-from google.cloud._helpers import _to_bytes # type: ignore
-from google.cloud.bigtable_v2.types import data as data_v2_pb2
+from google.cloud._helpers import (
+ _datetime_from_microseconds, # type: ignore
+ _microseconds_from_datetime, # type: ignore
+ _to_bytes, # type: ignore
+)
+from google.cloud.bigtable_v2.types import data as data_v2_pb2
_PACK_I64 = struct.Struct(">q").pack
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/row_data.py b/packages/google-cloud-bigtable/google/cloud/bigtable/row_data.py
index e11379108c4f..e7d45a2e6ee4 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/row_data.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/row_data.py
@@ -14,20 +14,17 @@
"""Container for Google Cloud Bigtable Cells and Streaming Row Contents."""
-
import copy
+import warnings
import grpc # type: ignore
-import warnings
-from google.api_core import exceptions
-from google.api_core import retry
+from google.api_core import exceptions, retry
from google.cloud._helpers import _to_bytes # type: ignore
+from google.cloud.bigtable.row import Cell, InvalidChunk, PartialRowData
from google.cloud.bigtable.row_merger import _RowMerger, _State
from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2
from google.cloud.bigtable_v2.types import data as data_v2_pb2
-from google.cloud.bigtable.row import Cell, InvalidChunk, PartialRowData
-
# Some classes need to be re-exported here to keep backwards
# compatibility. Those classes were moved to row_merger, but we dont want to
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/row_filters.py b/packages/google-cloud-bigtable/google/cloud/bigtable/row_filters.py
index 53192acc86d0..b2c1349503e8 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/row_filters.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/row_filters.py
@@ -16,9 +16,11 @@
import struct
+from google.cloud._helpers import (
+ _microseconds_from_datetime, # type: ignore
+ _to_bytes, # type: ignore
+)
-from google.cloud._helpers import _microseconds_from_datetime # type: ignore
-from google.cloud._helpers import _to_bytes # type: ignore
from google.cloud.bigtable_v2.types import data as data_v2_pb2
_PACK_I64 = struct.Struct(">q").pack
@@ -374,7 +376,7 @@ def __init__(
inclusive_start = True
elif start_column is None:
raise ValueError(
- "Inclusive start was specified but no " "start column was given."
+ "Inclusive start was specified but no start column was given."
)
self.start_column = start_column
self.inclusive_start = inclusive_start
@@ -382,9 +384,7 @@ def __init__(
if inclusive_end is None:
inclusive_end = True
elif end_column is None:
- raise ValueError(
- "Inclusive end was specified but no " "end column was given."
- )
+ raise ValueError("Inclusive end was specified but no end column was given.")
self.end_column = end_column
self.inclusive_end = inclusive_end
@@ -516,7 +516,7 @@ def __init__(
inclusive_start = True
elif start_value is None:
raise ValueError(
- "Inclusive start was specified but no " "start value was given."
+ "Inclusive start was specified but no start value was given."
)
if isinstance(start_value, int):
start_value = _PACK_I64(start_value)
@@ -526,9 +526,7 @@ def __init__(
if inclusive_end is None:
inclusive_end = True
elif end_value is None:
- raise ValueError(
- "Inclusive end was specified but no " "end value was given."
- )
+ raise ValueError("Inclusive end was specified but no end value was given.")
if isinstance(end_value, int):
end_value = _PACK_I64(end_value)
self.end_value = end_value
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/row_merger.py b/packages/google-cloud-bigtable/google/cloud/bigtable/row_merger.py
index 515b91df7ef2..6fe9cdca505a 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/row_merger.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/row_merger.py
@@ -1,6 +1,7 @@
-from enum import Enum
from collections import OrderedDict
-from google.cloud.bigtable.row import Cell, PartialRowData, InvalidChunk
+from enum import Enum
+
+from google.cloud.bigtable.row import Cell, InvalidChunk, PartialRowData
_MISSING_COLUMN_FAMILY = "Column family {} is not among the cells stored in this row."
_MISSING_COLUMN = (
@@ -204,15 +205,15 @@ def _handle_cell_complete(self, chunk):
if self.row.last_family != self.row.cell.family:
family_changed = True
self.row.last_family = self.row.cell.family
- self.row.cells[
- self.row.cell.family
- ] = self.row.last_family_cells = OrderedDict()
+ self.row.cells[self.row.cell.family] = self.row.last_family_cells = (
+ OrderedDict()
+ )
if family_changed or self.row.last_qualifier != self.row.cell.qualifier:
self.row.last_qualifier = self.row.cell.qualifier
- self.row.last_family_cells[
- self.row.cell.qualifier
- ] = self.row.last_qualifier_cells = []
+ self.row.last_family_cells[self.row.cell.qualifier] = (
+ self.row.last_qualifier_cells
+ ) = []
self.row.last_qualifier_cells.append(
Cell(
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/row_set.py b/packages/google-cloud-bigtable/google/cloud/bigtable/row_set.py
index 2bc436d54c0c..99dae972a6be 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/row_set.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/row_set.py
@@ -12,8 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""User-friendly container for Google Cloud Bigtable RowSet """
-
+"""User-friendly container for Google Cloud Bigtable RowSet"""
from google.cloud._helpers import _to_bytes # type: ignore
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/table.py b/packages/google-cloud-bigtable/google/cloud/bigtable/table.py
index 0009f287ef85..1ded1d6f0948 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable/table.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable/table.py
@@ -14,44 +14,45 @@
"""User-friendly container for Google Cloud Bigtable Table."""
-from typing import Set
import warnings
+from typing import Set
from google.api_core import timeout
-from google.api_core.exceptions import Aborted
-from google.api_core.exceptions import DeadlineExceeded
-from google.api_core.exceptions import NotFound
-from google.api_core.exceptions import RetryError
-from google.api_core.exceptions import ServiceUnavailable
-from google.api_core.exceptions import InternalServerError
+from google.api_core.exceptions import (
+ Aborted,
+ DeadlineExceeded,
+ InternalServerError,
+ NotFound,
+ RetryError,
+ ServiceUnavailable,
+)
from google.api_core.gapic_v1.method import DEFAULT
-from google.api_core.retry import if_exception_type
-from google.api_core.retry import Retry
+from google.api_core.retry import Retry, if_exception_type
from google.cloud._helpers import _to_bytes # type: ignore
+
+from google.cloud.bigtable import enums
from google.cloud.bigtable.backup import Backup
-from google.cloud.bigtable.column_family import _gc_rule_from_pb
-from google.cloud.bigtable.column_family import ColumnFamily
-from google.cloud.bigtable.batcher import MutationsBatcher
-from google.cloud.bigtable.batcher import FLUSH_COUNT, MAX_MUTATION_SIZE
+from google.cloud.bigtable.batcher import (
+ FLUSH_COUNT,
+ MAX_MUTATION_SIZE,
+ MutationsBatcher,
+)
+from google.cloud.bigtable.column_family import ColumnFamily, _gc_rule_from_pb
from google.cloud.bigtable.encryption_info import EncryptionInfo
from google.cloud.bigtable.policy import Policy
-from google.cloud.bigtable.row import AppendRow
-from google.cloud.bigtable.row import ConditionalRow
-from google.cloud.bigtable.row import DirectRow
+from google.cloud.bigtable.row import AppendRow, ConditionalRow, DirectRow
from google.cloud.bigtable.row_data import (
+ DEFAULT_RETRY_READ_ROWS,
PartialRowsData,
_retriable_internal_server_error,
)
-from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS
-from google.cloud.bigtable.row_set import RowSet
-from google.cloud.bigtable.row_set import RowRange
-from google.cloud.bigtable import enums
-from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2
+from google.cloud.bigtable.row_set import RowRange, RowSet
from google.cloud.bigtable_admin_v2 import BaseBigtableTableAdminClient
-from google.cloud.bigtable_admin_v2.types import table as admin_messages_v2_pb2
from google.cloud.bigtable_admin_v2.types import (
bigtable_table_admin as table_admin_messages_v2_pb2,
)
+from google.cloud.bigtable_admin_v2.types import table as admin_messages_v2_pb2
+from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2
# Maximum number of mutations in bulk (MutateRowsRequest message):
# (https://cloud.google.com/bigtable/docs/reference/data/rpc/
@@ -1150,7 +1151,7 @@ def _do_mutate_retryable_rows(self):
entries=entries,
app_profile_id=self.app_profile_id,
retry=None,
- **kwargs
+ **kwargs,
)
except RETRYABLE_MUTATION_ERRORS as exc:
# If an exception, considered retryable by `RETRYABLE_MUTATION_ERRORS`, is
@@ -1318,7 +1319,7 @@ def _create_row_request(
"""
request_kwargs = {"table_name": table_name}
if (start_key is not None or end_key is not None) and row_set is not None:
- raise ValueError("Row range and row set cannot be " "set simultaneously")
+ raise ValueError("Row range and row set cannot be set simultaneously")
if filter_ is not None:
request_kwargs["filter"] = filter_.to_pb()
@@ -1352,9 +1353,7 @@ def _compile_mutation_entries(table_name, rows):
:returns: entries corresponding to the inputs.
:raises: :exc:`~.table.TooManyMutationsError` if the number of mutations is
greater than the max ({})
- """.format(
- _MAX_BULK_MUTATIONS
- )
+ """.format(_MAX_BULK_MUTATIONS)
entries = []
mutations_count = 0
entry_klass = data_messages_v2_pb2.MutateRowsRequest.Entry
@@ -1405,5 +1404,5 @@ def _check_row_type(row):
"""
if not isinstance(row, DirectRow):
raise TypeError(
- "Bulk processing can not be applied for " "conditional or append mutations."
+ "Bulk processing can not be applied for conditional or append mutations."
)
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin/__init__.py
index 2d95b06c849f..faa7a5506811 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin/__init__.py
@@ -18,307 +18,145 @@
__version__ = package_version.__version__
-from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.client import (
- BigtableInstanceAdminClient,
-)
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.async_client import (
BigtableInstanceAdminAsyncClient,
)
-from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.client import (
- BaseBigtableTableAdminClient,
+from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.client import (
+ BigtableInstanceAdminClient,
)
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.async_client import (
BaseBigtableTableAdminAsyncClient,
)
-
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
- CreateAppProfileRequest,
+from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.client import (
+ BaseBigtableTableAdminClient,
)
from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
+ CreateAppProfileRequest,
CreateClusterMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
CreateClusterRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
CreateInstanceMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
CreateInstanceRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
CreateLogicalViewMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
CreateLogicalViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
CreateMaterializedViewMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
CreateMaterializedViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
DeleteAppProfileRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
DeleteClusterRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
DeleteInstanceRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
DeleteLogicalViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
DeleteMaterializedViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
GetAppProfileRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
GetClusterRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
GetInstanceRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
GetLogicalViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
GetMaterializedViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListAppProfilesRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListAppProfilesResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListClustersRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListClustersResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListHotTabletsRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListHotTabletsResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListInstancesRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListInstancesResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListLogicalViewsRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListLogicalViewsResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListMaterializedViewsRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
ListMaterializedViewsResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
PartialUpdateClusterMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
PartialUpdateClusterRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
PartialUpdateInstanceRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
UpdateAppProfileMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
UpdateAppProfileRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
UpdateClusterMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
UpdateInstanceMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
UpdateLogicalViewMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
UpdateLogicalViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
UpdateMaterializedViewMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import (
UpdateMaterializedViewRequest,
)
from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
CheckConsistencyRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
CheckConsistencyResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import CopyBackupMetadata
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import CopyBackupRequest
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
+ CopyBackupMetadata,
+ CopyBackupRequest,
CreateAuthorizedViewMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
CreateAuthorizedViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
CreateBackupMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
CreateBackupRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
CreateSchemaBundleMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
CreateSchemaBundleRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
CreateTableFromSnapshotMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
CreateTableFromSnapshotRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import CreateTableRequest
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
+ CreateTableRequest,
DataBoostReadLocalWrites,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
DeleteAuthorizedViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
DeleteBackupRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
DeleteSchemaBundleRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
DeleteSnapshotRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import DeleteTableRequest
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
+ DeleteTableRequest,
DropRowRangeRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
GenerateConsistencyTokenRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
GenerateConsistencyTokenResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
GetAuthorizedViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import GetBackupRequest
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
+ GetBackupRequest,
GetSchemaBundleRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import GetSnapshotRequest
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import GetTableRequest
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
+ GetSnapshotRequest,
+ GetTableRequest,
ListAuthorizedViewsRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
ListAuthorizedViewsResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ListBackupsRequest
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
+ ListBackupsRequest,
ListBackupsResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
ListSchemaBundlesRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
ListSchemaBundlesResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
ListSnapshotsRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
ListSnapshotsResponse,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ListTablesRequest
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ListTablesResponse
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
+ ListTablesRequest,
+ ListTablesResponse,
ModifyColumnFamiliesRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
OptimizeRestoredTableMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
RestoreTableMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
RestoreTableRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
SnapshotTableMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
SnapshotTableRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
StandardReadRemoteWrites,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
UndeleteTableMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
UndeleteTableRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
UpdateAuthorizedViewMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
UpdateAuthorizedViewRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
UpdateBackupRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
UpdateSchemaBundleMetadata,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
UpdateSchemaBundleRequest,
-)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import (
UpdateTableMetadata,
+ UpdateTableRequest,
+)
+from google.cloud.bigtable_admin_v2.types.common import OperationProgress, StorageType
+from google.cloud.bigtable_admin_v2.types.instance import (
+ AppProfile,
+ AutoscalingLimits,
+ AutoscalingTargets,
+ Cluster,
+ HotTablet,
+ Instance,
+ LogicalView,
+ MaterializedView,
+)
+from google.cloud.bigtable_admin_v2.types.table import (
+ AuthorizedView,
+ Backup,
+ BackupInfo,
+ ChangeStreamConfig,
+ ColumnFamily,
+ EncryptionInfo,
+ GcRule,
+ ProtoSchema,
+ RestoreInfo,
+ RestoreSourceType,
+ SchemaBundle,
+ Snapshot,
+ Table,
+ TieredStorageConfig,
+ TieredStorageRule,
)
-from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import UpdateTableRequest
-from google.cloud.bigtable_admin_v2.types.common import OperationProgress
-from google.cloud.bigtable_admin_v2.types.common import StorageType
-from google.cloud.bigtable_admin_v2.types.instance import AppProfile
-from google.cloud.bigtable_admin_v2.types.instance import AutoscalingLimits
-from google.cloud.bigtable_admin_v2.types.instance import AutoscalingTargets
-from google.cloud.bigtable_admin_v2.types.instance import Cluster
-from google.cloud.bigtable_admin_v2.types.instance import HotTablet
-from google.cloud.bigtable_admin_v2.types.instance import Instance
-from google.cloud.bigtable_admin_v2.types.instance import LogicalView
-from google.cloud.bigtable_admin_v2.types.instance import MaterializedView
-from google.cloud.bigtable_admin_v2.types.table import AuthorizedView
-from google.cloud.bigtable_admin_v2.types.table import Backup
-from google.cloud.bigtable_admin_v2.types.table import BackupInfo
-from google.cloud.bigtable_admin_v2.types.table import ChangeStreamConfig
-from google.cloud.bigtable_admin_v2.types.table import ColumnFamily
-from google.cloud.bigtable_admin_v2.types.table import EncryptionInfo
-from google.cloud.bigtable_admin_v2.types.table import GcRule
-from google.cloud.bigtable_admin_v2.types.table import ProtoSchema
-from google.cloud.bigtable_admin_v2.types.table import RestoreInfo
-from google.cloud.bigtable_admin_v2.types.table import SchemaBundle
-from google.cloud.bigtable_admin_v2.types.table import Snapshot
-from google.cloud.bigtable_admin_v2.types.table import Table
-from google.cloud.bigtable_admin_v2.types.table import TieredStorageConfig
-from google.cloud.bigtable_admin_v2.types.table import TieredStorageRule
-from google.cloud.bigtable_admin_v2.types.table import RestoreSourceType
from google.cloud.bigtable_admin_v2.types.types import Type
__all__ = (
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py
index 6a47979fd5ec..3c23c54ef4c8 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py
@@ -13,10 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from google.cloud.bigtable_admin_v2 import gapic_version as package_version
+import sys
import google.api_core as api_core
-import sys
+
+from google.cloud.bigtable_admin_v2 import gapic_version as package_version
__version__ = package_version.__version__
@@ -28,131 +29,141 @@
import importlib_metadata as metadata
-from .services.bigtable_instance_admin import BigtableInstanceAdminClient
-from .services.bigtable_instance_admin import BigtableInstanceAdminAsyncClient
-from .services.bigtable_table_admin import BaseBigtableTableAdminClient
-from .services.bigtable_table_admin import BaseBigtableTableAdminAsyncClient
-
-from .types.bigtable_instance_admin import CreateAppProfileRequest
-from .types.bigtable_instance_admin import CreateClusterMetadata
-from .types.bigtable_instance_admin import CreateClusterRequest
-from .types.bigtable_instance_admin import CreateInstanceMetadata
-from .types.bigtable_instance_admin import CreateInstanceRequest
-from .types.bigtable_instance_admin import CreateLogicalViewMetadata
-from .types.bigtable_instance_admin import CreateLogicalViewRequest
-from .types.bigtable_instance_admin import CreateMaterializedViewMetadata
-from .types.bigtable_instance_admin import CreateMaterializedViewRequest
-from .types.bigtable_instance_admin import DeleteAppProfileRequest
-from .types.bigtable_instance_admin import DeleteClusterRequest
-from .types.bigtable_instance_admin import DeleteInstanceRequest
-from .types.bigtable_instance_admin import DeleteLogicalViewRequest
-from .types.bigtable_instance_admin import DeleteMaterializedViewRequest
-from .types.bigtable_instance_admin import GetAppProfileRequest
-from .types.bigtable_instance_admin import GetClusterRequest
-from .types.bigtable_instance_admin import GetInstanceRequest
-from .types.bigtable_instance_admin import GetLogicalViewRequest
-from .types.bigtable_instance_admin import GetMaterializedViewRequest
-from .types.bigtable_instance_admin import ListAppProfilesRequest
-from .types.bigtable_instance_admin import ListAppProfilesResponse
-from .types.bigtable_instance_admin import ListClustersRequest
-from .types.bigtable_instance_admin import ListClustersResponse
-from .types.bigtable_instance_admin import ListHotTabletsRequest
-from .types.bigtable_instance_admin import ListHotTabletsResponse
-from .types.bigtable_instance_admin import ListInstancesRequest
-from .types.bigtable_instance_admin import ListInstancesResponse
-from .types.bigtable_instance_admin import ListLogicalViewsRequest
-from .types.bigtable_instance_admin import ListLogicalViewsResponse
-from .types.bigtable_instance_admin import ListMaterializedViewsRequest
-from .types.bigtable_instance_admin import ListMaterializedViewsResponse
-from .types.bigtable_instance_admin import PartialUpdateClusterMetadata
-from .types.bigtable_instance_admin import PartialUpdateClusterRequest
-from .types.bigtable_instance_admin import PartialUpdateInstanceRequest
-from .types.bigtable_instance_admin import UpdateAppProfileMetadata
-from .types.bigtable_instance_admin import UpdateAppProfileRequest
-from .types.bigtable_instance_admin import UpdateClusterMetadata
-from .types.bigtable_instance_admin import UpdateInstanceMetadata
-from .types.bigtable_instance_admin import UpdateLogicalViewMetadata
-from .types.bigtable_instance_admin import UpdateLogicalViewRequest
-from .types.bigtable_instance_admin import UpdateMaterializedViewMetadata
-from .types.bigtable_instance_admin import UpdateMaterializedViewRequest
-from .types.bigtable_table_admin import CheckConsistencyRequest
-from .types.bigtable_table_admin import CheckConsistencyResponse
-from .types.bigtable_table_admin import CopyBackupMetadata
-from .types.bigtable_table_admin import CopyBackupRequest
-from .types.bigtable_table_admin import CreateAuthorizedViewMetadata
-from .types.bigtable_table_admin import CreateAuthorizedViewRequest
-from .types.bigtable_table_admin import CreateBackupMetadata
-from .types.bigtable_table_admin import CreateBackupRequest
-from .types.bigtable_table_admin import CreateSchemaBundleMetadata
-from .types.bigtable_table_admin import CreateSchemaBundleRequest
-from .types.bigtable_table_admin import CreateTableFromSnapshotMetadata
-from .types.bigtable_table_admin import CreateTableFromSnapshotRequest
-from .types.bigtable_table_admin import CreateTableRequest
-from .types.bigtable_table_admin import DataBoostReadLocalWrites
-from .types.bigtable_table_admin import DeleteAuthorizedViewRequest
-from .types.bigtable_table_admin import DeleteBackupRequest
-from .types.bigtable_table_admin import DeleteSchemaBundleRequest
-from .types.bigtable_table_admin import DeleteSnapshotRequest
-from .types.bigtable_table_admin import DeleteTableRequest
-from .types.bigtable_table_admin import DropRowRangeRequest
-from .types.bigtable_table_admin import GenerateConsistencyTokenRequest
-from .types.bigtable_table_admin import GenerateConsistencyTokenResponse
-from .types.bigtable_table_admin import GetAuthorizedViewRequest
-from .types.bigtable_table_admin import GetBackupRequest
-from .types.bigtable_table_admin import GetSchemaBundleRequest
-from .types.bigtable_table_admin import GetSnapshotRequest
-from .types.bigtable_table_admin import GetTableRequest
-from .types.bigtable_table_admin import ListAuthorizedViewsRequest
-from .types.bigtable_table_admin import ListAuthorizedViewsResponse
-from .types.bigtable_table_admin import ListBackupsRequest
-from .types.bigtable_table_admin import ListBackupsResponse
-from .types.bigtable_table_admin import ListSchemaBundlesRequest
-from .types.bigtable_table_admin import ListSchemaBundlesResponse
-from .types.bigtable_table_admin import ListSnapshotsRequest
-from .types.bigtable_table_admin import ListSnapshotsResponse
-from .types.bigtable_table_admin import ListTablesRequest
-from .types.bigtable_table_admin import ListTablesResponse
-from .types.bigtable_table_admin import ModifyColumnFamiliesRequest
-from .types.bigtable_table_admin import OptimizeRestoredTableMetadata
-from .types.bigtable_table_admin import RestoreTableMetadata
-from .types.bigtable_table_admin import RestoreTableRequest
-from .types.bigtable_table_admin import SnapshotTableMetadata
-from .types.bigtable_table_admin import SnapshotTableRequest
-from .types.bigtable_table_admin import StandardReadRemoteWrites
-from .types.bigtable_table_admin import UndeleteTableMetadata
-from .types.bigtable_table_admin import UndeleteTableRequest
-from .types.bigtable_table_admin import UpdateAuthorizedViewMetadata
-from .types.bigtable_table_admin import UpdateAuthorizedViewRequest
-from .types.bigtable_table_admin import UpdateBackupRequest
-from .types.bigtable_table_admin import UpdateSchemaBundleMetadata
-from .types.bigtable_table_admin import UpdateSchemaBundleRequest
-from .types.bigtable_table_admin import UpdateTableMetadata
-from .types.bigtable_table_admin import UpdateTableRequest
-from .types.common import OperationProgress
-from .types.common import StorageType
-from .types.instance import AppProfile
-from .types.instance import AutoscalingLimits
-from .types.instance import AutoscalingTargets
-from .types.instance import Cluster
-from .types.instance import HotTablet
-from .types.instance import Instance
-from .types.instance import LogicalView
-from .types.instance import MaterializedView
-from .types.table import AuthorizedView
-from .types.table import Backup
-from .types.table import BackupInfo
-from .types.table import ChangeStreamConfig
-from .types.table import ColumnFamily
-from .types.table import EncryptionInfo
-from .types.table import GcRule
-from .types.table import ProtoSchema
-from .types.table import RestoreInfo
-from .types.table import SchemaBundle
-from .types.table import Snapshot
-from .types.table import Table
-from .types.table import TieredStorageConfig
-from .types.table import TieredStorageRule
-from .types.table import RestoreSourceType
+from .services.bigtable_instance_admin import (
+ BigtableInstanceAdminAsyncClient,
+ BigtableInstanceAdminClient,
+)
+from .services.bigtable_table_admin import (
+ BaseBigtableTableAdminAsyncClient,
+ BaseBigtableTableAdminClient,
+)
+from .types.bigtable_instance_admin import (
+ CreateAppProfileRequest,
+ CreateClusterMetadata,
+ CreateClusterRequest,
+ CreateInstanceMetadata,
+ CreateInstanceRequest,
+ CreateLogicalViewMetadata,
+ CreateLogicalViewRequest,
+ CreateMaterializedViewMetadata,
+ CreateMaterializedViewRequest,
+ DeleteAppProfileRequest,
+ DeleteClusterRequest,
+ DeleteInstanceRequest,
+ DeleteLogicalViewRequest,
+ DeleteMaterializedViewRequest,
+ GetAppProfileRequest,
+ GetClusterRequest,
+ GetInstanceRequest,
+ GetLogicalViewRequest,
+ GetMaterializedViewRequest,
+ ListAppProfilesRequest,
+ ListAppProfilesResponse,
+ ListClustersRequest,
+ ListClustersResponse,
+ ListHotTabletsRequest,
+ ListHotTabletsResponse,
+ ListInstancesRequest,
+ ListInstancesResponse,
+ ListLogicalViewsRequest,
+ ListLogicalViewsResponse,
+ ListMaterializedViewsRequest,
+ ListMaterializedViewsResponse,
+ PartialUpdateClusterMetadata,
+ PartialUpdateClusterRequest,
+ PartialUpdateInstanceRequest,
+ UpdateAppProfileMetadata,
+ UpdateAppProfileRequest,
+ UpdateClusterMetadata,
+ UpdateInstanceMetadata,
+ UpdateLogicalViewMetadata,
+ UpdateLogicalViewRequest,
+ UpdateMaterializedViewMetadata,
+ UpdateMaterializedViewRequest,
+)
+from .types.bigtable_table_admin import (
+ CheckConsistencyRequest,
+ CheckConsistencyResponse,
+ CopyBackupMetadata,
+ CopyBackupRequest,
+ CreateAuthorizedViewMetadata,
+ CreateAuthorizedViewRequest,
+ CreateBackupMetadata,
+ CreateBackupRequest,
+ CreateSchemaBundleMetadata,
+ CreateSchemaBundleRequest,
+ CreateTableFromSnapshotMetadata,
+ CreateTableFromSnapshotRequest,
+ CreateTableRequest,
+ DataBoostReadLocalWrites,
+ DeleteAuthorizedViewRequest,
+ DeleteBackupRequest,
+ DeleteSchemaBundleRequest,
+ DeleteSnapshotRequest,
+ DeleteTableRequest,
+ DropRowRangeRequest,
+ GenerateConsistencyTokenRequest,
+ GenerateConsistencyTokenResponse,
+ GetAuthorizedViewRequest,
+ GetBackupRequest,
+ GetSchemaBundleRequest,
+ GetSnapshotRequest,
+ GetTableRequest,
+ ListAuthorizedViewsRequest,
+ ListAuthorizedViewsResponse,
+ ListBackupsRequest,
+ ListBackupsResponse,
+ ListSchemaBundlesRequest,
+ ListSchemaBundlesResponse,
+ ListSnapshotsRequest,
+ ListSnapshotsResponse,
+ ListTablesRequest,
+ ListTablesResponse,
+ ModifyColumnFamiliesRequest,
+ OptimizeRestoredTableMetadata,
+ RestoreTableMetadata,
+ RestoreTableRequest,
+ SnapshotTableMetadata,
+ SnapshotTableRequest,
+ StandardReadRemoteWrites,
+ UndeleteTableMetadata,
+ UndeleteTableRequest,
+ UpdateAuthorizedViewMetadata,
+ UpdateAuthorizedViewRequest,
+ UpdateBackupRequest,
+ UpdateSchemaBundleMetadata,
+ UpdateSchemaBundleRequest,
+ UpdateTableMetadata,
+ UpdateTableRequest,
+)
+from .types.common import OperationProgress, StorageType
+from .types.instance import (
+ AppProfile,
+ AutoscalingLimits,
+ AutoscalingTargets,
+ Cluster,
+ HotTablet,
+ Instance,
+ LogicalView,
+ MaterializedView,
+)
+from .types.table import (
+ AuthorizedView,
+ Backup,
+ BackupInfo,
+ ChangeStreamConfig,
+ ColumnFamily,
+ EncryptionInfo,
+ GcRule,
+ ProtoSchema,
+ RestoreInfo,
+ RestoreSourceType,
+ SchemaBundle,
+ Snapshot,
+ Table,
+ TieredStorageConfig,
+ TieredStorageRule,
+)
from .types.types import Type
if hasattr(api_core, "check_python_version") and hasattr(
@@ -164,8 +175,8 @@
# An older version of api_core is installed which does not define the
# functions above. We do equivalent checks manually.
try:
- import warnings
import sys
+ import warnings
_py_version_str = sys.version.split()[0]
_package_label = "google.cloud.bigtable_admin_v2"
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/__init__.py
index f66c7f8dd885..a3ad5728cd0d 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/__init__.py
@@ -29,17 +29,16 @@
# currently implemented as either types under overlay/types or in methods in an overwritten
# client class under overlay/services.
+from .services.bigtable_table_admin import (
+ BigtableTableAdminAsyncClient,
+ BigtableTableAdminClient,
+)
from .types import (
AsyncRestoreTableOperation,
RestoreTableOperation,
WaitForConsistencyRequest,
)
-from .services.bigtable_table_admin import (
- BigtableTableAdminAsyncClient,
- BigtableTableAdminClient,
-)
-
__all__ = (
"AsyncRestoreTableOperation",
"RestoreTableOperation",
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/async_client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/async_client.py
index ee8e5757d23a..bd642eb15661 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/async_client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/async_client.py
@@ -29,8 +29,8 @@
import copy
import functools
-
from typing import Callable, Optional, Sequence, Tuple, Union
+
from google.api_core import gapic_v1
from google.api_core import retry as retries
@@ -42,22 +42,19 @@
from google.api_core import client_options as client_options_lib
from google.auth import credentials as ga_credentials # type: ignore
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-
+from google.cloud.bigtable.gapic_version import __version__ as bigtable_version
+from google.cloud.bigtable_admin_v2.overlay.types import (
+ async_consistency,
+ async_restore_table,
+ wait_for_consistency_request,
+)
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
async_client as base_client,
)
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports.base import (
BigtableTableAdminTransport,
)
-from google.cloud.bigtable_admin_v2.overlay.types import (
- async_consistency,
- async_restore_table,
- wait_for_consistency_request,
-)
-
-from google.cloud.bigtable.gapic_version import __version__ as bigtable_version
-
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
DEFAULT_CLIENT_INFO = copy.copy(base_client.DEFAULT_CLIENT_INFO)
DEFAULT_CLIENT_INFO.client_library_version = f"{bigtable_version}-admin-overlay-async"
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/client.py
index 1b6770b10195..56ec13b157d5 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/client.py
@@ -29,8 +29,8 @@
import copy
import functools
-
from typing import Callable, Optional, Sequence, Tuple, Union
+
from google.api_core import gapic_v1
from google.api_core import retry as retries
@@ -42,22 +42,19 @@
from google.api_core import client_options as client_options_lib
from google.auth import credentials as ga_credentials # type: ignore
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-
+from google.cloud.bigtable.gapic_version import __version__ as bigtable_version
+from google.cloud.bigtable_admin_v2.overlay.types import (
+ consistency,
+ restore_table,
+ wait_for_consistency_request,
+)
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
client as base_client,
)
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports.base import (
BigtableTableAdminTransport,
)
-from google.cloud.bigtable_admin_v2.overlay.types import (
- consistency,
- restore_table,
- wait_for_consistency_request,
-)
-
-from google.cloud.bigtable.gapic_version import __version__ as bigtable_version
-
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
DEFAULT_CLIENT_INFO = copy.copy(base_client.DEFAULT_CLIENT_INFO)
DEFAULT_CLIENT_INFO.client_library_version = f"{bigtable_version}-admin-overlay"
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/__init__.py
index 16b032ac4743..5d026bd1ef43 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/__init__.py
@@ -15,11 +15,9 @@
from .async_restore_table import (
AsyncRestoreTableOperation,
)
-
from .restore_table import (
RestoreTableOperation,
)
-
from .wait_for_consistency_request import (
WaitForConsistencyRequest,
)
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_consistency.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_consistency.py
index 0703940d5138..bf82a864bf0a 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_consistency.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_consistency.py
@@ -12,11 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Awaitable, Union, Callable
+from typing import Awaitable, Callable, Union
-from google.api_core.future import async_future
from google.api_core import gapic_v1
from google.api_core import retry as retries
+from google.api_core.future import async_future
+
from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
try:
@@ -64,7 +65,7 @@ def __init__(
[OptionalRetry], Awaitable[bigtable_table_admin.CheckConsistencyResponse]
],
retry: retries.AsyncRetry = DEFAULT_RETRY,
- **kwargs
+ **kwargs,
):
super(_AsyncCheckConsistencyPollingFuture, self).__init__(retry=retry, **kwargs)
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_restore_table.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_restore_table.py
index 9edfb4963cd3..ee834713b5d6 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_restore_table.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_restore_table.py
@@ -14,8 +14,7 @@
from typing import Optional
-from google.api_core import exceptions
-from google.api_core import operation_async
+from google.api_core import exceptions, operation_async
from google.protobuf import empty_pb2
from google.cloud.bigtable_admin_v2.types import OptimizeRestoredTableMetadata
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/consistency.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/consistency.py
index 63a110975442..0894b6c3d82b 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/consistency.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/consistency.py
@@ -12,11 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Union, Callable
+from typing import Callable, Union
-from google.api_core.future import polling
from google.api_core import gapic_v1
from google.api_core import retry as retries
+from google.api_core.future import polling
+
from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
try:
@@ -60,7 +61,7 @@ def __init__(
[OptionalRetry], bigtable_table_admin.CheckConsistencyResponse
],
polling: retries.Retry = DEFAULT_RETRY,
- **kwargs
+ **kwargs,
):
super(_CheckConsistencyPollingFuture, self).__init__(polling=polling, **kwargs)
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/restore_table.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/restore_table.py
index 84c9c5d91644..e8201d0c0a62 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/restore_table.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/restore_table.py
@@ -14,8 +14,7 @@
from typing import Optional
-from google.api_core import exceptions
-from google.api_core import operation
+from google.api_core import exceptions, operation
from google.protobuf import empty_pb2
from google.cloud.bigtable_admin_v2.types import OptimizeRestoredTableMetadata
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py
index 20ac9e4fc5f6..eee3c02dcb5b 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py
@@ -13,8 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from .client import BigtableInstanceAdminClient
from .async_client import BigtableInstanceAdminAsyncClient
+from .client import BigtableInstanceAdminClient
__all__ = (
"BigtableInstanceAdminClient",
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py
index 632496543912..532e46290b47 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py
@@ -14,11 +14,11 @@
# limitations under the License.
#
import logging as std_logging
-from collections import OrderedDict
import re
+from collections import OrderedDict
from typing import (
- Dict,
Callable,
+ Dict,
Mapping,
MutableMapping,
MutableSequence,
@@ -29,36 +29,39 @@
Union,
)
-from google.cloud.bigtable_admin_v2 import gapic_version as package_version
-
-from google.api_core.client_options import ClientOptions
+import google.protobuf
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry_async as retries
+from google.api_core.client_options import ClientOptions
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
-import google.protobuf
+from google.cloud.bigtable_admin_v2 import gapic_version as package_version
try:
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore
-from google.api_core import operation # type: ignore
-from google.api_core import operation_async # type: ignore
+import google.api_core.operation as operation # type: ignore
+import google.api_core.operation_async as operation_async # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import pagers
-from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin
-from google.cloud.bigtable_admin_v2.types import common
-from google.cloud.bigtable_admin_v2.types import instance
+from google.cloud.bigtable_admin_v2.types import (
+ bigtable_instance_admin,
+ common,
+ instance,
+)
from google.cloud.bigtable_admin_v2.types import instance as gba_instance
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.protobuf import field_mask_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-from .transports.base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO
-from .transports.grpc_asyncio import BigtableInstanceAdminGrpcAsyncIOTransport
+
from .client import BigtableInstanceAdminClient
+from .transports.base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport
+from .transports.grpc_asyncio import BigtableInstanceAdminGrpcAsyncIOTransport
try:
from google.api_core import client_logging # type: ignore
@@ -154,7 +157,10 @@ def from_service_account_info(cls, info: dict, *args, **kwargs):
Returns:
BigtableInstanceAdminAsyncClient: The constructed client.
"""
- return BigtableInstanceAdminClient.from_service_account_info.__func__(BigtableInstanceAdminAsyncClient, info, *args, **kwargs) # type: ignore
+ sa_info_func = (
+ BigtableInstanceAdminClient.from_service_account_info.__func__ # type: ignore
+ )
+ return sa_info_func(BigtableInstanceAdminAsyncClient, info, *args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
@@ -170,7 +176,10 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
Returns:
BigtableInstanceAdminAsyncClient: The constructed client.
"""
- return BigtableInstanceAdminClient.from_service_account_file.__func__(BigtableInstanceAdminAsyncClient, filename, *args, **kwargs) # type: ignore
+ sa_file_func = (
+ BigtableInstanceAdminClient.from_service_account_file.__func__ # type: ignore
+ )
+ return sa_file_func(BigtableInstanceAdminAsyncClient, filename, *args, **kwargs)
from_service_account_json = from_service_account_file
@@ -208,7 +217,9 @@ def get_mtls_endpoint_and_cert_source(
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
- return BigtableInstanceAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
+ return BigtableInstanceAdminClient.get_mtls_endpoint_and_cert_source(
+ client_options
+ ) # type: ignore
@property
def transport(self) -> BigtableInstanceAdminTransport:
@@ -220,7 +231,7 @@ def transport(self) -> BigtableInstanceAdminTransport:
return self._client.transport
@property
- def api_endpoint(self):
+ def api_endpoint(self) -> str:
"""Return the API endpoint used by the client instance.
Returns:
@@ -2509,7 +2520,7 @@ async def get_iam_policy(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
async def sample_get_iam_policy():
# Create a client
@@ -2648,7 +2659,7 @@ async def set_iam_policy(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
async def sample_set_iam_policy():
# Create a client
@@ -2788,7 +2799,7 @@ async def test_iam_permissions(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
async def sample_test_iam_permissions():
# Create a client
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py
index 9d64108bb4a1..aeb1ea6e4948 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py
@@ -13,15 +13,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from collections import OrderedDict
-from http import HTTPStatus
import json
import logging as std_logging
import os
import re
+import warnings
+from collections import OrderedDict
+from http import HTTPStatus
from typing import (
- Dict,
Callable,
+ Dict,
Mapping,
MutableMapping,
MutableSequence,
@@ -32,20 +33,19 @@
Union,
cast,
)
-import warnings
-
-from google.cloud.bigtable_admin_v2 import gapic_version as package_version
+import google.protobuf
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
-import google.protobuf
+
+from google.cloud.bigtable_admin_v2 import gapic_version as package_version
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
@@ -61,18 +61,22 @@
_LOGGER = std_logging.getLogger(__name__)
-from google.api_core import operation # type: ignore
-from google.api_core import operation_async # type: ignore
+import google.api_core.operation as operation # type: ignore
+import google.api_core.operation_async as operation_async # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import pagers
-from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin
-from google.cloud.bigtable_admin_v2.types import common
-from google.cloud.bigtable_admin_v2.types import instance
+from google.cloud.bigtable_admin_v2.types import (
+ bigtable_instance_admin,
+ common,
+ instance,
+)
from google.cloud.bigtable_admin_v2.types import instance as gba_instance
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.protobuf import field_mask_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-from .transports.base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO
+
+from .transports.base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport
from .transports.grpc import BigtableInstanceAdminGrpcTransport
from .transports.grpc_asyncio import BigtableInstanceAdminGrpcAsyncIOTransport
from .transports.rest import BigtableInstanceAdminRestTransport
@@ -86,9 +90,7 @@ class BigtableInstanceAdminClientMeta(type):
objects.
"""
- _transport_registry = (
- OrderedDict()
- ) # type: Dict[str, Type[BigtableInstanceAdminTransport]]
+ _transport_registry = OrderedDict() # type: Dict[str, Type[BigtableInstanceAdminTransport]]
_transport_registry["grpc"] = BigtableInstanceAdminGrpcTransport
_transport_registry["grpc_asyncio"] = BigtableInstanceAdminGrpcAsyncIOTransport
_transport_registry["rest"] = BigtableInstanceAdminRestTransport
@@ -123,7 +125,7 @@ class BigtableInstanceAdminClient(metaclass=BigtableInstanceAdminClientMeta):
"""
@staticmethod
- def _get_default_mtls_endpoint(api_endpoint):
+ def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]:
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
@@ -131,7 +133,7 @@ def _get_default_mtls_endpoint(api_endpoint):
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
- str: converted mTLS api endpoint.
+ Optional[str]: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
@@ -141,6 +143,10 @@ def _get_default_mtls_endpoint(api_endpoint):
)
m = mtls_endpoint_re.match(api_endpoint)
+ if m is None:
+ # Could not parse api_endpoint; return as-is.
+ return api_endpoint
+
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
@@ -603,7 +609,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag):
@staticmethod
def _get_api_endpoint(
api_override, client_cert_source, universe_domain, use_mtls_endpoint
- ):
+ ) -> str:
"""Return the API endpoint used by the client.
Args:
@@ -702,7 +708,7 @@ def _add_cred_info_for_auth_errors(
error._details.append(json.dumps(cred_info))
@property
- def api_endpoint(self):
+ def api_endpoint(self) -> str:
"""Return the API endpoint used by the client instance.
Returns:
@@ -793,18 +799,16 @@ def __init__(
universe_domain_opt = getattr(self._client_options, "universe_domain", None)
- (
- self._use_client_cert,
- self._use_mtls_endpoint,
- self._universe_domain_env,
- ) = BigtableInstanceAdminClient._read_environment_variables()
+ self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = (
+ BigtableInstanceAdminClient._read_environment_variables()
+ )
self._client_cert_source = BigtableInstanceAdminClient._get_client_cert_source(
self._client_options.client_cert_source, self._use_client_cert
)
self._universe_domain = BigtableInstanceAdminClient._get_universe_domain(
universe_domain_opt, self._universe_domain_env
)
- self._api_endpoint = None # updated below, depending on `transport`
+ self._api_endpoint: str = "" # updated below, depending on `transport`
# Initialize the universe domain validation.
self._is_universe_domain_valid = False
@@ -832,8 +836,7 @@ def __init__(
)
if self._client_options.scopes:
raise ValueError(
- "When providing a transport instance, provide its scopes "
- "directly."
+ "When providing a transport instance, provide its scopes directly."
)
self._transport = cast(BigtableInstanceAdminTransport, transport)
self._api_endpoint = self._transport.host
@@ -3029,7 +3032,7 @@ def get_iam_policy(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
def sample_get_iam_policy():
# Create a client
@@ -3169,7 +3172,7 @@ def set_iam_policy(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
def sample_set_iam_policy():
# Create a client
@@ -3310,7 +3313,7 @@ def test_iam_permissions(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
def sample_test_iam_permissions():
# Create a client
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py
index ce5b67b27324..431aaf942390 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py
@@ -13,21 +13,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from google.api_core import gapic_v1
-from google.api_core import retry as retries
-from google.api_core import retry_async as retries_async
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
+ Iterator,
+ Optional,
Sequence,
Tuple,
- Optional,
- Iterator,
Union,
)
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
+
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
OptionalAsyncRetry = Union[
@@ -37,8 +38,7 @@
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore
-from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin
-from google.cloud.bigtable_admin_v2.types import instance
+from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance
class ListAppProfilesPager:
@@ -67,7 +67,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiate the pager.
@@ -145,7 +145,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiates the pager.
@@ -227,7 +227,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiate the pager.
@@ -305,7 +305,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiates the pager.
@@ -387,7 +387,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiate the pager.
@@ -465,7 +465,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiates the pager.
@@ -547,7 +547,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiate the pager.
@@ -625,7 +625,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiates the pager.
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/README.rst b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/README.rst
index 9a01ee7c3032..bf1871a63f9e 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/README.rst
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/README.rst
@@ -2,8 +2,9 @@
transport inheritance structure
_______________________________
-`BigtableInstanceAdminTransport` is the ABC for all transports.
-- public child `BigtableInstanceAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
-- public child `BigtableInstanceAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
-- private child `_BaseBigtableInstanceAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
-- public child `BigtableInstanceAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
+``BigtableInstanceAdminTransport`` is the ABC for all transports.
+
+- public child ``BigtableInstanceAdminGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``).
+- public child ``BigtableInstanceAdminGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``).
+- private child ``_BaseBigtableInstanceAdminRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``).
+- public child ``BigtableInstanceAdminRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``).
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py
index 021458f35945..28676517575f 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py
@@ -19,14 +19,13 @@
from .base import BigtableInstanceAdminTransport
from .grpc import BigtableInstanceAdminGrpcTransport
from .grpc_asyncio import BigtableInstanceAdminGrpcAsyncIOTransport
-from .rest import BigtableInstanceAdminRestTransport
-from .rest import BigtableInstanceAdminRestInterceptor
-
+from .rest import (
+ BigtableInstanceAdminRestInterceptor,
+ BigtableInstanceAdminRestTransport,
+)
# Compile a registry of transports.
-_transport_registry = (
- OrderedDict()
-) # type: Dict[str, Type[BigtableInstanceAdminTransport]]
+_transport_registry = OrderedDict() # type: Dict[str, Type[BigtableInstanceAdminTransport]]
_transport_registry["grpc"] = BigtableInstanceAdminGrpcTransport
_transport_registry["grpc_asyncio"] = BigtableInstanceAdminGrpcAsyncIOTransport
_transport_registry["rest"] = BigtableInstanceAdminRestTransport
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py
index 3a05dd6631ca..5882a27de7aa 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py
@@ -16,24 +16,21 @@
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
-from google.cloud.bigtable_admin_v2 import gapic_version as package_version
-
-import google.auth # type: ignore
import google.api_core
+import google.auth # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
from google.api_core import exceptions as core_exceptions
-from google.api_core import gapic_v1
+from google.api_core import gapic_v1, operations_v1
from google.api_core import retry as retries
-from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
from google.oauth2 import service_account # type: ignore
-import google.protobuf
-from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin
-from google.cloud.bigtable_admin_v2.types import instance
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
+from google.cloud.bigtable_admin_v2 import gapic_version as package_version
+from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
@@ -95,10 +92,12 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
"""
- scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
-
# Save the scopes.
self._scopes = scopes
if not hasattr(self, "_ignore_credentials"):
@@ -113,11 +112,16 @@ def __init__(
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
- credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
+ credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ default_scopes=self.AUTH_SCOPES,
)
elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
- **scopes_kwargs, quota_project_id=quota_project_id
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ default_scopes=self.AUTH_SCOPES,
)
# Don't apply audience if the credentials file passed from user.
if hasattr(credentials, "with_gdch_audience"):
@@ -141,6 +145,8 @@ def __init__(
host += ":443"
self._host = host
+ self._wrapped_methods: Dict[Callable, Callable] = {}
+
@property
def host(self):
return self._host
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py
index d5d5cf1e53b9..f2cc6e40304f 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py
@@ -19,25 +19,22 @@
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
-from google.api_core import grpc_helpers
-from google.api_core import operations_v1
-from google.api_core import gapic_v1
import google.auth # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
+import google.protobuf.message
+import grpc # type: ignore
+import proto # type: ignore
+from google.api_core import gapic_v1, grpc_helpers, operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
from google.protobuf.json_format import MessageToJson
-import google.protobuf.message
-import grpc # type: ignore
-import proto # type: ignore
+from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance
-from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin
-from google.cloud.bigtable_admin_v2.types import instance
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
-from .base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO
+from .base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport
try:
from google.api_core import client_logging # type: ignore
@@ -61,7 +58,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request):
elif isinstance(request, google.protobuf.message.Message):
request_payload = MessageToJson(request)
else:
- request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}"
request_metadata = {
key: value.decode("utf-8") if isinstance(value, bytes) else value
@@ -96,7 +93,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request):
elif isinstance(result, google.protobuf.message.Message):
response_payload = MessageToJson(result)
else:
- response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}"
grpc_response = {
"payload": response_payload,
"metadata": metadata,
@@ -194,6 +191,10 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py
index 7ce7627649fe..e444a9c07ad1 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py
@@ -15,32 +15,29 @@
#
import inspect
import json
-import pickle
import logging as std_logging
+import pickle
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
-from google.api_core import gapic_v1
-from google.api_core import grpc_helpers_async
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
+import google.protobuf.message
+import grpc # type: ignore
+import proto # type: ignore
from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1, grpc_helpers_async, operations_v1
from google.api_core import retry_async as retries
-from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
from google.protobuf.json_format import MessageToJson
-import google.protobuf.message
-
-import grpc # type: ignore
-import proto # type: ignore
from grpc.experimental import aio # type: ignore
-from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin
-from google.cloud.bigtable_admin_v2.types import instance
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
-from .base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO
+from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance
+
+from .base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport
from .grpc import BigtableInstanceAdminGrpcTransport
try:
@@ -67,7 +64,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request
elif isinstance(request, google.protobuf.message.Message):
request_payload = MessageToJson(request)
else:
- request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}"
request_metadata = {
key: value.decode("utf-8") if isinstance(value, bytes) else value
@@ -102,7 +99,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request
elif isinstance(result, google.protobuf.message.Message):
response_payload = MessageToJson(result)
else:
- response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}"
grpc_response = {
"payload": response_payload,
"metadata": metadata,
@@ -245,6 +242,10 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py
index 9879c4c45360..30f13fddd614 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py
@@ -13,37 +13,29 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-import logging
+import dataclasses
import json # type: ignore
+import logging
+import warnings
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
-from google.auth.transport.requests import AuthorizedSession # type: ignore
-from google.auth import credentials as ga_credentials # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming
from google.api_core import retry as retries
-from google.api_core import rest_helpers
-from google.api_core import rest_streaming
-from google.api_core import gapic_v1
-import google.protobuf
-
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.requests import AuthorizedSession # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import json_format
-from google.api_core import operations_v1
-
from requests import __version__ as requests_version
-import dataclasses
-from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
-import warnings
-
-from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin
-from google.cloud.bigtable_admin_v2.types import instance
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
+from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance
-
-from .rest_base import _BaseBigtableInstanceAdminRestTransport
from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+from .rest_base import _BaseBigtableInstanceAdminRestTransport
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
@@ -1740,6 +1732,12 @@ def __init__(
url_scheme: the protocol scheme for the API endpoint. Normally
"https", but for testing or local servers,
"http" can be specified.
+ interceptor (Optional[BigtableInstanceAdminRestInterceptor]): Interceptor used
+ to manipulate requests, request metadata, and responses.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
"""
# Run the base constructor
# TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
@@ -1874,9 +1872,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseCreateAppProfile._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseCreateAppProfile._get_http_options()
request, metadata = self._interceptor.pre_create_app_profile(
request, metadata
@@ -2033,9 +2029,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseCreateCluster._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseCreateCluster._get_http_options()
request, metadata = self._interceptor.pre_create_cluster(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseCreateCluster._get_transcoded_request(
@@ -2059,7 +2053,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2186,9 +2180,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseCreateInstance._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseCreateInstance._get_http_options()
request, metadata = self._interceptor.pre_create_instance(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseCreateInstance._get_transcoded_request(
@@ -2212,7 +2204,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2339,9 +2331,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseCreateLogicalView._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseCreateLogicalView._get_http_options()
request, metadata = self._interceptor.pre_create_logical_view(
request, metadata
@@ -2367,7 +2357,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2496,9 +2486,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseCreateMaterializedView._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseCreateMaterializedView._get_http_options()
request, metadata = self._interceptor.pre_create_materialized_view(
request, metadata
@@ -2524,7 +2512,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2643,9 +2631,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseDeleteAppProfile._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseDeleteAppProfile._get_http_options()
request, metadata = self._interceptor.pre_delete_app_profile(
request, metadata
@@ -2667,7 +2653,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2755,9 +2741,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseDeleteCluster._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseDeleteCluster._get_http_options()
request, metadata = self._interceptor.pre_delete_cluster(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseDeleteCluster._get_transcoded_request(
@@ -2777,7 +2761,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2863,9 +2847,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseDeleteInstance._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseDeleteInstance._get_http_options()
request, metadata = self._interceptor.pre_delete_instance(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseDeleteInstance._get_transcoded_request(
@@ -2885,7 +2867,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2971,9 +2953,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseDeleteLogicalView._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseDeleteLogicalView._get_http_options()
request, metadata = self._interceptor.pre_delete_logical_view(
request, metadata
@@ -2995,7 +2975,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -3083,9 +3063,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseDeleteMaterializedView._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseDeleteMaterializedView._get_http_options()
request, metadata = self._interceptor.pre_delete_materialized_view(
request, metadata
@@ -3107,7 +3085,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -3200,9 +3178,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseGetAppProfile._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseGetAppProfile._get_http_options()
request, metadata = self._interceptor.pre_get_app_profile(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseGetAppProfile._get_transcoded_request(
@@ -3350,9 +3326,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseGetCluster._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseGetCluster._get_http_options()
request, metadata = self._interceptor.pre_get_cluster(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseGetCluster._get_transcoded_request(
@@ -3571,9 +3545,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseGetIamPolicy._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseGetIamPolicy._get_http_options()
request, metadata = self._interceptor.pre_get_iam_policy(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(
@@ -3728,9 +3700,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseGetInstance._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseGetInstance._get_http_options()
request, metadata = self._interceptor.pre_get_instance(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseGetInstance._get_transcoded_request(
@@ -3876,9 +3846,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseGetLogicalView._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseGetLogicalView._get_http_options()
request, metadata = self._interceptor.pre_get_logical_view(
request, metadata
@@ -4026,9 +3994,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseGetMaterializedView._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseGetMaterializedView._get_http_options()
request, metadata = self._interceptor.pre_get_materialized_view(
request, metadata
@@ -4178,9 +4144,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseListAppProfiles._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseListAppProfiles._get_http_options()
request, metadata = self._interceptor.pre_list_app_profiles(
request, metadata
@@ -4334,9 +4298,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseListClusters._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseListClusters._get_http_options()
request, metadata = self._interceptor.pre_list_clusters(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseListClusters._get_transcoded_request(
@@ -4484,9 +4446,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseListHotTablets._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseListHotTablets._get_http_options()
request, metadata = self._interceptor.pre_list_hot_tablets(
request, metadata
@@ -4636,9 +4596,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseListInstances._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseListInstances._get_http_options()
request, metadata = self._interceptor.pre_list_instances(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseListInstances._get_transcoded_request(
@@ -4786,9 +4744,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseListLogicalViews._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseListLogicalViews._get_http_options()
request, metadata = self._interceptor.pre_list_logical_views(
request, metadata
@@ -4942,9 +4898,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseListMaterializedViews._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseListMaterializedViews._get_http_options()
request, metadata = self._interceptor.pre_list_materialized_views(
request, metadata
@@ -5100,9 +5054,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BasePartialUpdateCluster._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BasePartialUpdateCluster._get_http_options()
request, metadata = self._interceptor.pre_partial_update_cluster(
request, metadata
@@ -5128,7 +5080,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -5257,9 +5209,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BasePartialUpdateInstance._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BasePartialUpdateInstance._get_http_options()
request, metadata = self._interceptor.pre_partial_update_instance(
request, metadata
@@ -5285,7 +5235,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -5485,9 +5435,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseSetIamPolicy._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseSetIamPolicy._get_http_options()
request, metadata = self._interceptor.pre_set_iam_policy(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(
@@ -5636,9 +5584,7 @@ def __call__(
Response message for ``TestIamPermissions`` method.
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseTestIamPermissions._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseTestIamPermissions._get_http_options()
request, metadata = self._interceptor.pre_test_iam_permissions(
request, metadata
@@ -5795,9 +5741,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseUpdateAppProfile._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseUpdateAppProfile._get_http_options()
request, metadata = self._interceptor.pre_update_app_profile(
request, metadata
@@ -5823,7 +5767,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -5954,9 +5898,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseUpdateCluster._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseUpdateCluster._get_http_options()
request, metadata = self._interceptor.pre_update_cluster(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseUpdateCluster._get_transcoded_request(
@@ -5980,7 +5922,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -6114,9 +6056,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseUpdateInstance._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseUpdateInstance._get_http_options()
request, metadata = self._interceptor.pre_update_instance(request, metadata)
transcoded_request = _BaseBigtableInstanceAdminRestTransport._BaseUpdateInstance._get_transcoded_request(
@@ -6269,9 +6209,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseUpdateLogicalView._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseUpdateLogicalView._get_http_options()
request, metadata = self._interceptor.pre_update_logical_view(
request, metadata
@@ -6297,7 +6235,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -6426,9 +6364,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableInstanceAdminRestTransport._BaseUpdateMaterializedView._get_http_options()
- )
+ http_options = _BaseBigtableInstanceAdminRestTransport._BaseUpdateMaterializedView._get_http_options()
request, metadata = self._interceptor.pre_update_materialized_view(
request, metadata
@@ -6454,7 +6390,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -6570,7 +6506,9 @@ def create_materialized_view(
]:
# The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
# In C++ this would require a dynamic_cast
- return self._CreateMaterializedView(self._session, self._host, self._interceptor) # type: ignore
+ return self._CreateMaterializedView(
+ self._session, self._host, self._interceptor
+ ) # type: ignore
@property
def delete_app_profile(
@@ -6612,7 +6550,9 @@ def delete_materialized_view(
]:
# The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
# In C++ this would require a dynamic_cast
- return self._DeleteMaterializedView(self._session, self._host, self._interceptor) # type: ignore
+ return self._DeleteMaterializedView(
+ self._session, self._host, self._interceptor
+ ) # type: ignore
@property
def get_app_profile(
@@ -6812,7 +6752,9 @@ def update_materialized_view(
]:
# The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
# In C++ this would require a dynamic_cast
- return self._UpdateMaterializedView(self._session, self._host, self._interceptor) # type: ignore
+ return self._UpdateMaterializedView(
+ self._session, self._host, self._interceptor
+ ) # type: ignore
@property
def kind(self) -> str:
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest_base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest_base.py
index 9855756b8ee3..991f8400a905 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest_base.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest_base.py
@@ -14,22 +14,19 @@
# limitations under the License.
#
import json # type: ignore
-from google.api_core import path_template
-from google.api_core import gapic_v1
-
-from google.protobuf import json_format
-from .base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO
-
import re
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
-
-from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin
-from google.cloud.bigtable_admin_v2.types import instance
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
+from google.api_core import gapic_v1, path_template
from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import json_format
+
+from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance
+
+from .base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport
class _BaseBigtableInstanceAdminRestTransport(BigtableInstanceAdminTransport):
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py
index c5e8544d6423..c709fd07b84c 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py
@@ -13,8 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from .client import BaseBigtableTableAdminClient
from .async_client import BaseBigtableTableAdminAsyncClient
+from .client import BaseBigtableTableAdminClient
__all__ = (
"BaseBigtableTableAdminClient",
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py
index 7f772c87c77f..91f5a9042485 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py
@@ -14,11 +14,11 @@
# limitations under the License.
#
import logging as std_logging
-from collections import OrderedDict
import re
+from collections import OrderedDict
from typing import (
- Dict,
Callable,
+ Dict,
Mapping,
MutableMapping,
MutableSequence,
@@ -29,36 +29,35 @@
Union,
)
-from google.cloud.bigtable_admin_v2 import gapic_version as package_version
-
-from google.api_core.client_options import ClientOptions
+import google.protobuf
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry_async as retries
+from google.api_core.client_options import ClientOptions
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
-import google.protobuf
+from google.cloud.bigtable_admin_v2 import gapic_version as package_version
try:
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore
-from google.api_core import operation # type: ignore
-from google.api_core import operation_async # type: ignore
+import google.api_core.operation as operation # type: ignore
+import google.api_core.operation_async as operation_async # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import pagers
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table, types
from google.cloud.bigtable_admin_v2.types import table as gba_table
-from google.cloud.bigtable_admin_v2.types import types
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.protobuf import field_mask_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-from .transports.base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO
-from .transports.grpc_asyncio import BigtableTableAdminGrpcAsyncIOTransport
+
from .client import BaseBigtableTableAdminClient
+from .transports.base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport
+from .transports.grpc_asyncio import BigtableTableAdminGrpcAsyncIOTransport
try:
from google.api_core import client_logging # type: ignore
@@ -153,7 +152,10 @@ def from_service_account_info(cls, info: dict, *args, **kwargs):
Returns:
BaseBigtableTableAdminAsyncClient: The constructed client.
"""
- return BaseBigtableTableAdminClient.from_service_account_info.__func__(BaseBigtableTableAdminAsyncClient, info, *args, **kwargs) # type: ignore
+ sa_info_func = (
+ BaseBigtableTableAdminClient.from_service_account_info.__func__ # type: ignore
+ )
+ return sa_info_func(BaseBigtableTableAdminAsyncClient, info, *args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
@@ -169,7 +171,12 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
Returns:
BaseBigtableTableAdminAsyncClient: The constructed client.
"""
- return BaseBigtableTableAdminClient.from_service_account_file.__func__(BaseBigtableTableAdminAsyncClient, filename, *args, **kwargs) # type: ignore
+ sa_file_func = (
+ BaseBigtableTableAdminClient.from_service_account_file.__func__ # type: ignore
+ )
+ return sa_file_func(
+ BaseBigtableTableAdminAsyncClient, filename, *args, **kwargs
+ )
from_service_account_json = from_service_account_file
@@ -207,7 +214,9 @@ def get_mtls_endpoint_and_cert_source(
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
- return BaseBigtableTableAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
+ return BaseBigtableTableAdminClient.get_mtls_endpoint_and_cert_source(
+ client_options
+ ) # type: ignore
@property
def transport(self) -> BigtableTableAdminTransport:
@@ -219,7 +228,7 @@ def transport(self) -> BigtableTableAdminTransport:
return self._client.transport
@property
- def api_endpoint(self):
+ def api_endpoint(self) -> str:
"""Return the API endpoint used by the client instance.
Returns:
@@ -3915,7 +3924,7 @@ async def get_iam_policy(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
async def sample_get_iam_policy():
# Create a client
@@ -4054,7 +4063,7 @@ async def set_iam_policy(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
async def sample_set_iam_policy():
# Create a client
@@ -4194,7 +4203,7 @@ async def test_iam_permissions(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
async def sample_test_iam_permissions():
# Create a client
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py
index ce251db7d714..95f2716ed6e1 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py
@@ -13,15 +13,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from collections import OrderedDict
-from http import HTTPStatus
import json
import logging as std_logging
import os
import re
+import warnings
+from collections import OrderedDict
+from http import HTTPStatus
from typing import (
- Dict,
Callable,
+ Dict,
Mapping,
MutableMapping,
MutableSequence,
@@ -32,20 +33,19 @@
Union,
cast,
)
-import warnings
-
-from google.cloud.bigtable_admin_v2 import gapic_version as package_version
+import google.protobuf
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
-import google.protobuf
+
+from google.cloud.bigtable_admin_v2 import gapic_version as package_version
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
@@ -61,18 +61,18 @@
_LOGGER = std_logging.getLogger(__name__)
-from google.api_core import operation # type: ignore
-from google.api_core import operation_async # type: ignore
+import google.api_core.operation as operation # type: ignore
+import google.api_core.operation_async as operation_async # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import pagers
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table, types
from google.cloud.bigtable_admin_v2.types import table as gba_table
-from google.cloud.bigtable_admin_v2.types import types
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.protobuf import field_mask_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-from .transports.base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO
+
+from .transports.base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport
from .transports.grpc import BigtableTableAdminGrpcTransport
from .transports.grpc_asyncio import BigtableTableAdminGrpcAsyncIOTransport
from .transports.rest import BigtableTableAdminRestTransport
@@ -86,9 +86,7 @@ class BaseBigtableTableAdminClientMeta(type):
objects.
"""
- _transport_registry = (
- OrderedDict()
- ) # type: Dict[str, Type[BigtableTableAdminTransport]]
+ _transport_registry = OrderedDict() # type: Dict[str, Type[BigtableTableAdminTransport]]
_transport_registry["grpc"] = BigtableTableAdminGrpcTransport
_transport_registry["grpc_asyncio"] = BigtableTableAdminGrpcAsyncIOTransport
_transport_registry["rest"] = BigtableTableAdminRestTransport
@@ -124,7 +122,7 @@ class BaseBigtableTableAdminClient(metaclass=BaseBigtableTableAdminClientMeta):
"""
@staticmethod
- def _get_default_mtls_endpoint(api_endpoint):
+ def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]:
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
@@ -132,7 +130,7 @@ def _get_default_mtls_endpoint(api_endpoint):
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
- str: converted mTLS api endpoint.
+ Optional[str]: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
@@ -142,6 +140,10 @@ def _get_default_mtls_endpoint(api_endpoint):
)
m = mtls_endpoint_re.match(api_endpoint)
+ if m is None:
+ # Could not parse api_endpoint; return as-is.
+ return api_endpoint
+
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
@@ -610,7 +612,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag):
@staticmethod
def _get_api_endpoint(
api_override, client_cert_source, universe_domain, use_mtls_endpoint
- ):
+ ) -> str:
"""Return the API endpoint used by the client.
Args:
@@ -709,7 +711,7 @@ def _add_cred_info_for_auth_errors(
error._details.append(json.dumps(cred_info))
@property
- def api_endpoint(self):
+ def api_endpoint(self) -> str:
"""Return the API endpoint used by the client instance.
Returns:
@@ -800,18 +802,16 @@ def __init__(
universe_domain_opt = getattr(self._client_options, "universe_domain", None)
- (
- self._use_client_cert,
- self._use_mtls_endpoint,
- self._universe_domain_env,
- ) = BaseBigtableTableAdminClient._read_environment_variables()
+ self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = (
+ BaseBigtableTableAdminClient._read_environment_variables()
+ )
self._client_cert_source = BaseBigtableTableAdminClient._get_client_cert_source(
self._client_options.client_cert_source, self._use_client_cert
)
self._universe_domain = BaseBigtableTableAdminClient._get_universe_domain(
universe_domain_opt, self._universe_domain_env
)
- self._api_endpoint = None # updated below, depending on `transport`
+ self._api_endpoint: str = "" # updated below, depending on `transport`
# Initialize the universe domain validation.
self._is_universe_domain_valid = False
@@ -839,8 +839,7 @@ def __init__(
)
if self._client_options.scopes:
raise ValueError(
- "When providing a transport instance, provide its scopes "
- "directly."
+ "When providing a transport instance, provide its scopes directly."
)
self._transport = cast(BigtableTableAdminTransport, transport)
self._api_endpoint = self._transport.host
@@ -4418,7 +4417,7 @@ def get_iam_policy(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
def sample_get_iam_policy():
# Create a client
@@ -4558,7 +4557,7 @@ def set_iam_policy(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
def sample_set_iam_policy():
# Create a client
@@ -4699,7 +4698,7 @@ def test_iam_permissions(
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import bigtable_admin_v2
- from google.iam.v1 import iam_policy_pb2 # type: ignore
+ import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
def sample_test_iam_permissions():
# Create a client
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py
index e6d83ba63a0e..9141d9c64505 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py
@@ -13,21 +13,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from google.api_core import gapic_v1
-from google.api_core import retry as retries
-from google.api_core import retry_async as retries_async
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
+ Iterator,
+ Optional,
Sequence,
Tuple,
- Optional,
- Iterator,
Union,
)
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
+
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
OptionalAsyncRetry = Union[
@@ -37,8 +38,7 @@
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
class ListTablesPager:
@@ -67,7 +67,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiate the pager.
@@ -143,7 +143,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiates the pager.
@@ -223,7 +223,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiate the pager.
@@ -301,7 +301,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiates the pager.
@@ -383,7 +383,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiate the pager.
@@ -459,7 +459,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiates the pager.
@@ -539,7 +539,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiate the pager.
@@ -615,7 +615,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiates the pager.
@@ -695,7 +695,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiate the pager.
@@ -773,7 +773,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
):
"""Instantiates the pager.
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/README.rst b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/README.rst
index 0e8f40ec3a60..1e0bce38758d 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/README.rst
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/README.rst
@@ -2,8 +2,9 @@
transport inheritance structure
_______________________________
-`BigtableTableAdminTransport` is the ABC for all transports.
-- public child `BigtableTableAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
-- public child `BigtableTableAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
-- private child `_BaseBigtableTableAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
-- public child `BigtableTableAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
+``BigtableTableAdminTransport`` is the ABC for all transports.
+
+- public child ``BigtableTableAdminGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``).
+- public child ``BigtableTableAdminGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``).
+- private child ``_BaseBigtableTableAdminRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``).
+- public child ``BigtableTableAdminRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``).
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py
index e7621f781d0b..d1abd008f5c8 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py
@@ -19,14 +19,10 @@
from .base import BigtableTableAdminTransport
from .grpc import BigtableTableAdminGrpcTransport
from .grpc_asyncio import BigtableTableAdminGrpcAsyncIOTransport
-from .rest import BigtableTableAdminRestTransport
-from .rest import BigtableTableAdminRestInterceptor
-
+from .rest import BigtableTableAdminRestInterceptor, BigtableTableAdminRestTransport
# Compile a registry of transports.
-_transport_registry = (
- OrderedDict()
-) # type: Dict[str, Type[BigtableTableAdminTransport]]
+_transport_registry = OrderedDict() # type: Dict[str, Type[BigtableTableAdminTransport]]
_transport_registry["grpc"] = BigtableTableAdminGrpcTransport
_transport_registry["grpc_asyncio"] = BigtableTableAdminGrpcAsyncIOTransport
_transport_registry["rest"] = BigtableTableAdminRestTransport
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py
index 8ad08df3ffa0..164a36c8fdf9 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py
@@ -16,25 +16,22 @@
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
-from google.cloud.bigtable_admin_v2 import gapic_version as package_version
-
-import google.auth # type: ignore
import google.api_core
+import google.auth # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
from google.api_core import exceptions as core_exceptions
-from google.api_core import gapic_v1
+from google.api_core import gapic_v1, operations_v1
from google.api_core import retry as retries
-from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
from google.oauth2 import service_account # type: ignore
-import google.protobuf
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2 import gapic_version as package_version
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
from google.cloud.bigtable_admin_v2.types import table as gba_table
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
@@ -95,10 +92,12 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
"""
- scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
-
# Save the scopes.
self._scopes = scopes
if not hasattr(self, "_ignore_credentials"):
@@ -113,11 +112,16 @@ def __init__(
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
- credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
+ credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ default_scopes=self.AUTH_SCOPES,
)
elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
- **scopes_kwargs, quota_project_id=quota_project_id
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ default_scopes=self.AUTH_SCOPES,
)
# Don't apply audience if the credentials file passed from user.
if hasattr(credentials, "with_gdch_audience"):
@@ -141,6 +145,8 @@ def __init__(
host += ":443"
self._host = host
+ self._wrapped_methods: Dict[Callable, Callable] = {}
+
@property
def host(self):
return self._host
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py
index f8d1058c8c32..3d39f4382ec6 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py
@@ -19,26 +19,23 @@
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
-from google.api_core import grpc_helpers
-from google.api_core import operations_v1
-from google.api_core import gapic_v1
import google.auth # type: ignore
-from google.auth import credentials as ga_credentials # type: ignore
-from google.auth.transport.grpc import SslCredentials # type: ignore
-from google.protobuf.json_format import MessageToJson
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
import google.protobuf.message
-
import grpc # type: ignore
import proto # type: ignore
+from google.api_core import gapic_v1, grpc_helpers, operations_v1
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf.json_format import MessageToJson
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
from google.cloud.bigtable_admin_v2.types import table as gba_table
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
-from .base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO
+
+from .base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport
try:
from google.api_core import client_logging # type: ignore
@@ -62,7 +59,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request):
elif isinstance(request, google.protobuf.message.Message):
request_payload = MessageToJson(request)
else:
- request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}"
request_metadata = {
key: value.decode("utf-8") if isinstance(value, bytes) else value
@@ -97,7 +94,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request):
elif isinstance(result, google.protobuf.message.Message):
response_payload = MessageToJson(result)
else:
- response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}"
grpc_response = {
"payload": response_payload,
"metadata": metadata,
@@ -196,6 +193,10 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -408,12 +409,12 @@ def create_table_from_snapshot(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_table_from_snapshot" not in self._stubs:
- self._stubs[
- "create_table_from_snapshot"
- ] = self._logged_channel.unary_unary(
- "/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot",
- request_serializer=bigtable_table_admin.CreateTableFromSnapshotRequest.serialize,
- response_deserializer=operations_pb2.Operation.FromString,
+ self._stubs["create_table_from_snapshot"] = (
+ self._logged_channel.unary_unary(
+ "/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot",
+ request_serializer=bigtable_table_admin.CreateTableFromSnapshotRequest.serialize,
+ response_deserializer=operations_pb2.Operation.FromString,
+ )
)
return self._stubs["create_table_from_snapshot"]
@@ -778,12 +779,12 @@ def generate_consistency_token(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "generate_consistency_token" not in self._stubs:
- self._stubs[
- "generate_consistency_token"
- ] = self._logged_channel.unary_unary(
- "/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken",
- request_serializer=bigtable_table_admin.GenerateConsistencyTokenRequest.serialize,
- response_deserializer=bigtable_table_admin.GenerateConsistencyTokenResponse.deserialize,
+ self._stubs["generate_consistency_token"] = (
+ self._logged_channel.unary_unary(
+ "/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken",
+ request_serializer=bigtable_table_admin.GenerateConsistencyTokenRequest.serialize,
+ response_deserializer=bigtable_table_admin.GenerateConsistencyTokenResponse.deserialize,
+ )
)
return self._stubs["generate_consistency_token"]
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py
index 5017f17d0575..58a472e7c18d 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py
@@ -15,33 +15,30 @@
#
import inspect
import json
-import pickle
import logging as std_logging
+import pickle
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
-from google.api_core import gapic_v1
-from google.api_core import grpc_helpers_async
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
+import google.protobuf.message
+import grpc # type: ignore
+import proto # type: ignore
from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1, grpc_helpers_async, operations_v1
from google.api_core import retry_async as retries
-from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
from google.protobuf.json_format import MessageToJson
-import google.protobuf.message
-
-import grpc # type: ignore
-import proto # type: ignore
from grpc.experimental import aio # type: ignore
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
from google.cloud.bigtable_admin_v2.types import table as gba_table
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
-from .base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO
+
+from .base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport
from .grpc import BigtableTableAdminGrpcTransport
try:
@@ -68,7 +65,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request
elif isinstance(request, google.protobuf.message.Message):
request_payload = MessageToJson(request)
else:
- request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}"
request_metadata = {
key: value.decode("utf-8") if isinstance(value, bytes) else value
@@ -103,7 +100,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request
elif isinstance(result, google.protobuf.message.Message):
response_payload = MessageToJson(result)
else:
- response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}"
grpc_response = {
"payload": response_payload,
"metadata": metadata,
@@ -247,6 +244,10 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -419,12 +420,12 @@ def create_table_from_snapshot(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_table_from_snapshot" not in self._stubs:
- self._stubs[
- "create_table_from_snapshot"
- ] = self._logged_channel.unary_unary(
- "/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot",
- request_serializer=bigtable_table_admin.CreateTableFromSnapshotRequest.serialize,
- response_deserializer=operations_pb2.Operation.FromString,
+ self._stubs["create_table_from_snapshot"] = (
+ self._logged_channel.unary_unary(
+ "/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot",
+ request_serializer=bigtable_table_admin.CreateTableFromSnapshotRequest.serialize,
+ response_deserializer=operations_pb2.Operation.FromString,
+ )
)
return self._stubs["create_table_from_snapshot"]
@@ -801,12 +802,12 @@ def generate_consistency_token(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "generate_consistency_token" not in self._stubs:
- self._stubs[
- "generate_consistency_token"
- ] = self._logged_channel.unary_unary(
- "/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken",
- request_serializer=bigtable_table_admin.GenerateConsistencyTokenRequest.serialize,
- response_deserializer=bigtable_table_admin.GenerateConsistencyTokenResponse.deserialize,
+ self._stubs["generate_consistency_token"] = (
+ self._logged_channel.unary_unary(
+ "/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken",
+ request_serializer=bigtable_table_admin.GenerateConsistencyTokenRequest.serialize,
+ response_deserializer=bigtable_table_admin.GenerateConsistencyTokenResponse.deserialize,
+ )
)
return self._stubs["generate_consistency_token"]
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py
index 6c3815f79437..adb950e08e20 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py
@@ -13,38 +13,30 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-import logging
+import dataclasses
import json # type: ignore
+import logging
+import warnings
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
-from google.auth.transport.requests import AuthorizedSession # type: ignore
-from google.auth import credentials as ga_credentials # type: ignore
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming
from google.api_core import retry as retries
-from google.api_core import rest_helpers
-from google.api_core import rest_streaming
-from google.api_core import gapic_v1
-import google.protobuf
-
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.requests import AuthorizedSession # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import json_format
-from google.api_core import operations_v1
-
from requests import __version__ as requests_version
-import dataclasses
-from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
-import warnings
-
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
from google.cloud.bigtable_admin_v2.types import table as gba_table
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-
-from .rest_base import _BaseBigtableTableAdminRestTransport
from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+from .rest_base import _BaseBigtableTableAdminRestTransport
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
@@ -1917,6 +1909,12 @@ def __init__(
url_scheme: the protocol scheme for the API endpoint. Normally
"https", but for testing or local servers,
"http" can be specified.
+ interceptor (Optional[BigtableTableAdminRestInterceptor]): Interceptor used
+ to manipulate requests, request metadata, and responses.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
"""
# Run the base constructor
# TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
@@ -2050,9 +2048,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseCheckConsistency._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseCheckConsistency._get_http_options()
request, metadata = self._interceptor.pre_check_consistency(
request, metadata
@@ -2234,7 +2230,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2361,9 +2357,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseCreateAuthorizedView._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseCreateAuthorizedView._get_http_options()
request, metadata = self._interceptor.pre_create_authorized_view(
request, metadata
@@ -2389,7 +2383,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2518,9 +2512,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseCreateBackup._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseCreateBackup._get_http_options()
request, metadata = self._interceptor.pre_create_backup(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseCreateBackup._get_transcoded_request(
@@ -2544,7 +2536,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2671,9 +2663,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseCreateSchemaBundle._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseCreateSchemaBundle._get_http_options()
request, metadata = self._interceptor.pre_create_schema_bundle(
request, metadata
@@ -2699,7 +2689,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -2829,9 +2819,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseCreateTable._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseCreateTable._get_http_options()
request, metadata = self._interceptor.pre_create_table(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseCreateTable._get_transcoded_request(
@@ -2992,9 +2980,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseCreateTableFromSnapshot._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseCreateTableFromSnapshot._get_http_options()
request, metadata = self._interceptor.pre_create_table_from_snapshot(
request, metadata
@@ -3020,7 +3006,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -3141,9 +3127,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseDeleteAuthorizedView._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseDeleteAuthorizedView._get_http_options()
request, metadata = self._interceptor.pre_delete_authorized_view(
request, metadata
@@ -3165,7 +3149,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -3253,9 +3237,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseDeleteBackup._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseDeleteBackup._get_http_options()
request, metadata = self._interceptor.pre_delete_backup(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseDeleteBackup._get_transcoded_request(
@@ -3275,7 +3257,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -3361,9 +3343,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseDeleteSchemaBundle._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseDeleteSchemaBundle._get_http_options()
request, metadata = self._interceptor.pre_delete_schema_bundle(
request, metadata
@@ -3385,7 +3365,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -3480,9 +3460,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseDeleteSnapshot._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseDeleteSnapshot._get_http_options()
request, metadata = self._interceptor.pre_delete_snapshot(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseDeleteSnapshot._get_transcoded_request(
@@ -3502,7 +3480,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -3588,9 +3566,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseDeleteTable._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseDeleteTable._get_http_options()
request, metadata = self._interceptor.pre_delete_table(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseDeleteTable._get_transcoded_request(
@@ -3610,7 +3586,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -3697,9 +3673,7 @@ def __call__(
be of type `bytes`.
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseDropRowRange._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseDropRowRange._get_http_options()
request, metadata = self._interceptor.pre_drop_row_range(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseDropRowRange._get_transcoded_request(
@@ -3723,7 +3697,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -3818,9 +3792,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseGenerateConsistencyToken._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseGenerateConsistencyToken._get_http_options()
request, metadata = self._interceptor.pre_generate_consistency_token(
request, metadata
@@ -3983,9 +3955,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseGetAuthorizedView._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseGetAuthorizedView._get_http_options()
request, metadata = self._interceptor.pre_get_authorized_view(
request, metadata
@@ -4351,9 +4321,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseGetIamPolicy._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseGetIamPolicy._get_http_options()
request, metadata = self._interceptor.pre_get_iam_policy(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(
@@ -4504,9 +4472,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseGetSchemaBundle._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseGetSchemaBundle._get_http_options()
request, metadata = self._interceptor.pre_get_schema_bundle(
request, metadata
@@ -4672,9 +4638,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseGetSnapshot._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseGetSnapshot._get_http_options()
request, metadata = self._interceptor.pre_get_snapshot(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseGetSnapshot._get_transcoded_request(
@@ -4969,9 +4933,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseListAuthorizedViews._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseListAuthorizedViews._get_http_options()
request, metadata = self._interceptor.pre_list_authorized_views(
request, metadata
@@ -5125,9 +5087,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseListBackups._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseListBackups._get_http_options()
request, metadata = self._interceptor.pre_list_backups(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseListBackups._get_transcoded_request(
@@ -5275,9 +5235,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseListSchemaBundles._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseListSchemaBundles._get_http_options()
request, metadata = self._interceptor.pre_list_schema_bundles(
request, metadata
@@ -5441,9 +5399,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseListSnapshots._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseListSnapshots._get_http_options()
request, metadata = self._interceptor.pre_list_snapshots(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseListSnapshots._get_transcoded_request(
@@ -5743,9 +5699,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseModifyColumnFamilies._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseModifyColumnFamilies._get_http_options()
request, metadata = self._interceptor.pre_modify_column_families(
request, metadata
@@ -5902,9 +5856,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseRestoreTable._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseRestoreTable._get_http_options()
request, metadata = self._interceptor.pre_restore_table(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseRestoreTable._get_transcoded_request(
@@ -5928,7 +5880,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -6126,9 +6078,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseSetIamPolicy._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseSetIamPolicy._get_http_options()
request, metadata = self._interceptor.pre_set_iam_policy(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(
@@ -6288,9 +6238,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseSnapshotTable._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseSnapshotTable._get_http_options()
request, metadata = self._interceptor.pre_snapshot_table(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseSnapshotTable._get_transcoded_request(
@@ -6314,7 +6262,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -6437,9 +6385,7 @@ def __call__(
Response message for ``TestIamPermissions`` method.
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseTestIamPermissions._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseTestIamPermissions._get_http_options()
request, metadata = self._interceptor.pre_test_iam_permissions(
request, metadata
@@ -6596,9 +6542,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseUndeleteTable._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseUndeleteTable._get_http_options()
request, metadata = self._interceptor.pre_undelete_table(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseUndeleteTable._get_transcoded_request(
@@ -6622,7 +6566,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -6749,9 +6693,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseUpdateAuthorizedView._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseUpdateAuthorizedView._get_http_options()
request, metadata = self._interceptor.pre_update_authorized_view(
request, metadata
@@ -6777,7 +6719,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -6903,9 +6845,7 @@ def __call__(
A backup of a Cloud Bigtable table.
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseUpdateBackup._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseUpdateBackup._get_http_options()
request, metadata = self._interceptor.pre_update_backup(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseUpdateBackup._get_transcoded_request(
@@ -7058,9 +6998,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseUpdateSchemaBundle._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseUpdateSchemaBundle._get_http_options()
request, metadata = self._interceptor.pre_update_schema_bundle(
request, metadata
@@ -7086,7 +7024,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -7215,9 +7153,7 @@ def __call__(
"""
- http_options = (
- _BaseBigtableTableAdminRestTransport._BaseUpdateTable._get_http_options()
- )
+ http_options = _BaseBigtableTableAdminRestTransport._BaseUpdateTable._get_http_options()
request, metadata = self._interceptor.pre_update_table(request, metadata)
transcoded_request = _BaseBigtableTableAdminRestTransport._BaseUpdateTable._get_transcoded_request(
@@ -7241,7 +7177,7 @@ def __call__(
)
method = transcoded_request["method"]
try:
- request_payload = json_format.MessageToJson(request)
+ request_payload = type(request).to_json(request)
except:
request_payload = None
http_request = {
@@ -7371,7 +7307,9 @@ def create_table_from_snapshot(
]:
# The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
# In C++ this would require a dynamic_cast
- return self._CreateTableFromSnapshot(self._session, self._host, self._interceptor) # type: ignore
+ return self._CreateTableFromSnapshot(
+ self._session, self._host, self._interceptor
+ ) # type: ignore
@property
def delete_authorized_view(
@@ -7430,7 +7368,9 @@ def generate_consistency_token(
]:
# The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
# In C++ this would require a dynamic_cast
- return self._GenerateConsistencyToken(self._session, self._host, self._interceptor) # type: ignore
+ return self._GenerateConsistencyToken(
+ self._session, self._host, self._interceptor
+ ) # type: ignore
@property
def get_authorized_view(
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest_base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest_base.py
index ef6c2374d2a2..1b6b7d48bdc2 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest_base.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest_base.py
@@ -14,23 +14,20 @@
# limitations under the License.
#
import json # type: ignore
-from google.api_core import path_template
-from google.api_core import gapic_v1
-
-from google.protobuf import json_format
-from .base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO
-
import re
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.empty_pb2 as empty_pb2 # type: ignore
+from google.api_core import gapic_v1, path_template
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import json_format
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
from google.cloud.bigtable_admin_v2.types import table as gba_table
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
+
+from .base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport
class _BaseBigtableTableAdminRestTransport(BigtableTableAdminTransport):
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/__init__.py
index d2036c7a3cd0..3a26c0e58837 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/__init__.py
@@ -136,12 +136,12 @@
GcRule,
ProtoSchema,
RestoreInfo,
+ RestoreSourceType,
SchemaBundle,
Snapshot,
Table,
TieredStorageConfig,
TieredStorageRule,
- RestoreSourceType,
)
from .types import (
Type,
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py
index 4197ed0b7424..0f2cbfd91bce 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py
@@ -17,12 +17,11 @@
from typing import MutableMapping, MutableSequence
+import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
import proto # type: ignore
from google.cloud.bigtable_admin_v2.types import instance as gba_instance
-from google.protobuf import field_mask_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.admin.v2",
@@ -481,6 +480,7 @@ class State(proto.Enum):
deleted after completion will stay marked as
COMPLETED, not CANCELLED.
"""
+
STATE_UNSPECIFIED = 0
PENDING = 1
COPYING = 2
@@ -1262,12 +1262,12 @@ class ListMaterializedViewsResponse(proto.Message):
def raw_page(self):
return self
- materialized_views: MutableSequence[
- gba_instance.MaterializedView
- ] = proto.RepeatedField(
- proto.MESSAGE,
- number=1,
- message=gba_instance.MaterializedView,
+ materialized_views: MutableSequence[gba_instance.MaterializedView] = (
+ proto.RepeatedField(
+ proto.MESSAGE,
+ number=1,
+ message=gba_instance.MaterializedView,
+ )
)
next_page_token: str = proto.Field(
proto.STRING,
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py
index 69de07a2ab8e..508b1fcba337 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py
@@ -17,14 +17,13 @@
from typing import MutableMapping, MutableSequence
+import google.protobuf.duration_pb2 as duration_pb2 # type: ignore
+import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
import proto # type: ignore
from google.cloud.bigtable_admin_v2.types import common
from google.cloud.bigtable_admin_v2.types import table as gba_table
-from google.protobuf import duration_pb2 # type: ignore
-from google.protobuf import field_mask_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.admin.v2",
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/common.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/common.py
index 7b05e5ff5a2e..7f03891307e5 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/common.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/common.py
@@ -17,11 +17,9 @@
from typing import MutableMapping, MutableSequence
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
import proto # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-
-
__protobuf__ = proto.module(
package="google.bigtable.admin.v2",
manifest={
@@ -42,6 +40,7 @@ class StorageType(proto.Enum):
HDD (2):
Magnetic drive (HDD) storage should be used.
"""
+
STORAGE_TYPE_UNSPECIFIED = 0
SSD = 1
HDD = 2
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/instance.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/instance.py
index f07414d56957..9d6878706b54 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/instance.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/instance.py
@@ -17,11 +17,10 @@
from typing import MutableMapping, MutableSequence
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
import proto # type: ignore
from google.cloud.bigtable_admin_v2.types import common
-from google.protobuf import timestamp_pb2 # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.admin.v2",
@@ -120,6 +119,7 @@ class State(proto.Enum):
may be destroyed if the creation process
encounters an error.
"""
+
STATE_NOT_KNOWN = 0
READY = 1
CREATING = 2
@@ -141,6 +141,7 @@ class Type(proto.Enum):
cases, as it no longer enforces a higher minimum
node count than DEVELOPMENT.
"""
+
TYPE_UNSPECIFIED = 0
PRODUCTION = 1
DEVELOPMENT = 2
@@ -313,6 +314,7 @@ class State(proto.Enum):
(tables) still exist, but no operations can be
performed on the cluster.
"""
+
STATE_NOT_KNOWN = 0
READY = 1
CREATING = 2
@@ -337,6 +339,7 @@ class NodeScalingFactor(proto.Enum):
factor enabled, otherwise an INVALID_ARGUMENT error will be
returned.
"""
+
NODE_SCALING_FACTOR_UNSPECIFIED = 0
NODE_SCALING_FACTOR_1X = 1
NODE_SCALING_FACTOR_2X = 2
@@ -517,6 +520,7 @@ class Priority(proto.Enum):
PRIORITY_HIGH (3):
No description available.
"""
+
PRIORITY_UNSPECIFIED = 0
PRIORITY_LOW = 1
PRIORITY_MEDIUM = 2
@@ -643,6 +647,7 @@ class ComputeBillingOwner(proto.Enum):
targeted Bigtable Instance / Table pays for
compute.
"""
+
COMPUTE_BILLING_OWNER_UNSPECIFIED = 0
HOST_PAYS = 1
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py
index c4f23d5fa7bc..66e1626f95e3 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py
@@ -17,14 +17,13 @@
from typing import MutableMapping, MutableSequence
+import google.protobuf.duration_pb2 as duration_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+import google.rpc.status_pb2 as status_pb2 # type: ignore
import proto # type: ignore
from google.cloud.bigtable_admin_v2.types import types
from google.cloud.bigtable_admin_v2.utils import oneof_message
-from google.protobuf import duration_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-from google.rpc import status_pb2 # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.admin.v2",
@@ -58,6 +57,7 @@ class RestoreSourceType(proto.Enum):
A backup was used as the source of the
restore.
"""
+
RESTORE_SOURCE_TYPE_UNSPECIFIED = 0
BACKUP = 1
@@ -247,6 +247,7 @@ class TimestampGranularity(proto.Enum):
The table keeps data versioned at a
granularity of 1ms.
"""
+
TIMESTAMP_GRANULARITY_UNSPECIFIED = 0
MILLIS = 1
@@ -271,6 +272,7 @@ class View(proto.Enum):
FULL (4):
Populates all fields.
"""
+
VIEW_UNSPECIFIED = 0
NAME_ONLY = 1
SCHEMA_VIEW = 2
@@ -328,6 +330,7 @@ class ReplicationState(proto.Enum):
optimizations are complete, the table will transition to
``READY`` state.
"""
+
STATE_NOT_KNOWN = 0
INITIALIZING = 1
PLANNED_MAINTENANCE = 2
@@ -470,6 +473,7 @@ class ResponseView(proto.Enum):
FULL (3):
Populates every fields.
"""
+
RESPONSE_VIEW_UNSPECIFIED = 0
NAME_ONLY = 1
BASIC = 2
@@ -520,13 +524,13 @@ class SubsetView(proto.Message):
proto.BYTES,
number=1,
)
- family_subsets: MutableMapping[
- str, "AuthorizedView.FamilySubsets"
- ] = proto.MapField(
- proto.STRING,
- proto.MESSAGE,
- number=2,
- message="AuthorizedView.FamilySubsets",
+ family_subsets: MutableMapping[str, "AuthorizedView.FamilySubsets"] = (
+ proto.MapField(
+ proto.STRING,
+ proto.MESSAGE,
+ number=2,
+ message="AuthorizedView.FamilySubsets",
+ )
)
name: str = proto.Field(
@@ -723,6 +727,7 @@ class EncryptionType(proto.Enum):
version is populated but its status is not tracked and is
reported as ``UNKNOWN``.
"""
+
ENCRYPTION_TYPE_UNSPECIFIED = 0
GOOGLE_DEFAULT_ENCRYPTION = 1
CUSTOMER_MANAGED_ENCRYPTION = 2
@@ -798,6 +803,7 @@ class State(proto.Enum):
encounters an error. A snapshot may not be
restored to a table while it is being created.
"""
+
STATE_NOT_KNOWN = 0
READY = 1
CREATING = 2
@@ -917,6 +923,7 @@ class State(proto.Enum):
READY (2):
The backup is complete and ready for use.
"""
+
STATE_UNSPECIFIED = 0
CREATING = 1
READY = 2
@@ -939,6 +946,7 @@ class BackupType(proto.Enum):
a hot backup reaches production performance more
quickly than a standard backup.
"""
+
BACKUP_TYPE_UNSPECIFIED = 0
STANDARD = 1
HOT = 2
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/types.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/types.py
index 4f56429dabff..0b89a8e1d22e 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/types.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/types.py
@@ -19,7 +19,6 @@
import proto # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.admin.v2",
manifest={
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/utils/oneof_message.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/utils/oneof_message.py
index e110d8fa6cf1..9c5f08615c98 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/utils/oneof_message.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/utils/oneof_message.py
@@ -15,6 +15,7 @@
#
#
import collections.abc
+
import proto
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/__init__.py
index ec552a85dbad..6dcbba6e4912 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/__init__.py
@@ -13,10 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from google.cloud.bigtable_v2 import gapic_version as package_version
+import sys
import google.api_core as api_core
-import sys
+
+from google.cloud.bigtable_v2 import gapic_version as package_version
__version__ = package_version.__version__
@@ -28,63 +29,67 @@
import importlib_metadata as metadata
-from .services.bigtable import BigtableClient
-from .services.bigtable import BigtableAsyncClient
-
-from .types.bigtable import CheckAndMutateRowRequest
-from .types.bigtable import CheckAndMutateRowResponse
-from .types.bigtable import ExecuteQueryRequest
-from .types.bigtable import ExecuteQueryResponse
-from .types.bigtable import GenerateInitialChangeStreamPartitionsRequest
-from .types.bigtable import GenerateInitialChangeStreamPartitionsResponse
-from .types.bigtable import MutateRowRequest
-from .types.bigtable import MutateRowResponse
-from .types.bigtable import MutateRowsRequest
-from .types.bigtable import MutateRowsResponse
-from .types.bigtable import PingAndWarmRequest
-from .types.bigtable import PingAndWarmResponse
-from .types.bigtable import PrepareQueryRequest
-from .types.bigtable import PrepareQueryResponse
-from .types.bigtable import RateLimitInfo
-from .types.bigtable import ReadChangeStreamRequest
-from .types.bigtable import ReadChangeStreamResponse
-from .types.bigtable import ReadModifyWriteRowRequest
-from .types.bigtable import ReadModifyWriteRowResponse
-from .types.bigtable import ReadRowsRequest
-from .types.bigtable import ReadRowsResponse
-from .types.bigtable import SampleRowKeysRequest
-from .types.bigtable import SampleRowKeysResponse
-from .types.data import ArrayValue
-from .types.data import Cell
-from .types.data import Column
-from .types.data import ColumnMetadata
-from .types.data import ColumnRange
-from .types.data import Family
-from .types.data import Idempotency
-from .types.data import Mutation
-from .types.data import PartialResultSet
-from .types.data import ProtoFormat
-from .types.data import ProtoRows
-from .types.data import ProtoRowsBatch
-from .types.data import ProtoSchema
-from .types.data import ReadModifyWriteRule
-from .types.data import ResultSetMetadata
-from .types.data import Row
-from .types.data import RowFilter
-from .types.data import RowRange
-from .types.data import RowSet
-from .types.data import StreamContinuationToken
-from .types.data import StreamContinuationTokens
-from .types.data import StreamPartition
-from .types.data import TimestampRange
-from .types.data import Value
-from .types.data import ValueRange
+from .services.bigtable import BigtableAsyncClient, BigtableClient
+from .types.bigtable import (
+ CheckAndMutateRowRequest,
+ CheckAndMutateRowResponse,
+ ExecuteQueryRequest,
+ ExecuteQueryResponse,
+ GenerateInitialChangeStreamPartitionsRequest,
+ GenerateInitialChangeStreamPartitionsResponse,
+ MutateRowRequest,
+ MutateRowResponse,
+ MutateRowsRequest,
+ MutateRowsResponse,
+ PingAndWarmRequest,
+ PingAndWarmResponse,
+ PrepareQueryRequest,
+ PrepareQueryResponse,
+ RateLimitInfo,
+ ReadChangeStreamRequest,
+ ReadChangeStreamResponse,
+ ReadModifyWriteRowRequest,
+ ReadModifyWriteRowResponse,
+ ReadRowsRequest,
+ ReadRowsResponse,
+ SampleRowKeysRequest,
+ SampleRowKeysResponse,
+)
+from .types.data import (
+ ArrayValue,
+ Cell,
+ Column,
+ ColumnMetadata,
+ ColumnRange,
+ Family,
+ Idempotency,
+ Mutation,
+ PartialResultSet,
+ ProtoFormat,
+ ProtoRows,
+ ProtoRowsBatch,
+ ProtoSchema,
+ ReadModifyWriteRule,
+ ResultSetMetadata,
+ Row,
+ RowFilter,
+ RowRange,
+ RowSet,
+ StreamContinuationToken,
+ StreamContinuationTokens,
+ StreamPartition,
+ TimestampRange,
+ Value,
+ ValueRange,
+)
from .types.feature_flags import FeatureFlags
from .types.peer_info import PeerInfo
-from .types.request_stats import FullReadStatsView
-from .types.request_stats import ReadIterationStats
-from .types.request_stats import RequestLatencyStats
-from .types.request_stats import RequestStats
+from .types.request_stats import (
+ FullReadStatsView,
+ ReadIterationStats,
+ RequestLatencyStats,
+ RequestStats,
+)
from .types.response_params import ResponseParams
from .types.types import Type
@@ -97,8 +102,8 @@
# An older version of api_core is installed which does not define the
# functions above. We do equivalent checks manually.
try:
- import warnings
import sys
+ import warnings
_py_version_str = sys.version.split()[0]
_package_label = "google.cloud.bigtable_v2"
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/__init__.py
index c74141156324..d24937f6abc5 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/__init__.py
@@ -13,8 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from .client import BigtableClient
from .async_client import BigtableAsyncClient
+from .client import BigtableClient
__all__ = (
"BigtableClient",
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/async_client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/async_client.py
index 0a9442287e22..90732351193a 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/async_client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/async_client.py
@@ -14,46 +14,45 @@
# limitations under the License.
#
import logging as std_logging
-from collections import OrderedDict
import re
+from collections import OrderedDict
from typing import (
- Dict,
+ AsyncIterable,
+ Awaitable,
Callable,
+ Dict,
Mapping,
MutableMapping,
MutableSequence,
Optional,
- AsyncIterable,
- Awaitable,
Sequence,
Tuple,
Type,
Union,
)
-from google.cloud.bigtable_v2 import gapic_version as package_version
-
-from google.api_core.client_options import ClientOptions
+import google.protobuf
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry_async as retries
+from google.api_core.client_options import ClientOptions
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
-import google.protobuf
+from google.cloud.bigtable_v2 import gapic_version as package_version
try:
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore
-from google.cloud.bigtable_v2.types import bigtable
-from google.cloud.bigtable_v2.types import data
-from google.cloud.bigtable_v2.types import request_stats
-from google.protobuf import timestamp_pb2 # type: ignore
-from .transports.base import BigtableTransport, DEFAULT_CLIENT_INFO
-from .transports.grpc_asyncio import BigtableGrpcAsyncIOTransport
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+
+from google.cloud.bigtable_v2.types import bigtable, data, request_stats
+
from .client import BigtableClient
+from .transports.base import DEFAULT_CLIENT_INFO, BigtableTransport
+from .transports.grpc_asyncio import BigtableGrpcAsyncIOTransport
try:
from google.api_core import client_logging # type: ignore
@@ -119,7 +118,10 @@ def from_service_account_info(cls, info: dict, *args, **kwargs):
Returns:
BigtableAsyncClient: The constructed client.
"""
- return BigtableClient.from_service_account_info.__func__(BigtableAsyncClient, info, *args, **kwargs) # type: ignore
+ sa_info_func = (
+ BigtableClient.from_service_account_info.__func__ # type: ignore
+ )
+ return sa_info_func(BigtableAsyncClient, info, *args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
@@ -135,7 +137,10 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
Returns:
BigtableAsyncClient: The constructed client.
"""
- return BigtableClient.from_service_account_file.__func__(BigtableAsyncClient, filename, *args, **kwargs) # type: ignore
+ sa_file_func = (
+ BigtableClient.from_service_account_file.__func__ # type: ignore
+ )
+ return sa_file_func(BigtableAsyncClient, filename, *args, **kwargs)
from_service_account_json = from_service_account_file
@@ -185,7 +190,7 @@ def transport(self) -> BigtableTransport:
return self._client.transport
@property
- def api_endpoint(self):
+ def api_endpoint(self) -> str:
"""Return the API endpoint used by the client instance.
Returns:
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/client.py
index 5eb6ba894cba..d0dbe91866ac 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/client.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/client.py
@@ -13,40 +13,40 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from collections import OrderedDict
-from http import HTTPStatus
import json
import logging as std_logging
import os
import re
+import warnings
+from collections import OrderedDict
+from http import HTTPStatus
from typing import (
- Dict,
Callable,
+ Dict,
+ Iterable,
Mapping,
MutableMapping,
MutableSequence,
Optional,
- Iterable,
Sequence,
Tuple,
Type,
Union,
cast,
)
-import warnings
-
-from google.cloud.bigtable_v2 import gapic_version as package_version
+import google.protobuf
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
-import google.protobuf
+
+from google.cloud.bigtable_v2 import gapic_version as package_version
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
@@ -62,11 +62,11 @@
_LOGGER = std_logging.getLogger(__name__)
-from google.cloud.bigtable_v2.types import bigtable
-from google.cloud.bigtable_v2.types import data
-from google.cloud.bigtable_v2.types import request_stats
-from google.protobuf import timestamp_pb2 # type: ignore
-from .transports.base import BigtableTransport, DEFAULT_CLIENT_INFO
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+
+from google.cloud.bigtable_v2.types import bigtable, data, request_stats
+
+from .transports.base import DEFAULT_CLIENT_INFO, BigtableTransport
from .transports.grpc import BigtableGrpcTransport
from .transports.grpc_asyncio import BigtableGrpcAsyncIOTransport
from .transports.rest import BigtableRestTransport
@@ -113,7 +113,7 @@ class BigtableClient(metaclass=BigtableClientMeta):
"""
@staticmethod
- def _get_default_mtls_endpoint(api_endpoint):
+ def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]:
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
@@ -121,7 +121,7 @@ def _get_default_mtls_endpoint(api_endpoint):
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
- str: converted mTLS api endpoint.
+ Optional[str]: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
@@ -131,6 +131,10 @@ def _get_default_mtls_endpoint(api_endpoint):
)
m = mtls_endpoint_re.match(api_endpoint)
+ if m is None:
+ # Could not parse api_endpoint; return as-is.
+ return api_endpoint
+
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
@@ -501,7 +505,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag):
@staticmethod
def _get_api_endpoint(
api_override, client_cert_source, universe_domain, use_mtls_endpoint
- ):
+ ) -> str:
"""Return the API endpoint used by the client.
Args:
@@ -598,7 +602,7 @@ def _add_cred_info_for_auth_errors(
error._details.append(json.dumps(cred_info))
@property
- def api_endpoint(self):
+ def api_endpoint(self) -> str:
"""Return the API endpoint used by the client instance.
Returns:
@@ -685,18 +689,16 @@ def __init__(
universe_domain_opt = getattr(self._client_options, "universe_domain", None)
- (
- self._use_client_cert,
- self._use_mtls_endpoint,
- self._universe_domain_env,
- ) = BigtableClient._read_environment_variables()
+ self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = (
+ BigtableClient._read_environment_variables()
+ )
self._client_cert_source = BigtableClient._get_client_cert_source(
self._client_options.client_cert_source, self._use_client_cert
)
self._universe_domain = BigtableClient._get_universe_domain(
universe_domain_opt, self._universe_domain_env
)
- self._api_endpoint = None # updated below, depending on `transport`
+ self._api_endpoint: str = "" # updated below, depending on `transport`
# Initialize the universe domain validation.
self._is_universe_domain_valid = False
@@ -724,8 +726,7 @@ def __init__(
)
if self._client_options.scopes:
raise ValueError(
- "When providing a transport instance, provide its scopes "
- "directly."
+ "When providing a transport instance, provide its scopes directly."
)
self._transport = cast(BigtableTransport, transport)
self._api_endpoint = self._transport.host
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/README.rst b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/README.rst
index 254812cd366f..674da52de70f 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/README.rst
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/README.rst
@@ -2,8 +2,9 @@
transport inheritance structure
_______________________________
-`BigtableTransport` is the ABC for all transports.
-- public child `BigtableGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
-- public child `BigtableGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
-- private child `_BaseBigtableRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
-- public child `BigtableRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
+``BigtableTransport`` is the ABC for all transports.
+
+- public child ``BigtableGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``).
+- public child ``BigtableGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``).
+- private child ``_BaseBigtableRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``).
+- public child ``BigtableRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``).
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py
index b35e85534182..c8cf1876eefd 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py
@@ -19,9 +19,7 @@
from .base import BigtableTransport
from .grpc import BigtableGrpcTransport
from .grpc_asyncio import BigtableGrpcAsyncIOTransport
-from .rest import BigtableRestTransport
-from .rest import BigtableRestInterceptor
-
+from .rest import BigtableRestInterceptor, BigtableRestTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[BigtableTransport]]
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/base.py
index f08bca73ede0..27a6204cf903 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/base.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/base.py
@@ -16,17 +16,16 @@
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
-from google.cloud.bigtable_v2 import gapic_version as package_version
-
-import google.auth # type: ignore
import google.api_core
+import google.auth # type: ignore
+import google.protobuf
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
-import google.protobuf
+from google.cloud.bigtable_v2 import gapic_version as package_version
from google.cloud.bigtable_v2.types import bigtable
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
@@ -88,10 +87,12 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
"""
- scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
-
# Save the scopes.
self._scopes = scopes
if not hasattr(self, "_ignore_credentials"):
@@ -106,11 +107,16 @@ def __init__(
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
- credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
+ credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ default_scopes=self.AUTH_SCOPES,
)
elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
- **scopes_kwargs, quota_project_id=quota_project_id
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ default_scopes=self.AUTH_SCOPES,
)
# Don't apply audience if the credentials file passed from user.
if hasattr(credentials, "with_gdch_audience"):
@@ -134,6 +140,8 @@ def __init__(
host += ":443"
self._host = host
+ self._wrapped_methods: Dict[Callable, Callable] = {}
+
@property
def host(self):
return self._host
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py
index 8ddbf15a20c0..addbd2b89ce2 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py
@@ -19,19 +19,18 @@
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
-from google.api_core import grpc_helpers
-from google.api_core import gapic_v1
import google.auth # type: ignore
-from google.auth import credentials as ga_credentials # type: ignore
-from google.auth.transport.grpc import SslCredentials # type: ignore
-from google.protobuf.json_format import MessageToJson
import google.protobuf.message
-
import grpc # type: ignore
import proto # type: ignore
+from google.api_core import gapic_v1, grpc_helpers
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
from google.cloud.bigtable_v2.types import bigtable
-from .base import BigtableTransport, DEFAULT_CLIENT_INFO
+
+from .base import DEFAULT_CLIENT_INFO, BigtableTransport
try:
from google.api_core import client_logging # type: ignore
@@ -55,7 +54,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request):
elif isinstance(request, google.protobuf.message.Message):
request_payload = MessageToJson(request)
else:
- request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}"
request_metadata = {
key: value.decode("utf-8") if isinstance(value, bytes) else value
@@ -90,7 +89,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request):
elif isinstance(result, google.protobuf.message.Message):
response_payload = MessageToJson(result)
else:
- response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}"
grpc_response = {
"payload": response_payload,
"metadata": metadata,
@@ -186,6 +185,10 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -557,12 +560,12 @@ def generate_initial_change_stream_partitions(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "generate_initial_change_stream_partitions" not in self._stubs:
- self._stubs[
- "generate_initial_change_stream_partitions"
- ] = self._logged_channel.unary_stream(
- "/google.bigtable.v2.Bigtable/GenerateInitialChangeStreamPartitions",
- request_serializer=bigtable.GenerateInitialChangeStreamPartitionsRequest.serialize,
- response_deserializer=bigtable.GenerateInitialChangeStreamPartitionsResponse.deserialize,
+ self._stubs["generate_initial_change_stream_partitions"] = (
+ self._logged_channel.unary_stream(
+ "/google.bigtable.v2.Bigtable/GenerateInitialChangeStreamPartitions",
+ request_serializer=bigtable.GenerateInitialChangeStreamPartitionsRequest.serialize,
+ response_deserializer=bigtable.GenerateInitialChangeStreamPartitionsResponse.deserialize,
+ )
)
return self._stubs["generate_initial_change_stream_partitions"]
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py
index 3e6b70832307..54e68eacefab 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py
@@ -15,26 +15,25 @@
#
import inspect
import json
-import pickle
import logging as std_logging
+import pickle
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
-from google.api_core import gapic_v1
-from google.api_core import grpc_helpers_async
+import google.protobuf.message
+import grpc # type: ignore
+import proto # type: ignore
from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1, grpc_helpers_async
from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.protobuf.json_format import MessageToJson
-import google.protobuf.message
-
-import grpc # type: ignore
-import proto # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.bigtable_v2.types import bigtable
-from .base import BigtableTransport, DEFAULT_CLIENT_INFO
+
+from .base import DEFAULT_CLIENT_INFO, BigtableTransport
from .grpc import BigtableGrpcTransport
try:
@@ -61,7 +60,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request
elif isinstance(request, google.protobuf.message.Message):
request_payload = MessageToJson(request)
else:
- request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}"
request_metadata = {
key: value.decode("utf-8") if isinstance(value, bytes) else value
@@ -96,7 +95,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request
elif isinstance(result, google.protobuf.message.Message):
response_payload = MessageToJson(result)
else:
- response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}"
grpc_response = {
"payload": response_payload,
"metadata": metadata,
@@ -237,6 +236,10 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -571,12 +574,12 @@ def generate_initial_change_stream_partitions(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "generate_initial_change_stream_partitions" not in self._stubs:
- self._stubs[
- "generate_initial_change_stream_partitions"
- ] = self._logged_channel.unary_stream(
- "/google.bigtable.v2.Bigtable/GenerateInitialChangeStreamPartitions",
- request_serializer=bigtable.GenerateInitialChangeStreamPartitionsRequest.serialize,
- response_deserializer=bigtable.GenerateInitialChangeStreamPartitionsResponse.deserialize,
+ self._stubs["generate_initial_change_stream_partitions"] = (
+ self._logged_channel.unary_stream(
+ "/google.bigtable.v2.Bigtable/GenerateInitialChangeStreamPartitions",
+ request_serializer=bigtable.GenerateInitialChangeStreamPartitionsRequest.serialize,
+ response_deserializer=bigtable.GenerateInitialChangeStreamPartitionsResponse.deserialize,
+ )
)
return self._stubs["generate_initial_change_stream_partitions"]
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest.py
index f0a761a360c3..c233f68ea815 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest.py
@@ -13,31 +13,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-import logging
+import dataclasses
import json # type: ignore
+import logging
+import warnings
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
-from google.auth.transport.requests import AuthorizedSession # type: ignore
-from google.auth import credentials as ga_credentials # type: ignore
+import google.protobuf
from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1, rest_helpers, rest_streaming
from google.api_core import retry as retries
-from google.api_core import rest_helpers
-from google.api_core import rest_streaming
-from google.api_core import gapic_v1
-import google.protobuf
-
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.requests import AuthorizedSession # type: ignore
from google.protobuf import json_format
-
from requests import __version__ as requests_version
-import dataclasses
-from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
-import warnings
-
from google.cloud.bigtable_v2.types import bigtable
-
-from .rest_base import _BaseBigtableRestTransport
from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+from .rest_base import _BaseBigtableRestTransport
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
@@ -771,6 +765,12 @@ def __init__(
url_scheme: the protocol scheme for the API endpoint. Normally
"https", but for testing or local servers,
"http" can be specified.
+ interceptor (Optional[BigtableRestInterceptor]): Interceptor used
+ to manipulate requests, request metadata, and responses.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the host value will be used as a default.
"""
# Run the base constructor
# TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
@@ -1164,15 +1164,12 @@ def __call__(
"""
- http_options = (
- _BaseBigtableRestTransport._BaseGenerateInitialChangeStreamPartitions._get_http_options()
- )
+ http_options = _BaseBigtableRestTransport._BaseGenerateInitialChangeStreamPartitions._get_http_options()
- (
- request,
- metadata,
- ) = self._interceptor.pre_generate_initial_change_stream_partitions(
- request, metadata
+ request, metadata = (
+ self._interceptor.pre_generate_initial_change_stream_partitions(
+ request, metadata
+ )
)
transcoded_request = _BaseBigtableRestTransport._BaseGenerateInitialChangeStreamPartitions._get_transcoded_request(
http_options, request
@@ -1239,11 +1236,10 @@ def __call__(
resp
)
response_metadata = [(k, str(v)) for k, v in response.headers.items()]
- (
- resp,
- _,
- ) = self._interceptor.post_generate_initial_change_stream_partitions_with_metadata(
- resp, response_metadata
+ resp, _ = (
+ self._interceptor.post_generate_initial_change_stream_partitions_with_metadata(
+ resp, response_metadata
+ )
)
if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
logging.DEBUG
@@ -2509,7 +2505,9 @@ def generate_initial_change_stream_partitions(
]:
# The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
# In C++ this would require a dynamic_cast
- return self._GenerateInitialChangeStreamPartitions(self._session, self._host, self._interceptor) # type: ignore
+ return self._GenerateInitialChangeStreamPartitions(
+ self._session, self._host, self._interceptor
+ ) # type: ignore
@property
def mutate_row(
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest_base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest_base.py
index 5eab0ded45e2..429dcc9604b8 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest_base.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest_base.py
@@ -14,18 +14,16 @@
# limitations under the License.
#
import json # type: ignore
-from google.api_core import path_template
-from google.api_core import gapic_v1
-
-from google.protobuf import json_format
-from .base import BigtableTransport, DEFAULT_CLIENT_INFO
-
import re
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+from google.api_core import gapic_v1, path_template
+from google.protobuf import json_format
from google.cloud.bigtable_v2.types import bigtable
+from .base import DEFAULT_CLIENT_INFO, BigtableTransport
+
class _BaseBigtableRestTransport(BigtableTransport):
"""Base REST backend transport for Bigtable.
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/bigtable.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/bigtable.py
index 19abba67b7d6..b6eb9ce21359 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/bigtable.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/bigtable.py
@@ -17,16 +17,14 @@
from typing import MutableMapping, MutableSequence
+import google.protobuf.duration_pb2 as duration_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+import google.protobuf.wrappers_pb2 as wrappers_pb2 # type: ignore
+import google.rpc.status_pb2 as status_pb2 # type: ignore
import proto # type: ignore
-from google.cloud.bigtable_v2.types import data
+from google.cloud.bigtable_v2.types import data, types
from google.cloud.bigtable_v2.types import request_stats as gb_request_stats
-from google.cloud.bigtable_v2.types import types
-from google.protobuf import duration_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-from google.protobuf import wrappers_pb2 # type: ignore
-from google.rpc import status_pb2 # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.v2",
@@ -133,6 +131,7 @@ class RequestStatsView(proto.Enum):
RequestStats in the response, applicable to this
read.
"""
+
REQUEST_STATS_VIEW_UNSPECIFIED = 0
REQUEST_STATS_NONE = 1
REQUEST_STATS_FULL = 2
@@ -1145,6 +1144,7 @@ class Type(proto.Enum):
This is a continuation of a multi-message
change.
"""
+
TYPE_UNSPECIFIED = 0
USER = 1
GARBAGE_COLLECTION = 2
@@ -1172,12 +1172,12 @@ class Type(proto.Enum):
proto.INT32,
number=5,
)
- chunks: MutableSequence[
- "ReadChangeStreamResponse.MutationChunk"
- ] = proto.RepeatedField(
- proto.MESSAGE,
- number=6,
- message="ReadChangeStreamResponse.MutationChunk",
+ chunks: MutableSequence["ReadChangeStreamResponse.MutationChunk"] = (
+ proto.RepeatedField(
+ proto.MESSAGE,
+ number=6,
+ message="ReadChangeStreamResponse.MutationChunk",
+ )
)
done: bool = proto.Field(
proto.BOOL,
@@ -1268,12 +1268,12 @@ class CloseStream(proto.Message):
number=1,
message=status_pb2.Status,
)
- continuation_tokens: MutableSequence[
- data.StreamContinuationToken
- ] = proto.RepeatedField(
- proto.MESSAGE,
- number=2,
- message=data.StreamContinuationToken,
+ continuation_tokens: MutableSequence[data.StreamContinuationToken] = (
+ proto.RepeatedField(
+ proto.MESSAGE,
+ number=2,
+ message=data.StreamContinuationToken,
+ )
)
new_partitions: MutableSequence[data.StreamPartition] = proto.RepeatedField(
proto.MESSAGE,
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/data.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/data.py
index 12ac8b2b1cbb..d4c481c93988 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/data.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/data.py
@@ -17,12 +17,11 @@
from typing import MutableMapping, MutableSequence
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+import google.type.date_pb2 as date_pb2 # type: ignore
import proto # type: ignore
from google.cloud.bigtable_v2.types import types
-from google.protobuf import timestamp_pb2 # type: ignore
-from google.type import date_pb2 # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.v2",
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/feature_flags.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/feature_flags.py
index 2c8ea8732746..b7cbaac422c2 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/feature_flags.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/feature_flags.py
@@ -19,7 +19,6 @@
import proto # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.v2",
manifest={
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/peer_info.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/peer_info.py
index b3f1203cc9e4..8420a7b47a7f 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/peer_info.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/peer_info.py
@@ -19,7 +19,6 @@
import proto # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.v2",
manifest={
@@ -83,6 +82,7 @@ class TransportType(proto.Enum):
The client connected to this peer via
Bigtable Sessions using DirectAccess.
"""
+
TRANSPORT_TYPE_UNKNOWN = 0
TRANSPORT_TYPE_EXTERNAL = 1
TRANSPORT_TYPE_CLOUD_PATH = 2
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/request_stats.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/request_stats.py
index 540e6548d052..3e53b8fe2872 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/request_stats.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/request_stats.py
@@ -17,11 +17,9 @@
from typing import MutableMapping, MutableSequence
+import google.protobuf.duration_pb2 as duration_pb2 # type: ignore
import proto # type: ignore
-from google.protobuf import duration_pb2 # type: ignore
-
-
__protobuf__ = proto.module(
package="google.bigtable.v2",
manifest={
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/response_params.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/response_params.py
index cc6384ab3465..51b87712ff43 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/response_params.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/response_params.py
@@ -19,7 +19,6 @@
import proto # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.v2",
manifest={
diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/types.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/types.py
index 0b4ddb57a6f5..f9344f293d42 100644
--- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/types.py
+++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/types.py
@@ -19,7 +19,6 @@
import proto # type: ignore
-
__protobuf__ = proto.module(
package="google.bigtable.v2",
manifest={
diff --git a/packages/google-cloud-bigtable/noxfile.py b/packages/google-cloud-bigtable/noxfile.py
index 72ab3f2db08b..b9883960a59e 100644
--- a/packages/google-cloud-bigtable/noxfile.py
+++ b/packages/google-cloud-bigtable/noxfile.py
@@ -1,46 +1,34 @@
# -*- coding: utf-8 -*-
-#
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input`
-# The source of truth for this file is `.librarian/generator-input`
-
-
-# Generated by synthtool. DO NOT EDIT!
-
-from __future__ import absolute_import
-
+#
import os
import pathlib
import re
import shutil
-from typing import Dict, List
import warnings
+from typing import Dict, List
import nox
-FLAKE8_VERSION = "flake8==6.1.0"
+RUFF_VERSION = "ruff==0.14.14"
BLACK_VERSION = "black[jupyter]==23.7.0"
-ISORT_VERSION = "isort==5.11.0"
-LINT_PATHS = ["google", "tests", "noxfile.py", "setup.py"]
+LINT_PATHS = ["google", "samples", "tests", "noxfile.py", "setup.py"]
DEFAULT_PYTHON_VERSION = "3.14"
-UNIT_TEST_PYTHON_VERSIONS: List[str] = [
- "3.7",
- "3.8",
+ALL_PYTHON = [
"3.9",
"3.10",
"3.11",
@@ -48,6 +36,7 @@
"3.13",
"3.14",
]
+
UNIT_TEST_STANDARD_DEPENDENCIES = [
"mock",
"asyncmock",
@@ -110,21 +99,35 @@ def lint(session):
Returns a failure if the linters find linting errors or sufficiently
serious code quality issues.
"""
- session.install(FLAKE8_VERSION, BLACK_VERSION)
+ session.install("flake8", RUFF_VERSION)
+
+ # 2. Check formatting
session.run(
- "black",
+ "ruff",
+ "format",
"--check",
+ f"--target-version=py{ALL_PYTHON[0].replace('.', '')}",
+ "--line-length=88",
*LINT_PATHS,
)
+
session.run("flake8", "google", "tests")
@nox.session(python=DEFAULT_PYTHON_VERSION)
def blacken(session):
- """Run black. Format code to uniform standard."""
- session.install(BLACK_VERSION)
+ """(Deprecated) Legacy session. Please use 'nox -s format'."""
+ session.log(
+ "WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future."
+ )
+
+ # Just run the ruff formatter (keeping legacy behavior of only formatting, not sorting imports)
+ session.install(RUFF_VERSION)
session.run(
- "black",
+ "ruff",
+ "format",
+ f"--target-version=py{ALL_PYTHON[0].replace('.', '')}",
+ "--line-length=88",
*LINT_PATHS,
)
@@ -132,19 +135,31 @@ def blacken(session):
@nox.session(python=DEFAULT_PYTHON_VERSION)
def format(session):
"""
- Run isort to sort imports. Then run black
- to format code to uniform standard.
+ Run ruff to sort imports and format code.
"""
- session.install(BLACK_VERSION, ISORT_VERSION)
- # Use the --fss option to sort imports using strict alphabetical order.
- # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections
+ # 1. Install ruff (skipped automatically if you run with --no-venv)
+ session.install(RUFF_VERSION)
+
+ # 2. Run Ruff to fix imports
+ # check --select I: Enables strict import sorting
+ # --fix: Applies the changes automatically
session.run(
- "isort",
- "--fss",
+ "ruff",
+ "check",
+ "--select",
+ "I",
+ "--fix",
+ f"--target-version=py{ALL_PYTHON[0].replace('.', '')}",
+ "--line-length=88", # Standard Black line length
*LINT_PATHS,
)
+
+ # 3. Run Ruff to format code
session.run(
- "black",
+ "ruff",
+ "format",
+ f"--target-version=py{ALL_PYTHON[0].replace('.', '')}",
+ "--line-length=88", # Standard Black line length
*LINT_PATHS,
)
@@ -195,7 +210,7 @@ def install_unittest_dependencies(session, *constraints):
session.install("-e", ".", *constraints)
-@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
+@nox.session(python=ALL_PYTHON)
@nox.parametrize(
"protobuf_implementation",
["python", "upb", "cpp"],
@@ -271,8 +286,8 @@ def install_systemtest_dependencies(session, *constraints):
@nox.session(python=DEFAULT_PYTHON_VERSION)
def system_emulated(session):
- import subprocess
import signal
+ import subprocess
try:
subprocess.call(["gcloud", "--version"])
@@ -476,9 +491,7 @@ def prerelease_deps(session, protobuf_implementation):
# version, the first version we test with in the unit tests sessions has a
# constraints file containing all dependencies and extras.
with open(
- CURRENT_DIRECTORY
- / "testing"
- / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt",
+ CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt",
encoding="utf-8",
) as constraints_file:
constraints_text = constraints_file.read()
@@ -546,10 +559,73 @@ def generate_sync(session):
@nox.session(python=DEFAULT_PYTHON_VERSION)
-def core_deps_from_source(session):
- """Run all tests with core dependencies installed from source
+@nox.parametrize(
+ "protobuf_implementation",
+ ["python", "upb"],
+)
+def core_deps_from_source(session, protobuf_implementation):
+ """Run all tests with core dependencies installed from source,
rather than pulling the dependencies from PyPI.
"""
- # TODO(https://github.com/googleapis/google-cloud-python/issues/16014):
- # Add core deps from source tests
- session.skip("Core deps from source tests are not yet supported")
+
+ # Install all dependencies
+ session.install("-e", ".")
+
+ # Install dependencies for the unit test environment
+ unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES
+ session.install(*unit_deps_all)
+
+ # Install dependencies for the system test environment
+ system_deps_all = (
+ SYSTEM_TEST_STANDARD_DEPENDENCIES
+ + SYSTEM_TEST_EXTERNAL_DEPENDENCIES
+ + SYSTEM_TEST_EXTRAS
+ )
+ session.install(*system_deps_all)
+
+ # Because we test minimum dependency versions on the minimum Python
+ # version, the first version we test with in the unit tests sessions has a
+ # constraints file containing all dependencies and extras.
+ with open(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt",
+ encoding="utf-8",
+ ) as constraints_file:
+ constraints_text = constraints_file.read()
+
+ # Ignore leading whitespace and comment lines.
+ constraints_deps = [
+ match.group(1)
+ for match in re.finditer(
+ r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE
+ )
+ ]
+
+ # Install dependencies specified in `testing/constraints-X.txt`.
+ session.install(*constraints_deps)
+
+ # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and
+ # `grpcio-status` should be added to the list below so that they are installed from source,
+ # rather than PyPI.
+ # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be
+ # added to the list below so that it is installed from source, rather than PyPI
+ # Note: If a dependency is added to the `core_dependencies_from_source` list,
+ # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated.
+ core_dependencies_from_source = [
+ "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos",
+ "google-api-core @ git+https://github.com/googleapis/google-cloud-python#egg=google-api-core&subdirectory=packages/google-api-core",
+ "google-auth @ git+https://github.com/googleapis/google-cloud-python#egg=google-auth&subdirectory=packages/google-auth",
+ "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1",
+ "proto-plus @ git+https://github.com/googleapis/google-cloud-python#egg=proto-plus&subdirectory=packages/proto-plus",
+ ]
+
+ for dep in core_dependencies_from_source:
+ session.install(dep, "--no-deps", "--ignore-installed")
+ print(f"Installed {dep}")
+
+ session.run(
+ "py.test",
+ "tests/unit",
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ },
+ )
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_async.py
index 82dafab44fb2..c6195bfe56b4 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_async.py
@@ -54,4 +54,5 @@ async def sample_create_app_profile():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateAppProfile_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_sync.py
index 82ff382b76a8..6913e777690a 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_app_profile_sync.py
@@ -54,4 +54,5 @@ def sample_create_app_profile():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateAppProfile_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_async.py
index fb9fac60f732..d59f32054880 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_async.py
@@ -54,4 +54,5 @@ async def sample_create_cluster():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateCluster_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_sync.py
index d8d5f99582fd..61599beff695 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_cluster_sync.py
@@ -54,4 +54,5 @@ def sample_create_cluster():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateCluster_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_async.py
index dbde6c4bc007..bdb4a5a2210b 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_async.py
@@ -58,4 +58,5 @@ async def sample_create_instance():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateInstance_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_sync.py
index 83ec90e53d31..80b75bdb9a6e 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_instance_sync.py
@@ -58,4 +58,5 @@ def sample_create_instance():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateInstance_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_async.py
index 6dfb1d6124f9..395f3a0e9247 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_async.py
@@ -58,4 +58,5 @@ async def sample_create_logical_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateLogicalView_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_sync.py
index f0214acbf298..7afaac40108f 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_logical_view_sync.py
@@ -58,4 +58,5 @@ def sample_create_logical_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateLogicalView_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_async.py
index 30481d2f3186..d4d577129489 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_async.py
@@ -58,4 +58,5 @@ async def sample_create_materialized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateMaterializedView_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_sync.py
index 45116fb49f2e..2483e58c149f 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_create_materialized_view_sync.py
@@ -58,4 +58,5 @@ def sample_create_materialized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_CreateMaterializedView_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_async.py
index 3a59ca599370..3fdbea234f9e 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_async.py
@@ -49,4 +49,5 @@ async def sample_get_app_profile():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetAppProfile_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_sync.py
index 2e54bfcad292..d2af61d37739 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_app_profile_sync.py
@@ -49,4 +49,5 @@ def sample_get_app_profile():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetAppProfile_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_async.py
index b4d89a11d819..72e78f8a741a 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_async.py
@@ -49,4 +49,5 @@ async def sample_get_cluster():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetCluster_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_sync.py
index 25a80a8718ac..bff29a61a26b 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_cluster_sync.py
@@ -49,4 +49,5 @@ def sample_get_cluster():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetCluster_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_async.py
index b2e479c11d4a..0aaf2680fb86 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_async.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
async def sample_get_iam_policy():
@@ -50,4 +51,5 @@ async def sample_get_iam_policy():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetIamPolicy_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_sync.py
index ffb2a81b02fa..9a1ff3c7ac1b 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_iam_policy_sync.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
def sample_get_iam_policy():
@@ -50,4 +51,5 @@ def sample_get_iam_policy():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetIamPolicy_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_async.py
index b76fac83a53f..088aeee15a6f 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_async.py
@@ -49,4 +49,5 @@ async def sample_get_instance():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetInstance_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_sync.py
index 711ed99a5ced..2f0495adec2b 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_instance_sync.py
@@ -49,4 +49,5 @@ def sample_get_instance():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetInstance_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_async.py
index 4ce25cdda60a..afd050f5185a 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_async.py
@@ -49,4 +49,5 @@ async def sample_get_logical_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetLogicalView_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_sync.py
index daaf7fa63580..23dc14a75eef 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_logical_view_sync.py
@@ -49,4 +49,5 @@ def sample_get_logical_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetLogicalView_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_async.py
index 165fb262c062..8ecc6763c436 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_async.py
@@ -49,4 +49,5 @@ async def sample_get_materialized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetMaterializedView_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_sync.py
index 1f94e3954879..efb078c6a9e7 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_get_materialized_view_sync.py
@@ -49,4 +49,5 @@ def sample_get_materialized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_GetMaterializedView_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_async.py
index d377fc6784e7..312191095e46 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_async.py
@@ -50,4 +50,5 @@ async def sample_list_app_profiles():
async for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListAppProfiles_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_sync.py
index 07f49ba3977f..425897a4123d 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_app_profiles_sync.py
@@ -50,4 +50,5 @@ def sample_list_app_profiles():
for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListAppProfiles_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_async.py
index 71532d98aa3c..50039b707b53 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_async.py
@@ -49,4 +49,5 @@ async def sample_list_clusters():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListClusters_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_sync.py
index 1c36c098d536..60adea060038 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_clusters_sync.py
@@ -49,4 +49,5 @@ def sample_list_clusters():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListClusters_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_async.py
index cb6d58847903..764d285fec4b 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_async.py
@@ -50,4 +50,5 @@ async def sample_list_hot_tablets():
async for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListHotTablets_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_sync.py
index 5add7715d2f4..bb9688766d32 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_hot_tablets_sync.py
@@ -50,4 +50,5 @@ def sample_list_hot_tablets():
for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListHotTablets_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_async.py
index 91c9a823024d..a0d6f1e2c3c1 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_async.py
@@ -49,4 +49,5 @@ async def sample_list_instances():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListInstances_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_sync.py
index bbe708c0e3c0..4ece97191a5c 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_instances_sync.py
@@ -49,4 +49,5 @@ def sample_list_instances():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListInstances_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_async.py
index 8de9bd06e9d0..bb8d5a7d47f0 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_async.py
@@ -50,4 +50,5 @@ async def sample_list_logical_views():
async for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListLogicalViews_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_sync.py
index b5fb602cd656..c0ec2fb65d7e 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_logical_views_sync.py
@@ -50,4 +50,5 @@ def sample_list_logical_views():
for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListLogicalViews_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_async.py
index 6fa672a2525a..3bc4cb9d97c0 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_async.py
@@ -50,4 +50,5 @@ async def sample_list_materialized_views():
async for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListMaterializedViews_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_sync.py
index 5a25da88ac06..48d6a55e8f03 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_list_materialized_views_sync.py
@@ -50,4 +50,5 @@ def sample_list_materialized_views():
for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_ListMaterializedViews_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_async.py
index dab73b9cb659..a24476ca4969 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_async.py
@@ -39,8 +39,7 @@ async def sample_partial_update_cluster():
client = bigtable_admin_v2.BigtableInstanceAdminAsyncClient()
# Initialize request argument(s)
- request = bigtable_admin_v2.PartialUpdateClusterRequest(
- )
+ request = bigtable_admin_v2.PartialUpdateClusterRequest()
# Make the request
operation = client.partial_update_cluster(request=request)
@@ -52,4 +51,5 @@ async def sample_partial_update_cluster():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_PartialUpdateCluster_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_sync.py
index bab63c6ed1bd..4841ee635f11 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_cluster_sync.py
@@ -39,8 +39,7 @@ def sample_partial_update_cluster():
client = bigtable_admin_v2.BigtableInstanceAdminClient()
# Initialize request argument(s)
- request = bigtable_admin_v2.PartialUpdateClusterRequest(
- )
+ request = bigtable_admin_v2.PartialUpdateClusterRequest()
# Make the request
operation = client.partial_update_cluster(request=request)
@@ -52,4 +51,5 @@ def sample_partial_update_cluster():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_PartialUpdateCluster_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_async.py
index 4c5e53ebe929..502a2dfd9030 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_async.py
@@ -56,4 +56,5 @@ async def sample_partial_update_instance():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_PartialUpdateInstance_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_sync.py
index 0d2a74cfcc30..6eb69450928e 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_partial_update_instance_sync.py
@@ -56,4 +56,5 @@ def sample_partial_update_instance():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_PartialUpdateInstance_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_async.py
index b389b76791b9..e6e0a0f13e28 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_async.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
async def sample_set_iam_policy():
@@ -50,4 +51,5 @@ async def sample_set_iam_policy():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_SetIamPolicy_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_sync.py
index 97bc29d65589..9281dbcc63b3 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_set_iam_policy_sync.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
def sample_set_iam_policy():
@@ -50,4 +51,5 @@ def sample_set_iam_policy():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_SetIamPolicy_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_async.py
index 977f79d9b148..cb1f7844b171 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_async.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
async def sample_test_iam_permissions():
@@ -42,7 +43,7 @@ async def sample_test_iam_permissions():
# Initialize request argument(s)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource="resource_value",
- permissions=['permissions_value1', 'permissions_value2'],
+ permissions=["permissions_value1", "permissions_value2"],
)
# Make the request
@@ -51,4 +52,5 @@ async def sample_test_iam_permissions():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_TestIamPermissions_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_sync.py
index db047d3670f4..38ccd693845f 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_test_iam_permissions_sync.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
def sample_test_iam_permissions():
@@ -42,7 +43,7 @@ def sample_test_iam_permissions():
# Initialize request argument(s)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource="resource_value",
- permissions=['permissions_value1', 'permissions_value2'],
+ permissions=["permissions_value1", "permissions_value2"],
)
# Make the request
@@ -51,4 +52,5 @@ def sample_test_iam_permissions():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_TestIamPermissions_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_async.py
index 2c55a45bd474..459abe2ebd47 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_async.py
@@ -56,4 +56,5 @@ async def sample_update_app_profile():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateAppProfile_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_sync.py
index a7b683426695..7af2c8def593 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_app_profile_sync.py
@@ -56,4 +56,5 @@ def sample_update_app_profile():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateAppProfile_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_async.py
index af3abde41cb6..3f0aa895eb49 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_async.py
@@ -39,8 +39,7 @@ async def sample_update_cluster():
client = bigtable_admin_v2.BigtableInstanceAdminAsyncClient()
# Initialize request argument(s)
- request = bigtable_admin_v2.Cluster(
- )
+ request = bigtable_admin_v2.Cluster()
# Make the request
operation = client.update_cluster(request=request)
@@ -52,4 +51,5 @@ async def sample_update_cluster():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateCluster_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_sync.py
index ec02a64aff7c..35302580b025 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_cluster_sync.py
@@ -39,8 +39,7 @@ def sample_update_cluster():
client = bigtable_admin_v2.BigtableInstanceAdminClient()
# Initialize request argument(s)
- request = bigtable_admin_v2.Cluster(
- )
+ request = bigtable_admin_v2.Cluster()
# Make the request
operation = client.update_cluster(request=request)
@@ -52,4 +51,5 @@ def sample_update_cluster():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateCluster_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_async.py
index 798afaf801a5..b5bf904b8d18 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_async.py
@@ -49,4 +49,5 @@ async def sample_update_instance():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateInstance_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_sync.py
index fb6e5e2d3cc0..21231e7ca09a 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_instance_sync.py
@@ -49,4 +49,5 @@ def sample_update_instance():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateInstance_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_async.py
index 9bdd620e6d1f..f5e2f06a8641 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_async.py
@@ -56,4 +56,5 @@ async def sample_update_logical_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateLogicalView_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_sync.py
index 10d962205fed..f9c84b4aaa1b 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_logical_view_sync.py
@@ -56,4 +56,5 @@ def sample_update_logical_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateLogicalView_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_async.py
index ddd93047521d..05d2a8f497b4 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_async.py
@@ -56,4 +56,5 @@ async def sample_update_materialized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateMaterializedView_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_sync.py
index a2ef78bd3dad..30849d47a41a 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_instance_admin_update_materialized_view_sync.py
@@ -56,4 +56,5 @@ def sample_update_materialized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableInstanceAdmin_UpdateMaterializedView_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_async.py
index 4cd57edc8245..adf3fbb8d65d 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_async.py
@@ -50,4 +50,5 @@ async def sample_check_consistency():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CheckConsistency_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_sync.py
index ec6085b3f596..261f9bb919e3 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_check_consistency_sync.py
@@ -50,4 +50,5 @@ def sample_check_consistency():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CheckConsistency_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_async.py
index 9355b7d44196..f50ba425dbd8 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_async.py
@@ -55,4 +55,5 @@ async def sample_copy_backup():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CopyBackup_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_sync.py
index 25456ad2176f..a7634710a3ce 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_copy_backup_sync.py
@@ -55,4 +55,5 @@ def sample_copy_backup():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CopyBackup_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_async.py
index 135bbe220738..a942ea6f7cb7 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_async.py
@@ -54,4 +54,5 @@ async def sample_create_authorized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateAuthorizedView_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_sync.py
index cafbf56cb3f9..a0cad5c4c0ad 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_authorized_view_sync.py
@@ -54,4 +54,5 @@ def sample_create_authorized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateAuthorizedView_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_async.py
index d9bd402b47c3..efa32c8b263c 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_async.py
@@ -58,4 +58,5 @@ async def sample_create_backup():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateBackup_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_sync.py
index 835f0573c95e..f459cd3238ca 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_backup_sync.py
@@ -58,4 +58,5 @@ def sample_create_backup():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateBackup_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_async.py
index 8e4992635e97..13777848af6c 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_async.py
@@ -40,7 +40,7 @@ async def sample_create_schema_bundle():
# Initialize request argument(s)
schema_bundle = bigtable_admin_v2.SchemaBundle()
- schema_bundle.proto_schema.proto_descriptors = b'proto_descriptors_blob'
+ schema_bundle.proto_schema.proto_descriptors = b"proto_descriptors_blob"
request = bigtable_admin_v2.CreateSchemaBundleRequest(
parent="parent_value",
@@ -58,4 +58,5 @@ async def sample_create_schema_bundle():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateSchemaBundle_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_sync.py
index a5911497d4aa..bae26df07427 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_schema_bundle_sync.py
@@ -40,7 +40,7 @@ def sample_create_schema_bundle():
# Initialize request argument(s)
schema_bundle = bigtable_admin_v2.SchemaBundle()
- schema_bundle.proto_schema.proto_descriptors = b'proto_descriptors_blob'
+ schema_bundle.proto_schema.proto_descriptors = b"proto_descriptors_blob"
request = bigtable_admin_v2.CreateSchemaBundleRequest(
parent="parent_value",
@@ -58,4 +58,5 @@ def sample_create_schema_bundle():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateSchemaBundle_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_async.py
index 3096539b98f3..85eb802935d7 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_async.py
@@ -50,4 +50,5 @@ async def sample_create_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateTable_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_async.py
index f7767438ef71..bc06756684be 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_async.py
@@ -55,4 +55,5 @@ async def sample_create_table_from_snapshot():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateTableFromSnapshot_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_sync.py
index ff1dd7899c38..68edc0129337 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_from_snapshot_sync.py
@@ -55,4 +55,5 @@ def sample_create_table_from_snapshot():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateTableFromSnapshot_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_sync.py
index 552a1095f3ee..c1614857f68b 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_create_table_sync.py
@@ -50,4 +50,5 @@ def sample_create_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_CreateTable_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_async.py
index 391205c7c1d8..46a83dfa17da 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_async.py
@@ -40,7 +40,7 @@ async def sample_drop_row_range():
# Initialize request argument(s)
request = bigtable_admin_v2.DropRowRangeRequest(
- row_key_prefix=b'row_key_prefix_blob',
+ row_key_prefix=b"row_key_prefix_blob",
name="name_value",
)
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_sync.py
index bcd528f1ae77..13949a21a2b1 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_drop_row_range_sync.py
@@ -40,7 +40,7 @@ def sample_drop_row_range():
# Initialize request argument(s)
request = bigtable_admin_v2.DropRowRangeRequest(
- row_key_prefix=b'row_key_prefix_blob',
+ row_key_prefix=b"row_key_prefix_blob",
name="name_value",
)
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_async.py
index 1953441b6d61..a72f31f116da 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_async.py
@@ -49,4 +49,5 @@ async def sample_generate_consistency_token():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GenerateConsistencyToken_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_sync.py
index 4ae52264d270..6a06837ef548 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_generate_consistency_token_sync.py
@@ -49,4 +49,5 @@ def sample_generate_consistency_token():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GenerateConsistencyToken_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_async.py
index 129948bc5dd3..c1355d7dfdef 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_async.py
@@ -49,4 +49,5 @@ async def sample_get_authorized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetAuthorizedView_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_sync.py
index 9cc63538c03a..358582b3e080 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_authorized_view_sync.py
@@ -49,4 +49,5 @@ def sample_get_authorized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetAuthorizedView_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_async.py
index 524d63e8638b..f2a0c9808d23 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_async.py
@@ -49,4 +49,5 @@ async def sample_get_backup():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetBackup_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_sync.py
index 5ed91b80c4db..ceca44fdd803 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_backup_sync.py
@@ -49,4 +49,5 @@ def sample_get_backup():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetBackup_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_async.py
index a599239d587e..ab44d558886f 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_async.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
async def sample_get_iam_policy():
@@ -50,4 +51,5 @@ async def sample_get_iam_policy():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetIamPolicy_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_sync.py
index 2d6e71c27394..729cd4d70bc6 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_iam_policy_sync.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
def sample_get_iam_policy():
@@ -50,4 +51,5 @@ def sample_get_iam_policy():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetIamPolicy_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_async.py
index b5e580276409..92c23ef7a7a8 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_async.py
@@ -49,4 +49,5 @@ async def sample_get_schema_bundle():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetSchemaBundle_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_sync.py
index 1ea7b69b70e9..886f09b1de86 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_schema_bundle_sync.py
@@ -49,4 +49,5 @@ def sample_get_schema_bundle():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetSchemaBundle_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_async.py
index ae48060bb882..9da2c9bad293 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_async.py
@@ -49,4 +49,5 @@ async def sample_get_snapshot():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetSnapshot_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_sync.py
index 8626549fda5c..e2a014026594 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_snapshot_sync.py
@@ -49,4 +49,5 @@ def sample_get_snapshot():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetSnapshot_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_async.py
index ff8dff1ae962..866048c0c3fd 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_async.py
@@ -49,4 +49,5 @@ async def sample_get_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetTable_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_sync.py
index ccb68b7664d2..ab160c9b1073 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_get_table_sync.py
@@ -49,4 +49,5 @@ def sample_get_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_GetTable_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_async.py
index 658b8f96a136..14644c256157 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_async.py
@@ -50,4 +50,5 @@ async def sample_list_authorized_views():
async for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListAuthorizedViews_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_sync.py
index a7bf4b6adaa5..779aee1be3ba 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_authorized_views_sync.py
@@ -50,4 +50,5 @@ def sample_list_authorized_views():
for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListAuthorizedViews_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_async.py
index 368c376f015b..512c2c68fc44 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_async.py
@@ -50,4 +50,5 @@ async def sample_list_backups():
async for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListBackups_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_sync.py
index ca0e3e0f2329..06a414b36719 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_backups_sync.py
@@ -50,4 +50,5 @@ def sample_list_backups():
for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListBackups_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_async.py
index 3daf30e6dd39..9c01f43cd1e6 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_async.py
@@ -50,4 +50,5 @@ async def sample_list_schema_bundles():
async for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListSchemaBundles_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_sync.py
index 945d606bbf97..04eeae665240 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_schema_bundles_sync.py
@@ -50,4 +50,5 @@ def sample_list_schema_bundles():
for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListSchemaBundles_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_async.py
index 91acb1d9e49e..141ee6ee6e4b 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_async.py
@@ -50,4 +50,5 @@ async def sample_list_snapshots():
async for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListSnapshots_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_sync.py
index 7f809156fa44..e697887dc449 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_snapshots_sync.py
@@ -50,4 +50,5 @@ def sample_list_snapshots():
for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListSnapshots_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_async.py
index 191de0fc738f..7666d7ad7eae 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_async.py
@@ -50,4 +50,5 @@ async def sample_list_tables():
async for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListTables_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_sync.py
index 5d0f3a2781e4..5927bf5aefa0 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_list_tables_sync.py
@@ -50,4 +50,5 @@ def sample_list_tables():
for response in page_result:
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ListTables_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_async.py
index 2c206eb44706..c21714329236 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_async.py
@@ -49,4 +49,5 @@ async def sample_modify_column_families():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ModifyColumnFamilies_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_sync.py
index 6224f5c5e62c..2ff1eee3556f 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_modify_column_families_sync.py
@@ -49,4 +49,5 @@ def sample_modify_column_families():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_ModifyColumnFamilies_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_async_internal.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_async_internal.py
index f70b5da17f49..b0b0d0f2506d 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_async_internal.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_async_internal.py
@@ -55,4 +55,5 @@ async def sample_restore_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_RestoreTable_async_internal]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_sync_internal.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_sync_internal.py
index 45621c22b068..f99a3d6aee8c 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_sync_internal.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_restore_table_sync_internal.py
@@ -55,4 +55,5 @@ def sample_restore_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_RestoreTable_sync_internal]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_async.py
index cbfafdc7728c..e21252ecabda 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_async.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
async def sample_set_iam_policy():
@@ -50,4 +51,5 @@ async def sample_set_iam_policy():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_SetIamPolicy_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_sync.py
index 9a6c5fcc23da..609086bc9243 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_set_iam_policy_sync.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
def sample_set_iam_policy():
@@ -50,4 +51,5 @@ def sample_set_iam_policy():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_SetIamPolicy_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_async.py
index 6ff619e85bc8..74d8af24da07 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_async.py
@@ -55,4 +55,5 @@ async def sample_snapshot_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_SnapshotTable_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_sync.py
index f983f78240c0..27159da2f77b 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_snapshot_table_sync.py
@@ -55,4 +55,5 @@ def sample_snapshot_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_SnapshotTable_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_async.py
index ee5fe6027719..558c21079288 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_async.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
async def sample_test_iam_permissions():
@@ -42,7 +43,7 @@ async def sample_test_iam_permissions():
# Initialize request argument(s)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource="resource_value",
- permissions=['permissions_value1', 'permissions_value2'],
+ permissions=["permissions_value1", "permissions_value2"],
)
# Make the request
@@ -51,4 +52,5 @@ async def sample_test_iam_permissions():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_TestIamPermissions_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_sync.py
index 46f0870b04e0..97c9138a1bd7 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_test_iam_permissions_sync.py
@@ -31,8 +31,9 @@
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+
from google.cloud import bigtable_admin_v2
-from google.iam.v1 import iam_policy_pb2 # type: ignore
def sample_test_iam_permissions():
@@ -42,7 +43,7 @@ def sample_test_iam_permissions():
# Initialize request argument(s)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource="resource_value",
- permissions=['permissions_value1', 'permissions_value2'],
+ permissions=["permissions_value1", "permissions_value2"],
)
# Make the request
@@ -51,4 +52,5 @@ def sample_test_iam_permissions():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_TestIamPermissions_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_async.py
index 1e2f6aa5afbf..2b9d2c4962bc 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_async.py
@@ -53,4 +53,5 @@ async def sample_undelete_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UndeleteTable_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_sync.py
index 637afee8b270..b8315344f85a 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_undelete_table_sync.py
@@ -53,4 +53,5 @@ def sample_undelete_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UndeleteTable_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_async.py
index 541427d4894f..42ac40a61f5e 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_async.py
@@ -39,8 +39,7 @@ async def sample_update_authorized_view():
client = bigtable_admin_v2.BigtableTableAdminAsyncClient()
# Initialize request argument(s)
- request = bigtable_admin_v2.UpdateAuthorizedViewRequest(
- )
+ request = bigtable_admin_v2.UpdateAuthorizedViewRequest()
# Make the request
operation = client.update_authorized_view(request=request)
@@ -52,4 +51,5 @@ async def sample_update_authorized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UpdateAuthorizedView_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_sync.py
index 9c8198d9aceb..6ccccd34c434 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_authorized_view_sync.py
@@ -39,8 +39,7 @@ def sample_update_authorized_view():
client = bigtable_admin_v2.BigtableTableAdminClient()
# Initialize request argument(s)
- request = bigtable_admin_v2.UpdateAuthorizedViewRequest(
- )
+ request = bigtable_admin_v2.UpdateAuthorizedViewRequest()
# Make the request
operation = client.update_authorized_view(request=request)
@@ -52,4 +51,5 @@ def sample_update_authorized_view():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UpdateAuthorizedView_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_async.py
index f98e1e33a771..29d39bd90a2f 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_async.py
@@ -52,4 +52,5 @@ async def sample_update_backup():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UpdateBackup_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_sync.py
index 466a3decb94e..7a14d56c95d0 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_backup_sync.py
@@ -52,4 +52,5 @@ def sample_update_backup():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UpdateBackup_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_async.py
index 96447088e398..6e6da9fb4439 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_async.py
@@ -40,7 +40,7 @@ async def sample_update_schema_bundle():
# Initialize request argument(s)
schema_bundle = bigtable_admin_v2.SchemaBundle()
- schema_bundle.proto_schema.proto_descriptors = b'proto_descriptors_blob'
+ schema_bundle.proto_schema.proto_descriptors = b"proto_descriptors_blob"
request = bigtable_admin_v2.UpdateSchemaBundleRequest(
schema_bundle=schema_bundle,
@@ -56,4 +56,5 @@ async def sample_update_schema_bundle():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UpdateSchemaBundle_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_sync.py
index 07568306045c..17f755a446bc 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_schema_bundle_sync.py
@@ -40,7 +40,7 @@ def sample_update_schema_bundle():
# Initialize request argument(s)
schema_bundle = bigtable_admin_v2.SchemaBundle()
- schema_bundle.proto_schema.proto_descriptors = b'proto_descriptors_blob'
+ schema_bundle.proto_schema.proto_descriptors = b"proto_descriptors_blob"
request = bigtable_admin_v2.UpdateSchemaBundleRequest(
schema_bundle=schema_bundle,
@@ -56,4 +56,5 @@ def sample_update_schema_bundle():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UpdateSchemaBundle_sync]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_async.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_async.py
index 93839d36f5ce..14c9838aca5f 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_async.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_async.py
@@ -39,8 +39,7 @@ async def sample_update_table():
client = bigtable_admin_v2.BigtableTableAdminAsyncClient()
# Initialize request argument(s)
- request = bigtable_admin_v2.UpdateTableRequest(
- )
+ request = bigtable_admin_v2.UpdateTableRequest()
# Make the request
operation = client.update_table(request=request)
@@ -52,4 +51,5 @@ async def sample_update_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UpdateTable_async]
diff --git a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_sync.py b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_sync.py
index fea09f6a890c..04224b2e766d 100644
--- a/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_sync.py
+++ b/packages/google-cloud-bigtable/samples/generated_samples/bigtableadmin_v2_generated_bigtable_table_admin_update_table_sync.py
@@ -39,8 +39,7 @@ def sample_update_table():
client = bigtable_admin_v2.BigtableTableAdminClient()
# Initialize request argument(s)
- request = bigtable_admin_v2.UpdateTableRequest(
- )
+ request = bigtable_admin_v2.UpdateTableRequest()
# Make the request
operation = client.update_table(request=request)
@@ -52,4 +51,5 @@ def sample_update_table():
# Handle the response
print(response)
+
# [END bigtableadmin_v2_generated_BigtableTableAdmin_UpdateTable_sync]
diff --git a/packages/google-cloud-bigtable/setup.py b/packages/google-cloud-bigtable/setup.py
index 7c005aa0b2bc..80b0380b2b96 100644
--- a/packages/google-cloud-bigtable/setup.py
+++ b/packages/google-cloud-bigtable/setup.py
@@ -1,4 +1,5 @@
-# Copyright 2018 Google LLC
+# -*- coding: utf-8 -*-
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,49 +12,51 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input`
-# The source of truth for this file is `.librarian/generator-input`
-
-
+#
import io
import os
+import re
-import setuptools
-
+import setuptools # type: ignore
package_root = os.path.abspath(os.path.dirname(__file__))
-# Package metadata.
-
name = "google-cloud-bigtable"
+
+
description = "Google Cloud Bigtable API client library"
-version = {}
+version = None
+
with open(os.path.join(package_root, "google/cloud/bigtable/gapic_version.py")) as fp:
- exec(fp.read(), version)
-version = version["__version__"]
+ version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read())
+ assert len(version_candidates) == 1
+ version = version_candidates[0]
+if version[0] == "0":
+ release_status = "Development Status :: 4 - Beta"
+else:
+ release_status = "Development Status :: 5 - Production/Stable"
-# Should be one of:
-# 'Development Status :: 3 - Alpha'
-# 'Development Status :: 4 - Beta'
-# 'Development Status :: 5 - Production/Stable'
-release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "google-api-core[grpc] >= 2.17.0, <3.0.0",
+ "google-api-core[grpc] >= 2.11.0, <3.0.0",
+ # Exclude incompatible versions of `google-auth`
+ # See https://github.com/googleapis/google-cloud-python/issues/12364
+ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0",
+ "grpcio >= 1.33.2, < 2.0.0",
+ "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'",
+ "proto-plus >= 1.22.3, <2.0.0",
+ "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'",
+ "protobuf >= 4.25.8, < 8.0.0",
"google-cloud-core >= 1.4.4, <3.0.0",
- "google-auth >= 2.23.0, <3.0.0,!=2.24.0,!=2.25.0",
"grpc-google-iam-v1 >= 0.12.4, <1.0.0",
- "proto-plus >= 1.22.3, <2.0.0",
- "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'",
- "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
"google-crc32c>=1.5.0, <2.0.0dev",
]
-extras = {"libcst": "libcst >= 0.2.5"}
-
+extras = {
+ "libcst": "libcst >= 0.2.5",
+}
-# Setup boilerplate below this line.
+url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigtable"
package_root = os.path.abspath(os.path.dirname(__file__))
@@ -61,8 +64,6 @@
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
-# Only include packages under the 'google' namespace. Do not include tests,
-# benchmarks, etc.
packages = [
package
for package in setuptools.find_namespace_packages()
@@ -77,14 +78,13 @@
author="Google LLC",
author_email="googleapis-packages@google.com",
license="Apache 2.0",
- url="https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigtable",
+ url=url,
classifiers=[
release_status,
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
@@ -96,9 +96,9 @@
],
platforms="Posix; MacOS X; Windows",
packages=packages,
+ python_requires=">=3.9",
install_requires=dependencies,
extras_require=extras,
- python_requires=">=3.7",
include_package_data=True,
zip_safe=False,
)
diff --git a/packages/google-cloud-bigtable/test_proxy/handlers/client_handler_data_sync_autogen.py b/packages/google-cloud-bigtable/test_proxy/handlers/client_handler_data_sync_autogen.py
index 0e557f058f39..16b8382fca41 100644
--- a/packages/google-cloud-bigtable/test_proxy/handlers/client_handler_data_sync_autogen.py
+++ b/packages/google-cloud-bigtable/test_proxy/handlers/client_handler_data_sync_autogen.py
@@ -190,9 +190,7 @@ async def ExecuteQuery(self, request, **kwargs):
app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
query = request.get("query")
params = request.get("params") or {}
- (formatted_params, parameter_types) = sql_encoding_helpers.convert_params(
- params
- )
+ formatted_params, parameter_types = sql_encoding_helpers.convert_params(params)
operation_timeout = (
kwargs.get("operation_timeout", self.per_operation_timeout) or 20
)
diff --git a/packages/google-cloud-bigtable/testing/.gitignore b/packages/google-cloud-bigtable/testing/.gitignore
deleted file mode 100644
index b05fbd630881..000000000000
--- a/packages/google-cloud-bigtable/testing/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-test-env.sh
-service-account.json
-client-secrets.json
\ No newline at end of file
diff --git a/packages/google-cloud-bigtable/testing/constraints-3.10.txt b/packages/google-cloud-bigtable/testing/constraints-3.10.txt
index e69de29bb2d1..7599dea499ed 100644
--- a/packages/google-cloud-bigtable/testing/constraints-3.10.txt
+++ b/packages/google-cloud-bigtable/testing/constraints-3.10.txt
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+google-auth
+grpcio
+proto-plus
+protobuf
+# cryptography is a direct dependency of google-auth
+cryptography
diff --git a/packages/google-cloud-bigtable/testing/constraints-3.11.txt b/packages/google-cloud-bigtable/testing/constraints-3.11.txt
index e69de29bb2d1..7599dea499ed 100644
--- a/packages/google-cloud-bigtable/testing/constraints-3.11.txt
+++ b/packages/google-cloud-bigtable/testing/constraints-3.11.txt
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+google-auth
+grpcio
+proto-plus
+protobuf
+# cryptography is a direct dependency of google-auth
+cryptography
diff --git a/packages/google-cloud-bigtable/testing/constraints-3.12.txt b/packages/google-cloud-bigtable/testing/constraints-3.12.txt
index e69de29bb2d1..7599dea499ed 100644
--- a/packages/google-cloud-bigtable/testing/constraints-3.12.txt
+++ b/packages/google-cloud-bigtable/testing/constraints-3.12.txt
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+google-auth
+grpcio
+proto-plus
+protobuf
+# cryptography is a direct dependency of google-auth
+cryptography
diff --git a/packages/google-cloud-bigtable/testing/constraints-3.13.txt b/packages/google-cloud-bigtable/testing/constraints-3.13.txt
index e69de29bb2d1..1e93c60e50aa 100644
--- a/packages/google-cloud-bigtable/testing/constraints-3.13.txt
+++ b/packages/google-cloud-bigtable/testing/constraints-3.13.txt
@@ -0,0 +1,12 @@
+# We use the constraints file for the latest Python version
+# (currently this file) to check that the latest
+# major versions of dependencies are supported in setup.py.
+# List all library dependencies and extras in this file.
+# Require the latest major version be installed for each dependency.
+# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0",
+# Then this file should have google-cloud-foo>=1
+google-api-core>=2
+google-auth>=2
+grpcio>=1
+proto-plus>=1
+protobuf>=6
diff --git a/packages/google-cloud-bigtable/testing/constraints-3.14.txt b/packages/google-cloud-bigtable/testing/constraints-3.14.txt
index e69de29bb2d1..1e93c60e50aa 100644
--- a/packages/google-cloud-bigtable/testing/constraints-3.14.txt
+++ b/packages/google-cloud-bigtable/testing/constraints-3.14.txt
@@ -0,0 +1,12 @@
+# We use the constraints file for the latest Python version
+# (currently this file) to check that the latest
+# major versions of dependencies are supported in setup.py.
+# List all library dependencies and extras in this file.
+# Require the latest major version be installed for each dependency.
+# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0",
+# Then this file should have google-cloud-foo>=1
+google-api-core>=2
+google-auth>=2
+grpcio>=1
+proto-plus>=1
+protobuf>=6
diff --git a/packages/google-cloud-bigtable/testing/constraints-3.7.txt b/packages/google-cloud-bigtable/testing/constraints-3.7.txt
deleted file mode 100644
index 023133380894..000000000000
--- a/packages/google-cloud-bigtable/testing/constraints-3.7.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-# This constraints file is used to check that lower bounds
-# are correct in setup.py
-# List *all* library dependencies and extras in this file.
-# Pin the version to the lower bound.
-#
-# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
-# Then this file should have foo==1.14.0
-google-api-core==2.17.0
-google-auth==2.23.0
-google-cloud-core==2.0.0
-grpc-google-iam-v1==0.12.4
-proto-plus==1.22.3
-libcst==0.2.5
-protobuf==3.20.2
-
diff --git a/packages/google-cloud-bigtable/testing/constraints-3.8.txt b/packages/google-cloud-bigtable/testing/constraints-3.8.txt
deleted file mode 100644
index a7e4616c9efb..000000000000
--- a/packages/google-cloud-bigtable/testing/constraints-3.8.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-# This constraints file is used to check that lower bounds
-# are correct in setup.py
-# List *all* library dependencies and extras in this file.
-# Pin the version to the lower bound.
-#
-# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
-# Then this file should have foo==1.14.0
-google-api-core==2.17.0
-google-auth==2.23.0
-google-cloud-core==2.0.0
-grpc-google-iam-v1==0.12.4
-proto-plus==1.22.3
-libcst==0.2.5
-protobuf==3.20.2
-pytest-asyncio==0.21.2
diff --git a/packages/google-cloud-bigtable/testing/constraints-3.9.txt b/packages/google-cloud-bigtable/testing/constraints-3.9.txt
index e69de29bb2d1..dffda9791f3d 100644
--- a/packages/google-cloud-bigtable/testing/constraints-3.9.txt
+++ b/packages/google-cloud-bigtable/testing/constraints-3.9.txt
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List all library dependencies and extras in this file,
+# pinning their versions to their lower bounds.
+# For example, if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0",
+# then this file should have google-cloud-foo==1.14.0
+google-api-core==2.21.0
+google-cloud-core==2.0.0
+grpc-google-iam-v1==0.12.4
+google-auth==2.35.0
+# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453)
+# Add the minimum supported version of grpcio to constraints files
+proto-plus==1.22.3
+protobuf==4.25.8
diff --git a/packages/google-cloud-bigtable/tests/system/admin_overlay/conftest.py b/packages/google-cloud-bigtable/tests/system/admin_overlay/conftest.py
index 66baef3f4d7a..c3698e3ae3d2 100644
--- a/packages/google-cloud-bigtable/tests/system/admin_overlay/conftest.py
+++ b/packages/google-cloud-bigtable/tests/system/admin_overlay/conftest.py
@@ -1,9 +1,8 @@
-import google.auth
-
import os
-import pytest
import uuid
+import google.auth
+import pytest
INSTANCE_PREFIX = "admin-overlay-instance"
BACKUP_PREFIX = "admin-overlay-backup"
diff --git a/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_async.py b/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_async.py
index aa412569edd8..22747cf77adb 100644
--- a/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_async.py
+++ b/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_async.py
@@ -12,35 +12,33 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import os
+from datetime import datetime, timedelta
from typing import Tuple
+import pytest
+from google.cloud.environment_vars import BIGTABLE_EMULATOR
+
from google.cloud import bigtable_admin_v2 as admin_v2
-from google.cloud.bigtable.data._cross_sync import CrossSync
from google.cloud.bigtable.data import mutations, read_rows_query
-from google.cloud.environment_vars import BIGTABLE_EMULATOR
+from google.cloud.bigtable.data._cross_sync import CrossSync
from .conftest import (
- INSTANCE_PREFIX,
BACKUP_PREFIX,
- ROW_PREFIX,
DEFAULT_CLUSTER_LOCATIONS,
+ INITIAL_CELL_VALUE,
+ INSTANCE_PREFIX,
+ NEW_CELL_VALUE,
+ NUM_ROWS,
REPLICATION_CLUSTER_LOCATIONS,
- TEST_TABLE_NAME,
+ ROW_PREFIX,
TEST_BACKUP_TABLE_NAME,
TEST_COLUMMN_FAMILY_NAME,
TEST_COLUMN_NAME,
- NUM_ROWS,
- INITIAL_CELL_VALUE,
- NEW_CELL_VALUE,
+ TEST_TABLE_NAME,
generate_unique_suffix,
)
-from datetime import datetime, timedelta
-
-import pytest
-import os
-
-
if CrossSync.is_async:
from google.api_core import operation_async as api_core_operation
else:
diff --git a/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_autogen.py b/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_autogen.py
index 4fde3571fa7e..d791da75ab13 100644
--- a/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_autogen.py
+++ b/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_autogen.py
@@ -15,29 +15,29 @@
# This file is automatically generated by CrossSync. Do not edit manually.
+import os
+from datetime import datetime, timedelta
from typing import Tuple
+import pytest
+from google.cloud.environment_vars import BIGTABLE_EMULATOR
from google.cloud import bigtable_admin_v2 as admin_v2
-from google.cloud.bigtable.data._cross_sync import CrossSync
from google.cloud.bigtable.data import mutations, read_rows_query
-from google.cloud.environment_vars import BIGTABLE_EMULATOR
+from google.cloud.bigtable.data._cross_sync import CrossSync
from .conftest import (
- INSTANCE_PREFIX,
BACKUP_PREFIX,
- ROW_PREFIX,
DEFAULT_CLUSTER_LOCATIONS,
+ INITIAL_CELL_VALUE,
+ INSTANCE_PREFIX,
+ NEW_CELL_VALUE,
+ NUM_ROWS,
REPLICATION_CLUSTER_LOCATIONS,
- TEST_TABLE_NAME,
+ ROW_PREFIX,
TEST_BACKUP_TABLE_NAME,
TEST_COLUMMN_FAMILY_NAME,
TEST_COLUMN_NAME,
- NUM_ROWS,
- INITIAL_CELL_VALUE,
- NEW_CELL_VALUE,
+ TEST_TABLE_NAME,
generate_unique_suffix,
)
-from datetime import datetime, timedelta
-import pytest
-import os
from google.api_core import operation as api_core_operation
if os.getenv(BIGTABLE_EMULATOR):
@@ -224,7 +224,7 @@ def test_optimize_restored_table(
second_instance_storage_type,
expect_optimize_operation,
):
- (instance_with_backup, table_to_backup) = create_instance(
+ instance_with_backup, table_to_backup = create_instance(
instance_admin_client,
table_admin_client,
data_client,
@@ -232,7 +232,7 @@ def test_optimize_restored_table(
instances_to_delete,
admin_v2.StorageType.HDD,
)
- (instance_to_restore, _) = create_instance(
+ instance_to_restore, _ = create_instance(
instance_admin_client,
table_admin_client,
data_client,
@@ -282,7 +282,7 @@ def test_wait_for_consistency(
instances_to_delete,
admin_overlay_project_id,
):
- (instance, table) = create_instance(
+ instance, table = create_instance(
instance_admin_client,
table_admin_client,
data_client,
diff --git a/packages/google-cloud-bigtable/tests/system/conftest.py b/packages/google-cloud-bigtable/tests/system/conftest.py
index 8c0eb30b1565..5557803fffc9 100644
--- a/packages/google-cloud-bigtable/tests/system/conftest.py
+++ b/packages/google-cloud-bigtable/tests/system/conftest.py
@@ -14,11 +14,12 @@
"""
Import pytest fixtures for setting up table for data client system tests
"""
-import sys
+
+import asyncio
import os
+import sys
import pytest
-import asyncio
script_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(script_path)
diff --git a/packages/google-cloud-bigtable/tests/system/cross_sync/test_cross_sync_e2e.py b/packages/google-cloud-bigtable/tests/system/cross_sync/test_cross_sync_e2e.py
index 86911b1631ea..e8a7f03da14f 100644
--- a/packages/google-cloud-bigtable/tests/system/cross_sync/test_cross_sync_e2e.py
+++ b/packages/google-cloud-bigtable/tests/system/cross_sync/test_cross_sync_e2e.py
@@ -1,6 +1,7 @@
import ast
-import sys
import os
+import sys
+
import black
import pytest
import yaml
@@ -11,11 +12,11 @@
sys.path.append(cross_sync_path)
from transformers import ( # noqa: F401 E402
- SymbolReplacer,
AsyncToSync,
+ CrossSyncFileProcessor,
RmAioFunctions,
StripAsyncConditionalBranches,
- CrossSyncFileProcessor,
+ SymbolReplacer,
)
diff --git a/packages/google-cloud-bigtable/tests/system/data/setup_fixtures.py b/packages/google-cloud-bigtable/tests/system/data/setup_fixtures.py
index 169e2396bdea..1416d6b7ab90 100644
--- a/packages/google-cloud-bigtable/tests/system/data/setup_fixtures.py
+++ b/packages/google-cloud-bigtable/tests/system/data/setup_fixtures.py
@@ -16,11 +16,12 @@
Bigtable database for testing purposes.
"""
-import pytest
import os
import uuid
-from . import TEST_FAMILY, TEST_FAMILY_2, TEST_AGGREGATE_FAMILY
+import pytest
+
+from . import TEST_AGGREGATE_FAMILY, TEST_FAMILY, TEST_FAMILY_2
# authorized view subset to allow all qualifiers
ALLOW_ALL = ""
@@ -43,10 +44,11 @@ def instance_id(admin_client, project_id, cluster_config):
"""
Returns BIGTABLE_TEST_INSTANCE if set, otherwise creates a new temporary instance for the test session
"""
- from google.cloud.bigtable_admin_v2 import types
from google.api_core import exceptions
from google.cloud.environment_vars import BIGTABLE_EMULATOR
+ from google.cloud.bigtable_admin_v2 import types
+
# use user-specified instance if available
user_specified_instance = os.getenv("BIGTABLE_TEST_INSTANCE")
if user_specified_instance:
@@ -109,8 +111,7 @@ def table_id(
Supplied by the init_table_id fixture.
- column_split_config: A list of row keys to use as initial splits when creating the test table.
"""
- from google.api_core import exceptions
- from google.api_core import retry
+ from google.api_core import exceptions, retry
# use user-specified instance if available
user_specified_table = os.getenv("BIGTABLE_TEST_TABLE")
@@ -162,8 +163,7 @@ def authorized_view_id(
- instance_id: The ID of the Bigtable instance to test against. Supplied by the instance_id fixture.
- table_id: The ID of the table to create the authorized view for. Supplied by the table_id fixture.
"""
- from google.api_core import exceptions
- from google.api_core import retry
+ from google.api_core import exceptions, retry
retry = retry.Retry(
predicate=retry.if_exception_type(exceptions.FailedPrecondition)
diff --git a/packages/google-cloud-bigtable/tests/system/data/test_system_async.py b/packages/google-cloud-bigtable/tests/system/data/test_system_async.py
index ac8a358a3ec1..b23b8f0aeff8 100644
--- a/packages/google-cloud-bigtable/tests/system/data/test_system_async.py
+++ b/packages/google-cloud-bigtable/tests/system/data/test_system_async.py
@@ -12,21 +12,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytest
import datetime
-import uuid
import os
+import uuid
+
+import pytest
from google.api_core import retry
from google.api_core.exceptions import ClientError, PermissionDenied
-
-from google.cloud.bigtable.data.execute_query.metadata import SqlType
-from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE
from google.cloud.environment_vars import BIGTABLE_EMULATOR
from google.type import date_pb2
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data.execute_query.metadata import SqlType
+from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE
-from . import TEST_FAMILY, TEST_FAMILY_2, TEST_AGGREGATE_FAMILY
+from . import TEST_AGGREGATE_FAMILY, TEST_FAMILY, TEST_FAMILY_2
if CrossSync.is_async:
from google.cloud.bigtable_v2.services.bigtable.transports.grpc_asyncio import (
@@ -413,9 +413,11 @@ async def test_bulk_mutations_raise_exception(self, client, target):
"""
If an invalid mutation is passed, an exception should be raised
"""
+ from google.cloud.bigtable.data.exceptions import (
+ FailedMutationEntryError,
+ MutationsExceptionGroup,
+ )
from google.cloud.bigtable.data.mutations import RowMutationEntry, SetCell
- from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
- from google.cloud.bigtable.data.exceptions import FailedMutationEntryError
row_key = uuid.uuid4().hex.encode()
mutation = SetCell(
@@ -723,8 +725,10 @@ async def test_read_modify_write_row_chained(self, client, target, temp_rows):
"""
test read_modify_write_row with multiple rules
"""
- from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule
- from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule
+ from google.cloud.bigtable.data.read_modify_write_rules import (
+ AppendValueRule,
+ IncrementRule,
+ )
row_key = b"test-row-key"
family = TEST_FAMILY
@@ -893,8 +897,7 @@ async def test_read_rows_sharded_from_sample(self, target, temp_rows):
"""
Test end-to-end sharding
"""
- from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
- from google.cloud.bigtable.data.read_rows_query import RowRange
+ from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery, RowRange
await temp_rows.add_row(b"a")
await temp_rows.add_row(b"b")
@@ -949,8 +952,7 @@ async def test_read_rows_range_query(self, target, temp_rows):
"""
Ensure that the read_rows method works
"""
- from google.cloud.bigtable.data import ReadRowsQuery
- from google.cloud.bigtable.data import RowRange
+ from google.cloud.bigtable.data import ReadRowsQuery, RowRange
await temp_rows.add_row(b"a")
await temp_rows.add_row(b"b")
@@ -1143,16 +1145,16 @@ async def test_literal_value_filter(
Literal value filter does complex escaping on re2 strings.
Make sure inputs are properly interpreted by the server
"""
- from google.cloud.bigtable.data.row_filters import LiteralValueFilter
from google.cloud.bigtable.data import ReadRowsQuery
+ from google.cloud.bigtable.data.row_filters import LiteralValueFilter
f = LiteralValueFilter(filter_input)
await temp_rows.add_row(b"row_key_1", value=cell_value)
query = ReadRowsQuery(row_filter=f)
row_list = await target.read_rows(query)
- assert len(row_list) == bool(
- expect_match
- ), f"row {type(cell_value)}({cell_value}) not found with {type(filter_input)}({filter_input}) filter"
+ assert len(row_list) == bool(expect_match), (
+ f"row {type(cell_value)}({cell_value}) not found with {type(filter_input)}({filter_input}) filter"
+ )
@pytest.mark.skipif(
bool(os.environ.get(BIGTABLE_EMULATOR)),
diff --git a/packages/google-cloud-bigtable/tests/system/data/test_system_autogen.py b/packages/google-cloud-bigtable/tests/system/data/test_system_autogen.py
index 463235087487..396f7b1a0605 100644
--- a/packages/google-cloud-bigtable/tests/system/data/test_system_autogen.py
+++ b/packages/google-cloud-bigtable/tests/system/data/test_system_autogen.py
@@ -15,18 +15,18 @@
# This file is automatically generated by CrossSync. Do not edit manually.
-import pytest
import datetime
-import uuid
import os
+import uuid
+import pytest
from google.api_core import retry
from google.api_core.exceptions import ClientError, PermissionDenied
-from google.cloud.bigtable.data.execute_query.metadata import SqlType
-from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE
from google.cloud.environment_vars import BIGTABLE_EMULATOR
from google.type import date_pb2
from google.cloud.bigtable.data._cross_sync import CrossSync
-from . import TEST_FAMILY, TEST_FAMILY_2, TEST_AGGREGATE_FAMILY
+from google.cloud.bigtable.data.execute_query.metadata import SqlType
+from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE
+from . import TEST_AGGREGATE_FAMILY, TEST_FAMILY, TEST_FAMILY_2
from google.cloud.bigtable_v2.services.bigtable.transports.grpc import (
_LoggingClientInterceptor as GapicInterceptor,
)
@@ -257,7 +257,7 @@ def test_mutation_set_cell(self, target, temp_rows):
"""Ensure cells can be set properly"""
row_key = b"bulk_mutate"
new_value = uuid.uuid4().hex.encode()
- (row_key, mutation) = self._create_row_and_mutation(
+ row_key, mutation = self._create_row_and_mutation(
target, temp_rows, new_value=new_value
)
target.mutate_row(row_key, mutation)
@@ -311,7 +311,7 @@ def test_bulk_mutations_set_cell(self, client, target, temp_rows):
from google.cloud.bigtable.data.mutations import RowMutationEntry
new_value = uuid.uuid4().hex.encode()
- (row_key, mutation) = self._create_row_and_mutation(
+ row_key, mutation = self._create_row_and_mutation(
target, temp_rows, new_value=new_value
)
bulk_mutation = RowMutationEntry(row_key, [mutation])
@@ -320,9 +320,11 @@ def test_bulk_mutations_set_cell(self, client, target, temp_rows):
def test_bulk_mutations_raise_exception(self, client, target):
"""If an invalid mutation is passed, an exception should be raised"""
+ from google.cloud.bigtable.data.exceptions import (
+ FailedMutationEntryError,
+ MutationsExceptionGroup,
+ )
from google.cloud.bigtable.data.mutations import RowMutationEntry, SetCell
- from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
- from google.cloud.bigtable.data.exceptions import FailedMutationEntryError
row_key = uuid.uuid4().hex.encode()
mutation = SetCell(
@@ -346,11 +348,11 @@ def test_mutations_batcher_context_manager(self, client, target, temp_rows):
"""test batcher with context manager. Should flush on exit"""
from google.cloud.bigtable.data.mutations import RowMutationEntry
- (new_value, new_value2) = [uuid.uuid4().hex.encode() for _ in range(2)]
- (row_key, mutation) = self._create_row_and_mutation(
+ new_value, new_value2 = [uuid.uuid4().hex.encode() for _ in range(2)]
+ row_key, mutation = self._create_row_and_mutation(
target, temp_rows, new_value=new_value
)
- (row_key2, mutation2) = self._create_row_and_mutation(
+ row_key2, mutation2 = self._create_row_and_mutation(
target, temp_rows, new_value=new_value2
)
bulk_mutation = RowMutationEntry(row_key, [mutation])
@@ -371,7 +373,7 @@ def test_mutations_batcher_timer_flush(self, client, target, temp_rows):
from google.cloud.bigtable.data.mutations import RowMutationEntry
new_value = uuid.uuid4().hex.encode()
- (row_key, mutation) = self._create_row_and_mutation(
+ row_key, mutation = self._create_row_and_mutation(
target, temp_rows, new_value=new_value
)
bulk_mutation = RowMutationEntry(row_key, [mutation])
@@ -393,12 +395,12 @@ def test_mutations_batcher_count_flush(self, client, target, temp_rows):
"""batch should flush after flush_limit_mutation_count mutations"""
from google.cloud.bigtable.data.mutations import RowMutationEntry
- (new_value, new_value2) = [uuid.uuid4().hex.encode() for _ in range(2)]
- (row_key, mutation) = self._create_row_and_mutation(
+ new_value, new_value2 = [uuid.uuid4().hex.encode() for _ in range(2)]
+ row_key, mutation = self._create_row_and_mutation(
target, temp_rows, new_value=new_value
)
bulk_mutation = RowMutationEntry(row_key, [mutation])
- (row_key2, mutation2) = self._create_row_and_mutation(
+ row_key2, mutation2 = self._create_row_and_mutation(
target, temp_rows, new_value=new_value2
)
bulk_mutation2 = RowMutationEntry(row_key2, [mutation2])
@@ -425,12 +427,12 @@ def test_mutations_batcher_bytes_flush(self, client, target, temp_rows):
"""batch should flush after flush_limit_bytes bytes"""
from google.cloud.bigtable.data.mutations import RowMutationEntry
- (new_value, new_value2) = [uuid.uuid4().hex.encode() for _ in range(2)]
- (row_key, mutation) = self._create_row_and_mutation(
+ new_value, new_value2 = [uuid.uuid4().hex.encode() for _ in range(2)]
+ row_key, mutation = self._create_row_and_mutation(
target, temp_rows, new_value=new_value
)
bulk_mutation = RowMutationEntry(row_key, [mutation])
- (row_key2, mutation2) = self._create_row_and_mutation(
+ row_key2, mutation2 = self._create_row_and_mutation(
target, temp_rows, new_value=new_value2
)
bulk_mutation2 = RowMutationEntry(row_key2, [mutation2])
@@ -456,11 +458,11 @@ def test_mutations_batcher_no_flush(self, client, target, temp_rows):
new_value = uuid.uuid4().hex.encode()
start_value = b"unchanged"
- (row_key, mutation) = self._create_row_and_mutation(
+ row_key, mutation = self._create_row_and_mutation(
target, temp_rows, start_value=start_value, new_value=new_value
)
bulk_mutation = RowMutationEntry(row_key, [mutation])
- (row_key2, mutation2) = self._create_row_and_mutation(
+ row_key2, mutation2 = self._create_row_and_mutation(
target, temp_rows, start_value=start_value, new_value=new_value
)
bulk_mutation2 = RowMutationEntry(row_key2, [mutation2])
@@ -573,8 +575,10 @@ def test_read_modify_write_row_append(
@pytest.mark.usefixtures("target")
def test_read_modify_write_row_chained(self, client, target, temp_rows):
"""test read_modify_write_row with multiple rules"""
- from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule
- from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule
+ from google.cloud.bigtable.data.read_modify_write_rules import (
+ AppendValueRule,
+ IncrementRule,
+ )
row_key = b"test-row-key"
family = TEST_FAMILY
@@ -711,8 +715,7 @@ def test_read_rows_sharded_simple(self, target, temp_rows):
)
def test_read_rows_sharded_from_sample(self, target, temp_rows):
"""Test end-to-end sharding"""
- from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
- from google.cloud.bigtable.data.read_rows_query import RowRange
+ from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery, RowRange
temp_rows.add_row(b"a")
temp_rows.add_row(b"b")
@@ -759,8 +762,7 @@ def test_read_rows_sharded_filters_limits(self, target, temp_rows):
)
def test_read_rows_range_query(self, target, temp_rows):
"""Ensure that the read_rows method works"""
- from google.cloud.bigtable.data import ReadRowsQuery
- from google.cloud.bigtable.data import RowRange
+ from google.cloud.bigtable.data import ReadRowsQuery, RowRange
temp_rows.add_row(b"a")
temp_rows.add_row(b"b")
@@ -924,16 +926,16 @@ def test_literal_value_filter(
):
"""Literal value filter does complex escaping on re2 strings.
Make sure inputs are properly interpreted by the server"""
- from google.cloud.bigtable.data.row_filters import LiteralValueFilter
from google.cloud.bigtable.data import ReadRowsQuery
+ from google.cloud.bigtable.data.row_filters import LiteralValueFilter
f = LiteralValueFilter(filter_input)
temp_rows.add_row(b"row_key_1", value=cell_value)
query = ReadRowsQuery(row_filter=f)
row_list = target.read_rows(query)
- assert len(row_list) == bool(
- expect_match
- ), f"row {type(cell_value)}({cell_value}) not found with {type(filter_input)}({filter_input}) filter"
+ assert len(row_list) == bool(expect_match), (
+ f"row {type(cell_value)}({cell_value}) not found with {type(filter_input)}({filter_input}) filter"
+ )
@pytest.mark.skipif(
bool(os.environ.get(BIGTABLE_EMULATOR)), reason="emulator doesn't support SQL"
diff --git a/packages/google-cloud-bigtable/tests/system/v2_client/_helpers.py b/packages/google-cloud-bigtable/tests/system/v2_client/_helpers.py
index e792def15914..29a517e71e52 100644
--- a/packages/google-cloud-bigtable/tests/system/v2_client/_helpers.py
+++ b/packages/google-cloud-bigtable/tests/system/v2_client/_helpers.py
@@ -16,9 +16,9 @@
import grpc
from google.api_core import exceptions
-from google.cloud import exceptions as core_exceptions
from test_utils import retry
+from google.cloud import exceptions as core_exceptions
retry_429 = retry.RetryErrors(exceptions.TooManyRequests, max_tries=9)
retry_504 = retry.RetryErrors(exceptions.DeadlineExceeded)
diff --git a/packages/google-cloud-bigtable/tests/system/v2_client/conftest.py b/packages/google-cloud-bigtable/tests/system/v2_client/conftest.py
index f39fcba88962..59e016f52af8 100644
--- a/packages/google-cloud-bigtable/tests/system/v2_client/conftest.py
+++ b/packages/google-cloud-bigtable/tests/system/v2_client/conftest.py
@@ -15,10 +15,10 @@
import os
import pytest
+from google.cloud.environment_vars import BIGTABLE_EMULATOR
from test_utils.system import unique_resource_id
from google.cloud.bigtable.client import Client
-from google.cloud.environment_vars import BIGTABLE_EMULATOR
from . import _helpers
diff --git a/packages/google-cloud-bigtable/tests/system/v2_client/test_data_api.py b/packages/google-cloud-bigtable/tests/system/v2_client/test_data_api.py
index c012eb32a414..232d0f02c6a4 100644
--- a/packages/google-cloud-bigtable/tests/system/v2_client/test_data_api.py
+++ b/packages/google-cloud-bigtable/tests/system/v2_client/test_data_api.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from datetime import datetime, timedelta, timezone
import operator
+from datetime import datetime, timedelta, timezone
import pytest
@@ -158,8 +158,7 @@ def test_table_drop_by_prefix(data_table, rows_to_delete):
def test_table_read_rows_w_row_set(data_table, rows_to_delete):
- from google.cloud.bigtable.row_set import RowSet
- from google.cloud.bigtable.row_set import RowRange
+ from google.cloud.bigtable.row_set import RowRange, RowSet
row_keys = [
b"row_key_1",
@@ -231,8 +230,11 @@ def test_table_read_row_large_cell(data_table, rows_to_delete, skip_on_emulator)
def _write_to_row(row1, row2, row3, row4):
- from google.cloud._helpers import _datetime_from_microseconds
- from google.cloud._helpers import _microseconds_from_datetime
+ from google.cloud._helpers import (
+ _datetime_from_microseconds,
+ _microseconds_from_datetime,
+ )
+
from google.cloud.bigtable.row_data import Cell
timestamp1 = datetime.now(timezone.utc)
@@ -325,10 +327,12 @@ def test_table_read_rows(data_table, rows_to_delete):
def test_read_with_label_applied(data_table, rows_to_delete, skip_on_emulator):
- from google.cloud.bigtable.row_filters import ApplyLabelFilter
- from google.cloud.bigtable.row_filters import ColumnQualifierRegexFilter
- from google.cloud.bigtable.row_filters import RowFilterChain
- from google.cloud.bigtable.row_filters import RowFilterUnion
+ from google.cloud.bigtable.row_filters import (
+ ApplyLabelFilter,
+ ColumnQualifierRegexFilter,
+ RowFilterChain,
+ RowFilterUnion,
+ )
row = data_table.direct_row(ROW_KEY)
rows_to_delete.append(row)
@@ -387,8 +391,10 @@ def test_mutations_batcher_threading(data_table, rows_to_delete):
Test the mutations batcher by sending a bunch of mutations using different
flush methods
"""
- import mock
import time
+
+ import mock
+
from google.cloud.bigtable.batcher import MutationsBatcher
num_sent = 20
diff --git a/packages/google-cloud-bigtable/tests/system/v2_client/test_table_admin.py b/packages/google-cloud-bigtable/tests/system/v2_client/test_table_admin.py
index c501890137a3..4cdbd1ce6847 100644
--- a/packages/google-cloud-bigtable/tests/system/v2_client/test_table_admin.py
+++ b/packages/google-cloud-bigtable/tests/system/v2_client/test_table_admin.py
@@ -219,8 +219,7 @@ def test_table_get_iam_policy(
def test_table_set_iam_policy(
service_account, data_instance_populated, tables_to_delete, skip_on_emulator
):
- from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
- from google.cloud.bigtable.policy import Policy
+ from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy
temp_table_id = "test-set-iam-policy-table"
temp_table = data_instance_populated.table(temp_table_id)
@@ -264,6 +263,7 @@ def test_table_backup(
skip_on_emulator,
):
from google.cloud._helpers import _datetime_to_pb_timestamp
+
from google.cloud.bigtable import enums
temp_table_id = "test-backup-table"
diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_client.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_client.py
index 0d844a9e4ced..573e0e67af6d 100644
--- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_client.py
+++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_client.py
@@ -20,30 +20,26 @@
except ImportError: # pragma: NO COVER
import mock
-from google.api_core import exceptions
-from google.api_core import gapic_v1
+import pytest
+from google.api_core import exceptions, gapic_v1
from google.api_core import retry as retries
from google.auth.credentials import AnonymousCredentials
-from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
+from test_async_consistency import (
+ FALSE_CONSISTENCY_RESPONSE,
+ TRUE_CONSISTENCY_RESPONSE,
+)
+
+from google.cloud.bigtable import __version__ as bigtable_version
from google.cloud.bigtable_admin_v2.overlay.services.bigtable_table_admin.async_client import (
- BigtableTableAdminAsyncClient,
DEFAULT_CLIENT_INFO,
+ BigtableTableAdminAsyncClient,
)
from google.cloud.bigtable_admin_v2.overlay.types import (
async_restore_table,
wait_for_consistency_request,
)
-
-from google.cloud.bigtable import __version__ as bigtable_version
-
-from test_async_consistency import (
- FALSE_CONSISTENCY_RESPONSE,
- TRUE_CONSISTENCY_RESPONSE,
-)
-
-import pytest
-
+from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
PARENT_NAME = "my_parent"
TABLE_NAME = "my_table"
diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_consistency.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_consistency.py
index b64ae1a117ff..d4a787fddd33 100644
--- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_consistency.py
+++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_consistency.py
@@ -20,11 +20,10 @@
except ImportError: # pragma: NO COVER
import mock
-from google.cloud.bigtable_admin_v2.overlay.types import async_consistency
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-
import pytest
+from google.cloud.bigtable_admin_v2.overlay.types import async_consistency
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
TRUE_CONSISTENCY_RESPONSE = bigtable_table_admin.CheckConsistencyResponse(
consistent=True
diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_restore_table.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_restore_table.py
index 95799fc147a4..252ef20a8335 100644
--- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_restore_table.py
+++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_restore_table.py
@@ -19,17 +19,15 @@
except ImportError: # pragma: NO COVER
import mock
-from google.longrunning import operations_pb2
-from google.rpc import status_pb2, code_pb2
-
-from google.api_core import operation_async, exceptions
+import pytest
+from google.api_core import exceptions, operation_async
from google.api_core.future import async_future
from google.api_core.operations_v1 import operations_async_client
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
-from google.cloud.bigtable_admin_v2.overlay.types import async_restore_table
-
-import pytest
+from google.longrunning import operations_pb2
+from google.rpc import code_pb2, status_pb2
+from google.cloud.bigtable_admin_v2.overlay.types import async_restore_table
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
# Set up the mock operations
DEFAULT_MAX_POLL = 3
diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_client.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_client.py
index 07922b349458..176f3ddd1e16 100644
--- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_client.py
+++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_client.py
@@ -19,30 +19,26 @@
except ImportError: # pragma: NO COVER
import mock
-from google.api_core import exceptions
-from google.api_core import gapic_v1
+import pytest
+from google.api_core import exceptions, gapic_v1
from google.api_core import retry as retries
from google.auth.credentials import AnonymousCredentials
-from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
+from test_consistency import (
+ FALSE_CONSISTENCY_RESPONSE,
+ TRUE_CONSISTENCY_RESPONSE,
+)
+
+from google.cloud.bigtable import __version__ as bigtable_version
from google.cloud.bigtable_admin_v2.overlay.services.bigtable_table_admin.client import (
- BigtableTableAdminClient,
DEFAULT_CLIENT_INFO,
+ BigtableTableAdminClient,
)
from google.cloud.bigtable_admin_v2.overlay.types import (
restore_table,
wait_for_consistency_request,
)
-
-from google.cloud.bigtable import __version__ as bigtable_version
-
-from test_consistency import (
- FALSE_CONSISTENCY_RESPONSE,
- TRUE_CONSISTENCY_RESPONSE,
-)
-
-import pytest
-
+from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
PARENT_NAME = "my_parent"
TABLE_NAME = "my_table"
diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_consistency.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_consistency.py
index 29bc0c4817ac..92e941773e0f 100644
--- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_consistency.py
+++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_consistency.py
@@ -19,11 +19,10 @@
except ImportError: # pragma: NO COVER
import mock
-from google.cloud.bigtable_admin_v2.overlay.types import consistency
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-
import pytest
+from google.cloud.bigtable_admin_v2.overlay.types import consistency
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
TRUE_CONSISTENCY_RESPONSE = bigtable_table_admin.CheckConsistencyResponse(
consistent=True
diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_oneof_message.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_oneof_message.py
index b9c521235caf..d812e8fb1207 100644
--- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_oneof_message.py
+++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_oneof_message.py
@@ -13,13 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from google.cloud.bigtable_admin_v2.types import GcRule
-from google.protobuf import duration_pb2
-
import my_oneof_message
-
import pytest
+from google.protobuf import duration_pb2
+from google.cloud.bigtable_admin_v2.types import GcRule
# The following proto bytestring was constructed running printproto in
# text-to-binary mode on the following textproto for GcRule:
diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_restore_table.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_restore_table.py
index 23c6609e46d4..1c9194c77896 100644
--- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_restore_table.py
+++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_restore_table.py
@@ -18,16 +18,14 @@
except ImportError: # pragma: NO COVER
import mock
+import pytest
+from google.api_core import exceptions, operation
+from google.api_core.operations_v1 import operations_client
from google.longrunning import operations_pb2
-from google.rpc import status_pb2, code_pb2
+from google.rpc import code_pb2, status_pb2
-from google.api_core import operation, exceptions
-from google.api_core.operations_v1 import operations_client
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
from google.cloud.bigtable_admin_v2.overlay.types import restore_table
-
-import pytest
-
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table
# Set up the mock operations
DEFAULT_MAX_POLL = 3
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test__mutate_rows.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test__mutate_rows.py
index f14fa6dee12a..82f234350a8c 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_async/test__mutate_rows.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test__mutate_rows.py
@@ -13,15 +13,12 @@
# limitations under the License.
import pytest
-
-from google.cloud.bigtable_v2.types import MutateRowsResponse
-from google.cloud.bigtable.data.mutations import RowMutationEntry
-from google.cloud.bigtable.data.mutations import DeleteAllFromRow
+from google.api_core.exceptions import DeadlineExceeded, Forbidden
from google.rpc import status_pb2
-from google.api_core.exceptions import DeadlineExceeded
-from google.api_core.exceptions import Forbidden
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data.mutations import DeleteAllFromRow, RowMutationEntry
+from google.cloud.bigtable_v2.types import MutateRowsResponse
# try/except added for compatibility with python < 3.8
try:
@@ -80,10 +77,10 @@ def test_ctor(self):
"""
test that constructor sets all the attributes correctly
"""
+ from google.api_core.exceptions import Aborted, DeadlineExceeded
+
from google.cloud.bigtable.data._async._mutate_rows import _EntryWithProto
from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
- from google.api_core.exceptions import DeadlineExceeded
- from google.api_core.exceptions import Aborted
client = mock.Mock()
table = mock.Mock()
@@ -198,8 +195,10 @@ async def test_mutate_rows_exception(self, exc_type):
"""
exceptions raised from retryable should be raised in MutationsExceptionGroup
"""
- from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
- from google.cloud.bigtable.data.exceptions import FailedMutationEntryError
+ from google.cloud.bigtable.data.exceptions import (
+ FailedMutationEntryError,
+ MutationsExceptionGroup,
+ )
client = mock.Mock()
table = mock.Mock()
@@ -265,10 +264,13 @@ async def test_mutate_rows_incomplete_ignored(self):
"""
MutateRowsIncomplete exceptions should not be added to error list
"""
- from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
- from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
from google.api_core.exceptions import DeadlineExceeded
+ from google.cloud.bigtable.data.exceptions import (
+ MutationsExceptionGroup,
+ _MutateRowsIncomplete,
+ )
+
client = mock.Mock()
table = mock.Mock()
entries = [self._make_mutation()]
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test__read_rows.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test__read_rows.py
index c43f46d5a66b..7fad973c43a3 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_async/test__read_rows.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test__read_rows.py
@@ -96,9 +96,9 @@ def test_ctor(self):
def test_revise_request_rowset_keys_with_range(
self, in_keys, last_key, expected, with_range
):
- from google.cloud.bigtable_v2.types import RowSet as RowSetPB
- from google.cloud.bigtable_v2.types import RowRange as RowRangePB
from google.cloud.bigtable.data.exceptions import _RowSetComplete
+ from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+ from google.cloud.bigtable_v2.types import RowSet as RowSetPB
in_keys = [key.encode("utf-8") for key in in_keys]
expected = [key.encode("utf-8") for key in expected]
@@ -167,9 +167,9 @@ def test_revise_request_rowset_keys_with_range(
def test_revise_request_rowset_ranges(
self, in_ranges, last_key, expected, with_key
):
- from google.cloud.bigtable_v2.types import RowSet as RowSetPB
- from google.cloud.bigtable_v2.types import RowRange as RowRangePB
from google.cloud.bigtable.data.exceptions import _RowSetComplete
+ from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+ from google.cloud.bigtable_v2.types import RowSet as RowSetPB
# convert to protobuf
next_key = (last_key + "a").encode("utf-8")
@@ -199,8 +199,8 @@ def test_revise_request_rowset_ranges(
@pytest.mark.parametrize("last_key", ["a", "b", "c"])
def test_revise_request_full_table(self, last_key):
- from google.cloud.bigtable_v2.types import RowSet as RowSetPB
from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+ from google.cloud.bigtable_v2.types import RowSet as RowSetPB
# convert to protobuf
last_key = last_key.encode("utf-8")
@@ -216,8 +216,8 @@ def test_revise_request_full_table(self, last_key):
def test_revise_to_empty_rowset(self):
"""revising to an empty rowset should raise error"""
from google.cloud.bigtable.data.exceptions import _RowSetComplete
- from google.cloud.bigtable_v2.types import RowSet as RowSetPB
from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+ from google.cloud.bigtable_v2.types import RowSet as RowSetPB
row_keys = [b"a", b"b", b"c"]
row_range = RowRangePB(end_key_open=b"c")
@@ -284,8 +284,8 @@ async def test_revise_limit_over_limit(self, start_limit, emit_num):
(unless start_num == 0, which represents unlimited)
"""
from google.cloud.bigtable.data import ReadRowsQuery
- from google.cloud.bigtable_v2.types import ReadRowsResponse
from google.cloud.bigtable.data.exceptions import InvalidChunk
+ from google.cloud.bigtable_v2.types import ReadRowsResponse
async def awaitable_stream():
async def mock_stream():
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test_client.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test_client.py
index 9f65d120bba6..f40059823773 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_async/test_client.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test_client.py
@@ -13,30 +13,28 @@
# limitations under the License.
from __future__ import annotations
-import grpc
import asyncio
import re
import sys
-import pytest
+import grpc
import mock
-
-from google.cloud.bigtable.data import mutations
-from google.auth.credentials import AnonymousCredentials
-from google.cloud.bigtable_v2.types import ReadRowsResponse
-from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
-from google.api_core import exceptions as core_exceptions
+import pytest
from google.api_core import client_options
-from google.cloud.bigtable.data.exceptions import InvalidChunk
-from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
-from google.cloud.bigtable.data.mutations import DeleteAllFromRow
-from google.cloud.bigtable.data import TABLE_DEFAULT
-
-from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule
-from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule
-from google.cloud.bigtable_v2.types.bigtable import ExecuteQueryResponse
+from google.api_core import exceptions as core_exceptions
+from google.auth.credentials import AnonymousCredentials
+from google.cloud.bigtable.data import TABLE_DEFAULT, mutations
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data.exceptions import InvalidChunk, _MutateRowsIncomplete
+from google.cloud.bigtable.data.mutations import DeleteAllFromRow
+from google.cloud.bigtable.data.read_modify_write_rules import (
+ AppendValueRule,
+ IncrementRule,
+)
+from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
+from google.cloud.bigtable_v2.types import ReadRowsResponse
+from google.cloud.bigtable_v2.types.bigtable import ExecuteQueryResponse
from tests.unit.data.execute_query.sql_helpers import (
chunked_responses,
column,
@@ -51,10 +49,11 @@
if CrossSync.is_async:
from google.api_core import grpc_helpers_async
- from google.cloud.bigtable.data._async.client import TableAsync
+
from google.cloud.bigtable.data._async._swappable_channel import (
AsyncSwappableChannel,
)
+ from google.cloud.bigtable.data._async.client import TableAsync
from google.cloud.bigtable.data._async.metrics_interceptor import (
AsyncBigtableMetricsInterceptor,
)
@@ -64,10 +63,11 @@
CrossSync.add_mapping("MetricsInterceptor", AsyncBigtableMetricsInterceptor)
else:
from google.api_core import grpc_helpers
- from google.cloud.bigtable.data._sync_autogen.client import Table # noqa: F401
+
from google.cloud.bigtable.data._sync_autogen._swappable_channel import (
SwappableChannel,
)
+ from google.cloud.bigtable.data._sync_autogen.client import Table # noqa: F401
from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import (
BigtableMetricsInterceptor,
)
@@ -127,8 +127,8 @@ async def test_ctor(self):
@CrossSync.pytest
async def test_ctor_super_inits(self):
- from google.cloud.client import ClientWithProject
from google.api_core import client_options as client_options_lib
+ from google.cloud.client import ClientWithProject
project = "project-id"
credentials = AnonymousCredentials()
@@ -215,9 +215,9 @@ async def test_veneer_grpc_headers(self):
wrapped_user_agent_sorted = " ".join(
sorted(client_info.to_user_agent().split(" "))
)
- assert VENEER_HEADER_REGEX.match(
- wrapped_user_agent_sorted
- ), f"'{wrapped_user_agent_sorted}' does not match {VENEER_HEADER_REGEX}"
+ assert VENEER_HEADER_REGEX.match(wrapped_user_agent_sorted), (
+ f"'{wrapped_user_agent_sorted}' does not match {VENEER_HEADER_REGEX}"
+ )
await client.close()
@CrossSync.drop
@@ -392,9 +392,9 @@ async def test__manage_channel_first_sleep(
pass
sleep.assert_called_once()
call_time = sleep.call_args[0][1]
- assert (
- abs(call_time - expected_sleep) < 0.1
- ), f"refresh_interval: {refresh_interval}, wait_time: {wait_time}, expected_sleep: {expected_sleep}"
+ assert abs(call_time - expected_sleep) < 0.1, (
+ f"refresh_interval: {refresh_interval}, wait_time: {wait_time}, expected_sleep: {expected_sleep}"
+ )
await client.close()
@CrossSync.pytest
@@ -441,8 +441,8 @@ async def test__manage_channel_sleeps(
self, refresh_interval, num_cycles, expected_sleep
):
# make sure that sleeps work as expected
- import time
import random
+ import time
with mock.patch.object(random, "uniform") as uniform:
uniform.side_effect = lambda min_, max_: min_
@@ -467,9 +467,9 @@ async def test__manage_channel_sleeps(
pass
assert sleep.call_count == num_cycles
total_sleep = sum([call[0][1] for call in sleep.call_args_list])
- assert (
- abs(total_sleep - expected_sleep) < 0.5
- ), f"refresh_interval={refresh_interval}, num_cycles={num_cycles}, expected_sleep={expected_sleep}"
+ assert abs(total_sleep - expected_sleep) < 0.5, (
+ f"refresh_interval={refresh_interval}, num_cycles={num_cycles}, expected_sleep={expected_sleep}"
+ )
await client.close()
@CrossSync.pytest
@@ -923,6 +923,7 @@ async def test_api_surface_context_manager(self, method):
get_table and get_authorized_view should work as context managers
"""
from functools import partial
+
from google.cloud.bigtable.data._helpers import _WarmedInstanceKey
expected_table_id = "table-id"
@@ -1628,9 +1629,11 @@ def _make_table(self, *args, **kwargs):
return CrossSync.TestTable._get_target_class()(client_mock, *args, **kwargs)
def _make_stats(self):
- from google.cloud.bigtable_v2.types import RequestStats
- from google.cloud.bigtable_v2.types import FullReadStatsView
- from google.cloud.bigtable_v2.types import ReadIterationStats
+ from google.cloud.bigtable_v2.types import (
+ FullReadStatsView,
+ ReadIterationStats,
+ RequestStats,
+ )
return RequestStats(
full_read_stats_view=FullReadStatsView(
@@ -2125,11 +2128,14 @@ async def test_read_rows_sharded_multiple_queries(self):
with mock.patch.object(
table.client._gapic_client, "read_rows"
) as read_rows:
- read_rows.side_effect = lambda *args, **kwargs: CrossSync.TestReadRows._make_gapic_stream(
- [
- CrossSync.TestReadRows._make_chunk(row_key=k)
- for k in args[0].rows.row_keys
- ]
+ read_rows.side_effect = (
+ lambda *args,
+ **kwargs: CrossSync.TestReadRows._make_gapic_stream(
+ [
+ CrossSync.TestReadRows._make_chunk(row_key=k)
+ for k in args[0].rows.row_keys
+ ]
+ )
)
query_1 = ReadRowsQuery(b"test_1")
query_2 = ReadRowsQuery(b"test_2")
@@ -2156,8 +2162,10 @@ async def test_read_rows_sharded_errors(self):
"""
Errors should be exposed as ShardedReadRowsExceptionGroups
"""
- from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
- from google.cloud.bigtable.data.exceptions import FailedQueryShardError
+ from google.cloud.bigtable.data.exceptions import (
+ FailedQueryShardError,
+ ShardedReadRowsExceptionGroup,
+ )
async with self._make_client() as client:
async with client.get_table("instance", "table") as table:
@@ -2258,9 +2266,10 @@ async def test_read_rows_sharded_expirary(self):
If the operation times out before all shards complete, should raise
a ShardedReadRowsExceptionGroup
"""
+ from google.api_core.exceptions import DeadlineExceeded
+
from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
- from google.api_core.exceptions import DeadlineExceeded
operation_timeout = 0.1
@@ -2299,10 +2308,11 @@ async def test_read_rows_sharded_negative_batch_timeout(self):
They should raise DeadlineExceeded errors
"""
- from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
- from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
from google.api_core.exceptions import DeadlineExceeded
+ from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
+ from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
+
async def mock_call(*args, **kwargs):
await CrossSync.sleep(0.06)
return [mock.Mock()]
@@ -2438,6 +2448,7 @@ async def test_sample_row_keys_retryable_errors(self, retryable_exception):
retryable errors should be retried until timeout
"""
from google.api_core.exceptions import DeadlineExceeded
+
from google.cloud.bigtable.data.exceptions import RetryExceptionGroup
async with self._make_client() as client:
@@ -2545,6 +2556,7 @@ async def test_mutate_row(self, mutation_arg):
@CrossSync.pytest
async def test_mutate_row_retryable_errors(self, retryable_exception):
from google.api_core.exceptions import DeadlineExceeded
+
from google.cloud.bigtable.data.exceptions import RetryExceptionGroup
async with self._make_client(project="project") as client:
@@ -2641,9 +2653,10 @@ def _make_client(self, *args, **kwargs):
@CrossSync.convert
async def _mock_response(self, response_list):
- from google.cloud.bigtable_v2.types import MutateRowsResponse
from google.rpc import status_pb2
+ from google.cloud.bigtable_v2.types import MutateRowsResponse
+
statuses = []
for response in response_list:
if isinstance(response, core_exceptions.GoogleAPICallError):
@@ -2751,9 +2764,9 @@ async def test_bulk_mutate_rows_idempotent_mutation_error_retryable(
Individual idempotent mutations should be retried if they fail with a retryable error
"""
from google.cloud.bigtable.data.exceptions import (
- RetryExceptionGroup,
FailedMutationEntryError,
MutationsExceptionGroup,
+ RetryExceptionGroup,
)
async with self._make_client(project="project") as client:
@@ -2837,9 +2850,9 @@ async def test_bulk_mutate_idempotent_retryable_request_errors(
Individual idempotent mutations should be retried if the request fails with a retryable error
"""
from google.cloud.bigtable.data.exceptions import (
- RetryExceptionGroup,
FailedMutationEntryError,
MutationsExceptionGroup,
+ RetryExceptionGroup,
)
async with self._make_client(project="project") as client:
@@ -2949,13 +2962,14 @@ async def test_bulk_mutate_error_index(self):
"""
from google.api_core.exceptions import (
DeadlineExceeded,
- ServiceUnavailable,
FailedPrecondition,
+ ServiceUnavailable,
)
+
from google.cloud.bigtable.data.exceptions import (
- RetryExceptionGroup,
FailedMutationEntryError,
MutationsExceptionGroup,
+ RetryExceptionGroup,
)
async with self._make_client(project="project") as client:
@@ -3141,8 +3155,8 @@ async def test_check_and_mutate_predicate_object(self):
@CrossSync.pytest
async def test_check_and_mutate_mutations_parsing(self):
"""mutations objects should be converted to protos"""
- from google.cloud.bigtable_v2.types import CheckAndMutateRowResponse
from google.cloud.bigtable.data.mutations import DeleteAllFromFamily
+ from google.cloud.bigtable_v2.types import CheckAndMutateRowResponse
mutations = [mock.Mock() for _ in range(5)]
for idx, mutation in enumerate(mutations):
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test_metrics_interceptor.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test_metrics_interceptor.py
index 1593b8c99acd..dfe861b16d46 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_async/test_metrics_interceptor.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test_metrics_interceptor.py
@@ -13,12 +13,13 @@
# limitations under the License.
import pytest
-from grpc import RpcError
-from grpc import ClientCallDetails
+from grpc import ClientCallDetails, RpcError
-from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric
-from google.cloud.bigtable.data._metrics.data_model import OperationState
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._metrics.data_model import (
+ ActiveOperationMetric,
+ OperationState,
+)
# try/except added for compatibility with python < 3.8
try:
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test_mutations_batcher.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test_mutations_batcher.py
index b139f31f1c7c..75de7c281332 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_async/test_mutations_batcher.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test_mutations_batcher.py
@@ -12,18 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytest
-import mock
import asyncio
import time
+
import google.api_core.exceptions as core_exceptions
import google.api_core.retry
-from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
-from google.cloud.bigtable.data.mutations import RowMutationEntry
-from google.cloud.bigtable.data.mutations import DeleteAllFromRow
-from google.cloud.bigtable.data import TABLE_DEFAULT
+import mock
+import pytest
+from google.cloud.bigtable.data import TABLE_DEFAULT
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
+from google.cloud.bigtable.data.mutations import DeleteAllFromRow, RowMutationEntry
__CROSS_SYNC_OUTPUT__ = "tests.unit.data._sync_autogen.test_mutations_batcher"
@@ -305,8 +305,7 @@ def _get_target_class(self):
return CrossSync.MutationsBatcher
def _make_one(self, table=None, **kwargs):
- from google.api_core.exceptions import DeadlineExceeded
- from google.api_core.exceptions import ServiceUnavailable
+ from google.api_core.exceptions import DeadlineExceeded, ServiceUnavailable
if table is None:
table = mock.Mock()
@@ -887,9 +886,10 @@ async def gen(x):
@CrossSync.convert
async def _mock_gapic_return(self, num=5):
- from google.cloud.bigtable_v2.types import MutateRowsResponse
from google.rpc import status_pb2
+ from google.cloud.bigtable_v2.types import MutateRowsResponse
+
@CrossSync.convert
async def gen(num):
for i in range(num):
@@ -949,8 +949,8 @@ async def test__execute_mutate_rows(self):
async def test__execute_mutate_rows_returns_errors(self):
"""Errors from operation should be retruned as list"""
from google.cloud.bigtable.data.exceptions import (
- MutationsExceptionGroup,
FailedMutationEntryError,
+ MutationsExceptionGroup,
)
with mock.patch.object(CrossSync._MutateRowsOperation, "start") as mutate_rows:
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test_read_rows_acceptance.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test_read_rows_acceptance.py
index ab9502223c21..d69b776bfe42 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_async/test_read_rows_acceptance.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test_read_rows_acceptance.py
@@ -15,21 +15,18 @@
import os
import warnings
-import pytest
-import mock
-
from itertools import zip_longest
-from google.cloud.bigtable_v2 import ReadRowsResponse
+import mock
+import pytest
+from google.cloud.bigtable.data._cross_sync import CrossSync
from google.cloud.bigtable.data.exceptions import InvalidChunk
from google.cloud.bigtable.data.row import Row
+from google.cloud.bigtable_v2 import ReadRowsResponse
from ...v2_client.test_row_merger import ReadRowsTest, TestFile
-from google.cloud.bigtable.data._cross_sync import CrossSync
-
-
__CROSS_SYNC_OUTPUT__ = "tests.unit.data._sync_autogen.test_read_rows_acceptance"
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync.py b/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync.py
index 410f59437711..bac6b61f0081 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync.py
@@ -11,16 +11,18 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import typing
import asyncio
-import pytest
-import pytest_asyncio
-import threading
import concurrent.futures
-import time
-import queue
import functools
+import queue
import sys
+import threading
+import time
+import typing
+
+import pytest
+import pytest_asyncio
+
from google import api_core
from google.cloud.bigtable.data._cross_sync.cross_sync import CrossSync, T
@@ -83,9 +85,9 @@ def test_alias_attributes(
Test basic alias attributes, to ensure they point to the right place
in both sync and async versions.
"""
- assert (
- getattr(cs_async, attr) == async_version
- ), f"Failed async version for {attr}"
+ assert getattr(cs_async, attr) == async_version, (
+ f"Failed async version for {attr}"
+ )
assert getattr(cs_sync, attr) == sync_version, f"Failed sync version for {attr}"
@pytest.mark.asyncio
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync_decorators.py b/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync_decorators.py
index 3be579379597..26640a959f11 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync_decorators.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync_decorators.py
@@ -12,18 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytest
-import pytest_asyncio
import ast
from unittest import mock
-from google.cloud.bigtable.data._cross_sync.cross_sync import CrossSync
+
+import pytest
+import pytest_asyncio
+
from google.cloud.bigtable.data._cross_sync._decorators import (
- ConvertClass,
Convert,
+ ConvertClass,
Drop,
Pytest,
PytestFixture,
)
+from google.cloud.bigtable.data._cross_sync.cross_sync import CrossSync
@pytest.fixture
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_data_model.py b/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_data_model.py
index 93e73c9d8603..307d8ba4ce87 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_data_model.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_data_model.py
@@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytest
import mock
+import pytest
from google.cloud.bigtable.data._metrics.data_model import OperationState as State
from google.cloud.bigtable_v2.types import ResponseParams
@@ -627,8 +627,9 @@ def test__exc_to_status(self):
If BigtableExceptionGroup, use the most recent exception in the group
"""
- from grpc import StatusCode
from google.api_core import exceptions as core_exc
+ from grpc import StatusCode
+
from google.cloud.bigtable.data import exceptions as bt_exc
cls = type(self._make_one(object()))
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_tracked_retry.py b/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_tracked_retry.py
index 39713dc694f5..55d09c7829c5 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_tracked_retry.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_tracked_retry.py
@@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytest
import inspect
-import mock
import sys
-from grpc import StatusCode
+
+import google.api_core.retry as retry_module
+import mock
+import pytest
from google.api_core import exceptions as core_exceptions
from google.api_core.retry import RetryFailureReason
-import google.api_core.retry as retry_module
+from grpc import StatusCode
class TestTrackRetryableError:
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__mutate_rows.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__mutate_rows.py
index b198df01b9c8..a9c841399098 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__mutate_rows.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__mutate_rows.py
@@ -16,13 +16,11 @@
# This file is automatically generated by CrossSync. Do not edit manually.
import pytest
-from google.cloud.bigtable_v2.types import MutateRowsResponse
-from google.cloud.bigtable.data.mutations import RowMutationEntry
-from google.cloud.bigtable.data.mutations import DeleteAllFromRow
+from google.api_core.exceptions import DeadlineExceeded, Forbidden
from google.rpc import status_pb2
-from google.api_core.exceptions import DeadlineExceeded
-from google.api_core.exceptions import Forbidden
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data.mutations import DeleteAllFromRow, RowMutationEntry
+from google.cloud.bigtable_v2.types import MutateRowsResponse
try:
from unittest import mock
@@ -74,10 +72,9 @@ def _make_mock_gapic(self, mutation_list, error_dict=None):
def test_ctor(self):
"""test that constructor sets all the attributes correctly"""
+ from google.api_core.exceptions import Aborted, DeadlineExceeded
from google.cloud.bigtable.data._async._mutate_rows import _EntryWithProto
from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
- from google.api_core.exceptions import DeadlineExceeded
- from google.api_core.exceptions import Aborted
client = mock.Mock()
table = mock.Mock()
@@ -170,8 +167,10 @@ def test_mutate_rows_attempt_exception(self, exc_type):
@pytest.mark.parametrize("exc_type", [RuntimeError, ZeroDivisionError, Forbidden])
def test_mutate_rows_exception(self, exc_type):
"""exceptions raised from retryable should be raised in MutationsExceptionGroup"""
- from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
- from google.cloud.bigtable.data.exceptions import FailedMutationEntryError
+ from google.cloud.bigtable.data.exceptions import (
+ FailedMutationEntryError,
+ MutationsExceptionGroup,
+ )
client = mock.Mock()
table = mock.Mock()
@@ -223,9 +222,11 @@ def test_mutate_rows_exception_retryable_eventually_pass(self, exc_type):
def test_mutate_rows_incomplete_ignored(self):
"""MutateRowsIncomplete exceptions should not be added to error list"""
- from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
- from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup
from google.api_core.exceptions import DeadlineExceeded
+ from google.cloud.bigtable.data.exceptions import (
+ MutationsExceptionGroup,
+ _MutateRowsIncomplete,
+ )
client = mock.Mock()
table = mock.Mock()
@@ -259,7 +260,7 @@ def test_run_attempt_single_entry_success(self):
instance._run_attempt()
assert len(instance.remaining_indices) == 0
assert mock_gapic_fn.call_count == 1
- (_, kwargs) = mock_gapic_fn.call_args
+ _, kwargs = mock_gapic_fn.call_args
assert kwargs["timeout"] == expected_timeout
request = kwargs["request"]
assert request.entries == [mutation._to_pb()]
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__read_rows.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__read_rows.py
index a545142d3dfb..eb0343dd4a20 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__read_rows.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__read_rows.py
@@ -90,9 +90,9 @@ def test_ctor(self):
def test_revise_request_rowset_keys_with_range(
self, in_keys, last_key, expected, with_range
):
- from google.cloud.bigtable_v2.types import RowSet as RowSetPB
- from google.cloud.bigtable_v2.types import RowRange as RowRangePB
from google.cloud.bigtable.data.exceptions import _RowSetComplete
+ from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+ from google.cloud.bigtable_v2.types import RowSet as RowSetPB
in_keys = [key.encode("utf-8") for key in in_keys]
expected = [key.encode("utf-8") for key in expected]
@@ -159,19 +159,18 @@ def test_revise_request_rowset_keys_with_range(
def test_revise_request_rowset_ranges(
self, in_ranges, last_key, expected, with_key
):
- from google.cloud.bigtable_v2.types import RowSet as RowSetPB
- from google.cloud.bigtable_v2.types import RowRange as RowRangePB
from google.cloud.bigtable.data.exceptions import _RowSetComplete
+ from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+ from google.cloud.bigtable_v2.types import RowSet as RowSetPB
next_key = (last_key + "a").encode("utf-8")
last_key = last_key.encode("utf-8")
in_ranges = [
- RowRangePB(**{k: v.encode("utf-8") for (k, v) in r.items()})
+ RowRangePB(**{k: v.encode("utf-8") for k, v in r.items()})
for r in in_ranges
]
expected = [
- RowRangePB(**{k: v.encode("utf-8") for (k, v) in r.items()})
- for r in expected
+ RowRangePB(**{k: v.encode("utf-8") for k, v in r.items()}) for r in expected
]
if with_key:
row_keys = [next_key]
@@ -188,8 +187,8 @@ def test_revise_request_rowset_ranges(
@pytest.mark.parametrize("last_key", ["a", "b", "c"])
def test_revise_request_full_table(self, last_key):
- from google.cloud.bigtable_v2.types import RowSet as RowSetPB
from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+ from google.cloud.bigtable_v2.types import RowSet as RowSetPB
last_key = last_key.encode("utf-8")
row_set = RowSetPB()
@@ -204,8 +203,8 @@ def test_revise_request_full_table(self, last_key):
def test_revise_to_empty_rowset(self):
"""revising to an empty rowset should raise error"""
from google.cloud.bigtable.data.exceptions import _RowSetComplete
- from google.cloud.bigtable_v2.types import RowSet as RowSetPB
from google.cloud.bigtable_v2.types import RowRange as RowRangePB
+ from google.cloud.bigtable_v2.types import RowSet as RowSetPB
row_keys = [b"a", b"b", b"c"]
row_range = RowRangePB(end_key_open=b"c")
@@ -265,8 +264,8 @@ def test_revise_limit_over_limit(self, start_limit, emit_num):
"""Should raise runtime error if we get in state where emit_num > start_num
(unless start_num == 0, which represents unlimited)"""
from google.cloud.bigtable.data import ReadRowsQuery
- from google.cloud.bigtable_v2.types import ReadRowsResponse
from google.cloud.bigtable.data.exceptions import InvalidChunk
+ from google.cloud.bigtable_v2.types import ReadRowsResponse
def awaitable_stream():
def mock_stream():
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_client.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_client.py
index 54be1f17c1b4..0d5e47b1072b 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_client.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_client.py
@@ -15,25 +15,25 @@
# This file is automatically generated by CrossSync. Do not edit manually.
from __future__ import annotations
-import grpc
import asyncio
import re
-import pytest
+import grpc
import mock
-from google.cloud.bigtable.data import mutations
-from google.auth.credentials import AnonymousCredentials
-from google.cloud.bigtable_v2.types import ReadRowsResponse
-from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
-from google.api_core import exceptions as core_exceptions
+import pytest
from google.api_core import client_options
-from google.cloud.bigtable.data.exceptions import InvalidChunk
-from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
+from google.api_core import exceptions as core_exceptions
+from google.auth.credentials import AnonymousCredentials
+from google.cloud.bigtable.data import TABLE_DEFAULT, mutations
+from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data.exceptions import InvalidChunk, _MutateRowsIncomplete
from google.cloud.bigtable.data.mutations import DeleteAllFromRow
-from google.cloud.bigtable.data import TABLE_DEFAULT
-from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule
-from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule
+from google.cloud.bigtable.data.read_modify_write_rules import (
+ AppendValueRule,
+ IncrementRule,
+)
+from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
+from google.cloud.bigtable_v2.types import ReadRowsResponse
from google.cloud.bigtable_v2.types.bigtable import ExecuteQueryResponse
-from google.cloud.bigtable.data._cross_sync import CrossSync
from tests.unit.data.execute_query.sql_helpers import (
chunked_responses,
column,
@@ -95,8 +95,8 @@ def test_ctor(self):
client.close()
def test_ctor_super_inits(self):
- from google.cloud.client import ClientWithProject
from google.api_core import client_options as client_options_lib
+ from google.cloud.client import ClientWithProject
project = "project-id"
credentials = AnonymousCredentials()
@@ -172,9 +172,9 @@ def test_veneer_grpc_headers(self):
wrapped_user_agent_sorted = " ".join(
sorted(client_info.to_user_agent().split(" "))
)
- assert VENEER_HEADER_REGEX.match(
- wrapped_user_agent_sorted
- ), f"'{wrapped_user_agent_sorted}' does not match {VENEER_HEADER_REGEX}"
+ assert VENEER_HEADER_REGEX.match(wrapped_user_agent_sorted), (
+ f"'{wrapped_user_agent_sorted}' does not match {VENEER_HEADER_REGEX}"
+ )
client.close()
def test__start_background_channel_refresh_task_exists(self):
@@ -233,10 +233,9 @@ def test__ping_and_warm_instances(self):
assert len(partial_list) == 4
grpc_call_args = channel.unary_unary().call_args_list
for idx, (_, kwargs) in enumerate(grpc_call_args):
- (
- expected_instance,
- expected_app_profile,
- ) = client_mock._active_instances[idx]
+ expected_instance, expected_app_profile = client_mock._active_instances[
+ idx
+ ]
request = kwargs["request"]
assert request["name"] == expected_instance
assert request["app_profile_id"] == expected_app_profile
@@ -302,9 +301,9 @@ def test__manage_channel_first_sleep(
pass
sleep.assert_called_once()
call_time = sleep.call_args[0][1]
- assert (
- abs(call_time - expected_sleep) < 0.1
- ), f"refresh_interval: {refresh_interval}, wait_time: {wait_time}, expected_sleep: {expected_sleep}"
+ assert abs(call_time - expected_sleep) < 0.1, (
+ f"refresh_interval: {refresh_interval}, wait_time: {wait_time}, expected_sleep: {expected_sleep}"
+ )
client.close()
def test__manage_channel_ping_and_warm(self):
@@ -320,9 +319,9 @@ def test__manage_channel_ping_and_warm(self):
)
with mock.patch.object(*sleep_tuple) as sleep_mock:
sleep_mock.side_effect = [None, asyncio.CancelledError]
- ping_and_warm = (
- client._ping_and_warm_instances
- ) = CrossSync._Sync_Impl.Mock()
+ ping_and_warm = client._ping_and_warm_instances = (
+ CrossSync._Sync_Impl.Mock()
+ )
try:
client._manage_channel(10)
except asyncio.CancelledError:
@@ -338,8 +337,8 @@ def test__manage_channel_ping_and_warm(self):
[(None, 1, 60 * 35), (10, 10, 100), (10, 1, 10)],
)
def test__manage_channel_sleeps(self, refresh_interval, num_cycles, expected_sleep):
- import time
import random
+ import time
with mock.patch.object(random, "uniform") as uniform:
uniform.side_effect = lambda min_, max_: min_
@@ -364,9 +363,9 @@ def test__manage_channel_sleeps(self, refresh_interval, num_cycles, expected_sle
pass
assert sleep.call_count == num_cycles
total_sleep = sum([call[0][1] for call in sleep.call_args_list])
- assert (
- abs(total_sleep - expected_sleep) < 0.5
- ), f"refresh_interval={refresh_interval}, num_cycles={num_cycles}, expected_sleep={expected_sleep}"
+ assert abs(total_sleep - expected_sleep) < 0.5, (
+ f"refresh_interval={refresh_interval}, num_cycles={num_cycles}, expected_sleep={expected_sleep}"
+ )
client.close()
def test__manage_channel_random(self):
@@ -1326,9 +1325,11 @@ def _make_table(self, *args, **kwargs):
)
def _make_stats(self):
- from google.cloud.bigtable_v2.types import RequestStats
- from google.cloud.bigtable_v2.types import FullReadStatsView
- from google.cloud.bigtable_v2.types import ReadIterationStats
+ from google.cloud.bigtable_v2.types import (
+ FullReadStatsView,
+ ReadIterationStats,
+ RequestStats,
+ )
return RequestStats(
full_read_stats_view=FullReadStatsView(
@@ -1647,7 +1648,7 @@ def test_read_row(self):
)
assert row == expected_result
assert read_rows.call_count == 1
- (args, kwargs) = read_rows.call_args_list[0]
+ args, kwargs = read_rows.call_args_list[0]
assert kwargs["operation_timeout"] == expected_op_timeout
assert kwargs["attempt_timeout"] == expected_req_timeout
assert len(args) == 1
@@ -1678,7 +1679,7 @@ def test_read_row_w_filter(self):
)
assert row == expected_result
assert read_rows.call_count == 1
- (args, kwargs) = read_rows.call_args_list[0]
+ args, kwargs = read_rows.call_args_list[0]
assert kwargs["operation_timeout"] == expected_op_timeout
assert kwargs["attempt_timeout"] == expected_req_timeout
assert len(args) == 1
@@ -1705,7 +1706,7 @@ def test_read_row_no_response(self):
)
assert result is None
assert read_rows.call_count == 1
- (args, kwargs) = read_rows.call_args_list[0]
+ args, kwargs = read_rows.call_args_list[0]
assert kwargs["operation_timeout"] == expected_op_timeout
assert kwargs["attempt_timeout"] == expected_req_timeout
assert isinstance(args[0], ReadRowsQuery)
@@ -1734,7 +1735,7 @@ def test_row_exists(self, return_value, expected_result):
)
assert expected_result == result
assert read_rows.call_count == 1
- (args, kwargs) = read_rows.call_args_list[0]
+ args, kwargs = read_rows.call_args_list[0]
assert kwargs["operation_timeout"] == expected_op_timeout
assert kwargs["attempt_timeout"] == expected_req_timeout
assert isinstance(args[0], ReadRowsQuery)
@@ -1771,11 +1772,14 @@ def test_read_rows_sharded_multiple_queries(self):
with mock.patch.object(
table.client._gapic_client, "read_rows"
) as read_rows:
- read_rows.side_effect = lambda *args, **kwargs: CrossSync._Sync_Impl.TestReadRows._make_gapic_stream(
- [
- CrossSync._Sync_Impl.TestReadRows._make_chunk(row_key=k)
- for k in args[0].rows.row_keys
- ]
+ read_rows.side_effect = (
+ lambda *args,
+ **kwargs: CrossSync._Sync_Impl.TestReadRows._make_gapic_stream(
+ [
+ CrossSync._Sync_Impl.TestReadRows._make_chunk(row_key=k)
+ for k in args[0].rows.row_keys
+ ]
+ )
)
query_1 = ReadRowsQuery(b"test_1")
query_2 = ReadRowsQuery(b"test_2")
@@ -1796,8 +1800,10 @@ def test_read_rows_sharded_multiple_queries_calls(self, n_queries):
def test_read_rows_sharded_errors(self):
"""Errors should be exposed as ShardedReadRowsExceptionGroups"""
- from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
- from google.cloud.bigtable.data.exceptions import FailedQueryShardError
+ from google.cloud.bigtable.data.exceptions import (
+ FailedQueryShardError,
+ ShardedReadRowsExceptionGroup,
+ )
with self._make_client() as client:
with client.get_table("instance", "table") as table:
@@ -1866,7 +1872,7 @@ def mock_call(*args, **kwargs):
assert read_rows.call_count == num_queries
rpc_start_list = [
starting_timeout - kwargs["operation_timeout"]
- for (_, kwargs) in read_rows.call_args_list
+ for _, kwargs in read_rows.call_args_list
]
eps = 0.01
assert all(
@@ -1879,9 +1885,9 @@ def mock_call(*args, **kwargs):
def test_read_rows_sharded_expirary(self):
"""If the operation times out before all shards complete, should raise
a ShardedReadRowsExceptionGroup"""
+ from google.api_core.exceptions import DeadlineExceeded
from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
- from google.api_core.exceptions import DeadlineExceeded
operation_timeout = 0.1
num_queries = 15
@@ -1914,9 +1920,9 @@ def test_read_rows_sharded_negative_batch_timeout(self):
"""try to run with batch that starts after operation timeout
They should raise DeadlineExceeded errors"""
- from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
- from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
from google.api_core.exceptions import DeadlineExceeded
+ from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
+ from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
def mock_call(*args, **kwargs):
CrossSync._Sync_Impl.sleep(0.06)
@@ -1998,7 +2004,7 @@ def test_sample_row_keys_default_timeout(self):
) as sample_row_keys:
sample_row_keys.return_value = self._make_gapic_stream([])
result = table.sample_row_keys()
- (_, kwargs) = sample_row_keys.call_args
+ _, kwargs = sample_row_keys.call_args
assert abs(kwargs["timeout"] - expected_timeout) < 0.1
assert result == []
assert kwargs["retry"] is None
@@ -2020,7 +2026,7 @@ def test_sample_row_keys_gapic_params(self):
) as sample_row_keys:
sample_row_keys.return_value = self._make_gapic_stream([])
table.sample_row_keys(attempt_timeout=expected_timeout)
- (args, kwargs) = sample_row_keys.call_args
+ args, kwargs = sample_row_keys.call_args
assert len(args) == 0
assert len(kwargs) == 3
assert kwargs["timeout"] == expected_timeout
@@ -2213,8 +2219,8 @@ def _make_client(self, *args, **kwargs):
return CrossSync._Sync_Impl.TestBigtableDataClient._make_client(*args, **kwargs)
def _mock_response(self, response_list):
- from google.cloud.bigtable_v2.types import MutateRowsResponse
from google.rpc import status_pb2
+ from google.cloud.bigtable_v2.types import MutateRowsResponse
statuses = []
for response in response_list:
@@ -2308,9 +2314,9 @@ def test_bulk_mutate_rows_multiple_entries(self):
def test_bulk_mutate_rows_idempotent_mutation_error_retryable(self, exception):
"""Individual idempotent mutations should be retried if they fail with a retryable error"""
from google.cloud.bigtable.data.exceptions import (
- RetryExceptionGroup,
FailedMutationEntryError,
MutationsExceptionGroup,
+ RetryExceptionGroup,
)
with self._make_client(project="project") as client:
@@ -2380,9 +2386,9 @@ def test_bulk_mutate_rows_idempotent_mutation_error_non_retryable(self, exceptio
def test_bulk_mutate_idempotent_retryable_request_errors(self, retryable_exception):
"""Individual idempotent mutations should be retried if the request fails with a retryable error"""
from google.cloud.bigtable.data.exceptions import (
- RetryExceptionGroup,
FailedMutationEntryError,
MutationsExceptionGroup,
+ RetryExceptionGroup,
)
with self._make_client(project="project") as client:
@@ -2482,13 +2488,13 @@ def test_bulk_mutate_error_index(self):
"""Test partial failure, partial success. Errors should be associated with the correct index"""
from google.api_core.exceptions import (
DeadlineExceeded,
- ServiceUnavailable,
FailedPrecondition,
+ ServiceUnavailable,
)
from google.cloud.bigtable.data.exceptions import (
- RetryExceptionGroup,
FailedMutationEntryError,
MutationsExceptionGroup,
+ RetryExceptionGroup,
)
with self._make_client(project="project") as client:
@@ -2662,8 +2668,8 @@ def test_check_and_mutate_predicate_object(self):
def test_check_and_mutate_mutations_parsing(self):
"""mutations objects should be converted to protos"""
- from google.cloud.bigtable_v2.types import CheckAndMutateRowResponse
from google.cloud.bigtable.data.mutations import DeleteAllFromFamily
+ from google.cloud.bigtable_v2.types import CheckAndMutateRowResponse
mutations = [mock.Mock() for _ in range(5)]
for idx, mutation in enumerate(mutations):
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_metrics_interceptor.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_metrics_interceptor.py
index c4efcc5b96c3..e89108e12bc6 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_metrics_interceptor.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_metrics_interceptor.py
@@ -16,11 +16,12 @@
# This file is automatically generated by CrossSync. Do not edit manually.
import pytest
-from grpc import RpcError
-from grpc import ClientCallDetails
-from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric
-from google.cloud.bigtable.data._metrics.data_model import OperationState
+from grpc import ClientCallDetails, RpcError
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data._metrics.data_model import (
+ ActiveOperationMetric,
+ OperationState,
+)
try:
from unittest import mock
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_mutations_batcher.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_mutations_batcher.py
index 92d16b349273..9f5bec2efb82 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_mutations_batcher.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_mutations_batcher.py
@@ -15,17 +15,16 @@
# This file is automatically generated by CrossSync. Do not edit manually.
-import pytest
-import mock
import asyncio
import time
import google.api_core.exceptions as core_exceptions
import google.api_core.retry
-from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
-from google.cloud.bigtable.data.mutations import RowMutationEntry
-from google.cloud.bigtable.data.mutations import DeleteAllFromRow
+import mock
+import pytest
from google.cloud.bigtable.data import TABLE_DEFAULT
from google.cloud.bigtable.data._cross_sync import CrossSync
+from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete
+from google.cloud.bigtable.data.mutations import DeleteAllFromRow, RowMutationEntry
class Test_FlowControl:
@@ -255,8 +254,7 @@ def _get_target_class(self):
return CrossSync._Sync_Impl.MutationsBatcher
def _make_one(self, table=None, **kwargs):
- from google.api_core.exceptions import DeadlineExceeded
- from google.api_core.exceptions import ServiceUnavailable
+ from google.api_core.exceptions import DeadlineExceeded, ServiceUnavailable
if table is None:
table = mock.Mock()
@@ -445,7 +443,7 @@ def test__start_flush_timer_w_empty_input(self, input_val):
self._get_target_class(), "_schedule_flush"
) as flush_mock:
with self._make_one() as instance:
- (sleep_obj, sleep_method) = (instance._closed, "wait")
+ sleep_obj, sleep_method = (instance._closed, "wait")
with mock.patch.object(sleep_obj, sleep_method) as sleep_mock:
result = instance._timer_routine(input_val)
assert sleep_mock.call_count == 0
@@ -461,7 +459,7 @@ def test__start_flush_timer_call_when_closed(self):
with self._make_one() as instance:
instance.close()
flush_mock.reset_mock()
- (sleep_obj, sleep_method) = (instance._closed, "wait")
+ sleep_obj, sleep_method = (instance._closed, "wait")
with mock.patch.object(sleep_obj, sleep_method) as sleep_mock:
instance._timer_routine(10)
assert sleep_mock.call_count == 0
@@ -770,8 +768,8 @@ def gen(x):
instance._newest_exceptions.clear()
def _mock_gapic_return(self, num=5):
- from google.cloud.bigtable_v2.types import MutateRowsResponse
from google.rpc import status_pb2
+ from google.cloud.bigtable_v2.types import MutateRowsResponse
def gen(num):
for i in range(num):
@@ -817,7 +815,7 @@ def test__execute_mutate_rows(self):
batch = [self._make_mutation()]
result = instance._execute_mutate_rows(batch)
assert start_operation.call_count == 1
- (args, kwargs) = mutate_rows.call_args
+ args, kwargs = mutate_rows.call_args
assert args[0] == table.client._gapic_client
assert args[1] == table
assert args[2] == batch
@@ -828,8 +826,8 @@ def test__execute_mutate_rows(self):
def test__execute_mutate_rows_returns_errors(self):
"""Errors from operation should be retruned as list"""
from google.cloud.bigtable.data.exceptions import (
- MutationsExceptionGroup,
FailedMutationEntryError,
+ MutationsExceptionGroup,
)
with mock.patch.object(
@@ -866,7 +864,7 @@ def test__raise_exceptions(self):
assert list(exc.exceptions) == expected_exceptions
assert str(expected_total) in str(exc)
assert instance._entries_processed_since_last_raise == 0
- (instance._oldest_exceptions, instance._newest_exceptions) = ([], [])
+ instance._oldest_exceptions, instance._newest_exceptions = ([], [])
instance._raise_exceptions()
def test___enter__(self):
@@ -908,7 +906,7 @@ def test_close_w_exceptions(self):
assert list(exc.exceptions) == expected_exceptions
assert str(expected_total) in str(exc)
assert instance._entries_processed_since_last_raise == 0
- (instance._oldest_exceptions, instance._newest_exceptions) = ([], [])
+ instance._oldest_exceptions, instance._newest_exceptions = ([], [])
def test__on_exit(self, recwarn):
"""Should raise warnings if unflushed mutations exist"""
diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_read_rows_acceptance.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_read_rows_acceptance.py
index 8ceb0daf764d..0950576645e8 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_read_rows_acceptance.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_read_rows_acceptance.py
@@ -17,14 +17,14 @@
from __future__ import annotations
import os
import warnings
-import pytest
-import mock
from itertools import zip_longest
-from google.cloud.bigtable_v2 import ReadRowsResponse
+import mock
+import pytest
+from google.cloud.bigtable.data._cross_sync import CrossSync
from google.cloud.bigtable.data.exceptions import InvalidChunk
from google.cloud.bigtable.data.row import Row
+from google.cloud.bigtable_v2 import ReadRowsResponse
from ...v2_client.test_row_merger import ReadRowsTest, TestFile
-from google.cloud.bigtable.data._cross_sync import CrossSync
class TestReadRowsAcceptance:
diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/_async/test_query_iterator.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/_async/test_query_iterator.py
index df6321f7f5e0..ee193e4b451c 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/_async/test_query_iterator.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/_async/test_query_iterator.py
@@ -12,23 +12,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import concurrent.futures
import gc
+
+import pytest
+
from google.cloud.bigtable.data import exceptions
+from google.cloud.bigtable.data._cross_sync import CrossSync
from google.cloud.bigtable.data.execute_query.metadata import (
_pb_metadata_to_metadata_types,
)
-import pytest
-import concurrent.futures
+
from ..sql_helpers import (
chunked_responses,
- int_val,
column,
- metadata,
int64_type,
+ int_val,
+ metadata,
)
-from google.cloud.bigtable.data._cross_sync import CrossSync
-
# try/except added for compatibility with python < 3.8
try:
from unittest import mock
diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/_sync_autogen/test_query_iterator.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/_sync_autogen/test_query_iterator.py
index 3915693cd2cc..25666e8b9d3b 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/_sync_autogen/test_query_iterator.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/_sync_autogen/test_query_iterator.py
@@ -15,15 +15,15 @@
# This file is automatically generated by CrossSync. Do not edit manually.
+import concurrent.futures
import gc
+import pytest
from google.cloud.bigtable.data import exceptions
+from google.cloud.bigtable.data._cross_sync import CrossSync
from google.cloud.bigtable.data.execute_query.metadata import (
_pb_metadata_to_metadata_types,
)
-import pytest
-import concurrent.futures
-from ..sql_helpers import chunked_responses, int_val, column, metadata, int64_type
-from google.cloud.bigtable.data._cross_sync import CrossSync
+from ..sql_helpers import chunked_responses, column, int64_type, int_val, metadata
try:
from unittest import mock
diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/resources/singer_pb2.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/resources/singer_pb2.py
index b4481db4bc80..f5da249d4811 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/resources/singer_pb2.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/resources/singer_pb2.py
@@ -2,10 +2,11 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: singer.proto
"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
+
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/sql_helpers.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/sql_helpers.py
index 119bb2d50862..fbce8fe486ec 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/sql_helpers.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/sql_helpers.py
@@ -15,6 +15,7 @@
from datetime import datetime, timedelta
from typing import List
+import google_crc32c # type: ignore
from google.protobuf import timestamp_pb2
from google.cloud.bigtable_v2.types.bigtable import (
@@ -22,14 +23,13 @@
PrepareQueryResponse,
)
from google.cloud.bigtable_v2.types.data import (
- Value,
+ ColumnMetadata,
ProtoRows,
ProtoRowsBatch,
ResultSetMetadata,
- ColumnMetadata,
+ Value,
)
from google.cloud.bigtable_v2.types.types import Type
-import google_crc32c # type: ignore
def checksum(data: bytearray) -> int:
diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_checksum.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_checksum.py
index 2a391882dc0e..e1317ab79a16 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_checksum.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_checksum.py
@@ -11,11 +11,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytest
-
import sys
-from unittest import mock
import warnings
+from unittest import mock
+
+import pytest
with warnings.catch_warnings(record=True) as suppressed_warning:
warnings.warn("Supressed warning", RuntimeWarning)
diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_execute_query_parameters_parsing.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_execute_query_parameters_parsing.py
index a49d25788deb..290efb0223af 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_execute_query_parameters_parsing.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_execute_query_parameters_parsing.py
@@ -14,9 +14,10 @@
import datetime
+import pytest
from google.api_core.datetime_helpers import DatetimeWithNanoseconds
+from google.protobuf import timestamp_pb2
from google.type import date_pb2
-import pytest
from google.cloud.bigtable.data.execute_query._parameters_formatting import (
_format_execute_query_params,
@@ -24,7 +25,7 @@
)
from google.cloud.bigtable.data.execute_query.metadata import SqlType
from google.cloud.bigtable.data.execute_query.values import Struct
-from google.protobuf import timestamp_pb2
+
from .resources import singer_pb2
timestamp = int(
diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_parsing_utils.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_parsing_utils.py
index 4d1068c7490f..6f2b0a3f6e2f 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_parsing_utils.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_parsing_utils.py
@@ -12,23 +12,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import datetime
+
import pytest
-from google.cloud.bigtable.data.execute_query.values import Struct
-from google.cloud.bigtable_v2 import Type as PBType, Value as PBValue
+from google.api_core.datetime_helpers import DatetimeWithNanoseconds
+from google.type import date_pb2
+
from google.cloud.bigtable.data.execute_query._query_result_parsing_utils import (
_parse_pb_value_to_python_value,
)
from google.cloud.bigtable.data.execute_query.metadata import (
- _pb_type_to_metadata_type,
SqlType,
+ _pb_type_to_metadata_type,
)
+from google.cloud.bigtable.data.execute_query.values import Struct
+from google.cloud.bigtable_v2 import Type as PBType
+from google.cloud.bigtable_v2 import Value as PBValue
+from tests.unit.data.execute_query.sql_helpers import enum_type, int64_type, proto_type
-from google.type import date_pb2
-from google.api_core.datetime_helpers import DatetimeWithNanoseconds
-
-import datetime
-
-from tests.unit.data.execute_query.sql_helpers import int64_type, proto_type, enum_type
from .resources import singer_pb2
TYPE_BYTES = {"bytes_type": {}}
diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_row_reader.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_row_reader.py
index 8667643a123c..e14b41826cdd 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_row_reader.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_row_reader.py
@@ -12,19 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import pytest
from unittest import mock
-from google.cloud.bigtable_v2.types.data import Value as PBValue
-from google.cloud.bigtable.data.execute_query._reader import _QueryResultRowReader
+import pytest
+
+import google.cloud.bigtable.data.execute_query._reader
+from google.cloud.bigtable.data.execute_query._reader import _QueryResultRowReader
from google.cloud.bigtable.data.execute_query.metadata import (
Metadata,
SqlType,
_pb_metadata_to_metadata_types,
)
-
-import google.cloud.bigtable.data.execute_query._reader
+from google.cloud.bigtable_v2.types.data import Value as PBValue
from tests.unit.data.execute_query.sql_helpers import (
+ bytes_val,
chunked_responses,
column,
int64_type,
@@ -32,8 +33,8 @@
metadata,
proto_rows_bytes,
str_val,
- bytes_val,
)
+
from .resources import singer_pb2
@@ -169,7 +170,7 @@ def pass_values_to_byte_cursor(byte_cursor, iterable):
returned_values = []
def intercept_return_values(func):
- nonlocal intercept_return_values
+ nonlocal intercept_return_values # noqa: F824
def wrapped(*args, **kwargs):
value = func(*args, **kwargs)
diff --git a/packages/google-cloud-bigtable/tests/unit/data/test__helpers.py b/packages/google-cloud-bigtable/tests/unit/data/test__helpers.py
index c8540024d60c..289b5ba7408b 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/test__helpers.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/test__helpers.py
@@ -12,14 +12,14 @@
# limitations under the License.
#
-import pytest
import grpc
+import mock
+import pytest
from google.api_core import exceptions as core_exceptions
+
import google.cloud.bigtable.data._helpers as _helpers
from google.cloud.bigtable.data._helpers import TABLE_DEFAULT
-import mock
-
class TestAttemptTimeoutGenerator:
@pytest.mark.parametrize(
diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_exceptions.py b/packages/google-cloud-bigtable/tests/unit/data/test_exceptions.py
index bc921717e596..3095065817f8 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/test_exceptions.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/test_exceptions.py
@@ -12,9 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import sys
import unittest
+
import pytest
-import sys
import google.cloud.bigtable.data.exceptions as bigtable_exceptions
diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_helpers.py b/packages/google-cloud-bigtable/tests/unit/data/test_helpers.py
index 5d1ad70f8e97..7668f51d0cad 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/test_helpers.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/test_helpers.py
@@ -14,6 +14,7 @@
#
import pytest
+
from google.cloud.bigtable.helpers import batched
diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_read_modify_write_rules.py b/packages/google-cloud-bigtable/tests/unit/data/test_read_modify_write_rules.py
index 1f67da13b170..c2e042ef640f 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/test_read_modify_write_rules.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/test_read_modify_write_rules.py
@@ -72,9 +72,7 @@ def test_ctor_bad_input(self, input_amount):
self._target_class()("fam", b"qual", input_amount)
assert "increment_amount must be an integer" in str(e.value)
- @pytest.mark.parametrize(
- "large_value", [2**64, 2**64 + 1, -(2**64), -(2**64) - 1]
- )
+ @pytest.mark.parametrize("large_value", [2**64, 2**64 + 1, -(2**64), -(2**64) - 1])
def test_ctor_large_values(self, large_value):
with pytest.raises(ValueError) as e:
self._target_class()("fam", b"qual", large_value)
diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_read_rows_query.py b/packages/google-cloud-bigtable/tests/unit/data/test_read_rows_query.py
index ba3b0468bbb8..e88ca4a9bb71 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/test_read_rows_query.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/test_read_rows_query.py
@@ -206,8 +206,8 @@ def test_ctor_defaults(self):
assert query.limit is None
def test_ctor_explicit(self):
- from google.cloud.bigtable.data.row_filters import RowFilterChain
from google.cloud.bigtable.data.read_rows_query import RowRange
+ from google.cloud.bigtable.data.row_filters import RowFilterChain
filter_ = RowFilterChain()
query = self._make_one(
@@ -559,8 +559,7 @@ def test_shard_limit_exception(self):
],
)
def test___eq__(self, first_args, second_args, expected):
- from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
- from google.cloud.bigtable.data.read_rows_query import RowRange
+ from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery, RowRange
# replace row_range placeholders with a RowRange object
if len(first_args) > 1:
diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_row.py b/packages/google-cloud-bigtable/tests/unit/data/test_row.py
index 10b5bdb2316f..95a72399acff 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/test_row.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/test_row.py
@@ -12,9 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import unittest
-
import time
+import unittest
TEST_VALUE = b"1234"
TEST_ROW_KEY = b"row"
@@ -59,10 +58,10 @@ def test__from_pb(self):
"""
Construct from protobuf.
"""
- from google.cloud.bigtable_v2.types import Row as RowPB
- from google.cloud.bigtable_v2.types import Family as FamilyPB
- from google.cloud.bigtable_v2.types import Column as ColumnPB
from google.cloud.bigtable_v2.types import Cell as CellPB
+ from google.cloud.bigtable_v2.types import Column as ColumnPB
+ from google.cloud.bigtable_v2.types import Family as FamilyPB
+ from google.cloud.bigtable_v2.types import Row as RowPB
row_key = b"row_key"
cells = [
diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_row_filters.py b/packages/google-cloud-bigtable/tests/unit/data/test_row_filters.py
index e90b6f270a61..6be9b4a2b252 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/test_row_filters.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/test_row_filters.py
@@ -17,10 +17,12 @@
def test_abstract_class_constructors():
- from google.cloud.bigtable.data.row_filters import RowFilter
- from google.cloud.bigtable.data.row_filters import _BoolFilter
- from google.cloud.bigtable.data.row_filters import _FilterCombination
- from google.cloud.bigtable.data.row_filters import _CellCountFilter
+ from google.cloud.bigtable.data.row_filters import (
+ RowFilter,
+ _BoolFilter,
+ _CellCountFilter,
+ _FilterCombination,
+ )
with pytest.raises(TypeError):
RowFilter()
@@ -392,7 +394,9 @@ def test_timestamp_range___ne__same_value():
def _timestamp_range_to_pb_helper(pb_kwargs, start=None, end=None):
import datetime
+
from google.cloud._helpers import _EPOCH
+
from google.cloud.bigtable.data.row_filters import TimestampRange
if start is not None:
@@ -421,9 +425,10 @@ def test_timestamp_range_to_pb():
def test_timestamp_range_to_dict():
+ import datetime
+
from google.cloud.bigtable.data.row_filters import TimestampRange
from google.cloud.bigtable_v2.types import data as data_v2_pb2
- import datetime
row_filter = TimestampRange(
start=datetime.datetime(2019, 1, 1), end=datetime.datetime(2019, 1, 2)
@@ -448,9 +453,10 @@ def test_timestamp_range_to_pb_start_only():
def test_timestamp_range_to_dict_start_only():
+ import datetime
+
from google.cloud.bigtable.data.row_filters import TimestampRange
from google.cloud.bigtable_v2.types import data as data_v2_pb2
- import datetime
row_filter = TimestampRange(start=datetime.datetime(2019, 1, 1))
expected_dict = {"start_timestamp_micros": 1546300800000000}
@@ -470,9 +476,10 @@ def test_timestamp_range_to_pb_end_only():
def test_timestamp_range_to_dict_end_only():
+ import datetime
+
from google.cloud.bigtable.data.row_filters import TimestampRange
from google.cloud.bigtable_v2.types import data as data_v2_pb2
- import datetime
row_filter = TimestampRange(end=datetime.datetime(2019, 1, 2))
expected_dict = {"end_timestamp_micros": 1546387200000000}
@@ -530,9 +537,10 @@ def test_timestamp_range_filter_to_pb():
def test_timestamp_range_filter_to_dict():
+ import datetime
+
from google.cloud.bigtable.data.row_filters import TimestampRangeFilter
from google.cloud.bigtable_v2.types import data as data_v2_pb2
- import datetime
row_filter = TimestampRangeFilter(
start=datetime.datetime(2019, 1, 1), end=datetime.datetime(2019, 1, 2)
@@ -560,9 +568,10 @@ def test_timestamp_range_filter_empty_to_dict():
def test_timestamp_range_filter___repr__():
- from google.cloud.bigtable.data.row_filters import TimestampRangeFilter
import datetime
+ from google.cloud.bigtable.data.row_filters import TimestampRangeFilter
+
start = datetime.datetime(2019, 1, 1)
end = datetime.datetime(2019, 1, 2)
row_filter = TimestampRangeFilter(start, end)
@@ -944,9 +953,10 @@ def test_value_range_filter_constructor_explicit():
def test_value_range_filter_constructor_w_int_values():
- from google.cloud.bigtable.data.row_filters import ValueRangeFilter
import struct
+ from google.cloud.bigtable.data.row_filters import ValueRangeFilter
+
start_value = 1
end_value = 10
@@ -1411,9 +1421,11 @@ def test_filter_combination___str__():
def test_row_filter_chain_to_pb():
- from google.cloud.bigtable.data.row_filters import RowFilterChain
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ RowFilterChain,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1._to_pb()
@@ -1431,9 +1443,11 @@ def test_row_filter_chain_to_pb():
def test_row_filter_chain_to_dict():
- from google.cloud.bigtable.data.row_filters import RowFilterChain
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ RowFilterChain,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
from google.cloud.bigtable_v2.types import data as data_v2_pb2
row_filter1 = StripValueTransformerFilter(True)
@@ -1452,10 +1466,12 @@ def test_row_filter_chain_to_dict():
def test_row_filter_chain_to_pb_nested():
- from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter
- from google.cloud.bigtable.data.row_filters import RowFilterChain
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ CellsRowLimitFilter,
+ RowFilterChain,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -1476,10 +1492,12 @@ def test_row_filter_chain_to_pb_nested():
def test_row_filter_chain_to_dict_nested():
- from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter
- from google.cloud.bigtable.data.row_filters import RowFilterChain
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ CellsRowLimitFilter,
+ RowFilterChain,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
from google.cloud.bigtable_v2.types import data as data_v2_pb2
row_filter1 = StripValueTransformerFilter(True)
@@ -1502,9 +1520,11 @@ def test_row_filter_chain_to_dict_nested():
def test_row_filter_chain___repr__():
- from google.cloud.bigtable.data.row_filters import RowFilterChain
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ RowFilterChain,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -1516,9 +1536,11 @@ def test_row_filter_chain___repr__():
def test_row_filter_chain___str__():
- from google.cloud.bigtable.data.row_filters import RowFilterChain
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ RowFilterChain,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -1533,9 +1555,11 @@ def test_row_filter_chain___str__():
def test_row_filter_union_to_pb():
- from google.cloud.bigtable.data.row_filters import RowFilterUnion
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ RowFilterUnion,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1._to_pb()
@@ -1553,9 +1577,11 @@ def test_row_filter_union_to_pb():
def test_row_filter_union_to_dict():
- from google.cloud.bigtable.data.row_filters import RowFilterUnion
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ RowFilterUnion,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
from google.cloud.bigtable_v2.types import data as data_v2_pb2
row_filter1 = StripValueTransformerFilter(True)
@@ -1574,10 +1600,12 @@ def test_row_filter_union_to_dict():
def test_row_filter_union_to_pb_nested():
- from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter
- from google.cloud.bigtable.data.row_filters import RowFilterUnion
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ CellsRowLimitFilter,
+ RowFilterUnion,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -1598,10 +1626,12 @@ def test_row_filter_union_to_pb_nested():
def test_row_filter_union_to_dict_nested():
- from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter
- from google.cloud.bigtable.data.row_filters import RowFilterUnion
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ CellsRowLimitFilter,
+ RowFilterUnion,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
from google.cloud.bigtable_v2.types import data as data_v2_pb2
row_filter1 = StripValueTransformerFilter(True)
@@ -1624,9 +1654,11 @@ def test_row_filter_union_to_dict_nested():
def test_row_filter_union___repr__():
- from google.cloud.bigtable.data.row_filters import RowFilterUnion
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ RowFilterUnion,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -1638,9 +1670,11 @@ def test_row_filter_union___repr__():
def test_row_filter_union___str__():
- from google.cloud.bigtable.data.row_filters import RowFilterUnion
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ RowFilterUnion,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -1713,10 +1747,12 @@ def test_conditional_row_filter___ne__():
def test_conditional_row_filter_to_pb():
- from google.cloud.bigtable.data.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.data.row_filters import CellsRowOffsetFilter
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ CellsRowOffsetFilter,
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1._to_pb()
@@ -1743,10 +1779,12 @@ def test_conditional_row_filter_to_pb():
def test_conditional_row_filter_to_dict():
- from google.cloud.bigtable.data.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.data.row_filters import CellsRowOffsetFilter
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ CellsRowOffsetFilter,
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
from google.cloud.bigtable_v2.types import data as data_v2_pb2
row_filter1 = StripValueTransformerFilter(True)
@@ -1776,9 +1814,11 @@ def test_conditional_row_filter_to_dict():
def test_conditional_row_filter_to_pb_true_only():
- from google.cloud.bigtable.data.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1._to_pb()
@@ -1798,9 +1838,11 @@ def test_conditional_row_filter_to_pb_true_only():
def test_conditional_row_filter_to_dict_true_only():
- from google.cloud.bigtable.data.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
from google.cloud.bigtable_v2.types import data as data_v2_pb2
row_filter1 = StripValueTransformerFilter(True)
@@ -1824,9 +1866,11 @@ def test_conditional_row_filter_to_dict_true_only():
def test_conditional_row_filter_to_pb_false_only():
- from google.cloud.bigtable.data.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1._to_pb()
@@ -1846,9 +1890,11 @@ def test_conditional_row_filter_to_pb_false_only():
def test_conditional_row_filter_to_dict_false_only():
- from google.cloud.bigtable.data.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
from google.cloud.bigtable_v2.types import data as data_v2_pb2
row_filter1 = StripValueTransformerFilter(True)
@@ -1872,9 +1918,11 @@ def test_conditional_row_filter_to_dict_false_only():
def test_conditional_row_filter___repr__():
- from google.cloud.bigtable.data.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -1893,10 +1941,12 @@ def test_conditional_row_filter___repr__():
def test_conditional_row_filter___str__():
- from google.cloud.bigtable.data.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.data.row_filters import RowSampleFilter
- from google.cloud.bigtable.data.row_filters import RowFilterUnion
- from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.data.row_filters import (
+ ConditionalRowFilter,
+ RowFilterUnion,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -1981,11 +2031,11 @@ def _ValueRangePB(*args, **kw):
def _get_regex_filters():
from google.cloud.bigtable.data.row_filters import (
- RowKeyRegexFilter,
- FamilyNameRegexFilter,
ColumnQualifierRegexFilter,
- ValueRegexFilter,
+ FamilyNameRegexFilter,
LiteralValueFilter,
+ RowKeyRegexFilter,
+ ValueRegexFilter,
)
return [
@@ -1999,9 +2049,9 @@ def _get_regex_filters():
def _get_bool_filters():
from google.cloud.bigtable.data.row_filters import (
- SinkFilter,
- PassAllFilter,
BlockAllFilter,
+ PassAllFilter,
+ SinkFilter,
StripValueTransformerFilter,
)
@@ -2015,9 +2065,9 @@ def _get_bool_filters():
def _get_cell_count_filters():
from google.cloud.bigtable.data.row_filters import (
+ CellsColumnLimitFilter,
CellsRowLimitFilter,
CellsRowOffsetFilter,
- CellsColumnLimitFilter,
)
return [
diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_sync_up_to_date.py b/packages/google-cloud-bigtable/tests/unit/data/test_sync_up_to_date.py
index e6bce9cf6266..4efaa4e903df 100644
--- a/packages/google-cloud-bigtable/tests/unit/data/test_sync_up_to_date.py
+++ b/packages/google-cloud-bigtable/tests/unit/data/test_sync_up_to_date.py
@@ -11,14 +11,15 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
-import sys
-import hashlib
-import pytest
import ast
+import hashlib
+import os
import re
+import sys
from difflib import unified_diff
+import pytest
+
if sys.version_info < (3, 9):
pytest.skip("ast.unparse is only available in 3.9+", allow_module_level=True)
@@ -28,7 +29,7 @@
cross_sync_path = os.path.join(repo_root, ".cross_sync")
sys.path.append(cross_sync_path)
-from generate import convert_files_in_dir, CrossSyncOutputFile # noqa: E402
+from generate import CrossSyncOutputFile, convert_files_in_dir # noqa: E402
sync_files = list(convert_files_in_dir(repo_root))
@@ -46,9 +47,9 @@ def test_found_files():
assert "execute_query_iterator.py" in outputs
assert "test_client.py" in outputs
assert "test_system_autogen.py" in outputs, "system tests not found"
- assert (
- "client_handler_data_sync_autogen.py" in outputs
- ), "test proxy handler not found"
+ assert "client_handler_data_sync_autogen.py" in outputs, (
+ "test proxy handler not found"
+ )
@pytest.mark.parametrize("sync_file", sync_files, ids=lambda f: f.output_path)
@@ -66,9 +67,9 @@ def test_sync_up_to_date(sync_file):
# compare by content
diff = unified_diff(found_render.splitlines(), new_render.splitlines(), lineterm="")
diff_str = "\n".join(diff)
- assert (
- not diff_str
- ), f"Found differences. Run `nox -s generate_sync` to update:\n{diff_str}"
+ assert not diff_str, (
+ f"Found differences. Run `nox -s generate_sync` to update:\n{diff_str}"
+ )
# compare by hash
new_hash = hashlib.md5(new_render.encode()).hexdigest()
found_hash = hashlib.md5(found_render.encode()).hexdigest()
diff --git a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py
index b0ba35f0c3de..53f15a597a29 100644
--- a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py
+++ b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py
@@ -22,20 +22,19 @@
except ImportError: # pragma: NO COVER
import mock
-import grpc
-from grpc.experimental import aio
-from collections.abc import Iterable, AsyncIterable
-from google.protobuf import json_format
import json
import math
+from collections.abc import AsyncIterable, Iterable, Mapping, Sequence
+
+import grpc
import pytest
from google.api_core import api_core_version
-from proto.marshal.rules.dates import DurationRule, TimestampRule
+from google.protobuf import json_format
+from grpc.experimental import aio
from proto.marshal.rules import wrappers
-from requests import Response
-from requests import Request, PreparedRequest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+from requests import PreparedRequest, Request, Response
from requests.sessions import Session
-from google.protobuf import json_format
try:
from google.auth.aio import credentials as ga_credentials_async
@@ -44,41 +43,43 @@
except ImportError: # pragma: NO COVER
HAS_GOOGLE_AUTH_AIO = False
-from google.api_core import client_options
+import google.api_core.operation_async as operation_async # type: ignore
+import google.auth
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.options_pb2 as options_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+import google.type.expr_pb2 as expr_pb2 # type: ignore
+from google.api_core import (
+ client_options,
+ future,
+ gapic_v1,
+ grpc_helpers,
+ grpc_helpers_async,
+ operation,
+ operations_v1,
+ path_template,
+)
from google.api_core import exceptions as core_exceptions
-from google.api_core import future
-from google.api_core import gapic_v1
-from google.api_core import grpc_helpers
-from google.api_core import grpc_helpers_async
-from google.api_core import operation
-from google.api_core import operation_async # type: ignore
-from google.api_core import operations_v1
-from google.api_core import path_template
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
+from google.longrunning import operations_pb2 # type: ignore
+from google.oauth2 import service_account
+
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminAsyncClient,
-)
-from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
+ pagers,
+ transports,
+)
+from google.cloud.bigtable_admin_v2.types import (
+ bigtable_instance_admin,
+ common,
+ instance,
)
-from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import pagers
-from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import transports
-from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin
-from google.cloud.bigtable_admin_v2.types import common
-from google.cloud.bigtable_admin_v2.types import instance
from google.cloud.bigtable_admin_v2.types import instance as gba_instance
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import options_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-from google.oauth2 import service_account
-from google.protobuf import field_mask_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-from google.type import expr_pb2 # type: ignore
-import google.auth
-
CRED_INFO_JSON = {
"credential_source": "/path/to/file",
@@ -134,6 +135,7 @@ def test__get_default_mtls_endpoint():
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
+ custom_endpoint = ".custom"
assert BigtableInstanceAdminClient._get_default_mtls_endpoint(None) is None
assert (
@@ -156,6 +158,10 @@ def test__get_default_mtls_endpoint():
BigtableInstanceAdminClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
+ assert (
+ BigtableInstanceAdminClient._get_default_mtls_endpoint(custom_endpoint)
+ == custom_endpoint
+ )
def test__read_environment_variables():
@@ -1014,10 +1020,9 @@ def test_bigtable_instance_admin_client_get_mtls_endpoint_and_cert_source(client
client_cert_source=mock_client_cert_source,
api_endpoint=mock_api_endpoint,
)
- (
- api_endpoint,
- cert_source,
- ) = client_class.get_mtls_endpoint_and_cert_source(options)
+ api_endpoint, cert_source = (
+ client_class.get_mtls_endpoint_and_cert_source(options)
+ )
assert api_endpoint == mock_api_endpoint
assert cert_source is expected_cert_source
@@ -1062,10 +1067,9 @@ def test_bigtable_instance_admin_client_get_mtls_endpoint_and_cert_source(client
client_cert_source=mock_client_cert_source,
api_endpoint=mock_api_endpoint,
)
- (
- api_endpoint,
- cert_source,
- ) = client_class.get_mtls_endpoint_and_cert_source(options)
+ api_endpoint, cert_source = (
+ client_class.get_mtls_endpoint_and_cert_source(options)
+ )
assert api_endpoint == mock_api_endpoint
assert cert_source is expected_cert_source
@@ -1101,10 +1105,9 @@ def test_bigtable_instance_admin_client_get_mtls_endpoint_and_cert_source(client
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
- (
- api_endpoint,
- cert_source,
- ) = client_class.get_mtls_endpoint_and_cert_source()
+ api_endpoint, cert_source = (
+ client_class.get_mtls_endpoint_and_cert_source()
+ )
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@@ -1360,13 +1363,13 @@ def test_bigtable_instance_admin_client_create_channel_credentials_file(
)
# test that the credentials from file are saved and used as the credentials.
- with mock.patch.object(
- google.auth, "load_credentials_from_file", autospec=True
- ) as load_creds, mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel"
- ) as create_channel:
+ with (
+ mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds,
+ mock.patch.object(google.auth, "default", autospec=True) as adc,
+ mock.patch.object(grpc_helpers, "create_channel") as create_channel,
+ ):
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
@@ -4712,9 +4715,9 @@ def test_partial_update_cluster_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.partial_update_cluster
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.partial_update_cluster] = (
+ mock_rpc
+ )
request = {}
client.partial_update_cluster(request)
@@ -5390,9 +5393,9 @@ def test_create_app_profile_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.create_app_profile
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.create_app_profile] = (
+ mock_rpc
+ )
request = {}
client.create_app_profile(request)
@@ -6083,9 +6086,9 @@ def test_list_app_profiles_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_app_profiles
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_app_profiles] = (
+ mock_rpc
+ )
request = {}
client.list_app_profiles(request)
@@ -6622,9 +6625,9 @@ def test_update_app_profile_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.update_app_profile
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.update_app_profile] = (
+ mock_rpc
+ )
request = {}
client.update_app_profile(request)
@@ -6978,9 +6981,9 @@ def test_delete_app_profile_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.delete_app_profile
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.delete_app_profile] = (
+ mock_rpc
+ )
request = {}
client.delete_app_profile(request)
@@ -8002,9 +8005,9 @@ def test_test_iam_permissions_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.test_iam_permissions
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.test_iam_permissions] = (
+ mock_rpc
+ )
request = {}
client.test_iam_permissions(request)
@@ -8369,9 +8372,9 @@ def test_list_hot_tablets_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_hot_tablets
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_hot_tablets] = (
+ mock_rpc
+ )
request = {}
client.list_hot_tablets(request)
@@ -8894,9 +8897,9 @@ def test_create_logical_view_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.create_logical_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.create_logical_view] = (
+ mock_rpc
+ )
request = {}
client.create_logical_view(request)
@@ -9263,9 +9266,9 @@ def test_get_logical_view_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.get_logical_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.get_logical_view] = (
+ mock_rpc
+ )
request = {}
client.get_logical_view(request)
@@ -9603,9 +9606,9 @@ def test_list_logical_views_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_logical_views
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_logical_views] = (
+ mock_rpc
+ )
request = {}
client.list_logical_views(request)
@@ -10140,9 +10143,9 @@ def test_update_logical_view_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.update_logical_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.update_logical_view] = (
+ mock_rpc
+ )
request = {}
client.update_logical_view(request)
@@ -10498,9 +10501,9 @@ def test_delete_logical_view_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.delete_logical_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.delete_logical_view] = (
+ mock_rpc
+ )
request = {}
client.delete_logical_view(request)
@@ -11207,9 +11210,9 @@ def test_get_materialized_view_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.get_materialized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.get_materialized_view] = (
+ mock_rpc
+ )
request = {}
client.get_materialized_view(request)
@@ -14353,9 +14356,9 @@ def test_partial_update_cluster_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.partial_update_cluster
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.partial_update_cluster] = (
+ mock_rpc
+ )
request = {}
client.partial_update_cluster(request)
@@ -14716,9 +14719,9 @@ def test_create_app_profile_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.create_app_profile
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.create_app_profile] = (
+ mock_rpc
+ )
request = {}
client.create_app_profile(request)
@@ -15115,9 +15118,9 @@ def test_list_app_profiles_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_app_profiles
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_app_profiles] = (
+ mock_rpc
+ )
request = {}
client.list_app_profiles(request)
@@ -15377,9 +15380,9 @@ def test_update_app_profile_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.update_app_profile
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.update_app_profile] = (
+ mock_rpc
+ )
request = {}
client.update_app_profile(request)
@@ -15580,9 +15583,9 @@ def test_delete_app_profile_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.delete_app_profile
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.delete_app_profile] = (
+ mock_rpc
+ )
request = {}
client.delete_app_profile(request)
@@ -16142,9 +16145,9 @@ def test_test_iam_permissions_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.test_iam_permissions
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.test_iam_permissions] = (
+ mock_rpc
+ )
request = {}
client.test_iam_permissions(request)
@@ -16333,9 +16336,9 @@ def test_list_hot_tablets_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_hot_tablets
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_hot_tablets] = (
+ mock_rpc
+ )
request = {}
client.list_hot_tablets(request)
@@ -16603,9 +16606,9 @@ def test_create_logical_view_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.create_logical_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.create_logical_view] = (
+ mock_rpc
+ )
request = {}
client.create_logical_view(request)
@@ -16811,9 +16814,9 @@ def test_get_logical_view_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.get_logical_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.get_logical_view] = (
+ mock_rpc
+ )
request = {}
client.get_logical_view(request)
@@ -16995,9 +16998,9 @@ def test_list_logical_views_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_logical_views
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_logical_views] = (
+ mock_rpc
+ )
request = {}
client.list_logical_views(request)
@@ -17258,9 +17261,9 @@ def test_update_logical_view_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.update_logical_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.update_logical_view] = (
+ mock_rpc
+ )
request = {}
client.update_logical_view(request)
@@ -17443,9 +17446,9 @@ def test_delete_logical_view_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.delete_logical_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.delete_logical_view] = (
+ mock_rpc
+ )
request = {}
client.delete_logical_view(request)
@@ -17838,9 +17841,9 @@ def test_get_materialized_view_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.get_materialized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.get_materialized_view] = (
+ mock_rpc
+ )
request = {}
client.get_materialized_view(request)
@@ -20313,8 +20316,9 @@ def test_create_instance_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -20371,20 +20375,21 @@ def test_create_instance_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_create_instance"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_create_instance_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_create_instance"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_create_instance"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_create_instance_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_create_instance"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -20437,8 +20442,9 @@ def test_get_instance_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -20511,18 +20517,20 @@ def test_get_instance_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_get_instance"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_get_instance_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_get_instance"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_get_instance"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_get_instance_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_get_instance"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -20575,8 +20583,9 @@ def test_list_instances_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -20643,18 +20652,20 @@ def test_list_instances_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_list_instances"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_list_instances_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_list_instances"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_list_instances"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_list_instances_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_list_instances"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -20710,8 +20721,9 @@ def test_update_instance_rest_bad_request(request_type=instance.Instance):
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -20784,18 +20796,20 @@ def test_update_instance_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_update_instance"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_update_instance_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_update_instance"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_update_instance"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_update_instance_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_update_instance"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -20846,8 +20860,9 @@ def test_partial_update_instance_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -20984,20 +20999,23 @@ def test_partial_update_instance_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_partial_update_instance"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_partial_update_instance_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_partial_update_instance"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_partial_update_instance",
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_partial_update_instance_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "pre_partial_update_instance",
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -21050,8 +21068,9 @@ def test_delete_instance_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -21108,13 +21127,13 @@ def test_delete_instance_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_delete_instance"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_delete_instance"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_instance_admin.DeleteInstanceRequest.pb(
bigtable_instance_admin.DeleteInstanceRequest()
@@ -21159,8 +21178,9 @@ def test_create_cluster_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -21305,20 +21325,21 @@ def test_create_cluster_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_create_cluster"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_create_cluster_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_create_cluster"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_create_cluster"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_create_cluster_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_create_cluster"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -21371,8 +21392,9 @@ def test_get_cluster_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -21448,18 +21470,20 @@ def test_get_cluster_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_get_cluster"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_get_cluster_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_get_cluster"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_get_cluster"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_get_cluster_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_get_cluster"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -21512,8 +21536,9 @@ def test_list_clusters_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -21580,18 +21605,20 @@ def test_list_clusters_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_list_clusters"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_list_clusters_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_list_clusters"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_list_clusters"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_list_clusters_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_list_clusters"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -21647,8 +21674,9 @@ def test_update_cluster_rest_bad_request(request_type=instance.Cluster):
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -21705,20 +21733,21 @@ def test_update_cluster_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_update_cluster"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_update_cluster_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_update_cluster"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_update_cluster"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_update_cluster_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_update_cluster"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -21771,8 +21800,9 @@ def test_partial_update_cluster_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -21921,20 +21951,23 @@ def test_partial_update_cluster_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_partial_update_cluster"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_partial_update_cluster_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_partial_update_cluster"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_partial_update_cluster",
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_partial_update_cluster_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "pre_partial_update_cluster",
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -21987,8 +22020,9 @@ def test_delete_cluster_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -22045,13 +22079,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_delete_cluster"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_delete_cluster"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_instance_admin.DeleteClusterRequest.pb(
bigtable_instance_admin.DeleteClusterRequest()
@@ -22096,8 +22130,9 @@ def test_create_app_profile_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -22250,18 +22285,20 @@ def test_create_app_profile_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_create_app_profile"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_create_app_profile_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_create_app_profile"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_create_app_profile"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_create_app_profile_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_create_app_profile"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -22314,8 +22351,9 @@ def test_get_app_profile_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -22383,18 +22421,20 @@ def test_get_app_profile_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_get_app_profile"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_get_app_profile_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_get_app_profile"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_get_app_profile"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_get_app_profile_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_get_app_profile"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -22447,8 +22487,9 @@ def test_list_app_profiles_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -22513,18 +22554,20 @@ def test_list_app_profiles_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_list_app_profiles"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_list_app_profiles_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_list_app_profiles"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_list_app_profiles"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_list_app_profiles_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_list_app_profiles"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -22586,8 +22629,9 @@ def test_update_app_profile_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -22733,20 +22777,21 @@ def test_update_app_profile_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_update_app_profile"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_update_app_profile_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_update_app_profile"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_update_app_profile"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_update_app_profile_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_update_app_profile"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -22799,8 +22844,9 @@ def test_delete_app_profile_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -22857,13 +22903,13 @@ def test_delete_app_profile_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_delete_app_profile"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_delete_app_profile"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_instance_admin.DeleteAppProfileRequest.pb(
bigtable_instance_admin.DeleteAppProfileRequest()
@@ -22908,8 +22954,9 @@ def test_get_iam_policy_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -22971,18 +23018,20 @@ def test_get_iam_policy_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_get_iam_policy"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_get_iam_policy_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_get_iam_policy"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_get_iam_policy"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_get_iam_policy_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_get_iam_policy"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23033,8 +23082,9 @@ def test_set_iam_policy_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23096,18 +23146,20 @@ def test_set_iam_policy_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_set_iam_policy"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_set_iam_policy_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_set_iam_policy"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_set_iam_policy"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_set_iam_policy_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_set_iam_policy"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23158,8 +23210,9 @@ def test_test_iam_permissions_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23219,18 +23272,20 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_test_iam_permissions"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_test_iam_permissions_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_test_iam_permissions"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_test_iam_permissions"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_test_iam_permissions_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_test_iam_permissions"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23286,8 +23341,9 @@ def test_list_hot_tablets_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23350,18 +23406,20 @@ def test_list_hot_tablets_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_list_hot_tablets"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_list_hot_tablets_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_list_hot_tablets"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_list_hot_tablets"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_list_hot_tablets_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_list_hot_tablets"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23419,8 +23477,9 @@ def test_create_logical_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23552,20 +23611,21 @@ def test_create_logical_view_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_create_logical_view"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_create_logical_view_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_create_logical_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_create_logical_view"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_create_logical_view_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_create_logical_view"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23618,8 +23678,9 @@ def test_get_logical_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23688,18 +23749,20 @@ def test_get_logical_view_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_get_logical_view"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_get_logical_view_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_get_logical_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_get_logical_view"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_get_logical_view_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_get_logical_view"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23752,8 +23815,9 @@ def test_list_logical_views_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23816,18 +23880,20 @@ def test_list_logical_views_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_list_logical_views"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_list_logical_views_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_list_logical_views"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_list_logical_views"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_list_logical_views_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_list_logical_views"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23889,8 +23955,9 @@ def test_update_logical_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24026,20 +24093,21 @@ def test_update_logical_view_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_update_logical_view"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_update_logical_view_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_update_logical_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "post_update_logical_view"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_update_logical_view_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_update_logical_view"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24092,8 +24160,9 @@ def test_delete_logical_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24150,13 +24219,13 @@ def test_delete_logical_view_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_delete_logical_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_delete_logical_view"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_instance_admin.DeleteLogicalViewRequest.pb(
bigtable_instance_admin.DeleteLogicalViewRequest()
@@ -24201,8 +24270,9 @@ def test_create_materialized_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24334,20 +24404,23 @@ def test_create_materialized_view_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_create_materialized_view"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_create_materialized_view_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_create_materialized_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_create_materialized_view",
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_create_materialized_view_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "pre_create_materialized_view",
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24402,8 +24475,9 @@ def test_get_materialized_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24474,18 +24548,21 @@ def test_get_materialized_view_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_get_materialized_view"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_get_materialized_view_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_get_materialized_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_get_materialized_view",
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_get_materialized_view_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor, "pre_get_materialized_view"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24538,8 +24615,9 @@ def test_list_materialized_views_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24604,18 +24682,22 @@ def test_list_materialized_views_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_list_materialized_views"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_list_materialized_views_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_list_materialized_views"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_list_materialized_views",
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_list_materialized_views_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "pre_list_materialized_views",
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24677,8 +24759,9 @@ def test_update_materialized_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24814,20 +24897,23 @@ def test_update_materialized_view_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "post_update_materialized_view"
- ) as post, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor,
- "post_update_materialized_view_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_update_materialized_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_update_materialized_view",
+ ) as post,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "post_update_materialized_view_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "pre_update_materialized_view",
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24882,8 +24968,9 @@ def test_delete_materialized_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24942,13 +25029,14 @@ def test_delete_materialized_view_rest_interceptors(null_interceptor):
)
client = BigtableInstanceAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableInstanceAdminRestInterceptor, "pre_delete_materialized_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableInstanceAdminRestInterceptor,
+ "pre_delete_materialized_view",
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_instance_admin.DeleteMaterializedViewRequest.pb(
bigtable_instance_admin.DeleteMaterializedViewRequest()
@@ -25746,11 +25834,14 @@ def test_bigtable_instance_admin_base_transport():
def test_bigtable_instance_admin_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
- with mock.patch.object(
- google.auth, "load_credentials_from_file", autospec=True
- ) as load_creds, mock.patch(
- "google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.transports.BigtableInstanceAdminTransport._prep_wrapped_messages"
- ) as Transport:
+ with (
+ mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds,
+ mock.patch(
+ "google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.transports.BigtableInstanceAdminTransport._prep_wrapped_messages"
+ ) as Transport,
+ ):
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.BigtableInstanceAdminTransport(
@@ -25775,9 +25866,12 @@ def test_bigtable_instance_admin_base_transport_with_credentials_file():
def test_bigtable_instance_admin_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
- with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
- "google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.transports.BigtableInstanceAdminTransport._prep_wrapped_messages"
- ) as Transport:
+ with (
+ mock.patch.object(google.auth, "default", autospec=True) as adc,
+ mock.patch(
+ "google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.transports.BigtableInstanceAdminTransport._prep_wrapped_messages"
+ ) as Transport,
+ ):
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.BigtableInstanceAdminTransport()
@@ -25867,11 +25961,12 @@ def test_bigtable_instance_admin_transport_create_channel(
):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
+ with (
+ mock.patch.object(google.auth, "default", autospec=True) as adc,
+ mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel,
+ ):
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
diff --git a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py
index bff2206931b6..b77c531feab0 100644
--- a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py
+++ b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py
@@ -22,20 +22,19 @@
except ImportError: # pragma: NO COVER
import mock
-import grpc
-from grpc.experimental import aio
-from collections.abc import Iterable, AsyncIterable
-from google.protobuf import json_format
import json
import math
+from collections.abc import AsyncIterable, Iterable, Mapping, Sequence
+
+import grpc
import pytest
from google.api_core import api_core_version
-from proto.marshal.rules.dates import DurationRule, TimestampRule
+from google.protobuf import json_format
+from grpc.experimental import aio
from proto.marshal.rules import wrappers
-from requests import Response
-from requests import Request, PreparedRequest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+from requests import PreparedRequest, Request, Response
from requests.sessions import Session
-from google.protobuf import json_format
try:
from google.auth.aio import credentials as ga_credentials_async
@@ -44,44 +43,42 @@
except ImportError: # pragma: NO COVER
HAS_GOOGLE_AUTH_AIO = False
-from google.api_core import client_options
+import google.api_core.operation_async as operation_async # type: ignore
+import google.auth
+import google.iam.v1.iam_policy_pb2 as iam_policy_pb2 # type: ignore
+import google.iam.v1.options_pb2 as options_pb2 # type: ignore
+import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore
+import google.protobuf.any_pb2 as any_pb2 # type: ignore
+import google.protobuf.duration_pb2 as duration_pb2 # type: ignore
+import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+import google.rpc.status_pb2 as status_pb2 # type: ignore
+import google.type.expr_pb2 as expr_pb2 # type: ignore
+from google.api_core import (
+ client_options,
+ future,
+ gapic_v1,
+ grpc_helpers,
+ grpc_helpers_async,
+ operation,
+ operations_v1,
+ path_template,
+)
from google.api_core import exceptions as core_exceptions
-from google.api_core import future
-from google.api_core import gapic_v1
-from google.api_core import grpc_helpers
-from google.api_core import grpc_helpers_async
-from google.api_core import operation
-from google.api_core import operation_async # type: ignore
-from google.api_core import operations_v1
-from google.api_core import path_template
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
+from google.longrunning import operations_pb2 # type: ignore
+from google.oauth2 import service_account
+
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
BaseBigtableTableAdminAsyncClient,
-)
-from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
BaseBigtableTableAdminClient,
+ pagers,
+ transports,
)
-from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import pagers
-from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports
-from google.cloud.bigtable_admin_v2.types import bigtable_table_admin
-from google.cloud.bigtable_admin_v2.types import table
+from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table, types
from google.cloud.bigtable_admin_v2.types import table as gba_table
-from google.cloud.bigtable_admin_v2.types import types
-from google.iam.v1 import iam_policy_pb2 # type: ignore
-from google.iam.v1 import options_pb2 # type: ignore
-from google.iam.v1 import policy_pb2 # type: ignore
-from google.longrunning import operations_pb2 # type: ignore
-from google.oauth2 import service_account
-from google.protobuf import any_pb2 # type: ignore
-from google.protobuf import duration_pb2 # type: ignore
-from google.protobuf import field_mask_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-from google.rpc import status_pb2 # type: ignore
-from google.type import expr_pb2 # type: ignore
-import google.auth
-
CRED_INFO_JSON = {
"credential_source": "/path/to/file",
@@ -137,6 +134,7 @@ def test__get_default_mtls_endpoint():
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
+ custom_endpoint = ".custom"
assert BaseBigtableTableAdminClient._get_default_mtls_endpoint(None) is None
assert (
@@ -159,6 +157,10 @@ def test__get_default_mtls_endpoint():
BaseBigtableTableAdminClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
+ assert (
+ BaseBigtableTableAdminClient._get_default_mtls_endpoint(custom_endpoint)
+ == custom_endpoint
+ )
def test__read_environment_variables():
@@ -1021,10 +1023,9 @@ def test_base_bigtable_table_admin_client_get_mtls_endpoint_and_cert_source(
client_cert_source=mock_client_cert_source,
api_endpoint=mock_api_endpoint,
)
- (
- api_endpoint,
- cert_source,
- ) = client_class.get_mtls_endpoint_and_cert_source(options)
+ api_endpoint, cert_source = (
+ client_class.get_mtls_endpoint_and_cert_source(options)
+ )
assert api_endpoint == mock_api_endpoint
assert cert_source is expected_cert_source
@@ -1069,10 +1070,9 @@ def test_base_bigtable_table_admin_client_get_mtls_endpoint_and_cert_source(
client_cert_source=mock_client_cert_source,
api_endpoint=mock_api_endpoint,
)
- (
- api_endpoint,
- cert_source,
- ) = client_class.get_mtls_endpoint_and_cert_source(options)
+ api_endpoint, cert_source = (
+ client_class.get_mtls_endpoint_and_cert_source(options)
+ )
assert api_endpoint == mock_api_endpoint
assert cert_source is expected_cert_source
@@ -1108,10 +1108,9 @@ def test_base_bigtable_table_admin_client_get_mtls_endpoint_and_cert_source(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
- (
- api_endpoint,
- cert_source,
- ) = client_class.get_mtls_endpoint_and_cert_source()
+ api_endpoint, cert_source = (
+ client_class.get_mtls_endpoint_and_cert_source()
+ )
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@@ -1367,13 +1366,13 @@ def test_base_bigtable_table_admin_client_create_channel_credentials_file(
)
# test that the credentials from file are saved and used as the credentials.
- with mock.patch.object(
- google.auth, "load_credentials_from_file", autospec=True
- ) as load_creds, mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel"
- ) as create_channel:
+ with (
+ mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds,
+ mock.patch.object(google.auth, "default", autospec=True) as adc,
+ mock.patch.object(grpc_helpers, "create_channel") as create_channel,
+ ):
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
@@ -4032,9 +4031,9 @@ def test_create_authorized_view_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.create_authorized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.create_authorized_view] = (
+ mock_rpc
+ )
request = {}
client.create_authorized_view(request)
@@ -4404,9 +4403,9 @@ def test_list_authorized_views_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_authorized_views
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_authorized_views] = (
+ mock_rpc
+ )
request = {}
client.list_authorized_views(request)
@@ -4952,9 +4951,9 @@ def test_get_authorized_view_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.get_authorized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.get_authorized_view] = (
+ mock_rpc
+ )
request = {}
client.get_authorized_view(request)
@@ -5292,9 +5291,9 @@ def test_update_authorized_view_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.update_authorized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.update_authorized_view] = (
+ mock_rpc
+ )
request = {}
client.update_authorized_view(request)
@@ -5651,9 +5650,9 @@ def test_delete_authorized_view_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.delete_authorized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.delete_authorized_view] = (
+ mock_rpc
+ )
request = {}
client.delete_authorized_view(request)
@@ -5989,9 +5988,9 @@ def test_modify_column_families_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.modify_column_families
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.modify_column_families] = (
+ mock_rpc
+ )
request = {}
client.modify_column_families(request)
@@ -6936,9 +6935,9 @@ def test_check_consistency_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.check_consistency
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.check_consistency] = (
+ mock_rpc
+ )
request = {}
client.check_consistency(request)
@@ -11977,9 +11976,9 @@ def test_test_iam_permissions_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.test_iam_permissions
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.test_iam_permissions] = (
+ mock_rpc
+ )
request = {}
client.test_iam_permissions(request)
@@ -12347,9 +12346,9 @@ def test_create_schema_bundle_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.create_schema_bundle
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.create_schema_bundle] = (
+ mock_rpc
+ )
request = {}
client.create_schema_bundle(request)
@@ -12709,9 +12708,9 @@ def test_update_schema_bundle_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.update_schema_bundle
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.update_schema_bundle] = (
+ mock_rpc
+ )
request = {}
client.update_schema_bundle(request)
@@ -13068,9 +13067,9 @@ def test_get_schema_bundle_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.get_schema_bundle
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.get_schema_bundle] = (
+ mock_rpc
+ )
request = {}
client.get_schema_bundle(request)
@@ -13410,9 +13409,9 @@ def test_list_schema_bundles_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_schema_bundles
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_schema_bundles] = (
+ mock_rpc
+ )
request = {}
client.list_schema_bundles(request)
@@ -13953,9 +13952,9 @@ def test_delete_schema_bundle_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.delete_schema_bundle
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.delete_schema_bundle] = (
+ mock_rpc
+ )
request = {}
client.delete_schema_bundle(request)
@@ -15601,9 +15600,9 @@ def test_create_authorized_view_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.create_authorized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.create_authorized_view] = (
+ mock_rpc
+ )
request = {}
client.create_authorized_view(request)
@@ -15812,9 +15811,9 @@ def test_list_authorized_views_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_authorized_views
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_authorized_views] = (
+ mock_rpc
+ )
request = {}
client.list_authorized_views(request)
@@ -16077,9 +16076,9 @@ def test_get_authorized_view_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.get_authorized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.get_authorized_view] = (
+ mock_rpc
+ )
request = {}
client.get_authorized_view(request)
@@ -16264,9 +16263,9 @@ def test_update_authorized_view_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.update_authorized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.update_authorized_view] = (
+ mock_rpc
+ )
request = {}
client.update_authorized_view(request)
@@ -16463,9 +16462,9 @@ def test_delete_authorized_view_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.delete_authorized_view
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.delete_authorized_view] = (
+ mock_rpc
+ )
request = {}
client.delete_authorized_view(request)
@@ -16645,9 +16644,9 @@ def test_modify_column_families_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.modify_column_families
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.modify_column_families] = (
+ mock_rpc
+ )
request = {}
client.modify_column_families(request)
@@ -17150,9 +17149,9 @@ def test_check_consistency_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.check_consistency
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.check_consistency] = (
+ mock_rpc
+ )
request = {}
client.check_consistency(request)
@@ -19882,9 +19881,9 @@ def test_test_iam_permissions_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.test_iam_permissions
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.test_iam_permissions] = (
+ mock_rpc
+ )
request = {}
client.test_iam_permissions(request)
@@ -20077,9 +20076,9 @@ def test_create_schema_bundle_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.create_schema_bundle
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.create_schema_bundle] = (
+ mock_rpc
+ )
request = {}
client.create_schema_bundle(request)
@@ -20287,9 +20286,9 @@ def test_update_schema_bundle_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.update_schema_bundle
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.update_schema_bundle] = (
+ mock_rpc
+ )
request = {}
client.update_schema_bundle(request)
@@ -20483,9 +20482,9 @@ def test_get_schema_bundle_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.get_schema_bundle
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.get_schema_bundle] = (
+ mock_rpc
+ )
request = {}
client.get_schema_bundle(request)
@@ -20667,9 +20666,9 @@ def test_list_schema_bundles_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.list_schema_bundles
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.list_schema_bundles] = (
+ mock_rpc
+ )
request = {}
client.list_schema_bundles(request)
@@ -20929,9 +20928,9 @@ def test_delete_schema_bundle_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.delete_schema_bundle
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.delete_schema_bundle] = (
+ mock_rpc
+ )
request = {}
client.delete_schema_bundle(request)
@@ -22943,8 +22942,9 @@ def test_create_table_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23011,17 +23011,20 @@ def test_create_table_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_create_table"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_create_table_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_create_table"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_create_table"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_create_table_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_create_table"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23074,8 +23077,9 @@ def test_create_table_from_snapshot_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23132,20 +23136,23 @@ def test_create_table_from_snapshot_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_create_table_from_snapshot"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_create_table_from_snapshot_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_create_table_from_snapshot"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_create_table_from_snapshot",
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_create_table_from_snapshot_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "pre_create_table_from_snapshot",
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23198,8 +23205,9 @@ def test_list_tables_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23262,17 +23270,20 @@ def test_list_tables_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_list_tables"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_list_tables_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_list_tables"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_list_tables"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_list_tables_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_list_tables"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23328,8 +23339,9 @@ def test_get_table_rest_bad_request(request_type=bigtable_table_admin.GetTableRe
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23396,17 +23408,19 @@ def test_get_table_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_get_table"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_get_table_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_get_table"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_get_table"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_get_table_with_metadata"
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_get_table"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23461,8 +23475,9 @@ def test_update_table_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23654,19 +23669,21 @@ def test_update_table_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_update_table"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_update_table_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_update_table"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_update_table"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_update_table_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_update_table"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23719,8 +23736,9 @@ def test_delete_table_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23777,13 +23795,13 @@ def test_delete_table_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_delete_table"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_delete_table"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_table_admin.DeleteTableRequest.pb(
bigtable_table_admin.DeleteTableRequest()
@@ -23828,8 +23846,9 @@ def test_undelete_table_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -23886,20 +23905,21 @@ def test_undelete_table_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_undelete_table"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_undelete_table_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_undelete_table"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_undelete_table"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_undelete_table_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_undelete_table"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -23952,8 +23972,9 @@ def test_create_authorized_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24088,20 +24109,21 @@ def test_create_authorized_view_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_create_authorized_view"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_create_authorized_view_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_create_authorized_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_create_authorized_view"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_create_authorized_view_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_create_authorized_view"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24154,8 +24176,9 @@ def test_list_authorized_views_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24218,18 +24241,20 @@ def test_list_authorized_views_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_list_authorized_views"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_list_authorized_views_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_list_authorized_views"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_list_authorized_views"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_list_authorized_views_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_list_authorized_views"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24289,8 +24314,9 @@ def test_get_authorized_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24359,18 +24385,20 @@ def test_get_authorized_view_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_get_authorized_view"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_get_authorized_view_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_get_authorized_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_get_authorized_view"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_get_authorized_view_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_get_authorized_view"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24427,8 +24455,9 @@ def test_update_authorized_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24567,20 +24596,21 @@ def test_update_authorized_view_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_update_authorized_view"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_update_authorized_view_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_update_authorized_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_update_authorized_view"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_update_authorized_view_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_update_authorized_view"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24635,8 +24665,9 @@ def test_delete_authorized_view_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24695,13 +24726,13 @@ def test_delete_authorized_view_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_delete_authorized_view"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_delete_authorized_view"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_table_admin.DeleteAuthorizedViewRequest.pb(
bigtable_table_admin.DeleteAuthorizedViewRequest()
@@ -24746,8 +24777,9 @@ def test_modify_column_families_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24814,18 +24846,20 @@ def test_modify_column_families_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_modify_column_families"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_modify_column_families_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_modify_column_families"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_modify_column_families"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_modify_column_families_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_modify_column_families"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -24878,8 +24912,9 @@ def test_drop_row_range_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -24936,13 +24971,13 @@ def test_drop_row_range_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_drop_row_range"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_drop_row_range"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_table_admin.DropRowRangeRequest.pb(
bigtable_table_admin.DropRowRangeRequest()
@@ -24987,8 +25022,9 @@ def test_generate_consistency_token_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -25053,18 +25089,22 @@ def test_generate_consistency_token_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_generate_consistency_token"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_generate_consistency_token_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_generate_consistency_token"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_generate_consistency_token",
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_generate_consistency_token_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "pre_generate_consistency_token",
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -25122,8 +25162,9 @@ def test_check_consistency_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -25186,18 +25227,20 @@ def test_check_consistency_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_check_consistency"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_check_consistency_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_check_consistency"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_check_consistency"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_check_consistency_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_check_consistency"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -25255,8 +25298,9 @@ def test_snapshot_table_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -25313,20 +25357,21 @@ def test_snapshot_table_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_snapshot_table"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_snapshot_table_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_snapshot_table"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_snapshot_table"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_snapshot_table_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_snapshot_table"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -25381,8 +25426,9 @@ def test_get_snapshot_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -25453,17 +25499,20 @@ def test_get_snapshot_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_get_snapshot"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_get_snapshot_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_get_snapshot"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_get_snapshot"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_get_snapshot_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_get_snapshot"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -25516,8 +25565,9 @@ def test_list_snapshots_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -25580,18 +25630,20 @@ def test_list_snapshots_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_list_snapshots"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_list_snapshots_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_list_snapshots"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_list_snapshots"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_list_snapshots_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_list_snapshots"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -25651,8 +25703,9 @@ def test_delete_snapshot_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -25711,13 +25764,13 @@ def test_delete_snapshot_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_delete_snapshot"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_delete_snapshot"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_table_admin.DeleteSnapshotRequest.pb(
bigtable_table_admin.DeleteSnapshotRequest()
@@ -25762,8 +25815,9 @@ def test_create_backup_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -25913,19 +25967,21 @@ def test_create_backup_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_create_backup"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_create_backup_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_create_backup"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_create_backup"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_create_backup_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_create_backup"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -25980,8 +26036,9 @@ def test_get_backup_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -26056,17 +26113,20 @@ def test_get_backup_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_get_backup"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_get_backup_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_get_backup"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_get_backup"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_get_backup_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_get_backup"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -26123,8 +26183,9 @@ def test_update_backup_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -26294,17 +26355,20 @@ def test_update_backup_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_update_backup"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_update_backup_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_update_backup"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_update_backup"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_update_backup_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_update_backup"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -26359,8 +26423,9 @@ def test_delete_backup_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -26419,13 +26484,13 @@ def test_delete_backup_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_delete_backup"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_delete_backup"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_table_admin.DeleteBackupRequest.pb(
bigtable_table_admin.DeleteBackupRequest()
@@ -26470,8 +26535,9 @@ def test_list_backups_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -26534,17 +26600,20 @@ def test_list_backups_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_list_backups"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_list_backups_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_list_backups"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_list_backups"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_list_backups_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_list_backups"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -26602,8 +26671,9 @@ def test__restore_table_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -26660,19 +26730,21 @@ def test__restore_table_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_restore_table"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_restore_table_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_restore_table"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_restore_table"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_restore_table_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_restore_table"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -26725,8 +26797,9 @@ def test_copy_backup_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -26783,19 +26856,21 @@ def test_copy_backup_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_copy_backup"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_copy_backup_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_copy_backup"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_copy_backup"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_copy_backup_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_copy_backup"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -26848,8 +26923,9 @@ def test_get_iam_policy_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -26911,18 +26987,20 @@ def test_get_iam_policy_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_get_iam_policy"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_get_iam_policy_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_get_iam_policy"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_get_iam_policy"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_get_iam_policy_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_get_iam_policy"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -26973,8 +27051,9 @@ def test_set_iam_policy_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -27036,18 +27115,20 @@ def test_set_iam_policy_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_set_iam_policy"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_set_iam_policy_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_set_iam_policy"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_set_iam_policy"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_set_iam_policy_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_set_iam_policy"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -27098,8 +27179,9 @@ def test_test_iam_permissions_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -27159,18 +27241,20 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_test_iam_permissions"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_test_iam_permissions_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_test_iam_permissions"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_test_iam_permissions"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_test_iam_permissions_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_test_iam_permissions"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -27226,8 +27310,9 @@ def test_create_schema_bundle_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -27358,20 +27443,21 @@ def test_create_schema_bundle_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_create_schema_bundle"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_create_schema_bundle_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_create_schema_bundle"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_create_schema_bundle"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_create_schema_bundle_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_create_schema_bundle"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -27428,8 +27514,9 @@ def test_update_schema_bundle_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -27564,20 +27651,21 @@ def test_update_schema_bundle_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- operation.Operation, "_set_result_from_operation"
- ), mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_update_schema_bundle"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_update_schema_bundle_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_update_schema_bundle"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(operation.Operation, "_set_result_from_operation"),
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_update_schema_bundle"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_update_schema_bundle_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_update_schema_bundle"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -27632,8 +27720,9 @@ def test_get_schema_bundle_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -27700,18 +27789,20 @@ def test_get_schema_bundle_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_get_schema_bundle"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_get_schema_bundle_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_get_schema_bundle"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_get_schema_bundle"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_get_schema_bundle_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_get_schema_bundle"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -27764,8 +27855,9 @@ def test_list_schema_bundles_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -27828,18 +27920,20 @@ def test_list_schema_bundles_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "post_list_schema_bundles"
- ) as post, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor,
- "post_list_schema_bundles_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_list_schema_bundles"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "post_list_schema_bundles"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor,
+ "post_list_schema_bundles_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_list_schema_bundles"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -27899,8 +27993,9 @@ def test_delete_schema_bundle_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -27959,13 +28054,13 @@ def test_delete_schema_bundle_rest_interceptors(null_interceptor):
)
client = BaseBigtableTableAdminClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableTableAdminRestInterceptor, "pre_delete_schema_bundle"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableTableAdminRestInterceptor, "pre_delete_schema_bundle"
+ ) as pre,
+ ):
pre.assert_not_called()
pb_message = bigtable_table_admin.DeleteSchemaBundleRequest.pb(
bigtable_table_admin.DeleteSchemaBundleRequest()
@@ -28845,11 +28940,14 @@ def test_bigtable_table_admin_base_transport():
def test_bigtable_table_admin_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
- with mock.patch.object(
- google.auth, "load_credentials_from_file", autospec=True
- ) as load_creds, mock.patch(
- "google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports.BigtableTableAdminTransport._prep_wrapped_messages"
- ) as Transport:
+ with (
+ mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds,
+ mock.patch(
+ "google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports.BigtableTableAdminTransport._prep_wrapped_messages"
+ ) as Transport,
+ ):
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.BigtableTableAdminTransport(
@@ -28873,9 +28971,12 @@ def test_bigtable_table_admin_base_transport_with_credentials_file():
def test_bigtable_table_admin_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
- with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
- "google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports.BigtableTableAdminTransport._prep_wrapped_messages"
- ) as Transport:
+ with (
+ mock.patch.object(google.auth, "default", autospec=True) as adc,
+ mock.patch(
+ "google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports.BigtableTableAdminTransport._prep_wrapped_messages"
+ ) as Transport,
+ ):
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.BigtableTableAdminTransport()
@@ -28961,11 +29062,12 @@ def test_bigtable_table_admin_transport_auth_gdch_credentials(transport_class):
def test_bigtable_table_admin_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
+ with (
+ mock.patch.object(google.auth, "default", autospec=True) as adc,
+ mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel,
+ ):
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
diff --git a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py
index ea7f0955d430..2ff52a61ebe7 100644
--- a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py
+++ b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py
@@ -22,20 +22,19 @@
except ImportError: # pragma: NO COVER
import mock
-import grpc
-from grpc.experimental import aio
-from collections.abc import Iterable, AsyncIterable
-from google.protobuf import json_format
import json
import math
+from collections.abc import AsyncIterable, Iterable, Mapping, Sequence
+
+import grpc
import pytest
from google.api_core import api_core_version
-from proto.marshal.rules.dates import DurationRule, TimestampRule
+from google.protobuf import json_format
+from grpc.experimental import aio
from proto.marshal.rules import wrappers
-from requests import Response
-from requests import Request, PreparedRequest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+from requests import PreparedRequest, Request, Response
from requests.sessions import Session
-from google.protobuf import json_format
try:
from google.auth.aio import credentials as ga_credentials_async
@@ -44,28 +43,29 @@
except ImportError: # pragma: NO COVER
HAS_GOOGLE_AUTH_AIO = False
-from google.api_core import client_options
+import google.auth
+import google.protobuf.duration_pb2 as duration_pb2 # type: ignore
+import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore
+import google.type.date_pb2 as date_pb2 # type: ignore
+from google.api_core import (
+ client_options,
+ gapic_v1,
+ grpc_helpers,
+ grpc_helpers_async,
+ path_template,
+)
from google.api_core import exceptions as core_exceptions
-from google.api_core import gapic_v1
-from google.api_core import grpc_helpers
-from google.api_core import grpc_helpers_async
-from google.api_core import path_template
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
-from google.cloud.bigtable_v2.services.bigtable import BigtableAsyncClient
-from google.cloud.bigtable_v2.services.bigtable import BigtableClient
-from google.cloud.bigtable_v2.services.bigtable import transports
-from google.cloud.bigtable_v2.types import bigtable
-from google.cloud.bigtable_v2.types import data
-from google.cloud.bigtable_v2.types import request_stats
-from google.cloud.bigtable_v2.types import types
from google.oauth2 import service_account
-from google.protobuf import duration_pb2 # type: ignore
-from google.protobuf import timestamp_pb2 # type: ignore
-from google.type import date_pb2 # type: ignore
-import google.auth
+from google.cloud.bigtable_v2.services.bigtable import (
+ BigtableAsyncClient,
+ BigtableClient,
+ transports,
+)
+from google.cloud.bigtable_v2.types import bigtable, data, request_stats, types
CRED_INFO_JSON = {
"credential_source": "/path/to/file",
@@ -121,6 +121,7 @@ def test__get_default_mtls_endpoint():
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
+ custom_endpoint = ".custom"
assert BigtableClient._get_default_mtls_endpoint(None) is None
assert BigtableClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
@@ -137,6 +138,7 @@ def test__get_default_mtls_endpoint():
== sandbox_mtls_endpoint
)
assert BigtableClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+ assert BigtableClient._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint
def test__read_environment_variables():
@@ -914,10 +916,9 @@ def test_bigtable_client_get_mtls_endpoint_and_cert_source(client_class):
client_cert_source=mock_client_cert_source,
api_endpoint=mock_api_endpoint,
)
- (
- api_endpoint,
- cert_source,
- ) = client_class.get_mtls_endpoint_and_cert_source(options)
+ api_endpoint, cert_source = (
+ client_class.get_mtls_endpoint_and_cert_source(options)
+ )
assert api_endpoint == mock_api_endpoint
assert cert_source is expected_cert_source
@@ -962,10 +963,9 @@ def test_bigtable_client_get_mtls_endpoint_and_cert_source(client_class):
client_cert_source=mock_client_cert_source,
api_endpoint=mock_api_endpoint,
)
- (
- api_endpoint,
- cert_source,
- ) = client_class.get_mtls_endpoint_and_cert_source(options)
+ api_endpoint, cert_source = (
+ client_class.get_mtls_endpoint_and_cert_source(options)
+ )
assert api_endpoint == mock_api_endpoint
assert cert_source is expected_cert_source
@@ -1001,10 +1001,9 @@ def test_bigtable_client_get_mtls_endpoint_and_cert_source(client_class):
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
- (
- api_endpoint,
- cert_source,
- ) = client_class.get_mtls_endpoint_and_cert_source()
+ api_endpoint, cert_source = (
+ client_class.get_mtls_endpoint_and_cert_source()
+ )
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@@ -1229,13 +1228,13 @@ def test_bigtable_client_create_channel_credentials_file(
)
# test that the credentials from file are saved and used as the credentials.
- with mock.patch.object(
- google.auth, "load_credentials_from_file", autospec=True
- ) as load_creds, mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel"
- ) as create_channel:
+ with (
+ mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds,
+ mock.patch.object(google.auth, "default", autospec=True) as adc,
+ mock.patch.object(grpc_helpers, "create_channel") as create_channel,
+ ):
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
@@ -2496,9 +2495,9 @@ def test_check_and_mutate_row_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.check_and_mutate_row
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.check_and_mutate_row] = (
+ mock_rpc
+ )
request = {}
client.check_and_mutate_row(request)
@@ -3196,9 +3195,9 @@ def test_read_modify_write_row_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.read_modify_write_row
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.read_modify_write_row] = (
+ mock_rpc
+ )
request = {}
client.read_modify_write_row(request)
@@ -3862,9 +3861,9 @@ def test_read_change_stream_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.read_change_stream
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.read_change_stream] = (
+ mock_rpc
+ )
request = {}
client.read_change_stream(request)
@@ -5297,9 +5296,9 @@ def test_check_and_mutate_row_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.check_and_mutate_row
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.check_and_mutate_row] = (
+ mock_rpc
+ )
request = {}
client.check_and_mutate_row(request)
@@ -5706,9 +5705,9 @@ def test_read_modify_write_row_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.read_modify_write_row
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.read_modify_write_row] = (
+ mock_rpc
+ )
request = {}
client.read_modify_write_row(request)
@@ -6113,9 +6112,9 @@ def test_read_change_stream_rest_use_cached_wrapped_rpc():
mock_rpc.return_value.name = (
"foo" # operation_request.operation in compute client(s) expect a string.
)
- client._transport._wrapped_methods[
- client._transport.read_change_stream
- ] = mock_rpc
+ client._transport._wrapped_methods[client._transport.read_change_stream] = (
+ mock_rpc
+ )
request = {}
client.read_change_stream(request)
@@ -9146,8 +9145,9 @@ def test_read_rows_rest_bad_request(request_type=bigtable.ReadRowsRequest):
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -9212,17 +9212,15 @@ def test_read_rows_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_read_rows"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_read_rows_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_read_rows"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(transports.BigtableRestInterceptor, "post_read_rows") as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_read_rows_with_metadata"
+ ) as post_with_metadata,
+ mock.patch.object(transports.BigtableRestInterceptor, "pre_read_rows") as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -9271,8 +9269,9 @@ def test_sample_row_keys_rest_bad_request(request_type=bigtable.SampleRowKeysReq
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -9339,17 +9338,19 @@ def test_sample_row_keys_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_sample_row_keys"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_sample_row_keys_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_sample_row_keys"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_sample_row_keys"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_sample_row_keys_with_metadata"
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "pre_sample_row_keys"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -9400,8 +9401,9 @@ def test_mutate_row_rest_bad_request(request_type=bigtable.MutateRowRequest):
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -9459,17 +9461,17 @@ def test_mutate_row_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_mutate_row"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_mutate_row_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_mutate_row"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_mutate_row"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_mutate_row_with_metadata"
+ ) as post_with_metadata,
+ mock.patch.object(transports.BigtableRestInterceptor, "pre_mutate_row") as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -9518,8 +9520,9 @@ def test_mutate_rows_rest_bad_request(request_type=bigtable.MutateRowsRequest):
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -9581,17 +9584,17 @@ def test_mutate_rows_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_mutate_rows"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_mutate_rows_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_mutate_rows"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_mutate_rows"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_mutate_rows_with_metadata"
+ ) as post_with_metadata,
+ mock.patch.object(transports.BigtableRestInterceptor, "pre_mutate_rows") as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -9644,8 +9647,9 @@ def test_check_and_mutate_row_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -9706,17 +9710,20 @@ def test_check_and_mutate_row_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_check_and_mutate_row"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_check_and_mutate_row_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_check_and_mutate_row"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_check_and_mutate_row"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor,
+ "post_check_and_mutate_row_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "pre_check_and_mutate_row"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -9769,8 +9776,9 @@ def test_ping_and_warm_rest_bad_request(request_type=bigtable.PingAndWarmRequest
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -9828,17 +9836,19 @@ def test_ping_and_warm_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_ping_and_warm"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_ping_and_warm_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_ping_and_warm"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_ping_and_warm"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_ping_and_warm_with_metadata"
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "pre_ping_and_warm"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -9891,8 +9901,9 @@ def test_read_modify_write_row_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -9950,17 +9961,20 @@ def test_read_modify_write_row_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_read_modify_write_row"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_read_modify_write_row_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_read_modify_write_row"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_read_modify_write_row"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor,
+ "post_read_modify_write_row_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "pre_read_modify_write_row"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -10018,8 +10032,9 @@ def test_generate_initial_change_stream_partitions_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -10083,20 +10098,22 @@ def test_generate_initial_change_stream_partitions_rest_interceptors(null_interc
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor,
- "post_generate_initial_change_stream_partitions",
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor,
- "post_generate_initial_change_stream_partitions_with_metadata",
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor,
- "pre_generate_initial_change_stream_partitions",
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor,
+ "post_generate_initial_change_stream_partitions",
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor,
+ "post_generate_initial_change_stream_partitions_with_metadata",
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableRestInterceptor,
+ "pre_generate_initial_change_stream_partitions",
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -10154,8 +10171,9 @@ def test_read_change_stream_rest_bad_request(
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -10217,17 +10235,19 @@ def test_read_change_stream_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_read_change_stream"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_read_change_stream_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_read_change_stream"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_read_change_stream"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_read_change_stream_with_metadata"
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "pre_read_change_stream"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -10280,8 +10300,9 @@ def test_prepare_query_rest_bad_request(request_type=bigtable.PrepareQueryReques
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -10342,17 +10363,19 @@ def test_prepare_query_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_prepare_query"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_prepare_query_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_prepare_query"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_prepare_query"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_prepare_query_with_metadata"
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "pre_prepare_query"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -10403,8 +10426,9 @@ def test_execute_query_rest_bad_request(request_type=bigtable.ExecuteQueryReques
request = request_type(**request_init)
# Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
+ with (
+ mock.patch.object(Session, "request") as req,
+ pytest.raises(core_exceptions.BadRequest),
):
# Wrap the value into a proper Response obj
response_value = mock.Mock()
@@ -10466,17 +10490,19 @@ def test_execute_query_rest_interceptors(null_interceptor):
)
client = BigtableClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.BigtableRestInterceptor, "post_execute_query"
- ) as post, mock.patch.object(
- transports.BigtableRestInterceptor, "post_execute_query_with_metadata"
- ) as post_with_metadata, mock.patch.object(
- transports.BigtableRestInterceptor, "pre_execute_query"
- ) as pre:
+ with (
+ mock.patch.object(type(client.transport._session), "request") as req,
+ mock.patch.object(path_template, "transcode") as transcode,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_execute_query"
+ ) as post,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "post_execute_query_with_metadata"
+ ) as post_with_metadata,
+ mock.patch.object(
+ transports.BigtableRestInterceptor, "pre_execute_query"
+ ) as pre,
+ ):
pre.assert_not_called()
post.assert_not_called()
post_with_metadata.assert_not_called()
@@ -11625,11 +11651,14 @@ def test_bigtable_base_transport():
def test_bigtable_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
- with mock.patch.object(
- google.auth, "load_credentials_from_file", autospec=True
- ) as load_creds, mock.patch(
- "google.cloud.bigtable_v2.services.bigtable.transports.BigtableTransport._prep_wrapped_messages"
- ) as Transport:
+ with (
+ mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds,
+ mock.patch(
+ "google.cloud.bigtable_v2.services.bigtable.transports.BigtableTransport._prep_wrapped_messages"
+ ) as Transport,
+ ):
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.BigtableTransport(
@@ -11653,9 +11682,12 @@ def test_bigtable_base_transport_with_credentials_file():
def test_bigtable_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
- with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
- "google.cloud.bigtable_v2.services.bigtable.transports.BigtableTransport._prep_wrapped_messages"
- ) as Transport:
+ with (
+ mock.patch.object(google.auth, "default", autospec=True) as adc,
+ mock.patch(
+ "google.cloud.bigtable_v2.services.bigtable.transports.BigtableTransport._prep_wrapped_messages"
+ ) as Transport,
+ ):
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.BigtableTransport()
@@ -11741,11 +11773,12 @@ def test_bigtable_transport_auth_gdch_credentials(transport_class):
def test_bigtable_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
+ with (
+ mock.patch.object(google.auth, "default", autospec=True) as adc,
+ mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel,
+ ):
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
diff --git a/packages/google-cloud-bigtable/tests/unit/test_sql_routing_parameters.py b/packages/google-cloud-bigtable/tests/unit/test_sql_routing_parameters.py
index fa9316369508..7e8842216cf8 100644
--- a/packages/google-cloud-bigtable/tests/unit/test_sql_routing_parameters.py
+++ b/packages/google-cloud-bigtable/tests/unit/test_sql_routing_parameters.py
@@ -21,7 +21,6 @@
except ImportError: # pragma: NO COVER
import mock
import pytest
-
from grpc.experimental import aio
try:
@@ -31,9 +30,9 @@
except ImportError: # pragma: NO COVER
HAS_GOOGLE_AUTH_AIO = False
-from google.api_core import gapic_v1
-from google.api_core import grpc_helpers_async
+from google.api_core import gapic_v1, grpc_helpers_async
from google.auth import credentials as ga_credentials
+
from google.cloud.bigtable_v2.services.bigtable.async_client import BigtableAsyncClient
from google.cloud.bigtable_v2.services.bigtable.client import BigtableClient
from google.cloud.bigtable_v2.types import bigtable
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/_testing.py b/packages/google-cloud-bigtable/tests/unit/v2_client/_testing.py
index 302d33ac1540..18575350e5cf 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/_testing.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/_testing.py
@@ -14,7 +14,6 @@
"""Mocks used to emulate gRPC generated objects."""
-
import mock
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_app_profile.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_app_profile.py
index 660ee78998b0..2a99621eaf91 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_app_profile.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_app_profile.py
@@ -165,9 +165,9 @@ def test_app_profile___ne__():
def test_app_profile_from_pb_success_w_routing_any():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.app_profile import AppProfile
from google.cloud.bigtable.enums import RoutingPolicyType
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
client = _Client(PROJECT)
instance = _Instance(INSTANCE_ID, client)
@@ -194,9 +194,9 @@ def test_app_profile_from_pb_success_w_routing_any():
def test_app_profile_from_pb_success_w_routing_any_multi_cluster_ids():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.app_profile import AppProfile
from google.cloud.bigtable.enums import RoutingPolicyType
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
client = _Client(PROJECT)
instance = _Instance(INSTANCE_ID, client)
@@ -225,9 +225,9 @@ def test_app_profile_from_pb_success_w_routing_any_multi_cluster_ids():
def test_app_profile_from_pb_success_w_routing_single():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.app_profile import AppProfile
from google.cloud.bigtable.enums import RoutingPolicyType
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
client = _Client(PROJECT)
instance = _Instance(INSTANCE_ID, client)
@@ -258,8 +258,8 @@ def test_app_profile_from_pb_success_w_routing_single():
def test_app_profile_from_pb_w_bad_app_profile_name():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.app_profile import AppProfile
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
bad_app_profile_name = "BAD_NAME"
@@ -270,8 +270,8 @@ def test_app_profile_from_pb_w_bad_app_profile_name():
def test_app_profile_from_pb_w_instance_id_mistmatch():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.app_profile import AppProfile
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
ALT_INSTANCE_ID = "ALT_INSTANCE_ID"
client = _Client(PROJECT)
@@ -285,8 +285,8 @@ def test_app_profile_from_pb_w_instance_id_mistmatch():
def test_app_profile_from_pb_w_project_mistmatch():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.app_profile import AppProfile
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
ALT_PROJECT = "ALT_PROJECT"
client = _Client(project=ALT_PROJECT)
@@ -300,11 +300,11 @@ def test_app_profile_from_pb_w_project_mistmatch():
def test_app_profile_reload_w_routing_any():
+ from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
- from google.cloud.bigtable.enums import RoutingPolicyType
api = mock.create_autospec(BigtableInstanceAdminClient)
credentials = _make_credentials()
@@ -362,11 +362,12 @@ def test_app_profile_reload_w_routing_any():
def test_app_profile_exists():
+ from google.api_core import exceptions
+
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
- from google.api_core import exceptions
instance_api = mock.create_autospec(BigtableInstanceAdminClient)
credentials = _make_credentials()
@@ -397,11 +398,11 @@ def test_app_profile_exists():
def test_app_profile_create_w_routing_any():
+ from google.cloud.bigtable.app_profile import AppProfile
+ from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
- from google.cloud.bigtable.app_profile import AppProfile
- from google.cloud.bigtable.enums import RoutingPolicyType
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -458,11 +459,11 @@ def test_app_profile_create_w_routing_any():
def test_app_profile_create_w_routing_single():
+ from google.cloud.bigtable.app_profile import AppProfile
+ from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
- from google.cloud.bigtable.app_profile import AppProfile
- from google.cloud.bigtable.enums import RoutingPolicyType
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -529,15 +530,16 @@ def test_app_profile_create_w_wrong_routing_policy():
def test_app_profile_update_w_routing_any():
from google.longrunning import operations_pb2
+ from google.protobuf import field_mask_pb2
from google.protobuf.any_pb2 import Any
- from google.cloud.bigtable_admin_v2.types import (
- bigtable_instance_admin as messages_v2_pb2,
- )
+
from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
- from google.protobuf import field_mask_pb2
+ from google.cloud.bigtable_admin_v2.types import (
+ bigtable_instance_admin as messages_v2_pb2,
+ )
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -604,15 +606,16 @@ def test_app_profile_update_w_routing_any():
def test_app_profile_update_w_routing_any_multi_cluster_ids():
from google.longrunning import operations_pb2
+ from google.protobuf import field_mask_pb2
from google.protobuf.any_pb2 import Any
- from google.cloud.bigtable_admin_v2.types import (
- bigtable_instance_admin as messages_v2_pb2,
- )
+
from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
- from google.protobuf import field_mask_pb2
+ from google.cloud.bigtable_admin_v2.types import (
+ bigtable_instance_admin as messages_v2_pb2,
+ )
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -680,15 +683,16 @@ def test_app_profile_update_w_routing_any_multi_cluster_ids():
def test_app_profile_update_w_routing_single():
from google.longrunning import operations_pb2
+ from google.protobuf import field_mask_pb2
from google.protobuf.any_pb2 import Any
- from google.cloud.bigtable_admin_v2.types import (
- bigtable_instance_admin as messages_v2_pb2,
- )
+
from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
- from google.protobuf import field_mask_pb2
+ from google.cloud.bigtable_admin_v2.types import (
+ bigtable_instance_admin as messages_v2_pb2,
+ )
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -752,6 +756,7 @@ def test_app_profile_update_w_wrong_routing_policy():
def test_app_profile_delete():
from google.protobuf import empty_pb2
+
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_backup.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_backup.py
index a5d205af652e..f8f96dcff22b 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_backup.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_backup.py
@@ -100,8 +100,8 @@ def test_backup_constructor_explicit():
def test_backup_from_pb_w_project_mismatch():
- from google.cloud.bigtable_admin_v2.types import table
from google.cloud.bigtable.backup import Backup
+ from google.cloud.bigtable_admin_v2.types import table
alt_project_id = "alt-project-id"
client = _Client(project=alt_project_id)
@@ -113,8 +113,8 @@ def test_backup_from_pb_w_project_mismatch():
def test_backup_from_pb_w_instance_mismatch():
- from google.cloud.bigtable_admin_v2.types import table
from google.cloud.bigtable.backup import Backup
+ from google.cloud.bigtable_admin_v2.types import table
alt_instance = "/projects/%s/instances/alt-instance" % PROJECT_ID
client = _Client()
@@ -126,8 +126,8 @@ def test_backup_from_pb_w_instance_mismatch():
def test_backup_from_pb_w_bad_name():
- from google.cloud.bigtable_admin_v2.types import table
from google.cloud.bigtable.backup import Backup
+ from google.cloud.bigtable_admin_v2.types import table
client = _Client()
instance = _Instance(INSTANCE_NAME, client)
@@ -138,12 +138,13 @@ def test_backup_from_pb_w_bad_name():
def test_backup_from_pb_success():
+ from google.cloud._helpers import _datetime_to_pb_timestamp
+ from google.rpc.code_pb2 import Code
+
+ from google.cloud.bigtable.backup import Backup
from google.cloud.bigtable.encryption_info import EncryptionInfo
from google.cloud.bigtable.error import Status
from google.cloud.bigtable_admin_v2.types import table
- from google.cloud.bigtable.backup import Backup
- from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.rpc.code_pb2 import Code
client = _Client()
instance = _Instance(INSTANCE_NAME, client)
@@ -345,9 +346,9 @@ def test_backup___ne__():
def test_backup_create_w_grpc_error():
- from google.api_core.exceptions import GoogleAPICallError
- from google.api_core.exceptions import Unknown
+ from google.api_core.exceptions import GoogleAPICallError, Unknown
from google.cloud._helpers import _datetime_to_pb_timestamp
+
from google.cloud.bigtable_admin_v2.types import table
client = _Client()
@@ -377,9 +378,10 @@ def test_backup_create_w_grpc_error():
def test_backup_create_w_already_exists():
from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable_admin_v2.types import table
from google.cloud.exceptions import Conflict
+ from google.cloud.bigtable_admin_v2.types import table
+
client = _Client()
api = client.table_admin_client = _make_table_admin_client()
api.create_backup.side_effect = Conflict("testing")
@@ -407,9 +409,10 @@ def test_backup_create_w_already_exists():
def test_backup_create_w_instance_not_found():
from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable_admin_v2.types import table
from google.cloud.exceptions import NotFound
+ from google.cloud.bigtable_admin_v2.types import table
+
client = _Client()
api = client.table_admin_client = _make_table_admin_client()
api.create_backup.side_effect = NotFound("testing")
@@ -471,8 +474,9 @@ def test_backup_create_w_expire_time_not_set():
def test_backup_create_success():
from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable_admin_v2.types import table
+
from google.cloud.bigtable import Client
+ from google.cloud.bigtable_admin_v2.types import table
op_future = object()
credentials = _make_credentials()
@@ -503,9 +507,10 @@ def test_backup_create_success():
def test_backup_get():
- from google.cloud.bigtable_admin_v2.types import table
from google.cloud._helpers import _datetime_to_pb_timestamp
+ from google.cloud.bigtable_admin_v2.types import table
+
timestamp = _datetime_to_pb_timestamp(_make_timestamp())
state = table.Backup.State.READY
@@ -529,9 +534,10 @@ def test_backup_get():
def test_backup_reload():
- from google.cloud.bigtable_admin_v2.types import table
from google.cloud._helpers import _datetime_to_pb_timestamp
+ from google.cloud.bigtable_admin_v2.types import table
+
timestamp = _datetime_to_pb_timestamp(_make_timestamp())
state = table.Backup.State.READY
@@ -655,9 +661,10 @@ def test_backup_delete_success():
def test_backup_update_expire_time_w_grpc_error():
from google.api_core.exceptions import Unknown
from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable_admin_v2.types import table
from google.protobuf import field_mask_pb2
+ from google.cloud.bigtable_admin_v2.types import table
+
client = _Client()
api = client.table_admin_client = _make_table_admin_client()
api.update_backup.side_effect = Unknown("testing")
@@ -681,9 +688,10 @@ def test_backup_update_expire_time_w_grpc_error():
def test_backup_update_expire_time_w_not_found():
from google.api_core.exceptions import NotFound
from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable_admin_v2.types import table
from google.protobuf import field_mask_pb2
+ from google.cloud.bigtable_admin_v2.types import table
+
client = _Client()
api = client.table_admin_client = _make_table_admin_client()
api.update_backup.side_effect = NotFound("testing")
@@ -706,9 +714,10 @@ def test_backup_update_expire_time_w_not_found():
def test_backup_update_expire_time_success():
from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable_admin_v2.types import table
from google.protobuf import field_mask_pb2
+ from google.cloud.bigtable_admin_v2.types import table
+
client = _Client()
api = client.table_admin_client = _make_table_admin_client()
api.update_backup.return_type = table.Backup(name=BACKUP_NAME)
@@ -729,8 +738,7 @@ def test_backup_update_expire_time_success():
def test_backup_restore_w_grpc_error():
- from google.api_core.exceptions import GoogleAPICallError
- from google.api_core.exceptions import Unknown
+ from google.api_core.exceptions import GoogleAPICallError, Unknown
client = _Client()
api = client.table_admin_client = _make_table_admin_client()
@@ -805,12 +813,13 @@ def test_backup_restore_to_another_instance():
def test_backup_get_iam_policy():
+ from google.iam.v1 import policy_pb2
+
from google.cloud.bigtable.client import Client
+ from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
BaseBigtableTableAdminClient,
)
- from google.iam.v1 import policy_pb2
- from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
credentials = _make_credentials()
client = Client(project=PROJECT_ID, credentials=credentials, admin=True)
@@ -841,13 +850,13 @@ def test_backup_get_iam_policy():
def test_backup_set_iam_policy():
+ from google.iam.v1 import policy_pb2
+
from google.cloud.bigtable.client import Client
+ from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
BaseBigtableTableAdminClient,
)
- from google.iam.v1 import policy_pb2
- from google.cloud.bigtable.policy import Policy
- from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
credentials = _make_credentials()
client = Client(project=PROJECT_ID, credentials=credentials, admin=True)
@@ -886,11 +895,12 @@ def test_backup_set_iam_policy():
def test_backup_test_iam_permissions():
+ from google.iam.v1 import iam_policy_pb2
+
from google.cloud.bigtable.client import Client
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
BaseBigtableTableAdminClient,
)
- from google.iam.v1 import iam_policy_pb2
credentials = _make_credentials()
client = Client(project=PROJECT_ID, credentials=credentials, admin=True)
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_batcher.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_batcher.py
index fcf6069725fc..4090c3c81cea 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_batcher.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_batcher.py
@@ -13,17 +13,17 @@
# limitations under the License.
-import mock
import time
+import mock
import pytest
-from google.cloud.bigtable.row import DirectRow
from google.cloud.bigtable.batcher import (
- _FlowControl,
MutationsBatcher,
MutationsBatchError,
+ _FlowControl,
)
+from google.cloud.bigtable.row import DirectRow
TABLE_ID = "table-id"
TABLE_NAME = "/tables/" + TABLE_ID
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_client.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_client.py
index a4fc0f9cb40e..a2f4225a4eeb 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_client.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_client.py
@@ -109,6 +109,7 @@ def _make_client(*args, **kwargs):
@mock.patch("os.environ", {})
def test_client_constructor_defaults():
from google.api_core import client_info
+
from google.cloud.bigtable import __version__
from google.cloud.bigtable.client import DATA_SCOPE
@@ -131,8 +132,8 @@ def test_client_constructor_defaults():
def test_client_constructor_explicit():
import warnings
- from google.cloud.bigtable.client import ADMIN_SCOPE
- from google.cloud.bigtable.client import DATA_SCOPE
+
+ from google.cloud.bigtable.client import ADMIN_SCOPE, DATA_SCOPE
credentials = _make_credentials()
client_info = mock.Mock()
@@ -170,10 +171,13 @@ def test_client_constructor_w_both_admin_and_read_only():
def test_client_constructor_w_emulator_host():
- from google.cloud.environment_vars import BIGTABLE_EMULATOR
- from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT
- from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS
import grpc
+ from google.cloud.environment_vars import BIGTABLE_EMULATOR
+
+ from google.cloud.bigtable.client import (
+ _DEFAULT_BIGTABLE_EMULATOR_CLIENT,
+ _GRPC_CHANNEL_OPTIONS,
+ )
emulator_host = "localhost:8081"
with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}):
@@ -196,9 +200,10 @@ def test_client_constructor_w_emulator_host():
def test_client_constructor_w_emulator_host_w_project():
+ import grpc
from google.cloud.environment_vars import BIGTABLE_EMULATOR
+
from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS
- import grpc
emulator_host = "localhost:8081"
with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}):
@@ -218,10 +223,13 @@ def test_client_constructor_w_emulator_host_w_project():
def test_client_constructor_w_emulator_host_w_credentials():
- from google.cloud.environment_vars import BIGTABLE_EMULATOR
- from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT
- from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS
import grpc
+ from google.cloud.environment_vars import BIGTABLE_EMULATOR
+
+ from google.cloud.bigtable.client import (
+ _DEFAULT_BIGTABLE_EMULATOR_CLIENT,
+ _GRPC_CHANNEL_OPTIONS,
+ )
emulator_host = "localhost:8081"
credentials = _make_credentials()
@@ -249,8 +257,7 @@ def test_client__get_scopes_default():
def test_client__get_scopes_w_admin():
- from google.cloud.bigtable.client import ADMIN_SCOPE
- from google.cloud.bigtable.client import DATA_SCOPE
+ from google.cloud.bigtable.client import ADMIN_SCOPE, DATA_SCOPE
client = _make_client(project=PROJECT, credentials=_make_credentials(), admin=True)
expected_scopes = (DATA_SCOPE, ADMIN_SCOPE)
@@ -597,8 +604,8 @@ def test_client_instance_factory_defaults():
def test_client_instance_factory_non_defaults():
- from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable import enums
+ from google.cloud.bigtable.instance import Instance
instance_type = enums.Instance.Type.DEVELOPMENT
labels = {"foo": "bar"}
@@ -621,14 +628,14 @@ def test_client_instance_factory_non_defaults():
def test_client_list_instances():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
- from google.cloud.bigtable_admin_v2.types import (
- bigtable_instance_admin as messages_v2_pb2,
- )
+ from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
- from google.cloud.bigtable.instance import Instance
+ from google.cloud.bigtable_admin_v2.types import (
+ bigtable_instance_admin as messages_v2_pb2,
+ )
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
FAILED_LOCATION = "FAILED"
INSTANCE_ID1 = "instance-id1"
@@ -673,6 +680,7 @@ def test_client_list_instances():
def test_client_list_clusters():
+ from google.cloud.bigtable.instance import Cluster
from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import (
BigtableInstanceAdminClient,
)
@@ -680,7 +688,6 @@ def test_client_list_clusters():
bigtable_instance_admin as messages_v2_pb2,
)
from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
- from google.cloud.bigtable.instance import Cluster
instance_api = mock.create_autospec(BigtableInstanceAdminClient)
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_cluster.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_cluster.py
index a21104549bc6..b25ca4ab7adc 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_cluster.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_cluster.py
@@ -72,8 +72,7 @@ def test_cluster_constructor_defaults():
def test_cluster_constructor_explicit():
- from google.cloud.bigtable.enums import StorageType
- from google.cloud.bigtable.enums import Cluster
+ from google.cloud.bigtable.enums import Cluster, StorageType
STATE = Cluster.State.READY
STORAGE_TYPE_SSD = StorageType.SSD
@@ -125,9 +124,9 @@ def test_cluster_kms_key_name_setter():
def test_cluster_from_pb_success():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
- from google.cloud.bigtable.cluster import Cluster
from google.cloud.bigtable import enums
+ from google.cloud.bigtable.cluster import Cluster
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
client = _Client(PROJECT)
instance = _Instance(INSTANCE_ID, client)
@@ -161,8 +160,8 @@ def test_cluster_from_pb_success():
def test_cluster_from_pb_w_bad_cluster_name():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.cluster import Cluster
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
bad_cluster_name = "BAD_NAME"
@@ -173,8 +172,8 @@ def test_cluster_from_pb_w_bad_cluster_name():
def test_cluster_from_pb_w_instance_id_mistmatch():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.cluster import Cluster
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
ALT_INSTANCE_ID = "ALT_INSTANCE_ID"
client = _Client(PROJECT)
@@ -188,8 +187,8 @@ def test_cluster_from_pb_w_instance_id_mistmatch():
def test_cluster_from_pb_w_project_mistmatch():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.cluster import Cluster
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
ALT_PROJECT = "ALT_PROJECT"
client = _Client(project=ALT_PROJECT)
@@ -203,9 +202,9 @@ def test_cluster_from_pb_w_project_mistmatch():
def test_cluster_from_pb_w_autoscaling():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
- from google.cloud.bigtable.cluster import Cluster
from google.cloud.bigtable import enums
+ from google.cloud.bigtable.cluster import Cluster
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
client = _Client(PROJECT)
instance = _Instance(INSTANCE_ID, client)
@@ -291,9 +290,8 @@ def _make_instance_admin_client():
def test_cluster_reload():
+ from google.cloud.bigtable.enums import Cluster, StorageType
from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
- from google.cloud.bigtable.enums import StorageType
- from google.cloud.bigtable.enums import Cluster
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -348,8 +346,8 @@ def test_cluster_reload():
def test_cluster_exists_hit():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.instance import Instance
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -371,9 +369,10 @@ def test_cluster_exists_hit():
def test_cluster_exists_miss():
- from google.cloud.bigtable.instance import Instance
from google.api_core import exceptions
+ from google.cloud.bigtable.instance import Instance
+
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
instance = Instance(INSTANCE_ID, client)
@@ -390,9 +389,10 @@ def test_cluster_exists_miss():
def test_cluster_exists_w_error():
- from google.cloud.bigtable.instance import Instance
from google.api_core import exceptions
+ from google.cloud.bigtable.instance import Instance
+
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
instance = Instance(INSTANCE_ID, client)
@@ -410,15 +410,17 @@ def test_cluster_exists_w_error():
def test_cluster_create():
import datetime
+
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf.any_pb2 import Any
+
+ from google.cloud.bigtable.enums import StorageType
+ from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
- from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2
- from google.cloud.bigtable.enums import StorageType
NOW = datetime.datetime.now(datetime.timezone.utc)
NOW_PB = _datetime_to_pb_timestamp(NOW)
@@ -465,15 +467,17 @@ def test_cluster_create():
def test_cluster_create_w_cmek():
import datetime
+
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf.any_pb2 import Any
+
+ from google.cloud.bigtable.enums import StorageType
+ from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
- from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2
- from google.cloud.bigtable.enums import StorageType
NOW = datetime.datetime.now(datetime.timezone.utc)
NOW_PB = _datetime_to_pb_timestamp(NOW)
@@ -525,15 +529,17 @@ def test_cluster_create_w_cmek():
def test_cluster_create_w_autoscaling():
import datetime
+
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf.any_pb2 import Any
+
+ from google.cloud.bigtable.enums import StorageType
+ from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
- from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2
- from google.cloud.bigtable.enums import StorageType
NOW = datetime.datetime.now(datetime.timezone.utc)
NOW_PB = _datetime_to_pb_timestamp(NOW)
@@ -593,14 +599,16 @@ def test_cluster_create_w_autoscaling():
def test_cluster_update():
import datetime
+
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf import field_mask_pb2
from google.protobuf.any_pb2 import Any
- from google.cloud._helpers import _datetime_to_pb_timestamp
+
+ from google.cloud.bigtable.enums import StorageType
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
- from google.cloud.bigtable.enums import StorageType
NOW = datetime.datetime.now(datetime.timezone.utc)
NOW_PB = _datetime_to_pb_timestamp(NOW)
@@ -660,14 +668,16 @@ def test_cluster_update():
def test_cluster_update_w_autoscaling():
import datetime
+
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf import field_mask_pb2
from google.protobuf.any_pb2 import Any
- from google.cloud._helpers import _datetime_to_pb_timestamp
+
+ from google.cloud.bigtable.enums import StorageType
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
- from google.cloud.bigtable.enums import StorageType
NOW = datetime.datetime.now(datetime.timezone.utc)
NOW_PB = _datetime_to_pb_timestamp(NOW)
@@ -719,14 +729,16 @@ def test_cluster_update_w_autoscaling():
def test_cluster_update_w_partial_autoscaling_config():
import datetime
+
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf import field_mask_pb2
from google.protobuf.any_pb2 import Any
- from google.cloud._helpers import _datetime_to_pb_timestamp
+
+ from google.cloud.bigtable.enums import StorageType
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
- from google.cloud.bigtable.enums import StorageType
NOW = datetime.datetime.now(datetime.timezone.utc)
NOW_PB = _datetime_to_pb_timestamp(NOW)
@@ -803,14 +815,16 @@ def test_cluster_update_w_partial_autoscaling_config():
def test_cluster_update_w_both_manual_and_autoscaling():
import datetime
+
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf import field_mask_pb2
from google.protobuf.any_pb2 import Any
- from google.cloud._helpers import _datetime_to_pb_timestamp
+
+ from google.cloud.bigtable.enums import StorageType
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
- from google.cloud.bigtable.enums import StorageType
NOW = datetime.datetime.now(datetime.timezone.utc)
NOW_PB = _datetime_to_pb_timestamp(NOW)
@@ -863,15 +877,17 @@ def test_cluster_update_w_both_manual_and_autoscaling():
def test_cluster_disable_autoscaling():
import datetime
+
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf import field_mask_pb2
from google.protobuf.any_pb2 import Any
+
+ from google.cloud.bigtable.enums import StorageType
+ from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
- from google.cloud._helpers import _datetime_to_pb_timestamp
- from google.cloud.bigtable.instance import Instance
- from google.cloud.bigtable.enums import StorageType
NOW = datetime.datetime.now(datetime.timezone.utc)
NOW_PB = _datetime_to_pb_timestamp(NOW)
@@ -926,8 +942,8 @@ def test_cluster_disable_autoscaling():
def test_create_cluster_with_both_manual_and_autoscaling():
- from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable.enums import StorageType
+ from google.cloud.bigtable.instance import Instance
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -953,8 +969,8 @@ def test_create_cluster_with_both_manual_and_autoscaling():
def test_create_cluster_with_partial_autoscaling_config():
- from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable.enums import StorageType
+ from google.cloud.bigtable.instance import Instance
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -993,8 +1009,8 @@ def test_create_cluster_with_partial_autoscaling_config():
def test_create_cluster_with_no_scaling_config():
- from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable.enums import StorageType
+ from google.cloud.bigtable.instance import Instance
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_column_family.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_column_family.py
index 2480e11cba11..0a33785ac97a 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_column_family.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_column_family.py
@@ -79,6 +79,7 @@ def test_max_age_gc_rule___ne__same_value():
def test_max_age_gc_rule_to_pb():
import datetime
+
from google.protobuf import duration_pb2
max_age = datetime.timedelta(seconds=1)
@@ -123,9 +124,10 @@ def test_gc_rule_union___ne__same_value():
def test_gc_rule_union_to_pb():
import datetime
+
from google.protobuf import duration_pb2
- from google.cloud.bigtable.column_family import MaxAgeGCRule
- from google.cloud.bigtable.column_family import MaxVersionsGCRule
+
+ from google.cloud.bigtable.column_family import MaxAgeGCRule, MaxVersionsGCRule
max_num_versions = 42
rule1 = MaxVersionsGCRule(max_num_versions)
@@ -144,9 +146,10 @@ def test_gc_rule_union_to_pb():
def test_gc_rule_union_to_pb_nested():
import datetime
+
from google.protobuf import duration_pb2
- from google.cloud.bigtable.column_family import MaxAgeGCRule
- from google.cloud.bigtable.column_family import MaxVersionsGCRule
+
+ from google.cloud.bigtable.column_family import MaxAgeGCRule, MaxVersionsGCRule
max_num_versions1 = 42
rule1 = MaxVersionsGCRule(max_num_versions1)
@@ -205,9 +208,10 @@ def test_gc_rule_intersection___ne__same_value():
def test_gc_rule_intersection_to_pb():
import datetime
+
from google.protobuf import duration_pb2
- from google.cloud.bigtable.column_family import MaxAgeGCRule
- from google.cloud.bigtable.column_family import MaxVersionsGCRule
+
+ from google.cloud.bigtable.column_family import MaxAgeGCRule, MaxVersionsGCRule
max_num_versions = 42
rule1 = MaxVersionsGCRule(max_num_versions)
@@ -226,9 +230,10 @@ def test_gc_rule_intersection_to_pb():
def test_gc_rule_intersection_to_pb_nested():
import datetime
+
from google.protobuf import duration_pb2
- from google.cloud.bigtable.column_family import MaxAgeGCRule
- from google.cloud.bigtable.column_family import MaxVersionsGCRule
+
+ from google.cloud.bigtable.column_family import MaxAgeGCRule, MaxVersionsGCRule
max_num_versions1 = 42
rule1 = MaxVersionsGCRule(max_num_versions1)
@@ -333,13 +338,14 @@ def test_column_family_to_pb_with_rule():
def _create_test_helper(gc_rule=None):
+ from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
+ BaseBigtableTableAdminClient,
+ )
from google.cloud.bigtable_admin_v2.types import (
bigtable_table_admin as table_admin_v2_pb2,
)
+
from ._testing import _FakeStub
- from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
- BaseBigtableTableAdminClient,
- )
project_id = "project-id"
zone = "zone"
@@ -404,13 +410,14 @@ def test_column_family_create_with_gc_rule():
def _update_test_helper(gc_rule=None):
- from ._testing import _FakeStub
- from google.cloud.bigtable_admin_v2.types import (
- bigtable_table_admin as table_admin_v2_pb2,
- )
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
BaseBigtableTableAdminClient,
)
+ from google.cloud.bigtable_admin_v2.types import (
+ bigtable_table_admin as table_admin_v2_pb2,
+ )
+
+ from ._testing import _FakeStub
project_id = "project-id"
zone = "zone"
@@ -475,13 +482,15 @@ def test_column_family_update_with_gc_rule():
def test_column_family_delete():
from google.protobuf import empty_pb2
+
+ from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
+ BaseBigtableTableAdminClient,
+ )
from google.cloud.bigtable_admin_v2.types import (
bigtable_table_admin as table_admin_v2_pb2,
)
+
from ._testing import _FakeStub
- from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
- BaseBigtableTableAdminClient,
- )
project_id = "project-id"
zone = "zone"
@@ -537,8 +546,7 @@ def test__gc_rule_from_pb_empty():
def test__gc_rule_from_pb_max_num_versions():
- from google.cloud.bigtable.column_family import _gc_rule_from_pb
- from google.cloud.bigtable.column_family import MaxVersionsGCRule
+ from google.cloud.bigtable.column_family import MaxVersionsGCRule, _gc_rule_from_pb
orig_rule = MaxVersionsGCRule(1)
gc_rule_pb = orig_rule.to_pb()
@@ -549,8 +557,8 @@ def test__gc_rule_from_pb_max_num_versions():
def test__gc_rule_from_pb_max_age():
import datetime
- from google.cloud.bigtable.column_family import _gc_rule_from_pb
- from google.cloud.bigtable.column_family import MaxAgeGCRule
+
+ from google.cloud.bigtable.column_family import MaxAgeGCRule, _gc_rule_from_pb
orig_rule = MaxAgeGCRule(datetime.timedelta(seconds=1))
gc_rule_pb = orig_rule.to_pb()
@@ -561,10 +569,13 @@ def test__gc_rule_from_pb_max_age():
def test__gc_rule_from_pb_union():
import datetime
- from google.cloud.bigtable.column_family import _gc_rule_from_pb
- from google.cloud.bigtable.column_family import GCRuleUnion
- from google.cloud.bigtable.column_family import MaxAgeGCRule
- from google.cloud.bigtable.column_family import MaxVersionsGCRule
+
+ from google.cloud.bigtable.column_family import (
+ GCRuleUnion,
+ MaxAgeGCRule,
+ MaxVersionsGCRule,
+ _gc_rule_from_pb,
+ )
rule1 = MaxVersionsGCRule(1)
rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1))
@@ -577,10 +588,13 @@ def test__gc_rule_from_pb_union():
def test__gc_rule_from_pb_intersection():
import datetime
- from google.cloud.bigtable.column_family import _gc_rule_from_pb
- from google.cloud.bigtable.column_family import GCRuleIntersection
- from google.cloud.bigtable.column_family import MaxAgeGCRule
- from google.cloud.bigtable.column_family import MaxVersionsGCRule
+
+ from google.cloud.bigtable.column_family import (
+ GCRuleIntersection,
+ MaxAgeGCRule,
+ MaxVersionsGCRule,
+ _gc_rule_from_pb,
+ )
rule1 = MaxVersionsGCRule(1)
rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1))
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_encryption_info.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_encryption_info.py
index 8b92a83ed980..32e32cffe038 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_encryption_info.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_encryption_info.py
@@ -16,7 +16,6 @@
from google.cloud.bigtable import enums
-
EncryptionType = enums.EncryptionInfo.EncryptionType
_STATUS_CODE = 123
_STATUS_MESSAGE = "message"
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_instance.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_instance.py
index c5ef9c9b8c9b..546049360892 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_instance.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_instance.py
@@ -16,9 +16,9 @@
import mock
import pytest
-from ._testing import _make_credentials
from google.cloud.bigtable.cluster import Cluster
+from ._testing import _make_credentials
PROJECT = "project"
INSTANCE_ID = "instance-id"
@@ -103,8 +103,8 @@ def test_instance_constructor_non_default():
def test_instance__update_from_pb_success():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable import enums
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
instance_type = data_v2_pb2.Instance.Type.PRODUCTION
state = enums.Instance.State.READY
@@ -128,8 +128,8 @@ def test_instance__update_from_pb_success():
def test_instance__update_from_pb_success_defaults():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable import enums
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
instance_pb = data_v2_pb2.Instance(display_name=DISPLAY_NAME)
@@ -155,9 +155,9 @@ def test_instance__update_from_pb_wo_display_name():
def test_instance_from_pb_success():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable import enums
from google.cloud.bigtable.instance import Instance
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -183,8 +183,8 @@ def test_instance_from_pb_success():
def test_instance_from_pb_bad_instance_name():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.instance import Instance
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
instance_name = "INCORRECT_FORMAT"
instance_pb = data_v2_pb2.Instance(name=instance_name)
@@ -194,8 +194,8 @@ def test_instance_from_pb_bad_instance_name():
def test_instance_from_pb_project_mistmatch():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable.instance import Instance
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
ALT_PROJECT = "ALT_PROJECT"
credentials = _make_credentials()
@@ -268,10 +268,12 @@ def test_instance_create_w_default_storage_type_and_clusters():
def _instance_api_response_for_create():
import datetime
+
from google.api_core import operation
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf.any_pb2 import Any
- from google.cloud._helpers import _datetime_to_pb_timestamp
+
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
@@ -304,11 +306,11 @@ def _instance_api_response_for_create():
def test_instance_create():
- from google.cloud.bigtable import enums
- from google.cloud.bigtable_admin_v2.types import Instance
- from google.cloud.bigtable_admin_v2.types import Cluster
import warnings
+ from google.cloud.bigtable import enums
+ from google.cloud.bigtable_admin_v2.types import Cluster, Instance
+
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
instance = _make_instance(
@@ -472,8 +474,8 @@ def test_instance_exists_w_error():
def test_instance_reload():
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
from google.cloud.bigtable import enums
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
DISPLAY_NAME = "hey-hi-hello"
credentials = _make_credentials()
@@ -494,10 +496,12 @@ def test_instance_reload():
def _instance_api_response_for_update():
import datetime
+
from google.api_core import operation
+ from google.cloud._helpers import _datetime_to_pb_timestamp
from google.longrunning import operations_pb2
from google.protobuf.any_pb2 import Any
- from google.cloud._helpers import _datetime_to_pb_timestamp
+
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
@@ -527,8 +531,9 @@ def _instance_api_response_for_update():
def test_instance_update():
- from google.cloud.bigtable import enums
from google.protobuf import field_mask_pb2
+
+ from google.cloud.bigtable import enums
from google.cloud.bigtable_admin_v2.types import Instance
credentials = _make_credentials()
@@ -562,6 +567,7 @@ def test_instance_update():
def test_instance_update_empty():
from google.protobuf import field_mask_pb2
+
from google.cloud.bigtable_admin_v2.types import Instance
credentials = _make_credentials()
@@ -603,6 +609,7 @@ def test_instance_delete():
def test_instance_get_iam_policy():
from google.iam.v1 import policy_pb2
+
from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
credentials = _make_credentials()
@@ -630,7 +637,8 @@ def test_instance_get_iam_policy():
def test_instance_get_iam_policy_w_requested_policy_version():
- from google.iam.v1 import policy_pb2, options_pb2
+ from google.iam.v1 import options_pb2, policy_pb2
+
from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
credentials = _make_credentials()
@@ -665,8 +673,8 @@ def test_instance_get_iam_policy_w_requested_policy_version():
def test_instance_set_iam_policy():
from google.iam.v1 import policy_pb2
- from google.cloud.bigtable.policy import Policy
- from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
+
+ from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -745,12 +753,11 @@ def test_instance_cluster_factory():
def test_instance_list_clusters():
+ from google.cloud.bigtable.instance import Cluster, Instance
from google.cloud.bigtable_admin_v2.types import (
bigtable_instance_admin as messages_v2_pb2,
)
from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
- from google.cloud.bigtable.instance import Instance
- from google.cloud.bigtable.instance import Cluster
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -801,13 +808,13 @@ def test_instance_table_factory():
def _list_tables_helper(table_name=None):
- from google.cloud.bigtable_admin_v2.types import table as table_data_v2_pb2
- from google.cloud.bigtable_admin_v2.types import (
- bigtable_table_admin as table_messages_v1_pb2,
- )
from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import (
BaseBigtableTableAdminClient,
)
+ from google.cloud.bigtable_admin_v2.types import (
+ bigtable_table_admin as table_messages_v1_pb2,
+ )
+ from google.cloud.bigtable_admin_v2.types import table as table_data_v2_pb2
credentials = _make_credentials()
client = _make_client(project=PROJECT, credentials=credentials, admin=True)
@@ -887,10 +894,10 @@ def test_instance_app_profile_factory():
def test_instance_list_app_profiles():
- from google.api_core.page_iterator import Iterator
- from google.api_core.page_iterator import Page
- from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
+ from google.api_core.page_iterator import Iterator, Page
+
from google.cloud.bigtable.app_profile import AppProfile
+ from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2
class _Iterator(Iterator):
def __init__(self, pages):
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_policy.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_policy.py
index 77674517e0d8..aab229a83cf9 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_policy.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_policy.py
@@ -89,6 +89,7 @@ def test_policy_bigtable_viewers():
def test_policy_from_pb_w_empty():
from google.iam.v1 import policy_pb2
+
from google.cloud.bigtable.policy import Policy
empty = frozenset()
@@ -106,8 +107,8 @@ def test_policy_from_pb_w_empty():
def test_policy_from_pb_w_non_empty():
from google.iam.v1 import policy_pb2
- from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
- from google.cloud.bigtable.policy import Policy
+
+ from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy
ETAG = b"ETAG"
VERSION = 1
@@ -131,10 +132,10 @@ def test_policy_from_pb_w_non_empty():
def test_policy_from_pb_w_condition():
import pytest
+ from google.api_core.iam import _DICT_ACCESS_MSG, InvalidOperationException
from google.iam.v1 import policy_pb2
- from google.api_core.iam import InvalidOperationException, _DICT_ACCESS_MSG
- from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
- from google.cloud.bigtable.policy import Policy
+
+ from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy
ETAG = b"ETAG"
VERSION = 3
@@ -184,6 +185,7 @@ def test_policy_to_pb_empty():
def test_policy_to_pb_explicit():
from google.iam.v1 import policy_pb2
+
from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
VERSION = 1
@@ -204,6 +206,7 @@ def test_policy_to_pb_explicit():
def test_policy_to_pb_w_condition():
from google.iam.v1 import policy_pb2
+
from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
VERSION = 3
@@ -252,6 +255,7 @@ def test_policy_from_api_repr_wo_etag():
def test_policy_from_api_repr_w_etag():
import base64
+
from google.cloud.bigtable.policy import Policy
ETAG = b"ETAG"
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row.py
index f04802f5cc07..b22cd69b3d08 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row.py
@@ -174,6 +174,7 @@ def test_direct_row_set_cell_with_non_bytes_value():
def test_direct_row_set_cell_with_non_null_timestamp():
import datetime
+
from google.cloud._helpers import _EPOCH
microseconds = 898294371
@@ -292,7 +293,9 @@ def test_direct_row_delete_cells_no_time_range():
def test_direct_row_delete_cells_with_time_range():
import datetime
+
from google.cloud._helpers import _EPOCH
+
from google.cloud.bigtable.row_filters import TimestampRange
microseconds = 30871000 # Makes sure already milliseconds granularity
@@ -466,6 +469,7 @@ def test_conditional_row_commit():
def test_conditional_row_commit_too_many_mutations():
from google.cloud._testing import _Monkey
+
from google.cloud.bigtable import row as MUT
row_key = b"row_key"
@@ -564,6 +568,7 @@ def test_append_row_increment_cell_value():
def test_append_row_commit():
from google.cloud._testing import _Monkey
+
from google.cloud.bigtable import row as MUT
from google.cloud.bigtable_v2.services.bigtable import BigtableClient
@@ -630,6 +635,7 @@ def test_append_row_commit_no_rules():
def test_append_row_commit_too_many_mutations():
from google.cloud._testing import _Monkey
+
from google.cloud.bigtable import row as MUT
row_key = b"row_key"
@@ -644,6 +650,7 @@ def test_append_row_commit_too_many_mutations():
def test__parse_rmw_row_response():
from google.cloud._helpers import _datetime_from_microseconds
+
from google.cloud.bigtable.row import _parse_rmw_row_response
col_fam1 = "col-fam-id"
@@ -700,6 +707,7 @@ def test__parse_rmw_row_response():
def test__parse_family_pb():
from google.cloud._helpers import _datetime_from_microseconds
+
from google.cloud.bigtable.row import _parse_family_pb
col_fam1 = "col-fam-id"
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_data.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_data.py
index 7c2987b56d18..c24f3c0faeb3 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_data.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_data.py
@@ -34,9 +34,11 @@ def _make_cell(*args, **kwargs):
def _cell_from_pb_test_helper(labels=None):
import datetime
+
from google.cloud._helpers import _EPOCH
- from google.cloud.bigtable_v2.types import data as data_v2_pb2
+
from google.cloud.bigtable.row_data import Cell
+ from google.cloud.bigtable_v2.types import data as data_v2_pb2
timestamp = _EPOCH + datetime.timedelta(microseconds=TIMESTAMP_MICROS)
value = b"value-bytes"
@@ -267,8 +269,7 @@ def test_partial_row_data_row_key_getter():
def _make_grpc_call_error(exception):
- from grpc import Call
- from grpc import RpcError
+ from grpc import Call, RpcError
class TestingException(Call, RpcError):
def __init__(self, exception):
@@ -288,6 +289,7 @@ def trailing_metadata(self):
def test__retry_read_rows_exception_miss():
from google.api_core.exceptions import Conflict
+
from google.cloud.bigtable.row_data import _retry_read_rows_exception
exception = Conflict("testing")
@@ -296,6 +298,7 @@ def test__retry_read_rows_exception_miss():
def test__retry_read_rows_exception_service_unavailable():
from google.api_core.exceptions import ServiceUnavailable
+
from google.cloud.bigtable.row_data import _retry_read_rows_exception
exception = ServiceUnavailable("testing")
@@ -304,6 +307,7 @@ def test__retry_read_rows_exception_service_unavailable():
def test__retry_read_rows_exception_deadline_exceeded():
from google.api_core.exceptions import DeadlineExceeded
+
from google.cloud.bigtable.row_data import _retry_read_rows_exception
exception = DeadlineExceeded("testing")
@@ -312,9 +316,10 @@ def test__retry_read_rows_exception_deadline_exceeded():
def test__retry_read_rows_exception_internal_server_not_retriable():
from google.api_core.exceptions import InternalServerError
+
from google.cloud.bigtable.row_data import (
- _retry_read_rows_exception,
RETRYABLE_INTERNAL_ERROR_MESSAGES,
+ _retry_read_rows_exception,
)
err_message = "500 Error"
@@ -325,9 +330,10 @@ def test__retry_read_rows_exception_internal_server_not_retriable():
def test__retry_read_rows_exception_internal_server_retriable():
from google.api_core.exceptions import InternalServerError
+
from google.cloud.bigtable.row_data import (
- _retry_read_rows_exception,
RETRYABLE_INTERNAL_ERROR_MESSAGES,
+ _retry_read_rows_exception,
)
for err_message in RETRYABLE_INTERNAL_ERROR_MESSAGES:
@@ -337,6 +343,7 @@ def test__retry_read_rows_exception_internal_server_retriable():
def test__retry_read_rows_exception_miss_wrapped_in_grpc():
from google.api_core.exceptions import Conflict
+
from google.cloud.bigtable.row_data import _retry_read_rows_exception
wrapped = Conflict("testing")
@@ -346,6 +353,7 @@ def test__retry_read_rows_exception_miss_wrapped_in_grpc():
def test__retry_read_rows_exception_service_unavailable_wrapped_in_grpc():
from google.api_core.exceptions import ServiceUnavailable
+
from google.cloud.bigtable.row_data import _retry_read_rows_exception
wrapped = ServiceUnavailable("testing")
@@ -355,6 +363,7 @@ def test__retry_read_rows_exception_service_unavailable_wrapped_in_grpc():
def test__retry_read_rows_exception_deadline_exceeded_wrapped_in_grpc():
from google.api_core.exceptions import DeadlineExceeded
+
from google.cloud.bigtable.row_data import _retry_read_rows_exception
wrapped = DeadlineExceeded("testing")
@@ -1099,8 +1108,8 @@ def test_RRRM_build_updated_request_last_row_read_raises_invalid_retry_request()
def test_RRRM_build_updated_request_row_ranges_read_raises_invalid_retry_request():
- from google.cloud.bigtable.row_data import InvalidRetryRequest
from google.cloud.bigtable import row_set
+ from google.cloud.bigtable.row_data import InvalidRetryRequest
row_range1 = row_set.RowRange(b"row_key21", b"row_key29")
@@ -1176,6 +1185,7 @@ def _ReadRowsResponseV2(chunks, last_scanned_row_key=b""):
def _generate_cell_chunks(chunk_text_pbs):
from google.protobuf.text_format import Merge
+
from google.cloud.bigtable_v2.types.bigtable import ReadRowsResponse
chunks = []
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_filters.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_filters.py
index b312cb942fdd..447ca95b0ee4 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_filters.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_filters.py
@@ -242,7 +242,9 @@ def test_timestamp_range___ne__same_value():
def _timestamp_range_to_pb_helper(pb_kwargs, start=None, end=None):
import datetime
+
from google.cloud._helpers import _EPOCH
+
from google.cloud.bigtable.row_filters import TimestampRange
if start is not None:
@@ -327,8 +329,7 @@ def test_timestamp_range_filter___ne__():
def test_timestamp_range_filter_to_pb():
- from google.cloud.bigtable.row_filters import TimestampRangeFilter
- from google.cloud.bigtable.row_filters import TimestampRange
+ from google.cloud.bigtable.row_filters import TimestampRange, TimestampRangeFilter
range_ = TimestampRange()
row_filter = TimestampRangeFilter(range_)
@@ -557,6 +558,7 @@ def test_exact_value_filter_to_pb_w_str():
def test_exact_value_filter_to_pb_w_int():
import struct
+
from google.cloud.bigtable.row_filters import ExactValueFilter
value = 1
@@ -600,9 +602,10 @@ def test_value_range_filter_constructor_explicit():
def test_value_range_filter_constructor_w_int_values():
- from google.cloud.bigtable.row_filters import ValueRangeFilter
import struct
+ from google.cloud.bigtable.row_filters import ValueRangeFilter
+
start_value = 1
end_value = 10
@@ -897,9 +900,11 @@ def test_filter_combination___ne__():
def test_row_filter_chain_to_pb():
- from google.cloud.bigtable.row_filters import RowFilterChain
- from google.cloud.bigtable.row_filters import RowSampleFilter
- from google.cloud.bigtable.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.row_filters import (
+ RowFilterChain,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
@@ -917,10 +922,12 @@ def test_row_filter_chain_to_pb():
def test_row_filter_chain_to_pb_nested():
- from google.cloud.bigtable.row_filters import CellsRowLimitFilter
- from google.cloud.bigtable.row_filters import RowFilterChain
- from google.cloud.bigtable.row_filters import RowSampleFilter
- from google.cloud.bigtable.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.row_filters import (
+ CellsRowLimitFilter,
+ RowFilterChain,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -941,9 +948,11 @@ def test_row_filter_chain_to_pb_nested():
def test_row_filter_union_to_pb():
- from google.cloud.bigtable.row_filters import RowFilterUnion
- from google.cloud.bigtable.row_filters import RowSampleFilter
- from google.cloud.bigtable.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.row_filters import (
+ RowFilterUnion,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
@@ -961,10 +970,12 @@ def test_row_filter_union_to_pb():
def test_row_filter_union_to_pb_nested():
- from google.cloud.bigtable.row_filters import CellsRowLimitFilter
- from google.cloud.bigtable.row_filters import RowFilterUnion
- from google.cloud.bigtable.row_filters import RowSampleFilter
- from google.cloud.bigtable.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.row_filters import (
+ CellsRowLimitFilter,
+ RowFilterUnion,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter2 = RowSampleFilter(0.25)
@@ -1043,10 +1054,12 @@ def test_conditional_row_filter___ne__():
def test_conditional_row_filter_to_pb():
- from google.cloud.bigtable.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.row_filters import CellsRowOffsetFilter
- from google.cloud.bigtable.row_filters import RowSampleFilter
- from google.cloud.bigtable.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.row_filters import (
+ CellsRowOffsetFilter,
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
@@ -1073,9 +1086,11 @@ def test_conditional_row_filter_to_pb():
def test_conditional_row_filter_to_pb_true_only():
- from google.cloud.bigtable.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.row_filters import RowSampleFilter
- from google.cloud.bigtable.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.row_filters import (
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
@@ -1095,9 +1110,11 @@ def test_conditional_row_filter_to_pb_true_only():
def test_conditional_row_filter_to_pb_false_only():
- from google.cloud.bigtable.row_filters import ConditionalRowFilter
- from google.cloud.bigtable.row_filters import RowSampleFilter
- from google.cloud.bigtable.row_filters import StripValueTransformerFilter
+ from google.cloud.bigtable.row_filters import (
+ ConditionalRowFilter,
+ RowSampleFilter,
+ StripValueTransformerFilter,
+ )
row_filter1 = StripValueTransformerFilter(True)
row_filter1_pb = row_filter1.to_pb()
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_merger.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_merger.py
index 483c04536666..cc9df9c70822 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_merger.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_merger.py
@@ -5,9 +5,9 @@
import proto
import pytest
-from google.cloud.bigtable.row_data import PartialRowsData, PartialRowData, InvalidChunk
-from google.cloud.bigtable_v2.types.bigtable import ReadRowsResponse
+from google.cloud.bigtable.row_data import InvalidChunk, PartialRowData, PartialRowsData
from google.cloud.bigtable.row_merger import _RowMerger
+from google.cloud.bigtable_v2.types.bigtable import ReadRowsResponse
# TODO: autogenerate protos from
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_set.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_set.py
index 1a33be7202e4..4142348ee195 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_set.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_set.py
@@ -22,8 +22,7 @@ def test_row_set_constructor():
def test_row_set__eq__():
- from google.cloud.bigtable.row_set import RowRange
- from google.cloud.bigtable.row_set import RowSet
+ from google.cloud.bigtable.row_set import RowRange, RowSet
row_key1 = b"row_key1"
row_key2 = b"row_key1"
@@ -66,8 +65,7 @@ def test_row_set__eq__len_row_keys_differ():
def test_row_set__eq__len_row_ranges_differ():
- from google.cloud.bigtable.row_set import RowRange
- from google.cloud.bigtable.row_set import RowSet
+ from google.cloud.bigtable.row_set import RowRange, RowSet
row_range1 = RowRange(b"row_key4", b"row_key9")
row_range2 = RowRange(b"row_key4", b"row_key9")
@@ -99,8 +97,7 @@ def test_row_set__eq__row_keys_differ():
def test_row_set__eq__row_ranges_differ():
- from google.cloud.bigtable.row_set import RowRange
- from google.cloud.bigtable.row_set import RowSet
+ from google.cloud.bigtable.row_set import RowRange, RowSet
row_range1 = RowRange(b"row_key4", b"row_key9")
row_range2 = RowRange(b"row_key14", b"row_key19")
@@ -119,8 +116,7 @@ def test_row_set__eq__row_ranges_differ():
def test_row_set__ne__():
- from google.cloud.bigtable.row_set import RowRange
- from google.cloud.bigtable.row_set import RowSet
+ from google.cloud.bigtable.row_set import RowRange, RowSet
row_key1 = b"row_key1"
row_key2 = b"row_key1"
@@ -139,8 +135,7 @@ def test_row_set__ne__():
def test_row_set__ne__same_value():
- from google.cloud.bigtable.row_set import RowRange
- from google.cloud.bigtable.row_set import RowSet
+ from google.cloud.bigtable.row_set import RowRange, RowSet
row_key1 = b"row_key1"
row_key2 = b"row_key1"
@@ -168,8 +163,7 @@ def test_row_set_add_row_key():
def test_row_set_add_row_range():
- from google.cloud.bigtable.row_set import RowRange
- from google.cloud.bigtable.row_set import RowSet
+ from google.cloud.bigtable.row_set import RowRange, RowSet
row_set = RowSet()
row_range1 = RowRange(b"row_key1", b"row_key9")
@@ -203,8 +197,8 @@ def test_row_set_add_row_range_with_prefix():
def test_row_set__update_message_request():
from google.cloud._helpers import _to_bytes
- from google.cloud.bigtable.row_set import RowRange
- from google.cloud.bigtable.row_set import RowSet
+
+ from google.cloud.bigtable.row_set import RowRange, RowSet
row_set = RowSet()
table_name = "table_name"
diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_table.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_table.py
index 6b31a5e23148..882fbbc413d0 100644
--- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_table.py
+++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_table.py
@@ -17,9 +17,9 @@
import mock
import pytest
+from google.api_core.exceptions import DeadlineExceeded
from grpc import StatusCode
-from google.api_core.exceptions import DeadlineExceeded
from ._testing import _make_credentials
PROJECT_ID = "project-id"
@@ -53,8 +53,10 @@
@mock.patch("google.cloud.bigtable.table._MAX_BULK_MUTATIONS", new=3)
def test__compile_mutation_entries_w_too_many_mutations():
from google.cloud.bigtable.row import DirectRow
- from google.cloud.bigtable.table import TooManyMutationsError
- from google.cloud.bigtable.table import _compile_mutation_entries
+ from google.cloud.bigtable.table import (
+ TooManyMutationsError,
+ _compile_mutation_entries,
+ )
table = mock.Mock(name="table", spec=["name"])
table.name = "table"
@@ -74,8 +76,7 @@ def test__compile_mutation_entries_w_too_many_mutations():
def test__compile_mutation_entries_normal():
from google.cloud.bigtable.row import DirectRow
from google.cloud.bigtable.table import _compile_mutation_entries
- from google.cloud.bigtable_v2.types import MutateRowsRequest
- from google.cloud.bigtable_v2.types import data
+ from google.cloud.bigtable_v2.types import MutateRowsRequest, data
table = mock.Mock(spec=["name"])
table.name = "table"
@@ -109,9 +110,8 @@ def test__compile_mutation_entries_normal():
def test__check_row_table_name_w_wrong_table_name():
- from google.cloud.bigtable.table import _check_row_table_name
- from google.cloud.bigtable.table import TableMismatchError
from google.cloud.bigtable.row import DirectRow
+ from google.cloud.bigtable.table import TableMismatchError, _check_row_table_name
table = mock.Mock(name="table", spec=["name"])
table.name = "table"
@@ -353,11 +353,11 @@ def _make_table_api():
def _create_table_helper(split_keys=[], column_families={}):
- from google.cloud.bigtable_admin_v2.types import table as table_pb2
+ from google.cloud.bigtable.column_family import ColumnFamily
from google.cloud.bigtable_admin_v2.types import (
bigtable_table_admin as table_admin_messages_v2_pb2,
)
- from google.cloud.bigtable.column_family import ColumnFamily
+ from google.cloud.bigtable_admin_v2.types import table as table_pb2
credentials = _make_credentials()
client = _make_client(project="project-id", credentials=credentials, admin=True)
@@ -402,9 +402,8 @@ def test_table_create_with_split_keys():
def test_table_exists_hit():
- from google.cloud.bigtable_admin_v2.types import ListTablesResponse
- from google.cloud.bigtable_admin_v2.types import Table
from google.cloud.bigtable import enums
+ from google.cloud.bigtable_admin_v2.types import ListTablesResponse, Table
credentials = _make_credentials()
client = _make_client(project="project-id", credentials=credentials, admin=True)
@@ -426,6 +425,7 @@ def test_table_exists_hit():
def test_table_exists_miss():
from google.api_core.exceptions import NotFound
+
from google.cloud.bigtable import enums
credentials = _make_credentials()
@@ -447,6 +447,7 @@ def test_table_exists_miss():
def test_table_exists_error():
from google.api_core.exceptions import BadRequest
+
from google.cloud.bigtable import enums
credentials = _make_credentials()
@@ -557,6 +558,7 @@ def test_table_get_cluster_states():
def test_table_get_encryption_info():
from google.rpc.code_pb2 import Code
+
from google.cloud.bigtable.encryption_info import EncryptionInfo
from google.cloud.bigtable.enums import EncryptionInfo as enum_crypto
from google.cloud.bigtable.enums import Table as enum_table
@@ -640,10 +642,11 @@ def _make_data_api():
def _table_read_row_helper(chunks, expected_result, app_profile_id=None):
from google.cloud._testing import _Monkey
+
from google.cloud.bigtable import table as MUT
- from google.cloud.bigtable.row_set import RowSet
- from google.cloud.bigtable.row_filters import RowSampleFilter
from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS
+ from google.cloud.bigtable.row_filters import RowSampleFilter
+ from google.cloud.bigtable.row_set import RowSet
credentials = _make_credentials()
client = _make_client(project="project-id", credentials=credentials, admin=True)
@@ -707,8 +710,7 @@ def test_table_read_row_miss_no_chunks_in_response():
def test_table_read_row_complete():
- from google.cloud.bigtable.row_data import Cell
- from google.cloud.bigtable.row_data import PartialRowData
+ from google.cloud.bigtable.row_data import Cell, PartialRowData
app_profile_id = "app-profile-id"
chunk = _ReadRowsResponseCellChunkPB(
@@ -771,6 +773,7 @@ def _table_mutate_rows_helper(
mutation_timeout=None, app_profile_id=None, retry=None, timeout=None
):
from google.rpc.status_pb2 import Status
+
from google.cloud.bigtable.table import DEFAULT_RETRY
credentials = _make_credentials()
@@ -857,9 +860,9 @@ def test_table_mutate_rows_w_mutation_timeout_and_timeout_arg():
def test_table_read_rows():
from google.cloud._testing import _Monkey
- from google.cloud.bigtable.row_data import PartialRowsData
+
from google.cloud.bigtable import table as MUT
- from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS
+ from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS, PartialRowsData
credentials = _make_credentials()
client = _make_client(project="project-id", credentials=credentials, admin=True)
@@ -1082,10 +1085,9 @@ def test_table_yield_retry_rows():
def test_table_yield_rows_with_row_set():
- from google.cloud.bigtable.row_set import RowSet
- from google.cloud.bigtable.row_set import RowRange
- from google.cloud.bigtable.table import _create_row_request
from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS
+ from google.cloud.bigtable.row_set import RowRange, RowSet
+ from google.cloud.bigtable.table import _create_row_request
credentials = _make_credentials()
client = _make_client(project="project-id", credentials=credentials, admin=True)
@@ -1261,6 +1263,7 @@ def test_table_mutations_batcher_factory():
def test_table_get_iam_policy():
from google.iam.v1 import policy_pb2
+
from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
credentials = _make_credentials()
@@ -1292,8 +1295,8 @@ def test_table_get_iam_policy():
def test_table_set_iam_policy():
from google.iam.v1 import policy_pb2
- from google.cloud.bigtable.policy import Policy
- from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE
+
+ from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy
credentials = _make_credentials()
client = _make_client(project="project-id", credentials=credentials, admin=True)
@@ -1378,6 +1381,7 @@ def test_table_backup_factory_defaults():
def test_table_backup_factory_non_defaults():
import datetime
+
from google.cloud.bigtable.backup import Backup
from google.cloud.bigtable.instance import Instance
@@ -1405,11 +1409,13 @@ def test_table_backup_factory_non_defaults():
def _table_list_backups_helper(cluster_id=None, filter_=None, **kwargs):
+ from google.cloud.bigtable.backup import Backup
from google.cloud.bigtable_admin_v2.types import (
Backup as backup_pb,
+ )
+ from google.cloud.bigtable_admin_v2.types import (
bigtable_table_admin,
)
- from google.cloud.bigtable.backup import Backup
client = _make_client(
project=PROJECT_ID, credentials=_make_credentials(), admin=True
@@ -1520,9 +1526,10 @@ def _make_responses_statuses(codes):
def _make_responses(codes):
- from google.cloud.bigtable_v2.types.bigtable import MutateRowsResponse
from google.rpc.status_pb2 import Status
+ from google.cloud.bigtable_v2.types.bigtable import MutateRowsResponse
+
entries = [
MutateRowsResponse.Entry(index=i, status=Status(code=codes[i]))
for i in range(len(codes))
@@ -1645,6 +1652,7 @@ def _do_mutate_retryable_rows_helper(
mutate_rows_side_effect=None,
):
from google.api_core.exceptions import ServiceUnavailable
+
from google.cloud.bigtable.row import DirectRow
from google.cloud.bigtable.table import _BigtableRetryableError
from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2
@@ -1801,6 +1809,7 @@ def test_rmrw_do_mutate_retryable_rows_w_retryable_error_internal_rst_stream_err
# Raise internal server error with RST STREAM error messages
# There should be no error raised and that the request is retried
from google.api_core.exceptions import InternalServerError
+
from google.cloud.bigtable.row_data import RETRYABLE_INTERNAL_ERROR_MESSAGES
row_cells = [
@@ -2079,8 +2088,8 @@ def test__create_row_request_row_range_both_keys_inclusive():
def test__create_row_request_with_filter():
- from google.cloud.bigtable.table import _create_row_request
from google.cloud.bigtable.row_filters import RowSampleFilter
+ from google.cloud.bigtable.table import _create_row_request
table_name = "table_name"
row_filter = RowSampleFilter(0.33)
@@ -2102,8 +2111,8 @@ def test__create_row_request_with_limit():
def test__create_row_request_with_row_set():
- from google.cloud.bigtable.table import _create_row_request
from google.cloud.bigtable.row_set import RowSet
+ from google.cloud.bigtable.table import _create_row_request
table_name = "table_name"
row_set = RowSet()