diff --git a/handwritten/bigquery-storage/.OwlBot.yaml b/handwritten/bigquery-storage/.OwlBot.yaml
index e35546ed08db..7dc2a309c91f 100644
--- a/handwritten/bigquery-storage/.OwlBot.yaml
+++ b/handwritten/bigquery-storage/.OwlBot.yaml
@@ -1,10 +1,10 @@
-# Copyright 2021 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,17 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-deep-preserve-regex:
- - /owl-bot-staging/v1alpha2
- - /owl-bot-staging/v1beta2
-
-
-deep-remove-regex:
- - /owl-bot-staging
-
deep-copy-regex:
- - source: /google/cloud/bigquery/storage/(v.*)/.*-nodejs
- dest: /owl-bot-staging/bigquery-storage/$1
-
-begin-after-commit-hash: e0ea8b51f30e2ff6104abd1e4c8d1eb67078c86a
+ - source: /google/cloud/bigquery/storage/google-cloud-bigquery-storage-nodejs
+ dest: /owl-bot-staging/google-cloud-bigquery-storage
+api-name: storage
\ No newline at end of file
diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js
index a278e26223f0..e0a978946159 100644
--- a/handwritten/bigquery-storage/.jsdoc.js
+++ b/handwritten/bigquery-storage/.jsdoc.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -40,7 +40,7 @@ module.exports = {
includePattern: '\\.js$'
},
templates: {
- copyright: 'Copyright 2025 Google LLC',
+ copyright: 'Copyright 2026 Google LLC',
includeDate: false,
sourceFiles: false,
systemName: '@google-cloud/bigquery-storage',
diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md
index 5e9efa548ee0..bfe59af0b8fe 100644
--- a/handwritten/bigquery-storage/README.md
+++ b/handwritten/bigquery-storage/README.md
@@ -1,129 +1,22 @@
[//]: # "This README.md file is auto-generated, all changes to this file will be lost."
-[//]: # "To regenerate it, use `python -m synthtool`."
+[//]: # "The comments you see below are used to generate those parts of the template in later states."
-# [Google BigQuery Storage: Node.js Client](https://github.com/googleapis/nodejs-bigquery-storage)
+# [BigQuery Storage API: Nodejs Client][homepage]
-[](https://cloud.google.com/terms/launch-stages)
-[](https://www.npmjs.org/package/@google-cloud/bigquery-storage)
-
-
-
-
-> Node.js idiomatic client for [BigQuery Storage](https://cloud.google.com/bigquery).
-
-The BigQuery Storage product is divided into two major APIs: Write and Read API.
-BigQuery Storage API does not provide functionality related to managing BigQuery
-resources such as datasets, jobs, or tables.
-
-The BigQuery Storage Write API is a unified data-ingestion API for BigQuery.
-It combines streaming ingestion and batch loading into a single high-performance API.
-You can use the Storage Write API to stream records into BigQuery in real time or
-to batch process an arbitrarily large number of records and commit them in a single
-atomic operation.
-
-Read more in our [introduction guide](https://cloud.google.com/bigquery/docs/write-api).
-
-Using a system provided default stream, this code sample demonstrates using the
-schema of a destination stream/table to construct a writer, and send several
-batches of row data to the table.
-
-```javascript
-const {adapt, managedwriter} = require('@google-cloud/bigquery-storage');
-const {WriterClient, JSONWriter} = managedwriter;
-
-async function appendJSONRowsDefaultStream() {
- const projectId = 'my_project';
- const datasetId = 'my_dataset';
- const tableId = 'my_table';
-
- const destinationTable = `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`;
- const writeClient = new WriterClient({projectId});
-
- try {
- const writeStream = await writeClient.getWriteStream({
- streamId: `${destinationTable}/streams/_default`,
- view: 'FULL'
- });
- const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor(
- writeStream.tableSchema,
- 'root'
- );
-
- const connection = await writeClient.createStreamConnection({
- streamId: managedwriter.DefaultStream,
- destinationTable,
- });
- const streamId = connection.getStreamId();
-
- const writer = new JSONWriter({
- streamId,
- connection,
- protoDescriptor,
- });
-
- let rows = [];
- const pendingWrites = [];
-
- // Row 1
- let row = {
- row_num: 1,
- customer_name: 'Octavia',
- };
- rows.push(row);
-
- // Row 2
- row = {
- row_num: 2,
- customer_name: 'Turing',
- };
- rows.push(row);
-
- // Send batch.
- let pw = writer.appendRows(rows);
- pendingWrites.push(pw);
-
- rows = [];
-
- // Row 3
- row = {
- row_num: 3,
- customer_name: 'Bell',
- };
- rows.push(row);
-
- // Send batch.
- pw = writer.appendRows(rows);
- pendingWrites.push(pw);
-
- const results = await Promise.all(
- pendingWrites.map(pw => pw.getResult())
- );
- console.log('Write results:', results);
- } catch (err) {
- console.log(err);
- } finally {
- writeClient.close();
- }
-}
-```
-
-The BigQuery Storage Read API provides fast access to BigQuery-managed storage by
-using an gRPC based protocol. When you use the Storage Read API, structured data is
-sent over the wire in a binary serialization format. This allows for additional
-parallelism among multiple consumers for a set of results.
+[//]: # "releaseLevel"
-Read more how to [use the BigQuery Storage Read API](https://cloud.google.com/bigquery/docs/reference/storage).
+[](https://www.npmjs.org/package/@google-cloud/bigquery-storage)
-See sample code on the [Quickstart section](#quickstart).
+BigQuery Storage API client for Node.js
+[//]: # "partials.introduction"
A comprehensive list of changes in each version may be found in
-[the CHANGELOG](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/CHANGELOG.md).
+[the CHANGELOG][homepage_changelog].
-* [Google BigQuery Storage Node.js Client API Reference][client-docs]
-* [Google BigQuery Storage Documentation][product-docs]
-* [github.com/googleapis/nodejs-bigquery-storage](https://github.com/googleapis/nodejs-bigquery-storage)
+* [BigQuery Storage API Nodejs Client API Reference](https://cloud.google.com/nodejs/docs/reference/storage/latest)
+* [BigQuery Storage API Documentation](https://cloud.google.com/bigquery/docs/reference/storage/)
Read more about the client libraries for Cloud APIs, including the older
Google APIs Client Libraries, in [Client Libraries Explained][explained].
@@ -132,178 +25,35 @@ Google APIs Client Libraries, in [Client Libraries Explained][explained].
**Table of contents:**
-
* [Quickstart](#quickstart)
* [Before you begin](#before-you-begin)
* [Installing the client library](#installing-the-client-library)
- * [Using the client library](#using-the-client-library)
-* [Samples](#samples)
+
* [Versioning](#versioning)
* [Contributing](#contributing)
* [License](#license)
## Quickstart
-
### Before you begin
1. [Select or create a Cloud Platform project][projects].
1. [Enable billing for your project][billing].
-1. [Enable the Google BigQuery Storage API][enable_api].
+1. [Enable the BigQuery Storage API API][enable_api].
1. [Set up authentication][auth] so you can access the
API from your local workstation.
-
### Installing the client library
```bash
npm install @google-cloud/bigquery-storage
```
-
-### Using the client library
-
-```javascript
-
-// The read stream contains blocks of Avro-encoded bytes. We use the
-// 'avsc' library to decode these blocks. Install avsc with the following
-// command: npm install avsc
-const avro = require('avsc');
-
-// See reference documentation at
-// https://cloud.google.com/bigquery/docs/reference/storage
-const {BigQueryReadClient} = require('@google-cloud/bigquery-storage');
-
-const client = new BigQueryReadClient();
-
-async function bigqueryStorageQuickstart() {
- // Get current project ID. The read session is created in this project.
- // This project can be different from that which contains the table.
- const myProjectId = await client.getProjectId();
-
- // This example reads baby name data from the public datasets.
- const projectId = 'bigquery-public-data';
- const datasetId = 'usa_names';
- const tableId = 'usa_1910_current';
-
- const tableReference = `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`;
-
- const parent = `projects/${myProjectId}`;
-
- /* We limit the output columns to a subset of those allowed in the table,
- * and set a simple filter to only report names from the state of
- * Washington (WA).
- */
- const readOptions = {
- selectedFields: ['name', 'number', 'state'],
- rowRestriction: 'state = "WA"',
- };
-
- let tableModifiers = null;
- const snapshotSeconds = 0;
-
- // Set a snapshot time if it's been specified.
- if (snapshotSeconds > 0) {
- tableModifiers = {snapshotTime: {seconds: snapshotSeconds}};
- }
-
- // API request.
- const request = {
- parent,
- readSession: {
- table: tableReference,
- // This API can also deliver data serialized in Apache Arrow format.
- // This example leverages Apache Avro.
- dataFormat: 'AVRO',
- readOptions,
- tableModifiers,
- },
- };
-
- const [session] = await client.createReadSession(request);
-
- const schema = JSON.parse(session.avroSchema.schema);
-
- const avroType = avro.Type.forSchema(schema);
-
- /* The offset requested must be less than the last
- * row read from ReadRows. Requesting a larger offset is
- * undefined.
- */
- let offset = 0;
-
- const readRowsRequest = {
- // Required stream name and optional offset. Offset requested must be less than
- // the last row read from readRows(). Requesting a larger offset is undefined.
- readStream: session.streams[0].name,
- offset,
- };
-
- const names = new Set();
- const states = [];
-
- /* We'll use only a single stream for reading data from the table. Because
- * of dynamic sharding, this will yield all the rows in the table. However,
- * if you wanted to fan out multiple readers you could do so by having a
- * reader process each individual stream.
- */
- client
- .readRows(readRowsRequest)
- .on('error', console.error)
- .on('data', data => {
- offset = data.avroRows.serializedBinaryRows.offset;
-
- try {
- // Decode all rows in buffer
- let pos;
- do {
- const decodedData = avroType.decode(
- data.avroRows.serializedBinaryRows,
- pos,
- );
-
- if (decodedData.value) {
- names.add(decodedData.value.name);
-
- if (!states.includes(decodedData.value.state)) {
- states.push(decodedData.value.state);
- }
- }
-
- pos = decodedData.offset;
- } while (pos > 0);
- } catch (error) {
- console.log(error);
- }
- })
- .on('end', () => {
- console.log(`Got ${names.size} unique names in states: ${states}`);
- console.log(`Last offset: ${offset}`);
- });
-}
-
-```
-
-
+[//]: # "partials.body"
## Samples
-Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-storage/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample.
-
-| Sample | Source Code | Try it |
-| --------------------------- | --------------------------------- | ------ |
-| Append_rows_buffered | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_buffered.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_buffered.js,samples/README.md) |
-| Append_rows_json_writer_committed | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_json_writer_committed.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_json_writer_committed.js,samples/README.md) |
-| Append_rows_json_writer_default | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_json_writer_default.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_json_writer_default.js,samples/README.md) |
-| Append_rows_pending | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_pending.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_pending.js,samples/README.md) |
-| Append_rows_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_proto2.js,samples/README.md) |
-| Append_rows_table_to_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_table_to_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_table_to_proto2.js,samples/README.md) |
-| Customer_record_pb | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/customer_record_pb.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/customer_record_pb.js,samples/README.md) |
-| BigQuery Storage Quickstart | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) |
-| Sample_data_pb | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/sample_data_pb.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/sample_data_pb.js,samples/README.md) |
+Samples are in the [`samples/`][homepage_samples] directory. Each sample's `README.md` has instructions for running its sample.
-
-
-The [Google BigQuery Storage Node.js Client API Reference][client-docs] documentation
-also contains samples.
+[//]: # "samples"
## Supported Node.js Versions
@@ -330,42 +80,29 @@ for versions compatible with Node.js 8.
This library follows [Semantic Versioning](http://semver.org/).
-
-
-This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways
-unless absolutely necessary (e.g. because of critical security issues) or with
-an extensive deprecation period. Issues and requests against **stable** libraries
-are addressed with the highest priority.
-
-
-
-
-
-
More Information: [Google Cloud Platform Launch Stages][launch_stages]
[launch_stages]: https://cloud.google.com/terms/launch-stages
## Contributing
-Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/CONTRIBUTING.md).
+Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/google-cloud-node/blob/main/CONTRIBUTING.md).
-Please note that this `README.md`, the `samples/README.md`,
+Please note that this `README.md`
and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`)
-are generated from a central template. To edit one of these files, make an edit
-to its templates in
-[directory](https://github.com/googleapis/synthtool).
+are generated from a central template.
## License
Apache Version 2.0
-See [LICENSE](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/LICENSE)
+See [LICENSE](https://github.com/googleapis/google-cloud-node/blob/main/LICENSE)
-[client-docs]: https://cloud.google.com/nodejs/docs/reference/bigquery-storage/latest
-[product-docs]: https://cloud.google.com/bigquery/docs/reference/storage
[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png
[projects]: https://console.cloud.google.com/project
[billing]: https://support.google.com/cloud/answer/6293499#enable-billing
[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquerystorage.googleapis.com
-[auth]: https://cloud.google.com/docs/authentication/external/set-up-adc-local
\ No newline at end of file
+[auth]: https://cloud.google.com/docs/authentication/external/set-up-adc-local
+[homepage_samples]: https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-bigquery-storage/samples
+[homepage_changelog]: https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-bigquery-storage/CHANGELOG.md
+[homepage]: https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-bigquery-storage
diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/arrow.proto
new file mode 100644
index 000000000000..7d17d559e244
--- /dev/null
+++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/arrow.proto
@@ -0,0 +1,57 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.bigquery.storage.v1beta2;
+
+option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta2/storagepb;storagepb";
+option java_multiple_files = true;
+option java_outer_classname = "ArrowProto";
+option java_package = "com.google.cloud.bigquery.storage.v1beta2";
+
+// Arrow schema as specified in
+// https://arrow.apache.org/docs/python/api/datatypes.html
+// and serialized to bytes using IPC:
+// https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc
+//
+// See code samples on how this message can be deserialized.
+message ArrowSchema {
+ // IPC serialized Arrow schema.
+ bytes serialized_schema = 1;
+}
+
+// Arrow RecordBatch.
+message ArrowRecordBatch {
+ // IPC-serialized Arrow RecordBatch.
+ bytes serialized_record_batch = 1;
+}
+
+// Contains options specific to Arrow Serialization.
+message ArrowSerializationOptions {
+ // The IPC format to use when serializing Arrow streams.
+ enum Format {
+ // If unspecied the IPC format as of 0.15 release will be used.
+ FORMAT_UNSPECIFIED = 0;
+
+ // Use the legacy IPC message format as of Apache Arrow Release 0.14.
+ ARROW_0_14 = 1;
+
+ // Use the message format as of Apache Arrow Release 0.15.
+ ARROW_0_15 = 2;
+ }
+
+ // The Arrow IPC format to use.
+ Format format = 1;
+}
diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/avro.proto
new file mode 100644
index 000000000000..bd48a5cd0d80
--- /dev/null
+++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/avro.proto
@@ -0,0 +1,35 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.bigquery.storage.v1beta2;
+
+option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta2/storagepb;storagepb";
+option java_multiple_files = true;
+option java_outer_classname = "AvroProto";
+option java_package = "com.google.cloud.bigquery.storage.v1beta2";
+
+// Avro schema.
+message AvroSchema {
+ // Json serialized schema, as described at
+ // https://avro.apache.org/docs/1.8.1/spec.html.
+ string schema = 1;
+}
+
+// Avro rows.
+message AvroRows {
+ // Binary serialized rows in a block.
+ bytes serialized_binary_rows = 1;
+}
diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/protobuf.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/protobuf.proto
new file mode 100644
index 000000000000..cdc77e7e6618
--- /dev/null
+++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/protobuf.proto
@@ -0,0 +1,40 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.bigquery.storage.v1beta2;
+
+import "google/protobuf/descriptor.proto";
+
+option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta2/storagepb;storagepb";
+option java_multiple_files = true;
+option java_outer_classname = "ProtoBufProto";
+option java_package = "com.google.cloud.bigquery.storage.v1beta2";
+
+// ProtoSchema describes the schema of the serialized protocol buffer data rows.
+message ProtoSchema {
+ // Descriptor for input message. The descriptor has to be self contained,
+ // including all the nested types, excepted for proto buffer well known types
+ // (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf).
+ google.protobuf.DescriptorProto proto_descriptor = 1;
+}
+
+message ProtoRows {
+ // A sequence of rows serialized as a Protocol Buffer.
+ //
+ // See https://developers.google.com/protocol-buffers/docs/overview for more
+ // information on deserializing this field.
+ repeated bytes serialized_rows = 1;
+}
diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/storage.proto
new file mode 100644
index 000000000000..35fb37a8202c
--- /dev/null
+++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/storage.proto
@@ -0,0 +1,577 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.bigquery.storage.v1beta2;
+
+import "google/api/annotations.proto";
+import "google/api/client.proto";
+import "google/api/field_behavior.proto";
+import "google/api/resource.proto";
+import "google/cloud/bigquery/storage/v1beta2/arrow.proto";
+import "google/cloud/bigquery/storage/v1beta2/avro.proto";
+import "google/cloud/bigquery/storage/v1beta2/protobuf.proto";
+import "google/cloud/bigquery/storage/v1beta2/stream.proto";
+import "google/cloud/bigquery/storage/v1beta2/table.proto";
+import "google/protobuf/timestamp.proto";
+import "google/protobuf/wrappers.proto";
+import "google/rpc/status.proto";
+
+option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta2/storagepb;storagepb";
+option java_multiple_files = true;
+option java_outer_classname = "StorageProto";
+option java_package = "com.google.cloud.bigquery.storage.v1beta2";
+
+// BigQuery Read API.
+//
+// The Read API can be used to read data from BigQuery.
+//
+// New code should use the v1 Read API going forward, if they don't use Write
+// API at the same time.
+service BigQueryRead {
+ option (google.api.default_host) = "bigquerystorage.googleapis.com";
+ option (google.api.oauth_scopes) =
+ "https://www.googleapis.com/auth/bigquery,"
+ "https://www.googleapis.com/auth/cloud-platform";
+
+ // Creates a new read session. A read session divides the contents of a
+ // BigQuery table into one or more streams, which can then be used to read
+ // data from the table. The read session also specifies properties of the
+ // data to be read, such as a list of columns or a push-down filter describing
+ // the rows to be returned.
+ //
+ // A particular row can be read by at most one stream. When the caller has
+ // reached the end of each stream in the session, then all the data in the
+ // table has been read.
+ //
+ // Data is assigned to each stream such that roughly the same number of
+ // rows can be read from each stream. Because the server-side unit for
+ // assigning data is collections of rows, the API does not guarantee that
+ // each stream will return the same number or rows. Additionally, the
+ // limits are enforced based on the number of pre-filtered rows, so some
+ // filters can lead to lopsided assignments.
+ //
+ // Read sessions automatically expire 6 hours after they are created and do
+ // not require manual clean-up by the caller.
+ rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) {
+ option (google.api.http) = {
+ post: "/v1beta2/{read_session.table=projects/*/datasets/*/tables/*}"
+ body: "*"
+ };
+ option (google.api.method_signature) =
+ "parent,read_session,max_stream_count";
+ }
+
+ // Reads rows from the stream in the format prescribed by the ReadSession.
+ // Each response contains one or more table rows, up to a maximum of 100 MiB
+ // per response; read requests which attempt to read individual rows larger
+ // than 100 MiB will fail.
+ //
+ // Each request also returns a set of stream statistics reflecting the current
+ // state of the stream.
+ rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) {
+ option (google.api.http) = {
+ get: "/v1beta2/{read_stream=projects/*/locations/*/sessions/*/streams/*}"
+ };
+ option (google.api.method_signature) = "read_stream,offset";
+ }
+
+ // Splits a given `ReadStream` into two `ReadStream` objects. These
+ // `ReadStream` objects are referred to as the primary and the residual
+ // streams of the split. The original `ReadStream` can still be read from in
+ // the same manner as before. Both of the returned `ReadStream` objects can
+ // also be read from, and the rows returned by both child streams will be
+ // the same as the rows read from the original stream.
+ //
+ // Moreover, the two child streams will be allocated back-to-back in the
+ // original `ReadStream`. Concretely, it is guaranteed that for streams
+ // original, primary, and residual, that original[0-j] = primary[0-j] and
+ // original[j-n] = residual[0-m] once the streams have been read to
+ // completion.
+ rpc SplitReadStream(SplitReadStreamRequest)
+ returns (SplitReadStreamResponse) {
+ option (google.api.http) = {
+ get: "/v1beta2/{name=projects/*/locations/*/sessions/*/streams/*}"
+ };
+ }
+}
+
+// BigQuery Write API.
+//
+// The Write API can be used to write data to BigQuery.
+//
+//
+// The [google.cloud.bigquery.storage.v1
+// API](/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1)
+// should be used instead of the v1beta2 API for BigQueryWrite operations.
+service BigQueryWrite {
+ option deprecated = true;
+ option (google.api.default_host) = "bigquerystorage.googleapis.com";
+ option (google.api.oauth_scopes) =
+ "https://www.googleapis.com/auth/bigquery,"
+ "https://www.googleapis.com/auth/bigquery.insertdata,"
+ "https://www.googleapis.com/auth/cloud-platform";
+
+ // Creates a write stream to the given table.
+ // Additionally, every table has a special COMMITTED stream named '_default'
+ // to which data can be written. This stream doesn't need to be created using
+ // CreateWriteStream. It is a stream that can be used simultaneously by any
+ // number of clients. Data written to this stream is considered committed as
+ // soon as an acknowledgement is received.
+ rpc CreateWriteStream(CreateWriteStreamRequest) returns (WriteStream) {
+ option deprecated = true;
+ option (google.api.http) = {
+ post: "/v1beta2/{parent=projects/*/datasets/*/tables/*}"
+ body: "write_stream"
+ };
+ option (google.api.method_signature) = "parent,write_stream";
+ }
+
+ // Appends data to the given stream.
+ //
+ // If `offset` is specified, the `offset` is checked against the end of
+ // stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
+ // attempt is made to append to an offset beyond the current end of the stream
+ // or `ALREADY_EXISTS` if user provids an `offset` that has already been
+ // written to. User can retry with adjusted offset within the same RPC
+ // stream. If `offset` is not specified, append happens at the end of the
+ // stream.
+ //
+ // The response contains the offset at which the append happened. Responses
+ // are received in the same order in which requests are sent. There will be
+ // one response for each successful request. If the `offset` is not set in
+ // response, it means append didn't happen due to some errors. If one request
+ // fails, all the subsequent requests will also fail until a success request
+ // is made again.
+ //
+ // If the stream is of `PENDING` type, data will only be available for read
+ // operations after the stream is committed.
+ rpc AppendRows(stream AppendRowsRequest) returns (stream AppendRowsResponse) {
+ option deprecated = true;
+ option (google.api.http) = {
+ post: "/v1beta2/{write_stream=projects/*/datasets/*/tables/*/streams/*}"
+ body: "*"
+ };
+ option (google.api.method_signature) = "write_stream";
+ }
+
+ // Gets a write stream.
+ rpc GetWriteStream(GetWriteStreamRequest) returns (WriteStream) {
+ option deprecated = true;
+ option (google.api.http) = {
+ post: "/v1beta2/{name=projects/*/datasets/*/tables/*/streams/*}"
+ body: "*"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Finalize a write stream so that no new data can be appended to the
+ // stream. Finalize is not supported on the '_default' stream.
+ rpc FinalizeWriteStream(FinalizeWriteStreamRequest)
+ returns (FinalizeWriteStreamResponse) {
+ option deprecated = true;
+ option (google.api.http) = {
+ post: "/v1beta2/{name=projects/*/datasets/*/tables/*/streams/*}"
+ body: "*"
+ };
+ option (google.api.method_signature) = "name";
+ }
+
+ // Atomically commits a group of `PENDING` streams that belong to the same
+ // `parent` table.
+ // Streams must be finalized before commit and cannot be committed multiple
+ // times. Once a stream is committed, data in the stream becomes available
+ // for read operations.
+ rpc BatchCommitWriteStreams(BatchCommitWriteStreamsRequest)
+ returns (BatchCommitWriteStreamsResponse) {
+ option deprecated = true;
+ option (google.api.http) = {
+ get: "/v1beta2/{parent=projects/*/datasets/*/tables/*}"
+ };
+ option (google.api.method_signature) = "parent";
+ }
+
+ // Flushes rows to a BUFFERED stream.
+ // If users are appending rows to BUFFERED stream, flush operation is
+ // required in order for the rows to become available for reading. A
+ // Flush operation flushes up to any previously flushed offset in a BUFFERED
+ // stream, to the offset specified in the request.
+ // Flush is not supported on the _default stream, since it is not BUFFERED.
+ rpc FlushRows(FlushRowsRequest) returns (FlushRowsResponse) {
+ option deprecated = true;
+ option (google.api.http) = {
+ post: "/v1beta2/{write_stream=projects/*/datasets/*/tables/*/streams/*}"
+ body: "*"
+ };
+ option (google.api.method_signature) = "write_stream";
+ }
+}
+
+// Request message for `CreateReadSession`.
+message CreateReadSessionRequest {
+ // Required. The request project that owns the session, in the form of
+ // `projects/{project_id}`.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }
+ ];
+
+ // Required. Session to be created.
+ ReadSession read_session = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // Max initial number of streams. If unset or zero, the server will
+ // provide a value of streams so as to produce reasonable throughput. Must be
+ // non-negative. The number of streams may be lower than the requested number,
+ // depending on the amount parallelism that is reasonable for the table. Error
+ // will be returned if the max count is greater than the current system
+ // max limit of 1,000.
+ //
+ // Streams must be read starting from offset 0.
+ int32 max_stream_count = 3;
+}
+
+// Request message for `ReadRows`.
+message ReadRowsRequest {
+ // Required. Stream to read rows from.
+ string read_stream = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/ReadStream"
+ }
+ ];
+
+ // The offset requested must be less than the last row read from Read.
+ // Requesting a larger offset is undefined. If not specified, start reading
+ // from offset zero.
+ int64 offset = 2;
+}
+
+// Information on if the current connection is being throttled.
+message ThrottleState {
+ // How much this connection is being throttled. Zero means no throttling,
+ // 100 means fully throttled.
+ int32 throttle_percent = 1;
+}
+
+// Estimated stream statistics for a given Stream.
+message StreamStats {
+ message Progress {
+ // The fraction of rows assigned to the stream that have been processed by
+ // the server so far, not including the rows in the current response
+ // message.
+ //
+ // This value, along with `at_response_end`, can be used to interpolate
+ // the progress made as the rows in the message are being processed using
+ // the following formula: `at_response_start + (at_response_end -
+ // at_response_start) * rows_processed_from_response / rows_in_response`.
+ //
+ // Note that if a filter is provided, the `at_response_end` value of the
+ // previous response may not necessarily be equal to the
+ // `at_response_start` value of the current response.
+ double at_response_start = 1;
+
+ // Similar to `at_response_start`, except that this value includes the
+ // rows in the current response.
+ double at_response_end = 2;
+ }
+
+ // Represents the progress of the current stream.
+ Progress progress = 2;
+}
+
+// Response from calling `ReadRows` may include row data, progress and
+// throttling information.
+message ReadRowsResponse {
+ // Row data is returned in format specified during session creation.
+ oneof rows {
+ // Serialized row data in AVRO format.
+ AvroRows avro_rows = 3;
+
+ // Serialized row data in Arrow RecordBatch format.
+ ArrowRecordBatch arrow_record_batch = 4;
+ }
+
+ // Number of serialized rows in the rows block.
+ int64 row_count = 6;
+
+ // Statistics for the stream.
+ StreamStats stats = 2;
+
+ // Throttling state. If unset, the latest response still describes
+ // the current throttling status.
+ ThrottleState throttle_state = 5;
+
+ // The schema for the read. If read_options.selected_fields is set, the
+ // schema may be different from the table schema as it will only contain
+ // the selected fields. This schema is equivalent to the one returned by
+ // CreateSession. This field is only populated in the first ReadRowsResponse
+ // RPC.
+ oneof schema {
+ // Output only. Avro schema.
+ AvroSchema avro_schema = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. Arrow schema.
+ ArrowSchema arrow_schema = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
+ }
+}
+
+// Request message for `SplitReadStream`.
+message SplitReadStreamRequest {
+ // Required. Name of the stream to split.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/ReadStream"
+ }
+ ];
+
+ // A value in the range (0.0, 1.0) that specifies the fractional point at
+ // which the original stream should be split. The actual split point is
+ // evaluated on pre-filtered rows, so if a filter is provided, then there is
+ // no guarantee that the division of the rows between the new child streams
+ // will be proportional to this fractional value. Additionally, because the
+ // server-side unit for assigning data is collections of rows, this fraction
+ // will always map to a data storage boundary on the server side.
+ double fraction = 2;
+}
+
+message SplitReadStreamResponse {
+ // Primary stream, which contains the beginning portion of
+ // |original_stream|. An empty value indicates that the original stream can no
+ // longer be split.
+ ReadStream primary_stream = 1;
+
+ // Remainder stream, which contains the tail of |original_stream|. An empty
+ // value indicates that the original stream can no longer be split.
+ ReadStream remainder_stream = 2;
+}
+
+// Request message for `CreateWriteStream`.
+message CreateWriteStreamRequest {
+ // Required. Reference to the table to which the stream belongs, in the format
+ // of `projects/{project}/datasets/{dataset}/tables/{table}`.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" }
+ ];
+
+ // Required. Stream to be created.
+ WriteStream write_stream = 2 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Request message for `AppendRows`.
+message AppendRowsRequest {
+ // Proto schema and data.
+ message ProtoData {
+ // Proto schema used to serialize the data.
+ ProtoSchema writer_schema = 1;
+
+ // Serialized row data in protobuf message format.
+ ProtoRows rows = 2;
+ }
+
+ // Required. The stream that is the target of the append operation. This value
+ // must be specified for the initial request. If subsequent requests specify
+ // the stream name, it must equal to the value provided in the first request.
+ // To write to the _default stream, populate this field with a string in the
+ // format `projects/{project}/datasets/{dataset}/tables/{table}/_default`.
+ string write_stream = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/WriteStream"
+ }
+ ];
+
+ // If present, the write is only performed if the next append offset is same
+ // as the provided value. If not present, the write is performed at the
+ // current end of stream. Specifying a value for this field is not allowed
+ // when calling AppendRows for the '_default' stream.
+ google.protobuf.Int64Value offset = 2;
+
+ // Input rows. The `writer_schema` field must be specified at the initial
+ // request and currently, it will be ignored if specified in following
+ // requests. Following requests must have data in the same format as the
+ // initial request.
+ oneof rows {
+ // Rows in proto format.
+ ProtoData proto_rows = 4;
+ }
+
+ // Id set by client to annotate its identity. Only initial request setting is
+ // respected.
+ string trace_id = 6;
+}
+
+// Response message for `AppendRows`.
+message AppendRowsResponse {
+ // AppendResult is returned for successful append requests.
+ message AppendResult {
+ // The row offset at which the last append occurred. The offset will not be
+ // set if appending using default streams.
+ google.protobuf.Int64Value offset = 1;
+ }
+
+ oneof response {
+ // Result if the append is successful.
+ AppendResult append_result = 1;
+
+ // Error returned when problems were encountered. If present,
+ // it indicates rows were not accepted into the system.
+ // Users can retry or continue with other append requests within the
+ // same connection.
+ //
+ // Additional information about error signalling:
+ //
+ // ALREADY_EXISTS: Happens when an append specified an offset, and the
+ // backend already has received data at this offset. Typically encountered
+ // in retry scenarios, and can be ignored.
+ //
+ // OUT_OF_RANGE: Returned when the specified offset in the stream is beyond
+ // the current end of the stream.
+ //
+ // INVALID_ARGUMENT: Indicates a malformed request or data.
+ //
+ // ABORTED: Request processing is aborted because of prior failures. The
+ // request can be retried if previous failure is addressed.
+ //
+ // INTERNAL: Indicates server side error(s) that can be retried.
+ google.rpc.Status error = 2;
+ }
+
+ // If backend detects a schema update, pass it to user so that user can
+ // use it to input new type of message. It will be empty when no schema
+ // updates have occurred.
+ TableSchema updated_schema = 3;
+}
+
+// Request message for `GetWriteStreamRequest`.
+message GetWriteStreamRequest {
+ // Required. Name of the stream to get, in the form of
+ // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/WriteStream"
+ }
+ ];
+}
+
+// Request message for `BatchCommitWriteStreams`.
+message BatchCommitWriteStreamsRequest {
+ // Required. Parent table that all the streams should belong to, in the form
+ // of `projects/{project}/datasets/{dataset}/tables/{table}`.
+ string parent = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. The group of streams that will be committed atomically.
+ repeated string write_streams = 2 [(google.api.field_behavior) = REQUIRED];
+}
+
+// Response message for `BatchCommitWriteStreams`.
+message BatchCommitWriteStreamsResponse {
+ // The time at which streams were committed in microseconds granularity.
+ // This field will only exist when there are no stream errors.
+ // **Note** if this field is not set, it means the commit was not successful.
+ google.protobuf.Timestamp commit_time = 1;
+
+ // Stream level error if commit failed. Only streams with error will be in
+ // the list.
+ // If empty, there is no error and all streams are committed successfully.
+ // If non empty, certain streams have errors and ZERO stream is committed due
+ // to atomicity guarantee.
+ repeated StorageError stream_errors = 2;
+}
+
+// Request message for invoking `FinalizeWriteStream`.
+message FinalizeWriteStreamRequest {
+ // Required. Name of the stream to finalize, in the form of
+ // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/WriteStream"
+ }
+ ];
+}
+
+// Response message for `FinalizeWriteStream`.
+message FinalizeWriteStreamResponse {
+ // Number of rows in the finalized stream.
+ int64 row_count = 1;
+}
+
+// Request message for `FlushRows`.
+message FlushRowsRequest {
+ // Required. The stream that is the target of the flush operation.
+ string write_stream = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/WriteStream"
+ }
+ ];
+
+ // Ending offset of the flush operation. Rows before this offset(including
+ // this offset) will be flushed.
+ google.protobuf.Int64Value offset = 2;
+}
+
+// Respond message for `FlushRows`.
+message FlushRowsResponse {
+ // The rows before this offset (including this offset) are flushed.
+ int64 offset = 1;
+}
+
+// Structured custom BigQuery Storage error message. The error can be attached
+// as error details in the returned rpc Status. In particular, the use of error
+// codes allows more structured error handling, and reduces the need to evaluate
+// unstructured error text strings.
+message StorageError {
+ // Error code for `StorageError`.
+ enum StorageErrorCode {
+ // Default error.
+ STORAGE_ERROR_CODE_UNSPECIFIED = 0;
+
+ // Table is not found in the system.
+ TABLE_NOT_FOUND = 1;
+
+ // Stream is already committed.
+ STREAM_ALREADY_COMMITTED = 2;
+
+ // Stream is not found.
+ STREAM_NOT_FOUND = 3;
+
+ // Invalid Stream type.
+ // For example, you try to commit a stream that is not pending.
+ INVALID_STREAM_TYPE = 4;
+
+ // Invalid Stream state.
+ // For example, you try to commit a stream that is not finalized or is
+ // garbaged.
+ INVALID_STREAM_STATE = 5;
+
+ // Stream is finalized.
+ STREAM_FINALIZED = 6;
+ }
+
+ // BigQuery Storage specific error code.
+ StorageErrorCode code = 1;
+
+ // Name of the failed entity.
+ string entity = 2;
+
+ // Message that describes the error.
+ string error_message = 3;
+}
diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/stream.proto
new file mode 100644
index 000000000000..c2d6d7b6a93a
--- /dev/null
+++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/stream.proto
@@ -0,0 +1,191 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.bigquery.storage.v1beta2;
+
+import "google/api/field_behavior.proto";
+import "google/api/resource.proto";
+import "google/cloud/bigquery/storage/v1beta2/arrow.proto";
+import "google/cloud/bigquery/storage/v1beta2/avro.proto";
+import "google/cloud/bigquery/storage/v1beta2/table.proto";
+import "google/protobuf/timestamp.proto";
+
+option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta2/storagepb;storagepb";
+option java_multiple_files = true;
+option java_outer_classname = "StreamProto";
+option java_package = "com.google.cloud.bigquery.storage.v1beta2";
+option (google.api.resource_definition) = {
+ type: "bigquery.googleapis.com/Table"
+ pattern: "projects/{project}/datasets/{dataset}/tables/{table}"
+};
+
+// Data format for input or output data.
+enum DataFormat {
+ DATA_FORMAT_UNSPECIFIED = 0;
+
+ // Avro is a standard open source row based file format.
+ // See https://avro.apache.org/ for more details.
+ AVRO = 1;
+
+ // Arrow is a standard open source column-based message format.
+ // See https://arrow.apache.org/ for more details.
+ ARROW = 2;
+}
+
+// Information about the ReadSession.
+message ReadSession {
+ option (google.api.resource) = {
+ type: "bigquerystorage.googleapis.com/ReadSession"
+ pattern: "projects/{project}/locations/{location}/sessions/{session}"
+ };
+
+ // Additional attributes when reading a table.
+ message TableModifiers {
+ // The snapshot time of the table. If not set, interpreted as now.
+ google.protobuf.Timestamp snapshot_time = 1;
+ }
+
+ // Options dictating how we read a table.
+ message TableReadOptions {
+ // Names of the fields in the table that should be read. If empty, all
+ // fields will be read. If the specified field is a nested field, all
+ // the sub-fields in the field will be selected. The output field order is
+ // unrelated to the order of fields in selected_fields.
+ repeated string selected_fields = 1;
+
+ // SQL text filtering statement, similar to a WHERE clause in a query.
+ // Aggregates are not supported.
+ //
+ // Examples: "int_field > 5"
+ // "date_field = CAST('2014-9-27' as DATE)"
+ // "nullable_field is not NULL"
+ // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))"
+ // "numeric_field BETWEEN 1.0 AND 5.0"
+ //
+ // Restricted to a maximum length for 1 MB.
+ string row_restriction = 2;
+
+ // Optional. Options specific to the Apache Arrow output format.
+ ArrowSerializationOptions arrow_serialization_options = 3 [(google.api.field_behavior) = OPTIONAL];
+ }
+
+ // Output only. Unique identifier for the session, in the form
+ // `projects/{project_id}/locations/{location}/sessions/{session_id}`.
+ string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. Time at which the session becomes invalid. After this time, subsequent
+ // requests to read this Session will return errors. The expire_time is
+ // automatically assigned and currently cannot be specified or updated.
+ google.protobuf.Timestamp expire_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Immutable. Data format of the output data.
+ DataFormat data_format = 3 [(google.api.field_behavior) = IMMUTABLE];
+
+ // The schema for the read. If read_options.selected_fields is set, the
+ // schema may be different from the table schema as it will only contain
+ // the selected fields.
+ oneof schema {
+ // Output only. Avro schema.
+ AvroSchema avro_schema = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. Arrow schema.
+ ArrowSchema arrow_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY];
+ }
+
+ // Immutable. Table that this ReadSession is reading from, in the form
+ // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}
+ string table = 6 [
+ (google.api.field_behavior) = IMMUTABLE,
+ (google.api.resource_reference) = {
+ type: "bigquery.googleapis.com/Table"
+ }
+ ];
+
+ // Optional. Any modifiers which are applied when reading from the specified table.
+ TableModifiers table_modifiers = 7 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. Read options for this session (e.g. column selection, filters).
+ TableReadOptions read_options = 8 [(google.api.field_behavior) = OPTIONAL];
+
+ // Output only. A list of streams created with the session.
+ //
+ // At least one stream is created with the session. In the future, larger
+ // request_stream_count values *may* result in this list being unpopulated,
+ // in that case, the user will need to use a List method to get the streams
+ // instead, which is not yet available.
+ repeated ReadStream streams = 10 [(google.api.field_behavior) = OUTPUT_ONLY];
+}
+
+// Information about a single stream that gets data out of the storage system.
+// Most of the information about `ReadStream` instances is aggregated, making
+// `ReadStream` lightweight.
+message ReadStream {
+ option (google.api.resource) = {
+ type: "bigquerystorage.googleapis.com/ReadStream"
+ pattern: "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}"
+ };
+
+ // Output only. Name of the stream, in the form
+ // `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`.
+ string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
+}
+
+// Information about a single stream that gets data inside the storage system.
+message WriteStream {
+ option (google.api.resource) = {
+ type: "bigquerystorage.googleapis.com/WriteStream"
+ pattern: "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"
+ };
+
+ // Type enum of the stream.
+ enum Type {
+ // Unknown type.
+ TYPE_UNSPECIFIED = 0;
+
+ // Data will commit automatically and appear as soon as the write is
+ // acknowledged.
+ COMMITTED = 1;
+
+ // Data is invisible until the stream is committed.
+ PENDING = 2;
+
+ // Data is only visible up to the offset to which it was flushed.
+ BUFFERED = 3;
+ }
+
+ // Output only. Name of the stream, in the form
+ // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Immutable. Type of the stream.
+ Type type = 2 [(google.api.field_behavior) = IMMUTABLE];
+
+ // Output only. Create time of the stream. For the _default stream, this is the
+ // creation_time of the table.
+ google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. Commit time of the stream.
+ // If a stream is of `COMMITTED` type, then it will have a commit_time same as
+ // `create_time`. If the stream is of `PENDING` type, commit_time being empty
+ // means it is not committed.
+ google.protobuf.Timestamp commit_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
+
+ // Output only. The schema of the destination table. It is only returned in
+ // `CreateWriteStream` response. Caller should generate data that's
+ // compatible with this schema to send in initial `AppendRowsRequest`.
+ // The table schema could go out of date during the life time of the stream.
+ TableSchema table_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY];
+}
diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/table.proto
new file mode 100644
index 000000000000..3dd27cf0f344
--- /dev/null
+++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta2/table.proto
@@ -0,0 +1,111 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.bigquery.storage.v1beta2;
+
+import "google/api/field_behavior.proto";
+
+option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta2/storagepb;storagepb";
+option java_multiple_files = true;
+option java_outer_classname = "TableProto";
+option java_package = "com.google.cloud.bigquery.storage.v1beta2";
+
+// Schema of a table
+message TableSchema {
+ // Describes the fields in a table.
+ repeated TableFieldSchema fields = 1;
+}
+
+// A field in TableSchema
+message TableFieldSchema {
+ enum Type {
+ // Illegal value
+ TYPE_UNSPECIFIED = 0;
+
+ // 64K, UTF8
+ STRING = 1;
+
+ // 64-bit signed
+ INT64 = 2;
+
+ // 64-bit IEEE floating point
+ DOUBLE = 3;
+
+ // Aggregate type
+ STRUCT = 4;
+
+ // 64K, Binary
+ BYTES = 5;
+
+ // 2-valued
+ BOOL = 6;
+
+ // 64-bit signed usec since UTC epoch
+ TIMESTAMP = 7;
+
+ // Civil date - Year, Month, Day
+ DATE = 8;
+
+ // Civil time - Hour, Minute, Second, Microseconds
+ TIME = 9;
+
+ // Combination of civil date and civil time
+ DATETIME = 10;
+
+ // Geography object
+ GEOGRAPHY = 11;
+
+ // Numeric value
+ NUMERIC = 12;
+
+ // BigNumeric value
+ BIGNUMERIC = 13;
+
+ // Interval
+ INTERVAL = 14;
+
+ // JSON, String
+ JSON = 15;
+ }
+
+ enum Mode {
+ // Illegal value
+ MODE_UNSPECIFIED = 0;
+
+ NULLABLE = 1;
+
+ REQUIRED = 2;
+
+ REPEATED = 3;
+ }
+
+ // Required. The field name. The name must contain only letters (a-z, A-Z),
+ // numbers (0-9), or underscores (_), and must start with a letter or
+ // underscore. The maximum length is 128 characters.
+ string name = 1 [(google.api.field_behavior) = REQUIRED];
+
+ // Required. The field data type.
+ Type type = 2 [(google.api.field_behavior) = REQUIRED];
+
+ // Optional. The field mode. The default value is NULLABLE.
+ Mode mode = 3 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. Describes the nested schema fields if the type property is set to STRUCT.
+ repeated TableFieldSchema fields = 4 [(google.api.field_behavior) = OPTIONAL];
+
+ // Optional. The field description. The maximum length is 1,024 characters.
+ string description = 6 [(google.api.field_behavior) = OPTIONAL];
+}
diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts
index b56668241417..78d959e8a7b5 100644
--- a/handwritten/bigquery-storage/protos/protos.d.ts
+++ b/handwritten/bigquery-storage/protos/protos.d.ts
@@ -11499,6 +11499,3994 @@ export namespace google {
public static getTypeUrl(typeUrlPrefix?: string): string;
}
}
+
+ /** Namespace v1beta2. */
+ namespace v1beta2 {
+
+ /** Properties of an ArrowSchema. */
+ interface IArrowSchema {
+
+ /** ArrowSchema serializedSchema */
+ serializedSchema?: (Uint8Array|Buffer|string|null);
+ }
+
+ /** Represents an ArrowSchema. */
+ class ArrowSchema implements IArrowSchema {
+
+ /**
+ * Constructs a new ArrowSchema.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IArrowSchema);
+
+ /** ArrowSchema serializedSchema. */
+ public serializedSchema: (Uint8Array|Buffer|string);
+
+ /**
+ * Creates a new ArrowSchema instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ArrowSchema instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IArrowSchema): google.cloud.bigquery.storage.v1beta2.ArrowSchema;
+
+ /**
+ * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowSchema.verify|verify} messages.
+ * @param message ArrowSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowSchema.verify|verify} messages.
+ * @param message ArrowSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes an ArrowSchema message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ArrowSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ArrowSchema;
+
+ /**
+ * Decodes an ArrowSchema message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ArrowSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ArrowSchema;
+
+ /**
+ * Verifies an ArrowSchema message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ArrowSchema
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ArrowSchema;
+
+ /**
+ * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified.
+ * @param message ArrowSchema
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ArrowSchema, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ArrowSchema to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ArrowSchema
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of an ArrowRecordBatch. */
+ interface IArrowRecordBatch {
+
+ /** ArrowRecordBatch serializedRecordBatch */
+ serializedRecordBatch?: (Uint8Array|Buffer|string|null);
+ }
+
+ /** Represents an ArrowRecordBatch. */
+ class ArrowRecordBatch implements IArrowRecordBatch {
+
+ /**
+ * Constructs a new ArrowRecordBatch.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch);
+
+ /** ArrowRecordBatch serializedRecordBatch. */
+ public serializedRecordBatch: (Uint8Array|Buffer|string);
+
+ /**
+ * Creates a new ArrowRecordBatch instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ArrowRecordBatch instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch): google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch;
+
+ /**
+ * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch.verify|verify} messages.
+ * @param message ArrowRecordBatch message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch.verify|verify} messages.
+ * @param message ArrowRecordBatch message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes an ArrowRecordBatch message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ArrowRecordBatch
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch;
+
+ /**
+ * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ArrowRecordBatch
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch;
+
+ /**
+ * Verifies an ArrowRecordBatch message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ArrowRecordBatch
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch;
+
+ /**
+ * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified.
+ * @param message ArrowRecordBatch
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ArrowRecordBatch to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ArrowRecordBatch
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of an ArrowSerializationOptions. */
+ interface IArrowSerializationOptions {
+
+ /** ArrowSerializationOptions format */
+ format?: (google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.Format|keyof typeof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.Format|null);
+ }
+
+ /** Represents an ArrowSerializationOptions. */
+ class ArrowSerializationOptions implements IArrowSerializationOptions {
+
+ /**
+ * Constructs a new ArrowSerializationOptions.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions);
+
+ /** ArrowSerializationOptions format. */
+ public format: (google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.Format|keyof typeof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.Format);
+
+ /**
+ * Creates a new ArrowSerializationOptions instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ArrowSerializationOptions instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions): google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions;
+
+ /**
+ * Encodes the specified ArrowSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.verify|verify} messages.
+ * @param message ArrowSerializationOptions message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ArrowSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.verify|verify} messages.
+ * @param message ArrowSerializationOptions message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes an ArrowSerializationOptions message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ArrowSerializationOptions
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions;
+
+ /**
+ * Decodes an ArrowSerializationOptions message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ArrowSerializationOptions
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions;
+
+ /**
+ * Verifies an ArrowSerializationOptions message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates an ArrowSerializationOptions message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ArrowSerializationOptions
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions;
+
+ /**
+ * Creates a plain object from an ArrowSerializationOptions message. Also converts values to other types if specified.
+ * @param message ArrowSerializationOptions
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ArrowSerializationOptions to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ArrowSerializationOptions
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ namespace ArrowSerializationOptions {
+
+ /** Format enum. */
+ enum Format {
+ FORMAT_UNSPECIFIED = 0,
+ ARROW_0_14 = 1,
+ ARROW_0_15 = 2
+ }
+ }
+
+ /** Properties of an AvroSchema. */
+ interface IAvroSchema {
+
+ /** AvroSchema schema */
+ schema?: (string|null);
+ }
+
+ /** Represents an AvroSchema. */
+ class AvroSchema implements IAvroSchema {
+
+ /**
+ * Constructs a new AvroSchema.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IAvroSchema);
+
+ /** AvroSchema schema. */
+ public schema: string;
+
+ /**
+ * Creates a new AvroSchema instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns AvroSchema instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IAvroSchema): google.cloud.bigquery.storage.v1beta2.AvroSchema;
+
+ /**
+ * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AvroSchema.verify|verify} messages.
+ * @param message AvroSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AvroSchema.verify|verify} messages.
+ * @param message AvroSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes an AvroSchema message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns AvroSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.AvroSchema;
+
+ /**
+ * Decodes an AvroSchema message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns AvroSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.AvroSchema;
+
+ /**
+ * Verifies an AvroSchema message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns AvroSchema
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.AvroSchema;
+
+ /**
+ * Creates a plain object from an AvroSchema message. Also converts values to other types if specified.
+ * @param message AvroSchema
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.AvroSchema, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this AvroSchema to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for AvroSchema
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of an AvroRows. */
+ interface IAvroRows {
+
+ /** AvroRows serializedBinaryRows */
+ serializedBinaryRows?: (Uint8Array|Buffer|string|null);
+ }
+
+ /** Represents an AvroRows. */
+ class AvroRows implements IAvroRows {
+
+ /**
+ * Constructs a new AvroRows.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IAvroRows);
+
+ /** AvroRows serializedBinaryRows. */
+ public serializedBinaryRows: (Uint8Array|Buffer|string);
+
+ /**
+ * Creates a new AvroRows instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns AvroRows instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IAvroRows): google.cloud.bigquery.storage.v1beta2.AvroRows;
+
+ /**
+ * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AvroRows.verify|verify} messages.
+ * @param message AvroRows message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AvroRows.verify|verify} messages.
+ * @param message AvroRows message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes an AvroRows message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns AvroRows
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.AvroRows;
+
+ /**
+ * Decodes an AvroRows message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns AvroRows
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.AvroRows;
+
+ /**
+ * Verifies an AvroRows message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates an AvroRows message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns AvroRows
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.AvroRows;
+
+ /**
+ * Creates a plain object from an AvroRows message. Also converts values to other types if specified.
+ * @param message AvroRows
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.AvroRows, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this AvroRows to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for AvroRows
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a ProtoSchema. */
+ interface IProtoSchema {
+
+ /** ProtoSchema protoDescriptor */
+ protoDescriptor?: (google.protobuf.IDescriptorProto|null);
+ }
+
+ /** Represents a ProtoSchema. */
+ class ProtoSchema implements IProtoSchema {
+
+ /**
+ * Constructs a new ProtoSchema.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IProtoSchema);
+
+ /** ProtoSchema protoDescriptor. */
+ public protoDescriptor?: (google.protobuf.IDescriptorProto|null);
+
+ /**
+ * Creates a new ProtoSchema instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ProtoSchema instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IProtoSchema): google.cloud.bigquery.storage.v1beta2.ProtoSchema;
+
+ /**
+ * Encodes the specified ProtoSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ProtoSchema.verify|verify} messages.
+ * @param message ProtoSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IProtoSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ProtoSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ProtoSchema.verify|verify} messages.
+ * @param message ProtoSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IProtoSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a ProtoSchema message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ProtoSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ProtoSchema;
+
+ /**
+ * Decodes a ProtoSchema message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ProtoSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ProtoSchema;
+
+ /**
+ * Verifies a ProtoSchema message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a ProtoSchema message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ProtoSchema
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ProtoSchema;
+
+ /**
+ * Creates a plain object from a ProtoSchema message. Also converts values to other types if specified.
+ * @param message ProtoSchema
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ProtoSchema, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ProtoSchema to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ProtoSchema
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a ProtoRows. */
+ interface IProtoRows {
+
+ /** ProtoRows serializedRows */
+ serializedRows?: (Uint8Array[]|null);
+ }
+
+ /** Represents a ProtoRows. */
+ class ProtoRows implements IProtoRows {
+
+ /**
+ * Constructs a new ProtoRows.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IProtoRows);
+
+ /** ProtoRows serializedRows. */
+ public serializedRows: Uint8Array[];
+
+ /**
+ * Creates a new ProtoRows instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ProtoRows instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IProtoRows): google.cloud.bigquery.storage.v1beta2.ProtoRows;
+
+ /**
+ * Encodes the specified ProtoRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ProtoRows.verify|verify} messages.
+ * @param message ProtoRows message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IProtoRows, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ProtoRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ProtoRows.verify|verify} messages.
+ * @param message ProtoRows message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IProtoRows, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a ProtoRows message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ProtoRows
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ProtoRows;
+
+ /**
+ * Decodes a ProtoRows message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ProtoRows
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ProtoRows;
+
+ /**
+ * Verifies a ProtoRows message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a ProtoRows message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ProtoRows
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ProtoRows;
+
+ /**
+ * Creates a plain object from a ProtoRows message. Also converts values to other types if specified.
+ * @param message ProtoRows
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ProtoRows, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ProtoRows to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ProtoRows
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Represents a BigQueryRead */
+ class BigQueryRead extends $protobuf.rpc.Service {
+
+ /**
+ * Constructs a new BigQueryRead service.
+ * @param rpcImpl RPC implementation
+ * @param [requestDelimited=false] Whether requests are length-delimited
+ * @param [responseDelimited=false] Whether responses are length-delimited
+ */
+ constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean);
+
+ /**
+ * Creates new BigQueryRead service using the specified rpc implementation.
+ * @param rpcImpl RPC implementation
+ * @param [requestDelimited=false] Whether requests are length-delimited
+ * @param [responseDelimited=false] Whether responses are length-delimited
+ * @returns RPC service. Useful where requests and/or responses are streamed.
+ */
+ public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryRead;
+
+ /**
+ * Calls CreateReadSession.
+ * @param request CreateReadSessionRequest message or plain object
+ * @param callback Node-style callback called with the error, if any, and ReadSession
+ */
+ public createReadSession(request: google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest, callback: google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSessionCallback): void;
+
+ /**
+ * Calls CreateReadSession.
+ * @param request CreateReadSessionRequest message or plain object
+ * @returns Promise
+ */
+ public createReadSession(request: google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest): Promise;
+
+ /**
+ * Calls ReadRows.
+ * @param request ReadRowsRequest message or plain object
+ * @param callback Node-style callback called with the error, if any, and ReadRowsResponse
+ */
+ public readRows(request: google.cloud.bigquery.storage.v1beta2.IReadRowsRequest, callback: google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRowsCallback): void;
+
+ /**
+ * Calls ReadRows.
+ * @param request ReadRowsRequest message or plain object
+ * @returns Promise
+ */
+ public readRows(request: google.cloud.bigquery.storage.v1beta2.IReadRowsRequest): Promise;
+
+ /**
+ * Calls SplitReadStream.
+ * @param request SplitReadStreamRequest message or plain object
+ * @param callback Node-style callback called with the error, if any, and SplitReadStreamResponse
+ */
+ public splitReadStream(request: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest, callback: google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStreamCallback): void;
+
+ /**
+ * Calls SplitReadStream.
+ * @param request SplitReadStreamRequest message or plain object
+ * @returns Promise
+ */
+ public splitReadStream(request: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest): Promise;
+ }
+
+ namespace BigQueryRead {
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryRead|createReadSession}.
+ * @param error Error, if any
+ * @param [response] ReadSession
+ */
+ type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta2.ReadSession) => void;
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryRead|readRows}.
+ * @param error Error, if any
+ * @param [response] ReadRowsResponse
+ */
+ type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta2.ReadRowsResponse) => void;
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryRead|splitReadStream}.
+ * @param error Error, if any
+ * @param [response] SplitReadStreamResponse
+ */
+ type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse) => void;
+ }
+
+ /** Represents a BigQueryWrite */
+ class BigQueryWrite extends $protobuf.rpc.Service {
+
+ /**
+ * Constructs a new BigQueryWrite service.
+ * @param rpcImpl RPC implementation
+ * @param [requestDelimited=false] Whether requests are length-delimited
+ * @param [responseDelimited=false] Whether responses are length-delimited
+ */
+ constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean);
+
+ /**
+ * Creates new BigQueryWrite service using the specified rpc implementation.
+ * @param rpcImpl RPC implementation
+ * @param [requestDelimited=false] Whether requests are length-delimited
+ * @param [responseDelimited=false] Whether responses are length-delimited
+ * @returns RPC service. Useful where requests and/or responses are streamed.
+ */
+ public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryWrite;
+
+ /**
+ * Calls CreateWriteStream.
+ * @param request CreateWriteStreamRequest message or plain object
+ * @param callback Node-style callback called with the error, if any, and WriteStream
+ */
+ public createWriteStream(request: google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest, callback: google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStreamCallback): void;
+
+ /**
+ * Calls CreateWriteStream.
+ * @param request CreateWriteStreamRequest message or plain object
+ * @returns Promise
+ */
+ public createWriteStream(request: google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest): Promise;
+
+ /**
+ * Calls AppendRows.
+ * @param request AppendRowsRequest message or plain object
+ * @param callback Node-style callback called with the error, if any, and AppendRowsResponse
+ */
+ public appendRows(request: google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest, callback: google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRowsCallback): void;
+
+ /**
+ * Calls AppendRows.
+ * @param request AppendRowsRequest message or plain object
+ * @returns Promise
+ */
+ public appendRows(request: google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest): Promise;
+
+ /**
+ * Calls GetWriteStream.
+ * @param request GetWriteStreamRequest message or plain object
+ * @param callback Node-style callback called with the error, if any, and WriteStream
+ */
+ public getWriteStream(request: google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest, callback: google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStreamCallback): void;
+
+ /**
+ * Calls GetWriteStream.
+ * @param request GetWriteStreamRequest message or plain object
+ * @returns Promise
+ */
+ public getWriteStream(request: google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest): Promise;
+
+ /**
+ * Calls FinalizeWriteStream.
+ * @param request FinalizeWriteStreamRequest message or plain object
+ * @param callback Node-style callback called with the error, if any, and FinalizeWriteStreamResponse
+ */
+ public finalizeWriteStream(request: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest, callback: google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStreamCallback): void;
+
+ /**
+ * Calls FinalizeWriteStream.
+ * @param request FinalizeWriteStreamRequest message or plain object
+ * @returns Promise
+ */
+ public finalizeWriteStream(request: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest): Promise;
+
+ /**
+ * Calls BatchCommitWriteStreams.
+ * @param request BatchCommitWriteStreamsRequest message or plain object
+ * @param callback Node-style callback called with the error, if any, and BatchCommitWriteStreamsResponse
+ */
+ public batchCommitWriteStreams(request: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest, callback: google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreamsCallback): void;
+
+ /**
+ * Calls BatchCommitWriteStreams.
+ * @param request BatchCommitWriteStreamsRequest message or plain object
+ * @returns Promise
+ */
+ public batchCommitWriteStreams(request: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest): Promise;
+
+ /**
+ * Calls FlushRows.
+ * @param request FlushRowsRequest message or plain object
+ * @param callback Node-style callback called with the error, if any, and FlushRowsResponse
+ */
+ public flushRows(request: google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest, callback: google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRowsCallback): void;
+
+ /**
+ * Calls FlushRows.
+ * @param request FlushRowsRequest message or plain object
+ * @returns Promise
+ */
+ public flushRows(request: google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest): Promise;
+ }
+
+ namespace BigQueryWrite {
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|createWriteStream}.
+ * @param error Error, if any
+ * @param [response] WriteStream
+ */
+ type CreateWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta2.WriteStream) => void;
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|appendRows}.
+ * @param error Error, if any
+ * @param [response] AppendRowsResponse
+ */
+ type AppendRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta2.AppendRowsResponse) => void;
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|getWriteStream}.
+ * @param error Error, if any
+ * @param [response] WriteStream
+ */
+ type GetWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta2.WriteStream) => void;
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|finalizeWriteStream}.
+ * @param error Error, if any
+ * @param [response] FinalizeWriteStreamResponse
+ */
+ type FinalizeWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse) => void;
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|batchCommitWriteStreams}.
+ * @param error Error, if any
+ * @param [response] BatchCommitWriteStreamsResponse
+ */
+ type BatchCommitWriteStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse) => void;
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|flushRows}.
+ * @param error Error, if any
+ * @param [response] FlushRowsResponse
+ */
+ type FlushRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta2.FlushRowsResponse) => void;
+ }
+
+ /** Properties of a CreateReadSessionRequest. */
+ interface ICreateReadSessionRequest {
+
+ /** CreateReadSessionRequest parent */
+ parent?: (string|null);
+
+ /** CreateReadSessionRequest readSession */
+ readSession?: (google.cloud.bigquery.storage.v1beta2.IReadSession|null);
+
+ /** CreateReadSessionRequest maxStreamCount */
+ maxStreamCount?: (number|null);
+ }
+
+ /** Represents a CreateReadSessionRequest. */
+ class CreateReadSessionRequest implements ICreateReadSessionRequest {
+
+ /**
+ * Constructs a new CreateReadSessionRequest.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest);
+
+ /** CreateReadSessionRequest parent. */
+ public parent: string;
+
+ /** CreateReadSessionRequest readSession. */
+ public readSession?: (google.cloud.bigquery.storage.v1beta2.IReadSession|null);
+
+ /** CreateReadSessionRequest maxStreamCount. */
+ public maxStreamCount: number;
+
+ /**
+ * Creates a new CreateReadSessionRequest instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns CreateReadSessionRequest instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest): google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest;
+
+ /**
+ * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest.verify|verify} messages.
+ * @param message CreateReadSessionRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest.verify|verify} messages.
+ * @param message CreateReadSessionRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a CreateReadSessionRequest message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns CreateReadSessionRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest;
+
+ /**
+ * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns CreateReadSessionRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest;
+
+ /**
+ * Verifies a CreateReadSessionRequest message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns CreateReadSessionRequest
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest;
+
+ /**
+ * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified.
+ * @param message CreateReadSessionRequest
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this CreateReadSessionRequest to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for CreateReadSessionRequest
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a ReadRowsRequest. */
+ interface IReadRowsRequest {
+
+ /** ReadRowsRequest readStream */
+ readStream?: (string|null);
+
+ /** ReadRowsRequest offset */
+ offset?: (number|Long|string|null);
+ }
+
+ /** Represents a ReadRowsRequest. */
+ class ReadRowsRequest implements IReadRowsRequest {
+
+ /**
+ * Constructs a new ReadRowsRequest.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IReadRowsRequest);
+
+ /** ReadRowsRequest readStream. */
+ public readStream: string;
+
+ /** ReadRowsRequest offset. */
+ public offset: (number|Long|string);
+
+ /**
+ * Creates a new ReadRowsRequest instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ReadRowsRequest instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IReadRowsRequest): google.cloud.bigquery.storage.v1beta2.ReadRowsRequest;
+
+ /**
+ * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadRowsRequest.verify|verify} messages.
+ * @param message ReadRowsRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadRowsRequest.verify|verify} messages.
+ * @param message ReadRowsRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a ReadRowsRequest message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ReadRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ReadRowsRequest;
+
+ /**
+ * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ReadRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ReadRowsRequest;
+
+ /**
+ * Verifies a ReadRowsRequest message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ReadRowsRequest
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ReadRowsRequest;
+
+ /**
+ * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified.
+ * @param message ReadRowsRequest
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ReadRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ReadRowsRequest to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ReadRowsRequest
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a ThrottleState. */
+ interface IThrottleState {
+
+ /** ThrottleState throttlePercent */
+ throttlePercent?: (number|null);
+ }
+
+ /** Represents a ThrottleState. */
+ class ThrottleState implements IThrottleState {
+
+ /**
+ * Constructs a new ThrottleState.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IThrottleState);
+
+ /** ThrottleState throttlePercent. */
+ public throttlePercent: number;
+
+ /**
+ * Creates a new ThrottleState instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ThrottleState instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IThrottleState): google.cloud.bigquery.storage.v1beta2.ThrottleState;
+
+ /**
+ * Encodes the specified ThrottleState message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ThrottleState.verify|verify} messages.
+ * @param message ThrottleState message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IThrottleState, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ThrottleState message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ThrottleState.verify|verify} messages.
+ * @param message ThrottleState message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IThrottleState, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a ThrottleState message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ThrottleState
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ThrottleState;
+
+ /**
+ * Decodes a ThrottleState message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ThrottleState
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ThrottleState;
+
+ /**
+ * Verifies a ThrottleState message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a ThrottleState message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ThrottleState
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ThrottleState;
+
+ /**
+ * Creates a plain object from a ThrottleState message. Also converts values to other types if specified.
+ * @param message ThrottleState
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ThrottleState, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ThrottleState to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ThrottleState
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a StreamStats. */
+ interface IStreamStats {
+
+ /** StreamStats progress */
+ progress?: (google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress|null);
+ }
+
+ /** Represents a StreamStats. */
+ class StreamStats implements IStreamStats {
+
+ /**
+ * Constructs a new StreamStats.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IStreamStats);
+
+ /** StreamStats progress. */
+ public progress?: (google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress|null);
+
+ /**
+ * Creates a new StreamStats instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns StreamStats instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IStreamStats): google.cloud.bigquery.storage.v1beta2.StreamStats;
+
+ /**
+ * Encodes the specified StreamStats message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StreamStats.verify|verify} messages.
+ * @param message StreamStats message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IStreamStats, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified StreamStats message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StreamStats.verify|verify} messages.
+ * @param message StreamStats message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IStreamStats, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a StreamStats message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns StreamStats
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.StreamStats;
+
+ /**
+ * Decodes a StreamStats message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns StreamStats
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.StreamStats;
+
+ /**
+ * Verifies a StreamStats message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a StreamStats message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns StreamStats
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.StreamStats;
+
+ /**
+ * Creates a plain object from a StreamStats message. Also converts values to other types if specified.
+ * @param message StreamStats
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.StreamStats, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this StreamStats to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for StreamStats
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ namespace StreamStats {
+
+ /** Properties of a Progress. */
+ interface IProgress {
+
+ /** Progress atResponseStart */
+ atResponseStart?: (number|null);
+
+ /** Progress atResponseEnd */
+ atResponseEnd?: (number|null);
+ }
+
+ /** Represents a Progress. */
+ class Progress implements IProgress {
+
+ /**
+ * Constructs a new Progress.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress);
+
+ /** Progress atResponseStart. */
+ public atResponseStart: number;
+
+ /** Progress atResponseEnd. */
+ public atResponseEnd: number;
+
+ /**
+ * Creates a new Progress instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns Progress instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress): google.cloud.bigquery.storage.v1beta2.StreamStats.Progress;
+
+ /**
+ * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StreamStats.Progress.verify|verify} messages.
+ * @param message Progress message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StreamStats.Progress.verify|verify} messages.
+ * @param message Progress message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a Progress message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns Progress
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.StreamStats.Progress;
+
+ /**
+ * Decodes a Progress message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns Progress
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.StreamStats.Progress;
+
+ /**
+ * Verifies a Progress message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a Progress message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns Progress
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.StreamStats.Progress;
+
+ /**
+ * Creates a plain object from a Progress message. Also converts values to other types if specified.
+ * @param message Progress
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.StreamStats.Progress, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this Progress to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for Progress
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+ }
+
+ /** Properties of a ReadRowsResponse. */
+ interface IReadRowsResponse {
+
+ /** ReadRowsResponse avroRows */
+ avroRows?: (google.cloud.bigquery.storage.v1beta2.IAvroRows|null);
+
+ /** ReadRowsResponse arrowRecordBatch */
+ arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch|null);
+
+ /** ReadRowsResponse rowCount */
+ rowCount?: (number|Long|string|null);
+
+ /** ReadRowsResponse stats */
+ stats?: (google.cloud.bigquery.storage.v1beta2.IStreamStats|null);
+
+ /** ReadRowsResponse throttleState */
+ throttleState?: (google.cloud.bigquery.storage.v1beta2.IThrottleState|null);
+
+ /** ReadRowsResponse avroSchema */
+ avroSchema?: (google.cloud.bigquery.storage.v1beta2.IAvroSchema|null);
+
+ /** ReadRowsResponse arrowSchema */
+ arrowSchema?: (google.cloud.bigquery.storage.v1beta2.IArrowSchema|null);
+ }
+
+ /** Represents a ReadRowsResponse. */
+ class ReadRowsResponse implements IReadRowsResponse {
+
+ /**
+ * Constructs a new ReadRowsResponse.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IReadRowsResponse);
+
+ /** ReadRowsResponse avroRows. */
+ public avroRows?: (google.cloud.bigquery.storage.v1beta2.IAvroRows|null);
+
+ /** ReadRowsResponse arrowRecordBatch. */
+ public arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch|null);
+
+ /** ReadRowsResponse rowCount. */
+ public rowCount: (number|Long|string);
+
+ /** ReadRowsResponse stats. */
+ public stats?: (google.cloud.bigquery.storage.v1beta2.IStreamStats|null);
+
+ /** ReadRowsResponse throttleState. */
+ public throttleState?: (google.cloud.bigquery.storage.v1beta2.IThrottleState|null);
+
+ /** ReadRowsResponse avroSchema. */
+ public avroSchema?: (google.cloud.bigquery.storage.v1beta2.IAvroSchema|null);
+
+ /** ReadRowsResponse arrowSchema. */
+ public arrowSchema?: (google.cloud.bigquery.storage.v1beta2.IArrowSchema|null);
+
+ /** ReadRowsResponse rows. */
+ public rows?: ("avroRows"|"arrowRecordBatch");
+
+ /** ReadRowsResponse schema. */
+ public schema?: ("avroSchema"|"arrowSchema");
+
+ /**
+ * Creates a new ReadRowsResponse instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ReadRowsResponse instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IReadRowsResponse): google.cloud.bigquery.storage.v1beta2.ReadRowsResponse;
+
+ /**
+ * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.verify|verify} messages.
+ * @param message ReadRowsResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.verify|verify} messages.
+ * @param message ReadRowsResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a ReadRowsResponse message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ReadRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ReadRowsResponse;
+
+ /**
+ * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ReadRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ReadRowsResponse;
+
+ /**
+ * Verifies a ReadRowsResponse message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ReadRowsResponse
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ReadRowsResponse;
+
+ /**
+ * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified.
+ * @param message ReadRowsResponse
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ReadRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ReadRowsResponse to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ReadRowsResponse
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a SplitReadStreamRequest. */
+ interface ISplitReadStreamRequest {
+
+ /** SplitReadStreamRequest name */
+ name?: (string|null);
+
+ /** SplitReadStreamRequest fraction */
+ fraction?: (number|null);
+ }
+
+ /** Represents a SplitReadStreamRequest. */
+ class SplitReadStreamRequest implements ISplitReadStreamRequest {
+
+ /**
+ * Constructs a new SplitReadStreamRequest.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest);
+
+ /** SplitReadStreamRequest name. */
+ public name: string;
+
+ /** SplitReadStreamRequest fraction. */
+ public fraction: number;
+
+ /**
+ * Creates a new SplitReadStreamRequest instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns SplitReadStreamRequest instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest): google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest;
+
+ /**
+ * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest.verify|verify} messages.
+ * @param message SplitReadStreamRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest.verify|verify} messages.
+ * @param message SplitReadStreamRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a SplitReadStreamRequest message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns SplitReadStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest;
+
+ /**
+ * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns SplitReadStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest;
+
+ /**
+ * Verifies a SplitReadStreamRequest message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns SplitReadStreamRequest
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest;
+
+ /**
+ * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified.
+ * @param message SplitReadStreamRequest
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this SplitReadStreamRequest to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for SplitReadStreamRequest
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a SplitReadStreamResponse. */
+ interface ISplitReadStreamResponse {
+
+ /** SplitReadStreamResponse primaryStream */
+ primaryStream?: (google.cloud.bigquery.storage.v1beta2.IReadStream|null);
+
+ /** SplitReadStreamResponse remainderStream */
+ remainderStream?: (google.cloud.bigquery.storage.v1beta2.IReadStream|null);
+ }
+
+ /** Represents a SplitReadStreamResponse. */
+ class SplitReadStreamResponse implements ISplitReadStreamResponse {
+
+ /**
+ * Constructs a new SplitReadStreamResponse.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse);
+
+ /** SplitReadStreamResponse primaryStream. */
+ public primaryStream?: (google.cloud.bigquery.storage.v1beta2.IReadStream|null);
+
+ /** SplitReadStreamResponse remainderStream. */
+ public remainderStream?: (google.cloud.bigquery.storage.v1beta2.IReadStream|null);
+
+ /**
+ * Creates a new SplitReadStreamResponse instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns SplitReadStreamResponse instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse): google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse;
+
+ /**
+ * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse.verify|verify} messages.
+ * @param message SplitReadStreamResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse.verify|verify} messages.
+ * @param message SplitReadStreamResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a SplitReadStreamResponse message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns SplitReadStreamResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse;
+
+ /**
+ * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns SplitReadStreamResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse;
+
+ /**
+ * Verifies a SplitReadStreamResponse message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns SplitReadStreamResponse
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse;
+
+ /**
+ * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified.
+ * @param message SplitReadStreamResponse
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this SplitReadStreamResponse to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for SplitReadStreamResponse
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a CreateWriteStreamRequest. */
+ interface ICreateWriteStreamRequest {
+
+ /** CreateWriteStreamRequest parent */
+ parent?: (string|null);
+
+ /** CreateWriteStreamRequest writeStream */
+ writeStream?: (google.cloud.bigquery.storage.v1beta2.IWriteStream|null);
+ }
+
+ /** Represents a CreateWriteStreamRequest. */
+ class CreateWriteStreamRequest implements ICreateWriteStreamRequest {
+
+ /**
+ * Constructs a new CreateWriteStreamRequest.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest);
+
+ /** CreateWriteStreamRequest parent. */
+ public parent: string;
+
+ /** CreateWriteStreamRequest writeStream. */
+ public writeStream?: (google.cloud.bigquery.storage.v1beta2.IWriteStream|null);
+
+ /**
+ * Creates a new CreateWriteStreamRequest instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns CreateWriteStreamRequest instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest): google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest;
+
+ /**
+ * Encodes the specified CreateWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest.verify|verify} messages.
+ * @param message CreateWriteStreamRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified CreateWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest.verify|verify} messages.
+ * @param message CreateWriteStreamRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a CreateWriteStreamRequest message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns CreateWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest;
+
+ /**
+ * Decodes a CreateWriteStreamRequest message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns CreateWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest;
+
+ /**
+ * Verifies a CreateWriteStreamRequest message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a CreateWriteStreamRequest message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns CreateWriteStreamRequest
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest;
+
+ /**
+ * Creates a plain object from a CreateWriteStreamRequest message. Also converts values to other types if specified.
+ * @param message CreateWriteStreamRequest
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this CreateWriteStreamRequest to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for CreateWriteStreamRequest
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of an AppendRowsRequest. */
+ interface IAppendRowsRequest {
+
+ /** AppendRowsRequest writeStream */
+ writeStream?: (string|null);
+
+ /** AppendRowsRequest offset */
+ offset?: (google.protobuf.IInt64Value|null);
+
+ /** AppendRowsRequest protoRows */
+ protoRows?: (google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData|null);
+
+ /** AppendRowsRequest traceId */
+ traceId?: (string|null);
+ }
+
+ /** Represents an AppendRowsRequest. */
+ class AppendRowsRequest implements IAppendRowsRequest {
+
+ /**
+ * Constructs a new AppendRowsRequest.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest);
+
+ /** AppendRowsRequest writeStream. */
+ public writeStream: string;
+
+ /** AppendRowsRequest offset. */
+ public offset?: (google.protobuf.IInt64Value|null);
+
+ /** AppendRowsRequest protoRows. */
+ public protoRows?: (google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData|null);
+
+ /** AppendRowsRequest traceId. */
+ public traceId: string;
+
+ /** AppendRowsRequest rows. */
+ public rows?: "protoRows";
+
+ /**
+ * Creates a new AppendRowsRequest instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns AppendRowsRequest instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest): google.cloud.bigquery.storage.v1beta2.AppendRowsRequest;
+
+ /**
+ * Encodes the specified AppendRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.verify|verify} messages.
+ * @param message AppendRowsRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified AppendRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.verify|verify} messages.
+ * @param message AppendRowsRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes an AppendRowsRequest message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns AppendRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.AppendRowsRequest;
+
+ /**
+ * Decodes an AppendRowsRequest message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns AppendRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.AppendRowsRequest;
+
+ /**
+ * Verifies an AppendRowsRequest message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates an AppendRowsRequest message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns AppendRowsRequest
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.AppendRowsRequest;
+
+ /**
+ * Creates a plain object from an AppendRowsRequest message. Also converts values to other types if specified.
+ * @param message AppendRowsRequest
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.AppendRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this AppendRowsRequest to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for AppendRowsRequest
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ namespace AppendRowsRequest {
+
+ /** Properties of a ProtoData. */
+ interface IProtoData {
+
+ /** ProtoData writerSchema */
+ writerSchema?: (google.cloud.bigquery.storage.v1beta2.IProtoSchema|null);
+
+ /** ProtoData rows */
+ rows?: (google.cloud.bigquery.storage.v1beta2.IProtoRows|null);
+ }
+
+ /** Represents a ProtoData. */
+ class ProtoData implements IProtoData {
+
+ /**
+ * Constructs a new ProtoData.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData);
+
+ /** ProtoData writerSchema. */
+ public writerSchema?: (google.cloud.bigquery.storage.v1beta2.IProtoSchema|null);
+
+ /** ProtoData rows. */
+ public rows?: (google.cloud.bigquery.storage.v1beta2.IProtoRows|null);
+
+ /**
+ * Creates a new ProtoData instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ProtoData instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData): google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData;
+
+ /**
+ * Encodes the specified ProtoData message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.verify|verify} messages.
+ * @param message ProtoData message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ProtoData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.verify|verify} messages.
+ * @param message ProtoData message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a ProtoData message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ProtoData
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData;
+
+ /**
+ * Decodes a ProtoData message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ProtoData
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData;
+
+ /**
+ * Verifies a ProtoData message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a ProtoData message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ProtoData
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData;
+
+ /**
+ * Creates a plain object from a ProtoData message. Also converts values to other types if specified.
+ * @param message ProtoData
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ProtoData to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ProtoData
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+ }
+
+ /** Properties of an AppendRowsResponse. */
+ interface IAppendRowsResponse {
+
+ /** AppendRowsResponse appendResult */
+ appendResult?: (google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult|null);
+
+ /** AppendRowsResponse error */
+ error?: (google.rpc.IStatus|null);
+
+ /** AppendRowsResponse updatedSchema */
+ updatedSchema?: (google.cloud.bigquery.storage.v1beta2.ITableSchema|null);
+ }
+
+ /** Represents an AppendRowsResponse. */
+ class AppendRowsResponse implements IAppendRowsResponse {
+
+ /**
+ * Constructs a new AppendRowsResponse.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IAppendRowsResponse);
+
+ /** AppendRowsResponse appendResult. */
+ public appendResult?: (google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult|null);
+
+ /** AppendRowsResponse error. */
+ public error?: (google.rpc.IStatus|null);
+
+ /** AppendRowsResponse updatedSchema. */
+ public updatedSchema?: (google.cloud.bigquery.storage.v1beta2.ITableSchema|null);
+
+ /** AppendRowsResponse response. */
+ public response?: ("appendResult"|"error");
+
+ /**
+ * Creates a new AppendRowsResponse instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns AppendRowsResponse instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IAppendRowsResponse): google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
+
+ /**
+ * Encodes the specified AppendRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.verify|verify} messages.
+ * @param message AppendRowsResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IAppendRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified AppendRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.verify|verify} messages.
+ * @param message AppendRowsResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IAppendRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes an AppendRowsResponse message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns AppendRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
+
+ /**
+ * Decodes an AppendRowsResponse message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns AppendRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
+
+ /**
+ * Verifies an AppendRowsResponse message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates an AppendRowsResponse message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns AppendRowsResponse
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
+
+ /**
+ * Creates a plain object from an AppendRowsResponse message. Also converts values to other types if specified.
+ * @param message AppendRowsResponse
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.AppendRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this AppendRowsResponse to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for AppendRowsResponse
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ namespace AppendRowsResponse {
+
+ /** Properties of an AppendResult. */
+ interface IAppendResult {
+
+ /** AppendResult offset */
+ offset?: (google.protobuf.IInt64Value|null);
+ }
+
+ /** Represents an AppendResult. */
+ class AppendResult implements IAppendResult {
+
+ /**
+ * Constructs a new AppendResult.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult);
+
+ /** AppendResult offset. */
+ public offset?: (google.protobuf.IInt64Value|null);
+
+ /**
+ * Creates a new AppendResult instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns AppendResult instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult): google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult;
+
+ /**
+ * Encodes the specified AppendResult message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.verify|verify} messages.
+ * @param message AppendResult message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified AppendResult message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.verify|verify} messages.
+ * @param message AppendResult message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes an AppendResult message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns AppendResult
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult;
+
+ /**
+ * Decodes an AppendResult message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns AppendResult
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult;
+
+ /**
+ * Verifies an AppendResult message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates an AppendResult message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns AppendResult
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult;
+
+ /**
+ * Creates a plain object from an AppendResult message. Also converts values to other types if specified.
+ * @param message AppendResult
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this AppendResult to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for AppendResult
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+ }
+
+ /** Properties of a GetWriteStreamRequest. */
+ interface IGetWriteStreamRequest {
+
+ /** GetWriteStreamRequest name */
+ name?: (string|null);
+ }
+
+ /** Represents a GetWriteStreamRequest. */
+ class GetWriteStreamRequest implements IGetWriteStreamRequest {
+
+ /**
+ * Constructs a new GetWriteStreamRequest.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest);
+
+ /** GetWriteStreamRequest name. */
+ public name: string;
+
+ /**
+ * Creates a new GetWriteStreamRequest instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns GetWriteStreamRequest instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest): google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest;
+
+ /**
+ * Encodes the specified GetWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest.verify|verify} messages.
+ * @param message GetWriteStreamRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified GetWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest.verify|verify} messages.
+ * @param message GetWriteStreamRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a GetWriteStreamRequest message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns GetWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest;
+
+ /**
+ * Decodes a GetWriteStreamRequest message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns GetWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest;
+
+ /**
+ * Verifies a GetWriteStreamRequest message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a GetWriteStreamRequest message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns GetWriteStreamRequest
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest;
+
+ /**
+ * Creates a plain object from a GetWriteStreamRequest message. Also converts values to other types if specified.
+ * @param message GetWriteStreamRequest
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this GetWriteStreamRequest to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for GetWriteStreamRequest
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a BatchCommitWriteStreamsRequest. */
+ interface IBatchCommitWriteStreamsRequest {
+
+ /** BatchCommitWriteStreamsRequest parent */
+ parent?: (string|null);
+
+ /** BatchCommitWriteStreamsRequest writeStreams */
+ writeStreams?: (string[]|null);
+ }
+
+ /** Represents a BatchCommitWriteStreamsRequest. */
+ class BatchCommitWriteStreamsRequest implements IBatchCommitWriteStreamsRequest {
+
+ /**
+ * Constructs a new BatchCommitWriteStreamsRequest.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest);
+
+ /** BatchCommitWriteStreamsRequest parent. */
+ public parent: string;
+
+ /** BatchCommitWriteStreamsRequest writeStreams. */
+ public writeStreams: string[];
+
+ /**
+ * Creates a new BatchCommitWriteStreamsRequest instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns BatchCommitWriteStreamsRequest instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest): google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest;
+
+ /**
+ * Encodes the specified BatchCommitWriteStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest.verify|verify} messages.
+ * @param message BatchCommitWriteStreamsRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified BatchCommitWriteStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest.verify|verify} messages.
+ * @param message BatchCommitWriteStreamsRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns BatchCommitWriteStreamsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest;
+
+ /**
+ * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns BatchCommitWriteStreamsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest;
+
+ /**
+ * Verifies a BatchCommitWriteStreamsRequest message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a BatchCommitWriteStreamsRequest message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns BatchCommitWriteStreamsRequest
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest;
+
+ /**
+ * Creates a plain object from a BatchCommitWriteStreamsRequest message. Also converts values to other types if specified.
+ * @param message BatchCommitWriteStreamsRequest
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this BatchCommitWriteStreamsRequest to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for BatchCommitWriteStreamsRequest
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a BatchCommitWriteStreamsResponse. */
+ interface IBatchCommitWriteStreamsResponse {
+
+ /** BatchCommitWriteStreamsResponse commitTime */
+ commitTime?: (google.protobuf.ITimestamp|null);
+
+ /** BatchCommitWriteStreamsResponse streamErrors */
+ streamErrors?: (google.cloud.bigquery.storage.v1beta2.IStorageError[]|null);
+ }
+
+ /** Represents a BatchCommitWriteStreamsResponse. */
+ class BatchCommitWriteStreamsResponse implements IBatchCommitWriteStreamsResponse {
+
+ /**
+ * Constructs a new BatchCommitWriteStreamsResponse.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse);
+
+ /** BatchCommitWriteStreamsResponse commitTime. */
+ public commitTime?: (google.protobuf.ITimestamp|null);
+
+ /** BatchCommitWriteStreamsResponse streamErrors. */
+ public streamErrors: google.cloud.bigquery.storage.v1beta2.IStorageError[];
+
+ /**
+ * Creates a new BatchCommitWriteStreamsResponse instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns BatchCommitWriteStreamsResponse instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse): google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse;
+
+ /**
+ * Encodes the specified BatchCommitWriteStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse.verify|verify} messages.
+ * @param message BatchCommitWriteStreamsResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified BatchCommitWriteStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse.verify|verify} messages.
+ * @param message BatchCommitWriteStreamsResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns BatchCommitWriteStreamsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse;
+
+ /**
+ * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns BatchCommitWriteStreamsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse;
+
+ /**
+ * Verifies a BatchCommitWriteStreamsResponse message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a BatchCommitWriteStreamsResponse message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns BatchCommitWriteStreamsResponse
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse;
+
+ /**
+ * Creates a plain object from a BatchCommitWriteStreamsResponse message. Also converts values to other types if specified.
+ * @param message BatchCommitWriteStreamsResponse
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this BatchCommitWriteStreamsResponse to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for BatchCommitWriteStreamsResponse
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a FinalizeWriteStreamRequest. */
+ interface IFinalizeWriteStreamRequest {
+
+ /** FinalizeWriteStreamRequest name */
+ name?: (string|null);
+ }
+
+ /** Represents a FinalizeWriteStreamRequest. */
+ class FinalizeWriteStreamRequest implements IFinalizeWriteStreamRequest {
+
+ /**
+ * Constructs a new FinalizeWriteStreamRequest.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest);
+
+ /** FinalizeWriteStreamRequest name. */
+ public name: string;
+
+ /**
+ * Creates a new FinalizeWriteStreamRequest instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns FinalizeWriteStreamRequest instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest): google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest;
+
+ /**
+ * Encodes the specified FinalizeWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest.verify|verify} messages.
+ * @param message FinalizeWriteStreamRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified FinalizeWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest.verify|verify} messages.
+ * @param message FinalizeWriteStreamRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns FinalizeWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest;
+
+ /**
+ * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns FinalizeWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest;
+
+ /**
+ * Verifies a FinalizeWriteStreamRequest message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a FinalizeWriteStreamRequest message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns FinalizeWriteStreamRequest
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest;
+
+ /**
+ * Creates a plain object from a FinalizeWriteStreamRequest message. Also converts values to other types if specified.
+ * @param message FinalizeWriteStreamRequest
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this FinalizeWriteStreamRequest to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for FinalizeWriteStreamRequest
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a FinalizeWriteStreamResponse. */
+ interface IFinalizeWriteStreamResponse {
+
+ /** FinalizeWriteStreamResponse rowCount */
+ rowCount?: (number|Long|string|null);
+ }
+
+ /** Represents a FinalizeWriteStreamResponse. */
+ class FinalizeWriteStreamResponse implements IFinalizeWriteStreamResponse {
+
+ /**
+ * Constructs a new FinalizeWriteStreamResponse.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse);
+
+ /** FinalizeWriteStreamResponse rowCount. */
+ public rowCount: (number|Long|string);
+
+ /**
+ * Creates a new FinalizeWriteStreamResponse instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns FinalizeWriteStreamResponse instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse): google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse;
+
+ /**
+ * Encodes the specified FinalizeWriteStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse.verify|verify} messages.
+ * @param message FinalizeWriteStreamResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified FinalizeWriteStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse.verify|verify} messages.
+ * @param message FinalizeWriteStreamResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns FinalizeWriteStreamResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse;
+
+ /**
+ * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns FinalizeWriteStreamResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse;
+
+ /**
+ * Verifies a FinalizeWriteStreamResponse message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a FinalizeWriteStreamResponse message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns FinalizeWriteStreamResponse
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse;
+
+ /**
+ * Creates a plain object from a FinalizeWriteStreamResponse message. Also converts values to other types if specified.
+ * @param message FinalizeWriteStreamResponse
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this FinalizeWriteStreamResponse to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for FinalizeWriteStreamResponse
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a FlushRowsRequest. */
+ interface IFlushRowsRequest {
+
+ /** FlushRowsRequest writeStream */
+ writeStream?: (string|null);
+
+ /** FlushRowsRequest offset */
+ offset?: (google.protobuf.IInt64Value|null);
+ }
+
+ /** Represents a FlushRowsRequest. */
+ class FlushRowsRequest implements IFlushRowsRequest {
+
+ /**
+ * Constructs a new FlushRowsRequest.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest);
+
+ /** FlushRowsRequest writeStream. */
+ public writeStream: string;
+
+ /** FlushRowsRequest offset. */
+ public offset?: (google.protobuf.IInt64Value|null);
+
+ /**
+ * Creates a new FlushRowsRequest instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns FlushRowsRequest instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest): google.cloud.bigquery.storage.v1beta2.FlushRowsRequest;
+
+ /**
+ * Encodes the specified FlushRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FlushRowsRequest.verify|verify} messages.
+ * @param message FlushRowsRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified FlushRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FlushRowsRequest.verify|verify} messages.
+ * @param message FlushRowsRequest message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a FlushRowsRequest message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns FlushRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.FlushRowsRequest;
+
+ /**
+ * Decodes a FlushRowsRequest message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns FlushRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.FlushRowsRequest;
+
+ /**
+ * Verifies a FlushRowsRequest message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a FlushRowsRequest message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns FlushRowsRequest
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.FlushRowsRequest;
+
+ /**
+ * Creates a plain object from a FlushRowsRequest message. Also converts values to other types if specified.
+ * @param message FlushRowsRequest
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.FlushRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this FlushRowsRequest to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for FlushRowsRequest
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a FlushRowsResponse. */
+ interface IFlushRowsResponse {
+
+ /** FlushRowsResponse offset */
+ offset?: (number|Long|string|null);
+ }
+
+ /** Represents a FlushRowsResponse. */
+ class FlushRowsResponse implements IFlushRowsResponse {
+
+ /**
+ * Constructs a new FlushRowsResponse.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse);
+
+ /** FlushRowsResponse offset. */
+ public offset: (number|Long|string);
+
+ /**
+ * Creates a new FlushRowsResponse instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns FlushRowsResponse instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse): google.cloud.bigquery.storage.v1beta2.FlushRowsResponse;
+
+ /**
+ * Encodes the specified FlushRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FlushRowsResponse.verify|verify} messages.
+ * @param message FlushRowsResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified FlushRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FlushRowsResponse.verify|verify} messages.
+ * @param message FlushRowsResponse message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a FlushRowsResponse message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns FlushRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.FlushRowsResponse;
+
+ /**
+ * Decodes a FlushRowsResponse message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns FlushRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.FlushRowsResponse;
+
+ /**
+ * Verifies a FlushRowsResponse message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a FlushRowsResponse message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns FlushRowsResponse
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.FlushRowsResponse;
+
+ /**
+ * Creates a plain object from a FlushRowsResponse message. Also converts values to other types if specified.
+ * @param message FlushRowsResponse
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.FlushRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this FlushRowsResponse to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for FlushRowsResponse
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a StorageError. */
+ interface IStorageError {
+
+ /** StorageError code */
+ code?: (google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode|keyof typeof google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode|null);
+
+ /** StorageError entity */
+ entity?: (string|null);
+
+ /** StorageError errorMessage */
+ errorMessage?: (string|null);
+ }
+
+ /** Represents a StorageError. */
+ class StorageError implements IStorageError {
+
+ /**
+ * Constructs a new StorageError.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IStorageError);
+
+ /** StorageError code. */
+ public code: (google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode|keyof typeof google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode);
+
+ /** StorageError entity. */
+ public entity: string;
+
+ /** StorageError errorMessage. */
+ public errorMessage: string;
+
+ /**
+ * Creates a new StorageError instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns StorageError instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IStorageError): google.cloud.bigquery.storage.v1beta2.StorageError;
+
+ /**
+ * Encodes the specified StorageError message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StorageError.verify|verify} messages.
+ * @param message StorageError message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IStorageError, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified StorageError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StorageError.verify|verify} messages.
+ * @param message StorageError message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IStorageError, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a StorageError message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns StorageError
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.StorageError;
+
+ /**
+ * Decodes a StorageError message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns StorageError
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.StorageError;
+
+ /**
+ * Verifies a StorageError message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a StorageError message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns StorageError
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.StorageError;
+
+ /**
+ * Creates a plain object from a StorageError message. Also converts values to other types if specified.
+ * @param message StorageError
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.StorageError, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this StorageError to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for StorageError
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ namespace StorageError {
+
+ /** StorageErrorCode enum. */
+ enum StorageErrorCode {
+ STORAGE_ERROR_CODE_UNSPECIFIED = 0,
+ TABLE_NOT_FOUND = 1,
+ STREAM_ALREADY_COMMITTED = 2,
+ STREAM_NOT_FOUND = 3,
+ INVALID_STREAM_TYPE = 4,
+ INVALID_STREAM_STATE = 5,
+ STREAM_FINALIZED = 6
+ }
+ }
+
+ /** DataFormat enum. */
+ enum DataFormat {
+ DATA_FORMAT_UNSPECIFIED = 0,
+ AVRO = 1,
+ ARROW = 2
+ }
+
+ /** Properties of a ReadSession. */
+ interface IReadSession {
+
+ /** ReadSession name */
+ name?: (string|null);
+
+ /** ReadSession expireTime */
+ expireTime?: (google.protobuf.ITimestamp|null);
+
+ /** ReadSession dataFormat */
+ dataFormat?: (google.cloud.bigquery.storage.v1beta2.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta2.DataFormat|null);
+
+ /** ReadSession avroSchema */
+ avroSchema?: (google.cloud.bigquery.storage.v1beta2.IAvroSchema|null);
+
+ /** ReadSession arrowSchema */
+ arrowSchema?: (google.cloud.bigquery.storage.v1beta2.IArrowSchema|null);
+
+ /** ReadSession table */
+ table?: (string|null);
+
+ /** ReadSession tableModifiers */
+ tableModifiers?: (google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers|null);
+
+ /** ReadSession readOptions */
+ readOptions?: (google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions|null);
+
+ /** ReadSession streams */
+ streams?: (google.cloud.bigquery.storage.v1beta2.IReadStream[]|null);
+ }
+
+ /** Represents a ReadSession. */
+ class ReadSession implements IReadSession {
+
+ /**
+ * Constructs a new ReadSession.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IReadSession);
+
+ /** ReadSession name. */
+ public name: string;
+
+ /** ReadSession expireTime. */
+ public expireTime?: (google.protobuf.ITimestamp|null);
+
+ /** ReadSession dataFormat. */
+ public dataFormat: (google.cloud.bigquery.storage.v1beta2.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta2.DataFormat);
+
+ /** ReadSession avroSchema. */
+ public avroSchema?: (google.cloud.bigquery.storage.v1beta2.IAvroSchema|null);
+
+ /** ReadSession arrowSchema. */
+ public arrowSchema?: (google.cloud.bigquery.storage.v1beta2.IArrowSchema|null);
+
+ /** ReadSession table. */
+ public table: string;
+
+ /** ReadSession tableModifiers. */
+ public tableModifiers?: (google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers|null);
+
+ /** ReadSession readOptions. */
+ public readOptions?: (google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions|null);
+
+ /** ReadSession streams. */
+ public streams: google.cloud.bigquery.storage.v1beta2.IReadStream[];
+
+ /** ReadSession schema. */
+ public schema?: ("avroSchema"|"arrowSchema");
+
+ /**
+ * Creates a new ReadSession instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ReadSession instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IReadSession): google.cloud.bigquery.storage.v1beta2.ReadSession;
+
+ /**
+ * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.verify|verify} messages.
+ * @param message ReadSession message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.verify|verify} messages.
+ * @param message ReadSession message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a ReadSession message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ReadSession
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ReadSession;
+
+ /**
+ * Decodes a ReadSession message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ReadSession
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ReadSession;
+
+ /**
+ * Verifies a ReadSession message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a ReadSession message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ReadSession
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ReadSession;
+
+ /**
+ * Creates a plain object from a ReadSession message. Also converts values to other types if specified.
+ * @param message ReadSession
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ReadSession to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ReadSession
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ namespace ReadSession {
+
+ /** Properties of a TableModifiers. */
+ interface ITableModifiers {
+
+ /** TableModifiers snapshotTime */
+ snapshotTime?: (google.protobuf.ITimestamp|null);
+ }
+
+ /** Represents a TableModifiers. */
+ class TableModifiers implements ITableModifiers {
+
+ /**
+ * Constructs a new TableModifiers.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers);
+
+ /** TableModifiers snapshotTime. */
+ public snapshotTime?: (google.protobuf.ITimestamp|null);
+
+ /**
+ * Creates a new TableModifiers instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns TableModifiers instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers): google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers;
+
+ /**
+ * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.verify|verify} messages.
+ * @param message TableModifiers message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.verify|verify} messages.
+ * @param message TableModifiers message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a TableModifiers message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns TableModifiers
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers;
+
+ /**
+ * Decodes a TableModifiers message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns TableModifiers
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers;
+
+ /**
+ * Verifies a TableModifiers message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns TableModifiers
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers;
+
+ /**
+ * Creates a plain object from a TableModifiers message. Also converts values to other types if specified.
+ * @param message TableModifiers
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this TableModifiers to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for TableModifiers
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a TableReadOptions. */
+ interface ITableReadOptions {
+
+ /** TableReadOptions selectedFields */
+ selectedFields?: (string[]|null);
+
+ /** TableReadOptions rowRestriction */
+ rowRestriction?: (string|null);
+
+ /** TableReadOptions arrowSerializationOptions */
+ arrowSerializationOptions?: (google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions|null);
+ }
+
+ /** Represents a TableReadOptions. */
+ class TableReadOptions implements ITableReadOptions {
+
+ /**
+ * Constructs a new TableReadOptions.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions);
+
+ /** TableReadOptions selectedFields. */
+ public selectedFields: string[];
+
+ /** TableReadOptions rowRestriction. */
+ public rowRestriction: string;
+
+ /** TableReadOptions arrowSerializationOptions. */
+ public arrowSerializationOptions?: (google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions|null);
+
+ /**
+ * Creates a new TableReadOptions instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns TableReadOptions instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions): google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions;
+
+ /**
+ * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.verify|verify} messages.
+ * @param message TableReadOptions message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.verify|verify} messages.
+ * @param message TableReadOptions message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a TableReadOptions message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns TableReadOptions
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions;
+
+ /**
+ * Decodes a TableReadOptions message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns TableReadOptions
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions;
+
+ /**
+ * Verifies a TableReadOptions message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns TableReadOptions
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions;
+
+ /**
+ * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified.
+ * @param message TableReadOptions
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this TableReadOptions to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for TableReadOptions
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+ }
+
+ /** Properties of a ReadStream. */
+ interface IReadStream {
+
+ /** ReadStream name */
+ name?: (string|null);
+ }
+
+ /** Represents a ReadStream. */
+ class ReadStream implements IReadStream {
+
+ /**
+ * Constructs a new ReadStream.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IReadStream);
+
+ /** ReadStream name. */
+ public name: string;
+
+ /**
+ * Creates a new ReadStream instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ReadStream instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IReadStream): google.cloud.bigquery.storage.v1beta2.ReadStream;
+
+ /**
+ * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadStream.verify|verify} messages.
+ * @param message ReadStream message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadStream.verify|verify} messages.
+ * @param message ReadStream message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a ReadStream message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ReadStream
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.ReadStream;
+
+ /**
+ * Decodes a ReadStream message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ReadStream
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.ReadStream;
+
+ /**
+ * Verifies a ReadStream message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a ReadStream message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ReadStream
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.ReadStream;
+
+ /**
+ * Creates a plain object from a ReadStream message. Also converts values to other types if specified.
+ * @param message ReadStream
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.ReadStream, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ReadStream to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ReadStream
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a WriteStream. */
+ interface IWriteStream {
+
+ /** WriteStream name */
+ name?: (string|null);
+
+ /** WriteStream type */
+ type?: (google.cloud.bigquery.storage.v1beta2.WriteStream.Type|keyof typeof google.cloud.bigquery.storage.v1beta2.WriteStream.Type|null);
+
+ /** WriteStream createTime */
+ createTime?: (google.protobuf.ITimestamp|null);
+
+ /** WriteStream commitTime */
+ commitTime?: (google.protobuf.ITimestamp|null);
+
+ /** WriteStream tableSchema */
+ tableSchema?: (google.cloud.bigquery.storage.v1beta2.ITableSchema|null);
+ }
+
+ /** Represents a WriteStream. */
+ class WriteStream implements IWriteStream {
+
+ /**
+ * Constructs a new WriteStream.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.IWriteStream);
+
+ /** WriteStream name. */
+ public name: string;
+
+ /** WriteStream type. */
+ public type: (google.cloud.bigquery.storage.v1beta2.WriteStream.Type|keyof typeof google.cloud.bigquery.storage.v1beta2.WriteStream.Type);
+
+ /** WriteStream createTime. */
+ public createTime?: (google.protobuf.ITimestamp|null);
+
+ /** WriteStream commitTime. */
+ public commitTime?: (google.protobuf.ITimestamp|null);
+
+ /** WriteStream tableSchema. */
+ public tableSchema?: (google.cloud.bigquery.storage.v1beta2.ITableSchema|null);
+
+ /**
+ * Creates a new WriteStream instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns WriteStream instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.IWriteStream): google.cloud.bigquery.storage.v1beta2.WriteStream;
+
+ /**
+ * Encodes the specified WriteStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.WriteStream.verify|verify} messages.
+ * @param message WriteStream message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.IWriteStream, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified WriteStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.WriteStream.verify|verify} messages.
+ * @param message WriteStream message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.IWriteStream, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a WriteStream message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns WriteStream
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.WriteStream;
+
+ /**
+ * Decodes a WriteStream message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns WriteStream
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.WriteStream;
+
+ /**
+ * Verifies a WriteStream message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a WriteStream message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns WriteStream
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.WriteStream;
+
+ /**
+ * Creates a plain object from a WriteStream message. Also converts values to other types if specified.
+ * @param message WriteStream
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.WriteStream, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this WriteStream to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for WriteStream
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ namespace WriteStream {
+
+ /** Type enum. */
+ enum Type {
+ TYPE_UNSPECIFIED = 0,
+ COMMITTED = 1,
+ PENDING = 2,
+ BUFFERED = 3
+ }
+ }
+
+ /** Properties of a TableSchema. */
+ interface ITableSchema {
+
+ /** TableSchema fields */
+ fields?: (google.cloud.bigquery.storage.v1beta2.ITableFieldSchema[]|null);
+ }
+
+ /** Represents a TableSchema. */
+ class TableSchema implements ITableSchema {
+
+ /**
+ * Constructs a new TableSchema.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.ITableSchema);
+
+ /** TableSchema fields. */
+ public fields: google.cloud.bigquery.storage.v1beta2.ITableFieldSchema[];
+
+ /**
+ * Creates a new TableSchema instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns TableSchema instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.ITableSchema): google.cloud.bigquery.storage.v1beta2.TableSchema;
+
+ /**
+ * Encodes the specified TableSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.TableSchema.verify|verify} messages.
+ * @param message TableSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.ITableSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified TableSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.TableSchema.verify|verify} messages.
+ * @param message TableSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.ITableSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a TableSchema message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns TableSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.TableSchema;
+
+ /**
+ * Decodes a TableSchema message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns TableSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.TableSchema;
+
+ /**
+ * Verifies a TableSchema message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a TableSchema message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns TableSchema
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.TableSchema;
+
+ /**
+ * Creates a plain object from a TableSchema message. Also converts values to other types if specified.
+ * @param message TableSchema
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.TableSchema, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this TableSchema to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for TableSchema
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ /** Properties of a TableFieldSchema. */
+ interface ITableFieldSchema {
+
+ /** TableFieldSchema name */
+ name?: (string|null);
+
+ /** TableFieldSchema type */
+ type?: (google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Type|null);
+
+ /** TableFieldSchema mode */
+ mode?: (google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Mode|keyof typeof google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Mode|null);
+
+ /** TableFieldSchema fields */
+ fields?: (google.cloud.bigquery.storage.v1beta2.ITableFieldSchema[]|null);
+
+ /** TableFieldSchema description */
+ description?: (string|null);
+ }
+
+ /** Represents a TableFieldSchema. */
+ class TableFieldSchema implements ITableFieldSchema {
+
+ /**
+ * Constructs a new TableFieldSchema.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.cloud.bigquery.storage.v1beta2.ITableFieldSchema);
+
+ /** TableFieldSchema name. */
+ public name: string;
+
+ /** TableFieldSchema type. */
+ public type: (google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Type);
+
+ /** TableFieldSchema mode. */
+ public mode: (google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Mode|keyof typeof google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Mode);
+
+ /** TableFieldSchema fields. */
+ public fields: google.cloud.bigquery.storage.v1beta2.ITableFieldSchema[];
+
+ /** TableFieldSchema description. */
+ public description: string;
+
+ /**
+ * Creates a new TableFieldSchema instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns TableFieldSchema instance
+ */
+ public static create(properties?: google.cloud.bigquery.storage.v1beta2.ITableFieldSchema): google.cloud.bigquery.storage.v1beta2.TableFieldSchema;
+
+ /**
+ * Encodes the specified TableFieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.TableFieldSchema.verify|verify} messages.
+ * @param message TableFieldSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.cloud.bigquery.storage.v1beta2.ITableFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified TableFieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.TableFieldSchema.verify|verify} messages.
+ * @param message TableFieldSchema message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta2.ITableFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a TableFieldSchema message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns TableFieldSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta2.TableFieldSchema;
+
+ /**
+ * Decodes a TableFieldSchema message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns TableFieldSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta2.TableFieldSchema;
+
+ /**
+ * Verifies a TableFieldSchema message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a TableFieldSchema message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns TableFieldSchema
+ */
+ public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta2.TableFieldSchema;
+
+ /**
+ * Creates a plain object from a TableFieldSchema message. Also converts values to other types if specified.
+ * @param message TableFieldSchema
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.cloud.bigquery.storage.v1beta2.TableFieldSchema, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this TableFieldSchema to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for TableFieldSchema
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ namespace TableFieldSchema {
+
+ /** Type enum. */
+ enum Type {
+ TYPE_UNSPECIFIED = 0,
+ STRING = 1,
+ INT64 = 2,
+ DOUBLE = 3,
+ STRUCT = 4,
+ BYTES = 5,
+ BOOL = 6,
+ TIMESTAMP = 7,
+ DATE = 8,
+ TIME = 9,
+ DATETIME = 10,
+ GEOGRAPHY = 11,
+ NUMERIC = 12,
+ BIGNUMERIC = 13,
+ INTERVAL = 14,
+ JSON = 15
+ }
+
+ /** Mode enum. */
+ enum Mode {
+ MODE_UNSPECIFIED = 0,
+ NULLABLE = 1,
+ REQUIRED = 2,
+ REPEATED = 3
+ }
+ }
+ }
}
}
}
diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js
index ff0c3f1b261e..4e93d6735ead 100644
--- a/handwritten/bigquery-storage/protos/protos.js
+++ b/handwritten/bigquery-storage/protos/protos.js
@@ -29359,6 +29359,9841 @@
return v1beta1;
})();
+ storage.v1beta2 = (function() {
+
+ /**
+ * Namespace v1beta2.
+ * @memberof google.cloud.bigquery.storage
+ * @namespace
+ */
+ var v1beta2 = {};
+
+ v1beta2.ArrowSchema = (function() {
+
+ /**
+ * Properties of an ArrowSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IArrowSchema
+ * @property {Uint8Array|null} [serializedSchema] ArrowSchema serializedSchema
+ */
+
+ /**
+ * Constructs a new ArrowSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents an ArrowSchema.
+ * @implements IArrowSchema
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowSchema=} [properties] Properties to set
+ */
+ function ArrowSchema(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ArrowSchema serializedSchema.
+ * @member {Uint8Array} serializedSchema
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @instance
+ */
+ ArrowSchema.prototype.serializedSchema = $util.newBuffer([]);
+
+ /**
+ * Creates a new ArrowSchema instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowSchema=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowSchema} ArrowSchema instance
+ */
+ ArrowSchema.create = function create(properties) {
+ return new ArrowSchema(properties);
+ };
+
+ /**
+ * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowSchema.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowSchema} message ArrowSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ArrowSchema.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.serializedSchema != null && Object.hasOwnProperty.call(message, "serializedSchema"))
+ writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowSchema.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowSchema} message ArrowSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ArrowSchema.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes an ArrowSchema message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowSchema} ArrowSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ArrowSchema.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.serializedSchema = reader.bytes();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes an ArrowSchema message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowSchema} ArrowSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ArrowSchema.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies an ArrowSchema message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ArrowSchema.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema"))
+ if (!(message.serializedSchema && typeof message.serializedSchema.length === "number" || $util.isString(message.serializedSchema)))
+ return "serializedSchema: buffer expected";
+ return null;
+ };
+
+ /**
+ * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowSchema} ArrowSchema
+ */
+ ArrowSchema.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema();
+ if (object.serializedSchema != null)
+ if (typeof object.serializedSchema === "string")
+ $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0);
+ else if (object.serializedSchema.length >= 0)
+ message.serializedSchema = object.serializedSchema;
+ return message;
+ };
+
+ /**
+ * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ArrowSchema} message ArrowSchema
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ArrowSchema.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ if (options.bytes === String)
+ object.serializedSchema = "";
+ else {
+ object.serializedSchema = [];
+ if (options.bytes !== Array)
+ object.serializedSchema = $util.newBuffer(object.serializedSchema);
+ }
+ if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema"))
+ object.serializedSchema = options.bytes === String ? $util.base64.encode(message.serializedSchema, 0, message.serializedSchema.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedSchema) : message.serializedSchema;
+ return object;
+ };
+
+ /**
+ * Converts this ArrowSchema to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ArrowSchema.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ArrowSchema
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSchema
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ArrowSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ArrowSchema";
+ };
+
+ return ArrowSchema;
+ })();
+
+ v1beta2.ArrowRecordBatch = (function() {
+
+ /**
+ * Properties of an ArrowRecordBatch.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IArrowRecordBatch
+ * @property {Uint8Array|null} [serializedRecordBatch] ArrowRecordBatch serializedRecordBatch
+ */
+
+ /**
+ * Constructs a new ArrowRecordBatch.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents an ArrowRecordBatch.
+ * @implements IArrowRecordBatch
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch=} [properties] Properties to set
+ */
+ function ArrowRecordBatch(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ArrowRecordBatch serializedRecordBatch.
+ * @member {Uint8Array} serializedRecordBatch
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @instance
+ */
+ ArrowRecordBatch.prototype.serializedRecordBatch = $util.newBuffer([]);
+
+ /**
+ * Creates a new ArrowRecordBatch instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch} ArrowRecordBatch instance
+ */
+ ArrowRecordBatch.create = function create(properties) {
+ return new ArrowRecordBatch(properties);
+ };
+
+ /**
+ * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ArrowRecordBatch.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.serializedRecordBatch != null && Object.hasOwnProperty.call(message, "serializedRecordBatch"))
+ writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ArrowRecordBatch.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes an ArrowRecordBatch message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch} ArrowRecordBatch
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ArrowRecordBatch.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.serializedRecordBatch = reader.bytes();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch} ArrowRecordBatch
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ArrowRecordBatch.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies an ArrowRecordBatch message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ArrowRecordBatch.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch"))
+ if (!(message.serializedRecordBatch && typeof message.serializedRecordBatch.length === "number" || $util.isString(message.serializedRecordBatch)))
+ return "serializedRecordBatch: buffer expected";
+ return null;
+ };
+
+ /**
+ * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch} ArrowRecordBatch
+ */
+ ArrowRecordBatch.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch();
+ if (object.serializedRecordBatch != null)
+ if (typeof object.serializedRecordBatch === "string")
+ $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0);
+ else if (object.serializedRecordBatch.length >= 0)
+ message.serializedRecordBatch = object.serializedRecordBatch;
+ return message;
+ };
+
+ /**
+ * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch} message ArrowRecordBatch
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ArrowRecordBatch.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ if (options.bytes === String)
+ object.serializedRecordBatch = "";
+ else {
+ object.serializedRecordBatch = [];
+ if (options.bytes !== Array)
+ object.serializedRecordBatch = $util.newBuffer(object.serializedRecordBatch);
+ }
+ if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch"))
+ object.serializedRecordBatch = options.bytes === String ? $util.base64.encode(message.serializedRecordBatch, 0, message.serializedRecordBatch.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRecordBatch) : message.serializedRecordBatch;
+ return object;
+ };
+
+ /**
+ * Converts this ArrowRecordBatch to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ArrowRecordBatch.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ArrowRecordBatch
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ArrowRecordBatch.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch";
+ };
+
+ return ArrowRecordBatch;
+ })();
+
+ v1beta2.ArrowSerializationOptions = (function() {
+
+ /**
+ * Properties of an ArrowSerializationOptions.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IArrowSerializationOptions
+ * @property {google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.Format|null} [format] ArrowSerializationOptions format
+ */
+
+ /**
+ * Constructs a new ArrowSerializationOptions.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents an ArrowSerializationOptions.
+ * @implements IArrowSerializationOptions
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions=} [properties] Properties to set
+ */
+ function ArrowSerializationOptions(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ArrowSerializationOptions format.
+ * @member {google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.Format} format
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @instance
+ */
+ ArrowSerializationOptions.prototype.format = 0;
+
+ /**
+ * Creates a new ArrowSerializationOptions instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions} ArrowSerializationOptions instance
+ */
+ ArrowSerializationOptions.create = function create(properties) {
+ return new ArrowSerializationOptions(properties);
+ };
+
+ /**
+ * Encodes the specified ArrowSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions} message ArrowSerializationOptions message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ArrowSerializationOptions.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.format != null && Object.hasOwnProperty.call(message, "format"))
+ writer.uint32(/* id 1, wireType 0 =*/8).int32(message.format);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ArrowSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions} message ArrowSerializationOptions message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ArrowSerializationOptions.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes an ArrowSerializationOptions message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions} ArrowSerializationOptions
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ArrowSerializationOptions.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.format = reader.int32();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes an ArrowSerializationOptions message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions} ArrowSerializationOptions
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ArrowSerializationOptions.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies an ArrowSerializationOptions message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ArrowSerializationOptions.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.format != null && message.hasOwnProperty("format"))
+ switch (message.format) {
+ default:
+ return "format: enum value expected";
+ case 0:
+ case 1:
+ case 2:
+ break;
+ }
+ return null;
+ };
+
+ /**
+ * Creates an ArrowSerializationOptions message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions} ArrowSerializationOptions
+ */
+ ArrowSerializationOptions.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions();
+ switch (object.format) {
+ default:
+ if (typeof object.format === "number") {
+ message.format = object.format;
+ break;
+ }
+ break;
+ case "FORMAT_UNSPECIFIED":
+ case 0:
+ message.format = 0;
+ break;
+ case "ARROW_0_14":
+ case 1:
+ message.format = 1;
+ break;
+ case "ARROW_0_15":
+ case 2:
+ message.format = 2;
+ break;
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from an ArrowSerializationOptions message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions} message ArrowSerializationOptions
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ArrowSerializationOptions.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.format = options.enums === String ? "FORMAT_UNSPECIFIED" : 0;
+ if (message.format != null && message.hasOwnProperty("format"))
+ object.format = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.Format[message.format] === undefined ? message.format : $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.Format[message.format] : message.format;
+ return object;
+ };
+
+ /**
+ * Converts this ArrowSerializationOptions to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ArrowSerializationOptions.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ArrowSerializationOptions
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ArrowSerializationOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions";
+ };
+
+ /**
+ * Format enum.
+ * @name google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.Format
+ * @enum {number}
+ * @property {number} FORMAT_UNSPECIFIED=0 FORMAT_UNSPECIFIED value
+ * @property {number} ARROW_0_14=1 ARROW_0_14 value
+ * @property {number} ARROW_0_15=2 ARROW_0_15 value
+ */
+ ArrowSerializationOptions.Format = (function() {
+ var valuesById = {}, values = Object.create(valuesById);
+ values[valuesById[0] = "FORMAT_UNSPECIFIED"] = 0;
+ values[valuesById[1] = "ARROW_0_14"] = 1;
+ values[valuesById[2] = "ARROW_0_15"] = 2;
+ return values;
+ })();
+
+ return ArrowSerializationOptions;
+ })();
+
+ v1beta2.AvroSchema = (function() {
+
+ /**
+ * Properties of an AvroSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IAvroSchema
+ * @property {string|null} [schema] AvroSchema schema
+ */
+
+ /**
+ * Constructs a new AvroSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents an AvroSchema.
+ * @implements IAvroSchema
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IAvroSchema=} [properties] Properties to set
+ */
+ function AvroSchema(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * AvroSchema schema.
+ * @member {string} schema
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @instance
+ */
+ AvroSchema.prototype.schema = "";
+
+ /**
+ * Creates a new AvroSchema instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAvroSchema=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.AvroSchema} AvroSchema instance
+ */
+ AvroSchema.create = function create(properties) {
+ return new AvroSchema(properties);
+ };
+
+ /**
+ * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AvroSchema.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAvroSchema} message AvroSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AvroSchema.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.schema != null && Object.hasOwnProperty.call(message, "schema"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AvroSchema.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAvroSchema} message AvroSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AvroSchema.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes an AvroSchema message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.AvroSchema} AvroSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AvroSchema.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.AvroSchema();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.schema = reader.string();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes an AvroSchema message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.AvroSchema} AvroSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AvroSchema.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies an AvroSchema message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ AvroSchema.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.schema != null && message.hasOwnProperty("schema"))
+ if (!$util.isString(message.schema))
+ return "schema: string expected";
+ return null;
+ };
+
+ /**
+ * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.AvroSchema} AvroSchema
+ */
+ AvroSchema.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.AvroSchema)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.AvroSchema();
+ if (object.schema != null)
+ message.schema = String(object.schema);
+ return message;
+ };
+
+ /**
+ * Creates a plain object from an AvroSchema message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AvroSchema} message AvroSchema
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ AvroSchema.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.schema = "";
+ if (message.schema != null && message.hasOwnProperty("schema"))
+ object.schema = message.schema;
+ return object;
+ };
+
+ /**
+ * Converts this AvroSchema to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ AvroSchema.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for AvroSchema
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroSchema
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ AvroSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.AvroSchema";
+ };
+
+ return AvroSchema;
+ })();
+
+ v1beta2.AvroRows = (function() {
+
+ /**
+ * Properties of an AvroRows.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IAvroRows
+ * @property {Uint8Array|null} [serializedBinaryRows] AvroRows serializedBinaryRows
+ */
+
+ /**
+ * Constructs a new AvroRows.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents an AvroRows.
+ * @implements IAvroRows
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IAvroRows=} [properties] Properties to set
+ */
+ function AvroRows(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * AvroRows serializedBinaryRows.
+ * @member {Uint8Array} serializedBinaryRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @instance
+ */
+ AvroRows.prototype.serializedBinaryRows = $util.newBuffer([]);
+
+ /**
+ * Creates a new AvroRows instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAvroRows=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.AvroRows} AvroRows instance
+ */
+ AvroRows.create = function create(properties) {
+ return new AvroRows(properties);
+ };
+
+ /**
+ * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AvroRows.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAvroRows} message AvroRows message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AvroRows.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.serializedBinaryRows != null && Object.hasOwnProperty.call(message, "serializedBinaryRows"))
+ writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AvroRows.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAvroRows} message AvroRows message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AvroRows.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes an AvroRows message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.AvroRows} AvroRows
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AvroRows.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.AvroRows();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.serializedBinaryRows = reader.bytes();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes an AvroRows message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.AvroRows} AvroRows
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AvroRows.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies an AvroRows message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ AvroRows.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows"))
+ if (!(message.serializedBinaryRows && typeof message.serializedBinaryRows.length === "number" || $util.isString(message.serializedBinaryRows)))
+ return "serializedBinaryRows: buffer expected";
+ return null;
+ };
+
+ /**
+ * Creates an AvroRows message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.AvroRows} AvroRows
+ */
+ AvroRows.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.AvroRows)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.AvroRows();
+ if (object.serializedBinaryRows != null)
+ if (typeof object.serializedBinaryRows === "string")
+ $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0);
+ else if (object.serializedBinaryRows.length >= 0)
+ message.serializedBinaryRows = object.serializedBinaryRows;
+ return message;
+ };
+
+ /**
+ * Creates a plain object from an AvroRows message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AvroRows} message AvroRows
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ AvroRows.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ if (options.bytes === String)
+ object.serializedBinaryRows = "";
+ else {
+ object.serializedBinaryRows = [];
+ if (options.bytes !== Array)
+ object.serializedBinaryRows = $util.newBuffer(object.serializedBinaryRows);
+ }
+ if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows"))
+ object.serializedBinaryRows = options.bytes === String ? $util.base64.encode(message.serializedBinaryRows, 0, message.serializedBinaryRows.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedBinaryRows) : message.serializedBinaryRows;
+ return object;
+ };
+
+ /**
+ * Converts this AvroRows to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ AvroRows.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for AvroRows
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.AvroRows
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ AvroRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.AvroRows";
+ };
+
+ return AvroRows;
+ })();
+
+ v1beta2.ProtoSchema = (function() {
+
+ /**
+ * Properties of a ProtoSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IProtoSchema
+ * @property {google.protobuf.IDescriptorProto|null} [protoDescriptor] ProtoSchema protoDescriptor
+ */
+
+ /**
+ * Constructs a new ProtoSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a ProtoSchema.
+ * @implements IProtoSchema
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IProtoSchema=} [properties] Properties to set
+ */
+ function ProtoSchema(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ProtoSchema protoDescriptor.
+ * @member {google.protobuf.IDescriptorProto|null|undefined} protoDescriptor
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @instance
+ */
+ ProtoSchema.prototype.protoDescriptor = null;
+
+ /**
+ * Creates a new ProtoSchema instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IProtoSchema=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ProtoSchema} ProtoSchema instance
+ */
+ ProtoSchema.create = function create(properties) {
+ return new ProtoSchema(properties);
+ };
+
+ /**
+ * Encodes the specified ProtoSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ProtoSchema.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IProtoSchema} message ProtoSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ProtoSchema.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.protoDescriptor != null && Object.hasOwnProperty.call(message, "protoDescriptor"))
+ $root.google.protobuf.DescriptorProto.encode(message.protoDescriptor, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ProtoSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ProtoSchema.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IProtoSchema} message ProtoSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ProtoSchema.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a ProtoSchema message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ProtoSchema} ProtoSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ProtoSchema.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ProtoSchema();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.protoDescriptor = $root.google.protobuf.DescriptorProto.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a ProtoSchema message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ProtoSchema} ProtoSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ProtoSchema.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a ProtoSchema message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ProtoSchema.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.protoDescriptor != null && message.hasOwnProperty("protoDescriptor")) {
+ var error = $root.google.protobuf.DescriptorProto.verify(message.protoDescriptor, long + 1);
+ if (error)
+ return "protoDescriptor." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates a ProtoSchema message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ProtoSchema} ProtoSchema
+ */
+ ProtoSchema.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ProtoSchema)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ProtoSchema();
+ if (object.protoDescriptor != null) {
+ if (typeof object.protoDescriptor !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ProtoSchema.protoDescriptor: object expected");
+ message.protoDescriptor = $root.google.protobuf.DescriptorProto.fromObject(object.protoDescriptor, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a ProtoSchema message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ProtoSchema} message ProtoSchema
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ProtoSchema.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.protoDescriptor = null;
+ if (message.protoDescriptor != null && message.hasOwnProperty("protoDescriptor"))
+ object.protoDescriptor = $root.google.protobuf.DescriptorProto.toObject(message.protoDescriptor, options);
+ return object;
+ };
+
+ /**
+ * Converts this ProtoSchema to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ProtoSchema.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ProtoSchema
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoSchema
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ProtoSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ProtoSchema";
+ };
+
+ return ProtoSchema;
+ })();
+
+ v1beta2.ProtoRows = (function() {
+
+ /**
+ * Properties of a ProtoRows.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IProtoRows
+ * @property {Array.|null} [serializedRows] ProtoRows serializedRows
+ */
+
+ /**
+ * Constructs a new ProtoRows.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a ProtoRows.
+ * @implements IProtoRows
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IProtoRows=} [properties] Properties to set
+ */
+ function ProtoRows(properties) {
+ this.serializedRows = [];
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ProtoRows serializedRows.
+ * @member {Array.} serializedRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @instance
+ */
+ ProtoRows.prototype.serializedRows = $util.emptyArray;
+
+ /**
+ * Creates a new ProtoRows instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IProtoRows=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ProtoRows} ProtoRows instance
+ */
+ ProtoRows.create = function create(properties) {
+ return new ProtoRows(properties);
+ };
+
+ /**
+ * Encodes the specified ProtoRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ProtoRows.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IProtoRows} message ProtoRows message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ProtoRows.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.serializedRows != null && message.serializedRows.length)
+ for (var i = 0; i < message.serializedRows.length; ++i)
+ writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRows[i]);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ProtoRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ProtoRows.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IProtoRows} message ProtoRows message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ProtoRows.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a ProtoRows message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ProtoRows} ProtoRows
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ProtoRows.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ProtoRows();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ if (!(message.serializedRows && message.serializedRows.length))
+ message.serializedRows = [];
+ message.serializedRows.push(reader.bytes());
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a ProtoRows message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ProtoRows} ProtoRows
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ProtoRows.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a ProtoRows message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ProtoRows.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.serializedRows != null && message.hasOwnProperty("serializedRows")) {
+ if (!Array.isArray(message.serializedRows))
+ return "serializedRows: array expected";
+ for (var i = 0; i < message.serializedRows.length; ++i)
+ if (!(message.serializedRows[i] && typeof message.serializedRows[i].length === "number" || $util.isString(message.serializedRows[i])))
+ return "serializedRows: buffer[] expected";
+ }
+ return null;
+ };
+
+ /**
+ * Creates a ProtoRows message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ProtoRows} ProtoRows
+ */
+ ProtoRows.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ProtoRows)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ProtoRows();
+ if (object.serializedRows) {
+ if (!Array.isArray(object.serializedRows))
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ProtoRows.serializedRows: array expected");
+ message.serializedRows = [];
+ for (var i = 0; i < object.serializedRows.length; ++i)
+ if (typeof object.serializedRows[i] === "string")
+ $util.base64.decode(object.serializedRows[i], message.serializedRows[i] = $util.newBuffer($util.base64.length(object.serializedRows[i])), 0);
+ else if (object.serializedRows[i].length >= 0)
+ message.serializedRows[i] = object.serializedRows[i];
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a ProtoRows message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ProtoRows} message ProtoRows
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ProtoRows.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.arrays || options.defaults)
+ object.serializedRows = [];
+ if (message.serializedRows && message.serializedRows.length) {
+ object.serializedRows = [];
+ for (var j = 0; j < message.serializedRows.length; ++j)
+ object.serializedRows[j] = options.bytes === String ? $util.base64.encode(message.serializedRows[j], 0, message.serializedRows[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRows[j]) : message.serializedRows[j];
+ }
+ return object;
+ };
+
+ /**
+ * Converts this ProtoRows to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ProtoRows.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ProtoRows
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ProtoRows
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ProtoRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ProtoRows";
+ };
+
+ return ProtoRows;
+ })();
+
+ v1beta2.BigQueryRead = (function() {
+
+ /**
+ * Constructs a new BigQueryRead service.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a BigQueryRead
+ * @extends $protobuf.rpc.Service
+ * @constructor
+ * @param {$protobuf.RPCImpl} rpcImpl RPC implementation
+ * @param {boolean} [requestDelimited=false] Whether requests are length-delimited
+ * @param {boolean} [responseDelimited=false] Whether responses are length-delimited
+ */
+ function BigQueryRead(rpcImpl, requestDelimited, responseDelimited) {
+ $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited);
+ }
+
+ (BigQueryRead.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryRead;
+
+ /**
+ * Creates new BigQueryRead service using the specified rpc implementation.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @static
+ * @param {$protobuf.RPCImpl} rpcImpl RPC implementation
+ * @param {boolean} [requestDelimited=false] Whether requests are length-delimited
+ * @param {boolean} [responseDelimited=false] Whether responses are length-delimited
+ * @returns {BigQueryRead} RPC service. Useful where requests and/or responses are streamed.
+ */
+ BigQueryRead.create = function create(rpcImpl, requestDelimited, responseDelimited) {
+ return new this(rpcImpl, requestDelimited, responseDelimited);
+ };
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryRead|createReadSession}.
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @typedef CreateReadSessionCallback
+ * @type {function}
+ * @param {Error|null} error Error, if any
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession} [response] ReadSession
+ */
+
+ /**
+ * Calls CreateReadSession.
+ * @function createReadSession
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object
+ * @param {google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSessionCallback} callback Node-style callback called with the error, if any, and ReadSession
+ * @returns {undefined}
+ * @variation 1
+ */
+ Object.defineProperty(BigQueryRead.prototype.createReadSession = function createReadSession(request, callback) {
+ return this.rpcCall(createReadSession, $root.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest, $root.google.cloud.bigquery.storage.v1beta2.ReadSession, request, callback);
+ }, "name", { value: "CreateReadSession" });
+
+ /**
+ * Calls CreateReadSession.
+ * @function createReadSession
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object
+ * @returns {Promise} Promise
+ * @variation 2
+ */
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryRead|readRows}.
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @typedef ReadRowsCallback
+ * @type {function}
+ * @param {Error|null} error Error, if any
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadRowsResponse} [response] ReadRowsResponse
+ */
+
+ /**
+ * Calls ReadRows.
+ * @function readRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsRequest} request ReadRowsRequest message or plain object
+ * @param {google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRowsCallback} callback Node-style callback called with the error, if any, and ReadRowsResponse
+ * @returns {undefined}
+ * @variation 1
+ */
+ Object.defineProperty(BigQueryRead.prototype.readRows = function readRows(request, callback) {
+ return this.rpcCall(readRows, $root.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest, $root.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse, request, callback);
+ }, "name", { value: "ReadRows" });
+
+ /**
+ * Calls ReadRows.
+ * @function readRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsRequest} request ReadRowsRequest message or plain object
+ * @returns {Promise} Promise
+ * @variation 2
+ */
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryRead|splitReadStream}.
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @typedef SplitReadStreamCallback
+ * @type {function}
+ * @param {Error|null} error Error, if any
+ * @param {google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse} [response] SplitReadStreamResponse
+ */
+
+ /**
+ * Calls SplitReadStream.
+ * @function splitReadStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object
+ * @param {google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStreamCallback} callback Node-style callback called with the error, if any, and SplitReadStreamResponse
+ * @returns {undefined}
+ * @variation 1
+ */
+ Object.defineProperty(BigQueryRead.prototype.splitReadStream = function splitReadStream(request, callback) {
+ return this.rpcCall(splitReadStream, $root.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest, $root.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse, request, callback);
+ }, "name", { value: "SplitReadStream" });
+
+ /**
+ * Calls SplitReadStream.
+ * @function splitReadStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryRead
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object
+ * @returns {Promise} Promise
+ * @variation 2
+ */
+
+ return BigQueryRead;
+ })();
+
+ v1beta2.BigQueryWrite = (function() {
+
+ /**
+ * Constructs a new BigQueryWrite service.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a BigQueryWrite
+ * @extends $protobuf.rpc.Service
+ * @constructor
+ * @param {$protobuf.RPCImpl} rpcImpl RPC implementation
+ * @param {boolean} [requestDelimited=false] Whether requests are length-delimited
+ * @param {boolean} [responseDelimited=false] Whether responses are length-delimited
+ */
+ function BigQueryWrite(rpcImpl, requestDelimited, responseDelimited) {
+ $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited);
+ }
+
+ (BigQueryWrite.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryWrite;
+
+ /**
+ * Creates new BigQueryWrite service using the specified rpc implementation.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @static
+ * @param {$protobuf.RPCImpl} rpcImpl RPC implementation
+ * @param {boolean} [requestDelimited=false] Whether requests are length-delimited
+ * @param {boolean} [responseDelimited=false] Whether responses are length-delimited
+ * @returns {BigQueryWrite} RPC service. Useful where requests and/or responses are streamed.
+ */
+ BigQueryWrite.create = function create(rpcImpl, requestDelimited, responseDelimited) {
+ return new this(rpcImpl, requestDelimited, responseDelimited);
+ };
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|createWriteStream}.
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @typedef CreateWriteStreamCallback
+ * @type {function}
+ * @param {Error|null} error Error, if any
+ * @param {google.cloud.bigquery.storage.v1beta2.WriteStream} [response] WriteStream
+ */
+
+ /**
+ * Calls CreateWriteStream.
+ * @function createWriteStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest} request CreateWriteStreamRequest message or plain object
+ * @param {google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStreamCallback} callback Node-style callback called with the error, if any, and WriteStream
+ * @returns {undefined}
+ * @variation 1
+ */
+ Object.defineProperty(BigQueryWrite.prototype.createWriteStream = function createWriteStream(request, callback) {
+ return this.rpcCall(createWriteStream, $root.google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest, $root.google.cloud.bigquery.storage.v1beta2.WriteStream, request, callback);
+ }, "name", { value: "CreateWriteStream" });
+
+ /**
+ * Calls CreateWriteStream.
+ * @function createWriteStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest} request CreateWriteStreamRequest message or plain object
+ * @returns {Promise} Promise
+ * @variation 2
+ */
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|appendRows}.
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @typedef AppendRowsCallback
+ * @type {function}
+ * @param {Error|null} error Error, if any
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse} [response] AppendRowsResponse
+ */
+
+ /**
+ * Calls AppendRows.
+ * @function appendRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest} request AppendRowsRequest message or plain object
+ * @param {google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRowsCallback} callback Node-style callback called with the error, if any, and AppendRowsResponse
+ * @returns {undefined}
+ * @variation 1
+ */
+ Object.defineProperty(BigQueryWrite.prototype.appendRows = function appendRows(request, callback) {
+ return this.rpcCall(appendRows, $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest, $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse, request, callback);
+ }, "name", { value: "AppendRows" });
+
+ /**
+ * Calls AppendRows.
+ * @function appendRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest} request AppendRowsRequest message or plain object
+ * @returns {Promise} Promise
+ * @variation 2
+ */
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|getWriteStream}.
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @typedef GetWriteStreamCallback
+ * @type {function}
+ * @param {Error|null} error Error, if any
+ * @param {google.cloud.bigquery.storage.v1beta2.WriteStream} [response] WriteStream
+ */
+
+ /**
+ * Calls GetWriteStream.
+ * @function getWriteStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest} request GetWriteStreamRequest message or plain object
+ * @param {google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStreamCallback} callback Node-style callback called with the error, if any, and WriteStream
+ * @returns {undefined}
+ * @variation 1
+ */
+ Object.defineProperty(BigQueryWrite.prototype.getWriteStream = function getWriteStream(request, callback) {
+ return this.rpcCall(getWriteStream, $root.google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest, $root.google.cloud.bigquery.storage.v1beta2.WriteStream, request, callback);
+ }, "name", { value: "GetWriteStream" });
+
+ /**
+ * Calls GetWriteStream.
+ * @function getWriteStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest} request GetWriteStreamRequest message or plain object
+ * @returns {Promise} Promise
+ * @variation 2
+ */
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|finalizeWriteStream}.
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @typedef FinalizeWriteStreamCallback
+ * @type {function}
+ * @param {Error|null} error Error, if any
+ * @param {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse} [response] FinalizeWriteStreamResponse
+ */
+
+ /**
+ * Calls FinalizeWriteStream.
+ * @function finalizeWriteStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest} request FinalizeWriteStreamRequest message or plain object
+ * @param {google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStreamCallback} callback Node-style callback called with the error, if any, and FinalizeWriteStreamResponse
+ * @returns {undefined}
+ * @variation 1
+ */
+ Object.defineProperty(BigQueryWrite.prototype.finalizeWriteStream = function finalizeWriteStream(request, callback) {
+ return this.rpcCall(finalizeWriteStream, $root.google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest, $root.google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse, request, callback);
+ }, "name", { value: "FinalizeWriteStream" });
+
+ /**
+ * Calls FinalizeWriteStream.
+ * @function finalizeWriteStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest} request FinalizeWriteStreamRequest message or plain object
+ * @returns {Promise} Promise
+ * @variation 2
+ */
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|batchCommitWriteStreams}.
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @typedef BatchCommitWriteStreamsCallback
+ * @type {function}
+ * @param {Error|null} error Error, if any
+ * @param {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse} [response] BatchCommitWriteStreamsResponse
+ */
+
+ /**
+ * Calls BatchCommitWriteStreams.
+ * @function batchCommitWriteStreams
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest} request BatchCommitWriteStreamsRequest message or plain object
+ * @param {google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreamsCallback} callback Node-style callback called with the error, if any, and BatchCommitWriteStreamsResponse
+ * @returns {undefined}
+ * @variation 1
+ */
+ Object.defineProperty(BigQueryWrite.prototype.batchCommitWriteStreams = function batchCommitWriteStreams(request, callback) {
+ return this.rpcCall(batchCommitWriteStreams, $root.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest, $root.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse, request, callback);
+ }, "name", { value: "BatchCommitWriteStreams" });
+
+ /**
+ * Calls BatchCommitWriteStreams.
+ * @function batchCommitWriteStreams
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest} request BatchCommitWriteStreamsRequest message or plain object
+ * @returns {Promise} Promise
+ * @variation 2
+ */
+
+ /**
+ * Callback as used by {@link google.cloud.bigquery.storage.v1beta2.BigQueryWrite|flushRows}.
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @typedef FlushRowsCallback
+ * @type {function}
+ * @param {Error|null} error Error, if any
+ * @param {google.cloud.bigquery.storage.v1beta2.FlushRowsResponse} [response] FlushRowsResponse
+ */
+
+ /**
+ * Calls FlushRows.
+ * @function flushRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest} request FlushRowsRequest message or plain object
+ * @param {google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRowsCallback} callback Node-style callback called with the error, if any, and FlushRowsResponse
+ * @returns {undefined}
+ * @variation 1
+ */
+ Object.defineProperty(BigQueryWrite.prototype.flushRows = function flushRows(request, callback) {
+ return this.rpcCall(flushRows, $root.google.cloud.bigquery.storage.v1beta2.FlushRowsRequest, $root.google.cloud.bigquery.storage.v1beta2.FlushRowsResponse, request, callback);
+ }, "name", { value: "FlushRows" });
+
+ /**
+ * Calls FlushRows.
+ * @function flushRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.BigQueryWrite
+ * @instance
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest} request FlushRowsRequest message or plain object
+ * @returns {Promise} Promise
+ * @variation 2
+ */
+
+ return BigQueryWrite;
+ })();
+
+ v1beta2.CreateReadSessionRequest = (function() {
+
+ /**
+ * Properties of a CreateReadSessionRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface ICreateReadSessionRequest
+ * @property {string|null} [parent] CreateReadSessionRequest parent
+ * @property {google.cloud.bigquery.storage.v1beta2.IReadSession|null} [readSession] CreateReadSessionRequest readSession
+ * @property {number|null} [maxStreamCount] CreateReadSessionRequest maxStreamCount
+ */
+
+ /**
+ * Constructs a new CreateReadSessionRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a CreateReadSessionRequest.
+ * @implements ICreateReadSessionRequest
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest=} [properties] Properties to set
+ */
+ function CreateReadSessionRequest(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * CreateReadSessionRequest parent.
+ * @member {string} parent
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @instance
+ */
+ CreateReadSessionRequest.prototype.parent = "";
+
+ /**
+ * CreateReadSessionRequest readSession.
+ * @member {google.cloud.bigquery.storage.v1beta2.IReadSession|null|undefined} readSession
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @instance
+ */
+ CreateReadSessionRequest.prototype.readSession = null;
+
+ /**
+ * CreateReadSessionRequest maxStreamCount.
+ * @member {number} maxStreamCount
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @instance
+ */
+ CreateReadSessionRequest.prototype.maxStreamCount = 0;
+
+ /**
+ * Creates a new CreateReadSessionRequest instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest} CreateReadSessionRequest instance
+ */
+ CreateReadSessionRequest.create = function create(properties) {
+ return new CreateReadSessionRequest(properties);
+ };
+
+ /**
+ * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ CreateReadSessionRequest.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.parent != null && Object.hasOwnProperty.call(message, "parent"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent);
+ if (message.readSession != null && Object.hasOwnProperty.call(message, "readSession"))
+ $root.google.cloud.bigquery.storage.v1beta2.ReadSession.encode(message.readSession, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ if (message.maxStreamCount != null && Object.hasOwnProperty.call(message, "maxStreamCount"))
+ writer.uint32(/* id 3, wireType 0 =*/24).int32(message.maxStreamCount);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ CreateReadSessionRequest.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a CreateReadSessionRequest message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest} CreateReadSessionRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ CreateReadSessionRequest.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.parent = reader.string();
+ break;
+ }
+ case 2: {
+ message.readSession = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 3: {
+ message.maxStreamCount = reader.int32();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest} CreateReadSessionRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ CreateReadSessionRequest.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a CreateReadSessionRequest message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ CreateReadSessionRequest.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.parent != null && message.hasOwnProperty("parent"))
+ if (!$util.isString(message.parent))
+ return "parent: string expected";
+ if (message.readSession != null && message.hasOwnProperty("readSession")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.verify(message.readSession, long + 1);
+ if (error)
+ return "readSession." + error;
+ }
+ if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount"))
+ if (!$util.isInteger(message.maxStreamCount))
+ return "maxStreamCount: integer expected";
+ return null;
+ };
+
+ /**
+ * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest} CreateReadSessionRequest
+ */
+ CreateReadSessionRequest.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest();
+ if (object.parent != null)
+ message.parent = String(object.parent);
+ if (object.readSession != null) {
+ if (typeof object.readSession !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest.readSession: object expected");
+ message.readSession = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.fromObject(object.readSession, long + 1);
+ }
+ if (object.maxStreamCount != null)
+ message.maxStreamCount = object.maxStreamCount | 0;
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest} message CreateReadSessionRequest
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ CreateReadSessionRequest.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.parent = "";
+ object.readSession = null;
+ object.maxStreamCount = 0;
+ }
+ if (message.parent != null && message.hasOwnProperty("parent"))
+ object.parent = message.parent;
+ if (message.readSession != null && message.hasOwnProperty("readSession"))
+ object.readSession = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.toObject(message.readSession, options);
+ if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount"))
+ object.maxStreamCount = message.maxStreamCount;
+ return object;
+ };
+
+ /**
+ * Converts this CreateReadSessionRequest to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ CreateReadSessionRequest.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for CreateReadSessionRequest
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ CreateReadSessionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest";
+ };
+
+ return CreateReadSessionRequest;
+ })();
+
+ v1beta2.ReadRowsRequest = (function() {
+
+ /**
+ * Properties of a ReadRowsRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IReadRowsRequest
+ * @property {string|null} [readStream] ReadRowsRequest readStream
+ * @property {number|Long|null} [offset] ReadRowsRequest offset
+ */
+
+ /**
+ * Constructs a new ReadRowsRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a ReadRowsRequest.
+ * @implements IReadRowsRequest
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsRequest=} [properties] Properties to set
+ */
+ function ReadRowsRequest(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ReadRowsRequest readStream.
+ * @member {string} readStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @instance
+ */
+ ReadRowsRequest.prototype.readStream = "";
+
+ /**
+ * ReadRowsRequest offset.
+ * @member {number|Long} offset
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @instance
+ */
+ ReadRowsRequest.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0;
+
+ /**
+ * Creates a new ReadRowsRequest instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsRequest=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadRowsRequest} ReadRowsRequest instance
+ */
+ ReadRowsRequest.create = function create(properties) {
+ return new ReadRowsRequest(properties);
+ };
+
+ /**
+ * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadRowsRequest.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsRequest} message ReadRowsRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ReadRowsRequest.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.readStream != null && Object.hasOwnProperty.call(message, "readStream"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.readStream);
+ if (message.offset != null && Object.hasOwnProperty.call(message, "offset"))
+ writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadRowsRequest.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsRequest} message ReadRowsRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ReadRowsRequest.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a ReadRowsRequest message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadRowsRequest} ReadRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ReadRowsRequest.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.readStream = reader.string();
+ break;
+ }
+ case 2: {
+ message.offset = reader.int64();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadRowsRequest} ReadRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ReadRowsRequest.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a ReadRowsRequest message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ReadRowsRequest.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.readStream != null && message.hasOwnProperty("readStream"))
+ if (!$util.isString(message.readStream))
+ return "readStream: string expected";
+ if (message.offset != null && message.hasOwnProperty("offset"))
+ if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high)))
+ return "offset: integer|Long expected";
+ return null;
+ };
+
+ /**
+ * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadRowsRequest} ReadRowsRequest
+ */
+ ReadRowsRequest.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest();
+ if (object.readStream != null)
+ message.readStream = String(object.readStream);
+ if (object.offset != null)
+ if ($util.Long)
+ (message.offset = $util.Long.fromValue(object.offset)).unsigned = false;
+ else if (typeof object.offset === "string")
+ message.offset = parseInt(object.offset, 10);
+ else if (typeof object.offset === "number")
+ message.offset = object.offset;
+ else if (typeof object.offset === "object")
+ message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber();
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadRowsRequest} message ReadRowsRequest
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ReadRowsRequest.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.readStream = "";
+ if ($util.Long) {
+ var long = new $util.Long(0, 0, false);
+ object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;
+ } else
+ object.offset = options.longs === String ? "0" : 0;
+ }
+ if (message.readStream != null && message.hasOwnProperty("readStream"))
+ object.readStream = message.readStream;
+ if (message.offset != null && message.hasOwnProperty("offset"))
+ if (typeof message.offset === "number")
+ object.offset = options.longs === String ? String(message.offset) : message.offset;
+ else
+ object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset;
+ return object;
+ };
+
+ /**
+ * Converts this ReadRowsRequest to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ReadRowsRequest.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ReadRowsRequest
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsRequest
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ReadRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ReadRowsRequest";
+ };
+
+ return ReadRowsRequest;
+ })();
+
+ v1beta2.ThrottleState = (function() {
+
+ /**
+ * Properties of a ThrottleState.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IThrottleState
+ * @property {number|null} [throttlePercent] ThrottleState throttlePercent
+ */
+
+ /**
+ * Constructs a new ThrottleState.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a ThrottleState.
+ * @implements IThrottleState
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IThrottleState=} [properties] Properties to set
+ */
+ function ThrottleState(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ThrottleState throttlePercent.
+ * @member {number} throttlePercent
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @instance
+ */
+ ThrottleState.prototype.throttlePercent = 0;
+
+ /**
+ * Creates a new ThrottleState instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IThrottleState=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ThrottleState} ThrottleState instance
+ */
+ ThrottleState.create = function create(properties) {
+ return new ThrottleState(properties);
+ };
+
+ /**
+ * Encodes the specified ThrottleState message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ThrottleState.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IThrottleState} message ThrottleState message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ThrottleState.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.throttlePercent != null && Object.hasOwnProperty.call(message, "throttlePercent"))
+ writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ThrottleState message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ThrottleState.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IThrottleState} message ThrottleState message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ThrottleState.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a ThrottleState message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ThrottleState} ThrottleState
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ThrottleState.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ThrottleState();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.throttlePercent = reader.int32();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a ThrottleState message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ThrottleState} ThrottleState
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ThrottleState.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a ThrottleState message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ThrottleState.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent"))
+ if (!$util.isInteger(message.throttlePercent))
+ return "throttlePercent: integer expected";
+ return null;
+ };
+
+ /**
+ * Creates a ThrottleState message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ThrottleState} ThrottleState
+ */
+ ThrottleState.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ThrottleState)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ThrottleState();
+ if (object.throttlePercent != null)
+ message.throttlePercent = object.throttlePercent | 0;
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a ThrottleState message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ThrottleState} message ThrottleState
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ThrottleState.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.throttlePercent = 0;
+ if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent"))
+ object.throttlePercent = message.throttlePercent;
+ return object;
+ };
+
+ /**
+ * Converts this ThrottleState to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ThrottleState.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ThrottleState
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ThrottleState
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ThrottleState.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ThrottleState";
+ };
+
+ return ThrottleState;
+ })();
+
+ v1beta2.StreamStats = (function() {
+
+ /**
+ * Properties of a StreamStats.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IStreamStats
+ * @property {google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress|null} [progress] StreamStats progress
+ */
+
+ /**
+ * Constructs a new StreamStats.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a StreamStats.
+ * @implements IStreamStats
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IStreamStats=} [properties] Properties to set
+ */
+ function StreamStats(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * StreamStats progress.
+ * @member {google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress|null|undefined} progress
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @instance
+ */
+ StreamStats.prototype.progress = null;
+
+ /**
+ * Creates a new StreamStats instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IStreamStats=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.StreamStats} StreamStats instance
+ */
+ StreamStats.create = function create(properties) {
+ return new StreamStats(properties);
+ };
+
+ /**
+ * Encodes the specified StreamStats message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StreamStats.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IStreamStats} message StreamStats message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ StreamStats.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.progress != null && Object.hasOwnProperty.call(message, "progress"))
+ $root.google.cloud.bigquery.storage.v1beta2.StreamStats.Progress.encode(message.progress, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified StreamStats message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StreamStats.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IStreamStats} message StreamStats message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ StreamStats.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a StreamStats message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.StreamStats} StreamStats
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ StreamStats.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.StreamStats();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 2: {
+ message.progress = $root.google.cloud.bigquery.storage.v1beta2.StreamStats.Progress.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a StreamStats message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.StreamStats} StreamStats
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ StreamStats.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a StreamStats message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ StreamStats.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.progress != null && message.hasOwnProperty("progress")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.StreamStats.Progress.verify(message.progress, long + 1);
+ if (error)
+ return "progress." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates a StreamStats message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.StreamStats} StreamStats
+ */
+ StreamStats.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.StreamStats)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.StreamStats();
+ if (object.progress != null) {
+ if (typeof object.progress !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.StreamStats.progress: object expected");
+ message.progress = $root.google.cloud.bigquery.storage.v1beta2.StreamStats.Progress.fromObject(object.progress, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a StreamStats message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.StreamStats} message StreamStats
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ StreamStats.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.progress = null;
+ if (message.progress != null && message.hasOwnProperty("progress"))
+ object.progress = $root.google.cloud.bigquery.storage.v1beta2.StreamStats.Progress.toObject(message.progress, options);
+ return object;
+ };
+
+ /**
+ * Converts this StreamStats to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ StreamStats.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for StreamStats
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ StreamStats.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.StreamStats";
+ };
+
+ StreamStats.Progress = (function() {
+
+ /**
+ * Properties of a Progress.
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @interface IProgress
+ * @property {number|null} [atResponseStart] Progress atResponseStart
+ * @property {number|null} [atResponseEnd] Progress atResponseEnd
+ */
+
+ /**
+ * Constructs a new Progress.
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats
+ * @classdesc Represents a Progress.
+ * @implements IProgress
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress=} [properties] Properties to set
+ */
+ function Progress(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * Progress atResponseStart.
+ * @member {number} atResponseStart
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @instance
+ */
+ Progress.prototype.atResponseStart = 0;
+
+ /**
+ * Progress atResponseEnd.
+ * @member {number} atResponseEnd
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @instance
+ */
+ Progress.prototype.atResponseEnd = 0;
+
+ /**
+ * Creates a new Progress instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.StreamStats.Progress} Progress instance
+ */
+ Progress.create = function create(properties) {
+ return new Progress(properties);
+ };
+
+ /**
+ * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StreamStats.Progress.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress} message Progress message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ Progress.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.atResponseStart != null && Object.hasOwnProperty.call(message, "atResponseStart"))
+ writer.uint32(/* id 1, wireType 1 =*/9).double(message.atResponseStart);
+ if (message.atResponseEnd != null && Object.hasOwnProperty.call(message, "atResponseEnd"))
+ writer.uint32(/* id 2, wireType 1 =*/17).double(message.atResponseEnd);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StreamStats.Progress.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.StreamStats.IProgress} message Progress message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ Progress.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a Progress message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.StreamStats.Progress} Progress
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ Progress.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.StreamStats.Progress();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.atResponseStart = reader.double();
+ break;
+ }
+ case 2: {
+ message.atResponseEnd = reader.double();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a Progress message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.StreamStats.Progress} Progress
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ Progress.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a Progress message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ Progress.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart"))
+ if (typeof message.atResponseStart !== "number")
+ return "atResponseStart: number expected";
+ if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd"))
+ if (typeof message.atResponseEnd !== "number")
+ return "atResponseEnd: number expected";
+ return null;
+ };
+
+ /**
+ * Creates a Progress message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.StreamStats.Progress} Progress
+ */
+ Progress.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.StreamStats.Progress)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.StreamStats.Progress();
+ if (object.atResponseStart != null)
+ message.atResponseStart = Number(object.atResponseStart);
+ if (object.atResponseEnd != null)
+ message.atResponseEnd = Number(object.atResponseEnd);
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a Progress message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.StreamStats.Progress} message Progress
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ Progress.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.atResponseStart = 0;
+ object.atResponseEnd = 0;
+ }
+ if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart"))
+ object.atResponseStart = options.json && !isFinite(message.atResponseStart) ? String(message.atResponseStart) : message.atResponseStart;
+ if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd"))
+ object.atResponseEnd = options.json && !isFinite(message.atResponseEnd) ? String(message.atResponseEnd) : message.atResponseEnd;
+ return object;
+ };
+
+ /**
+ * Converts this Progress to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ Progress.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for Progress
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.StreamStats.Progress
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ Progress.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.StreamStats.Progress";
+ };
+
+ return Progress;
+ })();
+
+ return StreamStats;
+ })();
+
+ v1beta2.ReadRowsResponse = (function() {
+
+ /**
+ * Properties of a ReadRowsResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IReadRowsResponse
+ * @property {google.cloud.bigquery.storage.v1beta2.IAvroRows|null} [avroRows] ReadRowsResponse avroRows
+ * @property {google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch|null} [arrowRecordBatch] ReadRowsResponse arrowRecordBatch
+ * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount
+ * @property {google.cloud.bigquery.storage.v1beta2.IStreamStats|null} [stats] ReadRowsResponse stats
+ * @property {google.cloud.bigquery.storage.v1beta2.IThrottleState|null} [throttleState] ReadRowsResponse throttleState
+ * @property {google.cloud.bigquery.storage.v1beta2.IAvroSchema|null} [avroSchema] ReadRowsResponse avroSchema
+ * @property {google.cloud.bigquery.storage.v1beta2.IArrowSchema|null} [arrowSchema] ReadRowsResponse arrowSchema
+ */
+
+ /**
+ * Constructs a new ReadRowsResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a ReadRowsResponse.
+ * @implements IReadRowsResponse
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsResponse=} [properties] Properties to set
+ */
+ function ReadRowsResponse(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ReadRowsResponse avroRows.
+ * @member {google.cloud.bigquery.storage.v1beta2.IAvroRows|null|undefined} avroRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ */
+ ReadRowsResponse.prototype.avroRows = null;
+
+ /**
+ * ReadRowsResponse arrowRecordBatch.
+ * @member {google.cloud.bigquery.storage.v1beta2.IArrowRecordBatch|null|undefined} arrowRecordBatch
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ */
+ ReadRowsResponse.prototype.arrowRecordBatch = null;
+
+ /**
+ * ReadRowsResponse rowCount.
+ * @member {number|Long} rowCount
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ */
+ ReadRowsResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0;
+
+ /**
+ * ReadRowsResponse stats.
+ * @member {google.cloud.bigquery.storage.v1beta2.IStreamStats|null|undefined} stats
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ */
+ ReadRowsResponse.prototype.stats = null;
+
+ /**
+ * ReadRowsResponse throttleState.
+ * @member {google.cloud.bigquery.storage.v1beta2.IThrottleState|null|undefined} throttleState
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ */
+ ReadRowsResponse.prototype.throttleState = null;
+
+ /**
+ * ReadRowsResponse avroSchema.
+ * @member {google.cloud.bigquery.storage.v1beta2.IAvroSchema|null|undefined} avroSchema
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ */
+ ReadRowsResponse.prototype.avroSchema = null;
+
+ /**
+ * ReadRowsResponse arrowSchema.
+ * @member {google.cloud.bigquery.storage.v1beta2.IArrowSchema|null|undefined} arrowSchema
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ */
+ ReadRowsResponse.prototype.arrowSchema = null;
+
+ // OneOf field names bound to virtual getters and setters
+ var $oneOfFields;
+
+ /**
+ * ReadRowsResponse rows.
+ * @member {"avroRows"|"arrowRecordBatch"|undefined} rows
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ */
+ Object.defineProperty(ReadRowsResponse.prototype, "rows", {
+ get: $util.oneOfGetter($oneOfFields = ["avroRows", "arrowRecordBatch"]),
+ set: $util.oneOfSetter($oneOfFields)
+ });
+
+ /**
+ * ReadRowsResponse schema.
+ * @member {"avroSchema"|"arrowSchema"|undefined} schema
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ */
+ Object.defineProperty(ReadRowsResponse.prototype, "schema", {
+ get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]),
+ set: $util.oneOfSetter($oneOfFields)
+ });
+
+ /**
+ * Creates a new ReadRowsResponse instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsResponse=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadRowsResponse} ReadRowsResponse instance
+ */
+ ReadRowsResponse.create = function create(properties) {
+ return new ReadRowsResponse(properties);
+ };
+
+ /**
+ * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsResponse} message ReadRowsResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ReadRowsResponse.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.stats != null && Object.hasOwnProperty.call(message, "stats"))
+ $root.google.cloud.bigquery.storage.v1beta2.StreamStats.encode(message.stats, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ if (message.avroRows != null && Object.hasOwnProperty.call(message, "avroRows"))
+ $root.google.cloud.bigquery.storage.v1beta2.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim();
+ if (message.arrowRecordBatch != null && Object.hasOwnProperty.call(message, "arrowRecordBatch"))
+ $root.google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();
+ if (message.throttleState != null && Object.hasOwnProperty.call(message, "throttleState"))
+ $root.google.cloud.bigquery.storage.v1beta2.ThrottleState.encode(message.throttleState, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();
+ if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount"))
+ writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount);
+ if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema"))
+ $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim();
+ if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema"))
+ $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadRowsResponse} message ReadRowsResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ReadRowsResponse.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a ReadRowsResponse message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadRowsResponse} ReadRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ReadRowsResponse.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 3: {
+ message.avroRows = $root.google.cloud.bigquery.storage.v1beta2.AvroRows.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 4: {
+ message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 6: {
+ message.rowCount = reader.int64();
+ break;
+ }
+ case 2: {
+ message.stats = $root.google.cloud.bigquery.storage.v1beta2.StreamStats.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 5: {
+ message.throttleState = $root.google.cloud.bigquery.storage.v1beta2.ThrottleState.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 7: {
+ message.avroSchema = $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 8: {
+ message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadRowsResponse} ReadRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ReadRowsResponse.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a ReadRowsResponse message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ReadRowsResponse.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ var properties = {};
+ if (message.avroRows != null && message.hasOwnProperty("avroRows")) {
+ properties.rows = 1;
+ {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.AvroRows.verify(message.avroRows, long + 1);
+ if (error)
+ return "avroRows." + error;
+ }
+ }
+ if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) {
+ if (properties.rows === 1)
+ return "rows: multiple values";
+ properties.rows = 1;
+ {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch.verify(message.arrowRecordBatch, long + 1);
+ if (error)
+ return "arrowRecordBatch." + error;
+ }
+ }
+ if (message.rowCount != null && message.hasOwnProperty("rowCount"))
+ if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high)))
+ return "rowCount: integer|Long expected";
+ if (message.stats != null && message.hasOwnProperty("stats")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.StreamStats.verify(message.stats, long + 1);
+ if (error)
+ return "stats." + error;
+ }
+ if (message.throttleState != null && message.hasOwnProperty("throttleState")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ThrottleState.verify(message.throttleState, long + 1);
+ if (error)
+ return "throttleState." + error;
+ }
+ if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) {
+ properties.schema = 1;
+ {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.verify(message.avroSchema, long + 1);
+ if (error)
+ return "avroSchema." + error;
+ }
+ }
+ if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) {
+ if (properties.schema === 1)
+ return "schema: multiple values";
+ properties.schema = 1;
+ {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.verify(message.arrowSchema, long + 1);
+ if (error)
+ return "arrowSchema." + error;
+ }
+ }
+ return null;
+ };
+
+ /**
+ * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadRowsResponse} ReadRowsResponse
+ */
+ ReadRowsResponse.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse();
+ if (object.avroRows != null) {
+ if (typeof object.avroRows !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.avroRows: object expected");
+ message.avroRows = $root.google.cloud.bigquery.storage.v1beta2.AvroRows.fromObject(object.avroRows, long + 1);
+ }
+ if (object.arrowRecordBatch != null) {
+ if (typeof object.arrowRecordBatch !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.arrowRecordBatch: object expected");
+ message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch.fromObject(object.arrowRecordBatch, long + 1);
+ }
+ if (object.rowCount != null)
+ if ($util.Long)
+ (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false;
+ else if (typeof object.rowCount === "string")
+ message.rowCount = parseInt(object.rowCount, 10);
+ else if (typeof object.rowCount === "number")
+ message.rowCount = object.rowCount;
+ else if (typeof object.rowCount === "object")
+ message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber();
+ if (object.stats != null) {
+ if (typeof object.stats !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.stats: object expected");
+ message.stats = $root.google.cloud.bigquery.storage.v1beta2.StreamStats.fromObject(object.stats, long + 1);
+ }
+ if (object.throttleState != null) {
+ if (typeof object.throttleState !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.throttleState: object expected");
+ message.throttleState = $root.google.cloud.bigquery.storage.v1beta2.ThrottleState.fromObject(object.throttleState, long + 1);
+ }
+ if (object.avroSchema != null) {
+ if (typeof object.avroSchema !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.avroSchema: object expected");
+ message.avroSchema = $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.fromObject(object.avroSchema, long + 1);
+ }
+ if (object.arrowSchema != null) {
+ if (typeof object.arrowSchema !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadRowsResponse.arrowSchema: object expected");
+ message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.fromObject(object.arrowSchema, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadRowsResponse} message ReadRowsResponse
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ReadRowsResponse.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.stats = null;
+ object.throttleState = null;
+ if ($util.Long) {
+ var long = new $util.Long(0, 0, false);
+ object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;
+ } else
+ object.rowCount = options.longs === String ? "0" : 0;
+ }
+ if (message.stats != null && message.hasOwnProperty("stats"))
+ object.stats = $root.google.cloud.bigquery.storage.v1beta2.StreamStats.toObject(message.stats, options);
+ if (message.avroRows != null && message.hasOwnProperty("avroRows")) {
+ object.avroRows = $root.google.cloud.bigquery.storage.v1beta2.AvroRows.toObject(message.avroRows, options);
+ if (options.oneofs)
+ object.rows = "avroRows";
+ }
+ if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) {
+ object.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta2.ArrowRecordBatch.toObject(message.arrowRecordBatch, options);
+ if (options.oneofs)
+ object.rows = "arrowRecordBatch";
+ }
+ if (message.throttleState != null && message.hasOwnProperty("throttleState"))
+ object.throttleState = $root.google.cloud.bigquery.storage.v1beta2.ThrottleState.toObject(message.throttleState, options);
+ if (message.rowCount != null && message.hasOwnProperty("rowCount"))
+ if (typeof message.rowCount === "number")
+ object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount;
+ else
+ object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount;
+ if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) {
+ object.avroSchema = $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.toObject(message.avroSchema, options);
+ if (options.oneofs)
+ object.schema = "avroSchema";
+ }
+ if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) {
+ object.arrowSchema = $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.toObject(message.arrowSchema, options);
+ if (options.oneofs)
+ object.schema = "arrowSchema";
+ }
+ return object;
+ };
+
+ /**
+ * Converts this ReadRowsResponse to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ReadRowsResponse.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ReadRowsResponse
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadRowsResponse
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ReadRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ReadRowsResponse";
+ };
+
+ return ReadRowsResponse;
+ })();
+
+ v1beta2.SplitReadStreamRequest = (function() {
+
+ /**
+ * Properties of a SplitReadStreamRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface ISplitReadStreamRequest
+ * @property {string|null} [name] SplitReadStreamRequest name
+ * @property {number|null} [fraction] SplitReadStreamRequest fraction
+ */
+
+ /**
+ * Constructs a new SplitReadStreamRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a SplitReadStreamRequest.
+ * @implements ISplitReadStreamRequest
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest=} [properties] Properties to set
+ */
+ function SplitReadStreamRequest(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * SplitReadStreamRequest name.
+ * @member {string} name
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @instance
+ */
+ SplitReadStreamRequest.prototype.name = "";
+
+ /**
+ * SplitReadStreamRequest fraction.
+ * @member {number} fraction
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @instance
+ */
+ SplitReadStreamRequest.prototype.fraction = 0;
+
+ /**
+ * Creates a new SplitReadStreamRequest instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest} SplitReadStreamRequest instance
+ */
+ SplitReadStreamRequest.create = function create(properties) {
+ return new SplitReadStreamRequest(properties);
+ };
+
+ /**
+ * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ SplitReadStreamRequest.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.name != null && Object.hasOwnProperty.call(message, "name"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);
+ if (message.fraction != null && Object.hasOwnProperty.call(message, "fraction"))
+ writer.uint32(/* id 2, wireType 1 =*/17).double(message.fraction);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ SplitReadStreamRequest.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a SplitReadStreamRequest message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest} SplitReadStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ SplitReadStreamRequest.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.name = reader.string();
+ break;
+ }
+ case 2: {
+ message.fraction = reader.double();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest} SplitReadStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ SplitReadStreamRequest.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a SplitReadStreamRequest message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ SplitReadStreamRequest.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.name != null && message.hasOwnProperty("name"))
+ if (!$util.isString(message.name))
+ return "name: string expected";
+ if (message.fraction != null && message.hasOwnProperty("fraction"))
+ if (typeof message.fraction !== "number")
+ return "fraction: number expected";
+ return null;
+ };
+
+ /**
+ * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest} SplitReadStreamRequest
+ */
+ SplitReadStreamRequest.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest();
+ if (object.name != null)
+ message.name = String(object.name);
+ if (object.fraction != null)
+ message.fraction = Number(object.fraction);
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest} message SplitReadStreamRequest
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ SplitReadStreamRequest.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.name = "";
+ object.fraction = 0;
+ }
+ if (message.name != null && message.hasOwnProperty("name"))
+ object.name = message.name;
+ if (message.fraction != null && message.hasOwnProperty("fraction"))
+ object.fraction = options.json && !isFinite(message.fraction) ? String(message.fraction) : message.fraction;
+ return object;
+ };
+
+ /**
+ * Converts this SplitReadStreamRequest to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ SplitReadStreamRequest.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for SplitReadStreamRequest
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ SplitReadStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest";
+ };
+
+ return SplitReadStreamRequest;
+ })();
+
+ v1beta2.SplitReadStreamResponse = (function() {
+
+ /**
+ * Properties of a SplitReadStreamResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface ISplitReadStreamResponse
+ * @property {google.cloud.bigquery.storage.v1beta2.IReadStream|null} [primaryStream] SplitReadStreamResponse primaryStream
+ * @property {google.cloud.bigquery.storage.v1beta2.IReadStream|null} [remainderStream] SplitReadStreamResponse remainderStream
+ */
+
+ /**
+ * Constructs a new SplitReadStreamResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a SplitReadStreamResponse.
+ * @implements ISplitReadStreamResponse
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse=} [properties] Properties to set
+ */
+ function SplitReadStreamResponse(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * SplitReadStreamResponse primaryStream.
+ * @member {google.cloud.bigquery.storage.v1beta2.IReadStream|null|undefined} primaryStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @instance
+ */
+ SplitReadStreamResponse.prototype.primaryStream = null;
+
+ /**
+ * SplitReadStreamResponse remainderStream.
+ * @member {google.cloud.bigquery.storage.v1beta2.IReadStream|null|undefined} remainderStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @instance
+ */
+ SplitReadStreamResponse.prototype.remainderStream = null;
+
+ /**
+ * Creates a new SplitReadStreamResponse instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse} SplitReadStreamResponse instance
+ */
+ SplitReadStreamResponse.create = function create(properties) {
+ return new SplitReadStreamResponse(properties);
+ };
+
+ /**
+ * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ SplitReadStreamResponse.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.primaryStream != null && Object.hasOwnProperty.call(message, "primaryStream"))
+ $root.google.cloud.bigquery.storage.v1beta2.ReadStream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();
+ if (message.remainderStream != null && Object.hasOwnProperty.call(message, "remainderStream"))
+ $root.google.cloud.bigquery.storage.v1beta2.ReadStream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ SplitReadStreamResponse.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a SplitReadStreamResponse message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse} SplitReadStreamResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ SplitReadStreamResponse.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.primaryStream = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 2: {
+ message.remainderStream = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse} SplitReadStreamResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ SplitReadStreamResponse.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a SplitReadStreamResponse message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ SplitReadStreamResponse.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.verify(message.primaryStream, long + 1);
+ if (error)
+ return "primaryStream." + error;
+ }
+ if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.verify(message.remainderStream, long + 1);
+ if (error)
+ return "remainderStream." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse} SplitReadStreamResponse
+ */
+ SplitReadStreamResponse.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse();
+ if (object.primaryStream != null) {
+ if (typeof object.primaryStream !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse.primaryStream: object expected");
+ message.primaryStream = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.fromObject(object.primaryStream, long + 1);
+ }
+ if (object.remainderStream != null) {
+ if (typeof object.remainderStream !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse.remainderStream: object expected");
+ message.remainderStream = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.fromObject(object.remainderStream, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse} message SplitReadStreamResponse
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ SplitReadStreamResponse.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.primaryStream = null;
+ object.remainderStream = null;
+ }
+ if (message.primaryStream != null && message.hasOwnProperty("primaryStream"))
+ object.primaryStream = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.toObject(message.primaryStream, options);
+ if (message.remainderStream != null && message.hasOwnProperty("remainderStream"))
+ object.remainderStream = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.toObject(message.remainderStream, options);
+ return object;
+ };
+
+ /**
+ * Converts this SplitReadStreamResponse to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ SplitReadStreamResponse.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for SplitReadStreamResponse
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ SplitReadStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse";
+ };
+
+ return SplitReadStreamResponse;
+ })();
+
+ v1beta2.CreateWriteStreamRequest = (function() {
+
+ /**
+ * Properties of a CreateWriteStreamRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface ICreateWriteStreamRequest
+ * @property {string|null} [parent] CreateWriteStreamRequest parent
+ * @property {google.cloud.bigquery.storage.v1beta2.IWriteStream|null} [writeStream] CreateWriteStreamRequest writeStream
+ */
+
+ /**
+ * Constructs a new CreateWriteStreamRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a CreateWriteStreamRequest.
+ * @implements ICreateWriteStreamRequest
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest=} [properties] Properties to set
+ */
+ function CreateWriteStreamRequest(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * CreateWriteStreamRequest parent.
+ * @member {string} parent
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @instance
+ */
+ CreateWriteStreamRequest.prototype.parent = "";
+
+ /**
+ * CreateWriteStreamRequest writeStream.
+ * @member {google.cloud.bigquery.storage.v1beta2.IWriteStream|null|undefined} writeStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @instance
+ */
+ CreateWriteStreamRequest.prototype.writeStream = null;
+
+ /**
+ * Creates a new CreateWriteStreamRequest instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest} CreateWriteStreamRequest instance
+ */
+ CreateWriteStreamRequest.create = function create(properties) {
+ return new CreateWriteStreamRequest(properties);
+ };
+
+ /**
+ * Encodes the specified CreateWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest} message CreateWriteStreamRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ CreateWriteStreamRequest.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.parent != null && Object.hasOwnProperty.call(message, "parent"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent);
+ if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream"))
+ $root.google.cloud.bigquery.storage.v1beta2.WriteStream.encode(message.writeStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified CreateWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest} message CreateWriteStreamRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ CreateWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a CreateWriteStreamRequest message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest} CreateWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ CreateWriteStreamRequest.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.parent = reader.string();
+ break;
+ }
+ case 2: {
+ message.writeStream = $root.google.cloud.bigquery.storage.v1beta2.WriteStream.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a CreateWriteStreamRequest message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest} CreateWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ CreateWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a CreateWriteStreamRequest message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ CreateWriteStreamRequest.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.parent != null && message.hasOwnProperty("parent"))
+ if (!$util.isString(message.parent))
+ return "parent: string expected";
+ if (message.writeStream != null && message.hasOwnProperty("writeStream")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.WriteStream.verify(message.writeStream, long + 1);
+ if (error)
+ return "writeStream." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates a CreateWriteStreamRequest message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest} CreateWriteStreamRequest
+ */
+ CreateWriteStreamRequest.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest();
+ if (object.parent != null)
+ message.parent = String(object.parent);
+ if (object.writeStream != null) {
+ if (typeof object.writeStream !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest.writeStream: object expected");
+ message.writeStream = $root.google.cloud.bigquery.storage.v1beta2.WriteStream.fromObject(object.writeStream, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a CreateWriteStreamRequest message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest} message CreateWriteStreamRequest
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ CreateWriteStreamRequest.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.parent = "";
+ object.writeStream = null;
+ }
+ if (message.parent != null && message.hasOwnProperty("parent"))
+ object.parent = message.parent;
+ if (message.writeStream != null && message.hasOwnProperty("writeStream"))
+ object.writeStream = $root.google.cloud.bigquery.storage.v1beta2.WriteStream.toObject(message.writeStream, options);
+ return object;
+ };
+
+ /**
+ * Converts this CreateWriteStreamRequest to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ CreateWriteStreamRequest.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for CreateWriteStreamRequest
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ CreateWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest";
+ };
+
+ return CreateWriteStreamRequest;
+ })();
+
+ v1beta2.AppendRowsRequest = (function() {
+
+ /**
+ * Properties of an AppendRowsRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IAppendRowsRequest
+ * @property {string|null} [writeStream] AppendRowsRequest writeStream
+ * @property {google.protobuf.IInt64Value|null} [offset] AppendRowsRequest offset
+ * @property {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData|null} [protoRows] AppendRowsRequest protoRows
+ * @property {string|null} [traceId] AppendRowsRequest traceId
+ */
+
+ /**
+ * Constructs a new AppendRowsRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents an AppendRowsRequest.
+ * @implements IAppendRowsRequest
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest=} [properties] Properties to set
+ */
+ function AppendRowsRequest(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * AppendRowsRequest writeStream.
+ * @member {string} writeStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @instance
+ */
+ AppendRowsRequest.prototype.writeStream = "";
+
+ /**
+ * AppendRowsRequest offset.
+ * @member {google.protobuf.IInt64Value|null|undefined} offset
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @instance
+ */
+ AppendRowsRequest.prototype.offset = null;
+
+ /**
+ * AppendRowsRequest protoRows.
+ * @member {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData|null|undefined} protoRows
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @instance
+ */
+ AppendRowsRequest.prototype.protoRows = null;
+
+ /**
+ * AppendRowsRequest traceId.
+ * @member {string} traceId
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @instance
+ */
+ AppendRowsRequest.prototype.traceId = "";
+
+ // OneOf field names bound to virtual getters and setters
+ var $oneOfFields;
+
+ /**
+ * AppendRowsRequest rows.
+ * @member {"protoRows"|undefined} rows
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @instance
+ */
+ Object.defineProperty(AppendRowsRequest.prototype, "rows", {
+ get: $util.oneOfGetter($oneOfFields = ["protoRows"]),
+ set: $util.oneOfSetter($oneOfFields)
+ });
+
+ /**
+ * Creates a new AppendRowsRequest instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest} AppendRowsRequest instance
+ */
+ AppendRowsRequest.create = function create(properties) {
+ return new AppendRowsRequest(properties);
+ };
+
+ /**
+ * Encodes the specified AppendRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest} message AppendRowsRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AppendRowsRequest.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.writeStream);
+ if (message.offset != null && Object.hasOwnProperty.call(message, "offset"))
+ $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ if (message.protoRows != null && Object.hasOwnProperty.call(message, "protoRows"))
+ $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.encode(message.protoRows, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();
+ if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId"))
+ writer.uint32(/* id 6, wireType 2 =*/50).string(message.traceId);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified AppendRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsRequest} message AppendRowsRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AppendRowsRequest.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes an AppendRowsRequest message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest} AppendRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AppendRowsRequest.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.writeStream = reader.string();
+ break;
+ }
+ case 2: {
+ message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 4: {
+ message.protoRows = $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 6: {
+ message.traceId = reader.string();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes an AppendRowsRequest message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest} AppendRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AppendRowsRequest.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies an AppendRowsRequest message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ AppendRowsRequest.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ var properties = {};
+ if (message.writeStream != null && message.hasOwnProperty("writeStream"))
+ if (!$util.isString(message.writeStream))
+ return "writeStream: string expected";
+ if (message.offset != null && message.hasOwnProperty("offset")) {
+ var error = $root.google.protobuf.Int64Value.verify(message.offset, long + 1);
+ if (error)
+ return "offset." + error;
+ }
+ if (message.protoRows != null && message.hasOwnProperty("protoRows")) {
+ properties.rows = 1;
+ {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.verify(message.protoRows, long + 1);
+ if (error)
+ return "protoRows." + error;
+ }
+ }
+ if (message.traceId != null && message.hasOwnProperty("traceId"))
+ if (!$util.isString(message.traceId))
+ return "traceId: string expected";
+ return null;
+ };
+
+ /**
+ * Creates an AppendRowsRequest message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest} AppendRowsRequest
+ */
+ AppendRowsRequest.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest();
+ if (object.writeStream != null)
+ message.writeStream = String(object.writeStream);
+ if (object.offset != null) {
+ if (typeof object.offset !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.offset: object expected");
+ message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset, long + 1);
+ }
+ if (object.protoRows != null) {
+ if (typeof object.protoRows !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.protoRows: object expected");
+ message.protoRows = $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.fromObject(object.protoRows, long + 1);
+ }
+ if (object.traceId != null)
+ message.traceId = String(object.traceId);
+ return message;
+ };
+
+ /**
+ * Creates a plain object from an AppendRowsRequest message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest} message AppendRowsRequest
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ AppendRowsRequest.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.writeStream = "";
+ object.offset = null;
+ object.traceId = "";
+ }
+ if (message.writeStream != null && message.hasOwnProperty("writeStream"))
+ object.writeStream = message.writeStream;
+ if (message.offset != null && message.hasOwnProperty("offset"))
+ object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options);
+ if (message.protoRows != null && message.hasOwnProperty("protoRows")) {
+ object.protoRows = $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.toObject(message.protoRows, options);
+ if (options.oneofs)
+ object.rows = "protoRows";
+ }
+ if (message.traceId != null && message.hasOwnProperty("traceId"))
+ object.traceId = message.traceId;
+ return object;
+ };
+
+ /**
+ * Converts this AppendRowsRequest to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ AppendRowsRequest.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for AppendRowsRequest
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ AppendRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.AppendRowsRequest";
+ };
+
+ AppendRowsRequest.ProtoData = (function() {
+
+ /**
+ * Properties of a ProtoData.
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @interface IProtoData
+ * @property {google.cloud.bigquery.storage.v1beta2.IProtoSchema|null} [writerSchema] ProtoData writerSchema
+ * @property {google.cloud.bigquery.storage.v1beta2.IProtoRows|null} [rows] ProtoData rows
+ */
+
+ /**
+ * Constructs a new ProtoData.
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest
+ * @classdesc Represents a ProtoData.
+ * @implements IProtoData
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData=} [properties] Properties to set
+ */
+ function ProtoData(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ProtoData writerSchema.
+ * @member {google.cloud.bigquery.storage.v1beta2.IProtoSchema|null|undefined} writerSchema
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @instance
+ */
+ ProtoData.prototype.writerSchema = null;
+
+ /**
+ * ProtoData rows.
+ * @member {google.cloud.bigquery.storage.v1beta2.IProtoRows|null|undefined} rows
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @instance
+ */
+ ProtoData.prototype.rows = null;
+
+ /**
+ * Creates a new ProtoData instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData} ProtoData instance
+ */
+ ProtoData.create = function create(properties) {
+ return new ProtoData(properties);
+ };
+
+ /**
+ * Encodes the specified ProtoData message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData} message ProtoData message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ProtoData.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.writerSchema != null && Object.hasOwnProperty.call(message, "writerSchema"))
+ $root.google.cloud.bigquery.storage.v1beta2.ProtoSchema.encode(message.writerSchema, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();
+ if (message.rows != null && Object.hasOwnProperty.call(message, "rows"))
+ $root.google.cloud.bigquery.storage.v1beta2.ProtoRows.encode(message.rows, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ProtoData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.IProtoData} message ProtoData message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ProtoData.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a ProtoData message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData} ProtoData
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ProtoData.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.writerSchema = $root.google.cloud.bigquery.storage.v1beta2.ProtoSchema.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 2: {
+ message.rows = $root.google.cloud.bigquery.storage.v1beta2.ProtoRows.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a ProtoData message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData} ProtoData
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ProtoData.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a ProtoData message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ProtoData.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.writerSchema != null && message.hasOwnProperty("writerSchema")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ProtoSchema.verify(message.writerSchema, long + 1);
+ if (error)
+ return "writerSchema." + error;
+ }
+ if (message.rows != null && message.hasOwnProperty("rows")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ProtoRows.verify(message.rows, long + 1);
+ if (error)
+ return "rows." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates a ProtoData message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData} ProtoData
+ */
+ ProtoData.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData();
+ if (object.writerSchema != null) {
+ if (typeof object.writerSchema !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.writerSchema: object expected");
+ message.writerSchema = $root.google.cloud.bigquery.storage.v1beta2.ProtoSchema.fromObject(object.writerSchema, long + 1);
+ }
+ if (object.rows != null) {
+ if (typeof object.rows !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData.rows: object expected");
+ message.rows = $root.google.cloud.bigquery.storage.v1beta2.ProtoRows.fromObject(object.rows, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a ProtoData message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData} message ProtoData
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ProtoData.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.writerSchema = null;
+ object.rows = null;
+ }
+ if (message.writerSchema != null && message.hasOwnProperty("writerSchema"))
+ object.writerSchema = $root.google.cloud.bigquery.storage.v1beta2.ProtoSchema.toObject(message.writerSchema, options);
+ if (message.rows != null && message.hasOwnProperty("rows"))
+ object.rows = $root.google.cloud.bigquery.storage.v1beta2.ProtoRows.toObject(message.rows, options);
+ return object;
+ };
+
+ /**
+ * Converts this ProtoData to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ProtoData.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ProtoData
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ProtoData.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData";
+ };
+
+ return ProtoData;
+ })();
+
+ return AppendRowsRequest;
+ })();
+
+ v1beta2.AppendRowsResponse = (function() {
+
+ /**
+ * Properties of an AppendRowsResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IAppendRowsResponse
+ * @property {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult|null} [appendResult] AppendRowsResponse appendResult
+ * @property {google.rpc.IStatus|null} [error] AppendRowsResponse error
+ * @property {google.cloud.bigquery.storage.v1beta2.ITableSchema|null} [updatedSchema] AppendRowsResponse updatedSchema
+ */
+
+ /**
+ * Constructs a new AppendRowsResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents an AppendRowsResponse.
+ * @implements IAppendRowsResponse
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsResponse=} [properties] Properties to set
+ */
+ function AppendRowsResponse(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * AppendRowsResponse appendResult.
+ * @member {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult|null|undefined} appendResult
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @instance
+ */
+ AppendRowsResponse.prototype.appendResult = null;
+
+ /**
+ * AppendRowsResponse error.
+ * @member {google.rpc.IStatus|null|undefined} error
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @instance
+ */
+ AppendRowsResponse.prototype.error = null;
+
+ /**
+ * AppendRowsResponse updatedSchema.
+ * @member {google.cloud.bigquery.storage.v1beta2.ITableSchema|null|undefined} updatedSchema
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @instance
+ */
+ AppendRowsResponse.prototype.updatedSchema = null;
+
+ // OneOf field names bound to virtual getters and setters
+ var $oneOfFields;
+
+ /**
+ * AppendRowsResponse response.
+ * @member {"appendResult"|"error"|undefined} response
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @instance
+ */
+ Object.defineProperty(AppendRowsResponse.prototype, "response", {
+ get: $util.oneOfGetter($oneOfFields = ["appendResult", "error"]),
+ set: $util.oneOfSetter($oneOfFields)
+ });
+
+ /**
+ * Creates a new AppendRowsResponse instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsResponse=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse} AppendRowsResponse instance
+ */
+ AppendRowsResponse.create = function create(properties) {
+ return new AppendRowsResponse(properties);
+ };
+
+ /**
+ * Encodes the specified AppendRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsResponse} message AppendRowsResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AppendRowsResponse.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.appendResult != null && Object.hasOwnProperty.call(message, "appendResult"))
+ $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.encode(message.appendResult, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();
+ if (message.error != null && Object.hasOwnProperty.call(message, "error"))
+ $root.google.rpc.Status.encode(message.error, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ if (message.updatedSchema != null && Object.hasOwnProperty.call(message, "updatedSchema"))
+ $root.google.cloud.bigquery.storage.v1beta2.TableSchema.encode(message.updatedSchema, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified AppendRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IAppendRowsResponse} message AppendRowsResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AppendRowsResponse.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes an AppendRowsResponse message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse} AppendRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AppendRowsResponse.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.appendResult = $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 2: {
+ message.error = $root.google.rpc.Status.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 3: {
+ message.updatedSchema = $root.google.cloud.bigquery.storage.v1beta2.TableSchema.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes an AppendRowsResponse message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse} AppendRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AppendRowsResponse.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies an AppendRowsResponse message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ AppendRowsResponse.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ var properties = {};
+ if (message.appendResult != null && message.hasOwnProperty("appendResult")) {
+ properties.response = 1;
+ {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.verify(message.appendResult, long + 1);
+ if (error)
+ return "appendResult." + error;
+ }
+ }
+ if (message.error != null && message.hasOwnProperty("error")) {
+ if (properties.response === 1)
+ return "response: multiple values";
+ properties.response = 1;
+ {
+ var error = $root.google.rpc.Status.verify(message.error, long + 1);
+ if (error)
+ return "error." + error;
+ }
+ }
+ if (message.updatedSchema != null && message.hasOwnProperty("updatedSchema")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.TableSchema.verify(message.updatedSchema, long + 1);
+ if (error)
+ return "updatedSchema." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates an AppendRowsResponse message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse} AppendRowsResponse
+ */
+ AppendRowsResponse.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse();
+ if (object.appendResult != null) {
+ if (typeof object.appendResult !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.appendResult: object expected");
+ message.appendResult = $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.fromObject(object.appendResult, long + 1);
+ }
+ if (object.error != null) {
+ if (typeof object.error !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.error: object expected");
+ message.error = $root.google.rpc.Status.fromObject(object.error, long + 1);
+ }
+ if (object.updatedSchema != null) {
+ if (typeof object.updatedSchema !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.updatedSchema: object expected");
+ message.updatedSchema = $root.google.cloud.bigquery.storage.v1beta2.TableSchema.fromObject(object.updatedSchema, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from an AppendRowsResponse message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse} message AppendRowsResponse
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ AppendRowsResponse.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.updatedSchema = null;
+ if (message.appendResult != null && message.hasOwnProperty("appendResult")) {
+ object.appendResult = $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.toObject(message.appendResult, options);
+ if (options.oneofs)
+ object.response = "appendResult";
+ }
+ if (message.error != null && message.hasOwnProperty("error")) {
+ object.error = $root.google.rpc.Status.toObject(message.error, options);
+ if (options.oneofs)
+ object.response = "error";
+ }
+ if (message.updatedSchema != null && message.hasOwnProperty("updatedSchema"))
+ object.updatedSchema = $root.google.cloud.bigquery.storage.v1beta2.TableSchema.toObject(message.updatedSchema, options);
+ return object;
+ };
+
+ /**
+ * Converts this AppendRowsResponse to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ AppendRowsResponse.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for AppendRowsResponse
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ AppendRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.AppendRowsResponse";
+ };
+
+ AppendRowsResponse.AppendResult = (function() {
+
+ /**
+ * Properties of an AppendResult.
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @interface IAppendResult
+ * @property {google.protobuf.IInt64Value|null} [offset] AppendResult offset
+ */
+
+ /**
+ * Constructs a new AppendResult.
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse
+ * @classdesc Represents an AppendResult.
+ * @implements IAppendResult
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult=} [properties] Properties to set
+ */
+ function AppendResult(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * AppendResult offset.
+ * @member {google.protobuf.IInt64Value|null|undefined} offset
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @instance
+ */
+ AppendResult.prototype.offset = null;
+
+ /**
+ * Creates a new AppendResult instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult} AppendResult instance
+ */
+ AppendResult.create = function create(properties) {
+ return new AppendResult(properties);
+ };
+
+ /**
+ * Encodes the specified AppendResult message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult} message AppendResult message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AppendResult.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.offset != null && Object.hasOwnProperty.call(message, "offset"))
+ $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified AppendResult message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.IAppendResult} message AppendResult message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ AppendResult.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes an AppendResult message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult} AppendResult
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AppendResult.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes an AppendResult message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult} AppendResult
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ AppendResult.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies an AppendResult message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ AppendResult.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.offset != null && message.hasOwnProperty("offset")) {
+ var error = $root.google.protobuf.Int64Value.verify(message.offset, long + 1);
+ if (error)
+ return "offset." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates an AppendResult message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult} AppendResult
+ */
+ AppendResult.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult();
+ if (object.offset != null) {
+ if (typeof object.offset !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.offset: object expected");
+ message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from an AppendResult message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult} message AppendResult
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ AppendResult.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.offset = null;
+ if (message.offset != null && message.hasOwnProperty("offset"))
+ object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options);
+ return object;
+ };
+
+ /**
+ * Converts this AppendResult to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ AppendResult.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for AppendResult
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ AppendResult.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult";
+ };
+
+ return AppendResult;
+ })();
+
+ return AppendRowsResponse;
+ })();
+
+ v1beta2.GetWriteStreamRequest = (function() {
+
+ /**
+ * Properties of a GetWriteStreamRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IGetWriteStreamRequest
+ * @property {string|null} [name] GetWriteStreamRequest name
+ */
+
+ /**
+ * Constructs a new GetWriteStreamRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a GetWriteStreamRequest.
+ * @implements IGetWriteStreamRequest
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest=} [properties] Properties to set
+ */
+ function GetWriteStreamRequest(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * GetWriteStreamRequest name.
+ * @member {string} name
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @instance
+ */
+ GetWriteStreamRequest.prototype.name = "";
+
+ /**
+ * Creates a new GetWriteStreamRequest instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest} GetWriteStreamRequest instance
+ */
+ GetWriteStreamRequest.create = function create(properties) {
+ return new GetWriteStreamRequest(properties);
+ };
+
+ /**
+ * Encodes the specified GetWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest} message GetWriteStreamRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ GetWriteStreamRequest.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.name != null && Object.hasOwnProperty.call(message, "name"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified GetWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest} message GetWriteStreamRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ GetWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a GetWriteStreamRequest message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest} GetWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ GetWriteStreamRequest.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.name = reader.string();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a GetWriteStreamRequest message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest} GetWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ GetWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a GetWriteStreamRequest message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ GetWriteStreamRequest.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.name != null && message.hasOwnProperty("name"))
+ if (!$util.isString(message.name))
+ return "name: string expected";
+ return null;
+ };
+
+ /**
+ * Creates a GetWriteStreamRequest message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest} GetWriteStreamRequest
+ */
+ GetWriteStreamRequest.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest();
+ if (object.name != null)
+ message.name = String(object.name);
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a GetWriteStreamRequest message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest} message GetWriteStreamRequest
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ GetWriteStreamRequest.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.name = "";
+ if (message.name != null && message.hasOwnProperty("name"))
+ object.name = message.name;
+ return object;
+ };
+
+ /**
+ * Converts this GetWriteStreamRequest to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ GetWriteStreamRequest.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for GetWriteStreamRequest
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ GetWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest";
+ };
+
+ return GetWriteStreamRequest;
+ })();
+
+ v1beta2.BatchCommitWriteStreamsRequest = (function() {
+
+ /**
+ * Properties of a BatchCommitWriteStreamsRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IBatchCommitWriteStreamsRequest
+ * @property {string|null} [parent] BatchCommitWriteStreamsRequest parent
+ * @property {Array.|null} [writeStreams] BatchCommitWriteStreamsRequest writeStreams
+ */
+
+ /**
+ * Constructs a new BatchCommitWriteStreamsRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a BatchCommitWriteStreamsRequest.
+ * @implements IBatchCommitWriteStreamsRequest
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest=} [properties] Properties to set
+ */
+ function BatchCommitWriteStreamsRequest(properties) {
+ this.writeStreams = [];
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * BatchCommitWriteStreamsRequest parent.
+ * @member {string} parent
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @instance
+ */
+ BatchCommitWriteStreamsRequest.prototype.parent = "";
+
+ /**
+ * BatchCommitWriteStreamsRequest writeStreams.
+ * @member {Array.} writeStreams
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @instance
+ */
+ BatchCommitWriteStreamsRequest.prototype.writeStreams = $util.emptyArray;
+
+ /**
+ * Creates a new BatchCommitWriteStreamsRequest instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest instance
+ */
+ BatchCommitWriteStreamsRequest.create = function create(properties) {
+ return new BatchCommitWriteStreamsRequest(properties);
+ };
+
+ /**
+ * Encodes the specified BatchCommitWriteStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ BatchCommitWriteStreamsRequest.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.parent != null && Object.hasOwnProperty.call(message, "parent"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent);
+ if (message.writeStreams != null && message.writeStreams.length)
+ for (var i = 0; i < message.writeStreams.length; ++i)
+ writer.uint32(/* id 2, wireType 2 =*/18).string(message.writeStreams[i]);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified BatchCommitWriteStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ BatchCommitWriteStreamsRequest.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ BatchCommitWriteStreamsRequest.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.parent = reader.string();
+ break;
+ }
+ case 2: {
+ if (!(message.writeStreams && message.writeStreams.length))
+ message.writeStreams = [];
+ message.writeStreams.push(reader.string());
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ BatchCommitWriteStreamsRequest.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a BatchCommitWriteStreamsRequest message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ BatchCommitWriteStreamsRequest.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.parent != null && message.hasOwnProperty("parent"))
+ if (!$util.isString(message.parent))
+ return "parent: string expected";
+ if (message.writeStreams != null && message.hasOwnProperty("writeStreams")) {
+ if (!Array.isArray(message.writeStreams))
+ return "writeStreams: array expected";
+ for (var i = 0; i < message.writeStreams.length; ++i)
+ if (!$util.isString(message.writeStreams[i]))
+ return "writeStreams: string[] expected";
+ }
+ return null;
+ };
+
+ /**
+ * Creates a BatchCommitWriteStreamsRequest message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest
+ */
+ BatchCommitWriteStreamsRequest.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest();
+ if (object.parent != null)
+ message.parent = String(object.parent);
+ if (object.writeStreams) {
+ if (!Array.isArray(object.writeStreams))
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest.writeStreams: array expected");
+ message.writeStreams = [];
+ for (var i = 0; i < object.writeStreams.length; ++i)
+ message.writeStreams[i] = String(object.writeStreams[i]);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a BatchCommitWriteStreamsRequest message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ BatchCommitWriteStreamsRequest.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.arrays || options.defaults)
+ object.writeStreams = [];
+ if (options.defaults)
+ object.parent = "";
+ if (message.parent != null && message.hasOwnProperty("parent"))
+ object.parent = message.parent;
+ if (message.writeStreams && message.writeStreams.length) {
+ object.writeStreams = [];
+ for (var j = 0; j < message.writeStreams.length; ++j)
+ object.writeStreams[j] = message.writeStreams[j];
+ }
+ return object;
+ };
+
+ /**
+ * Converts this BatchCommitWriteStreamsRequest to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ BatchCommitWriteStreamsRequest.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for BatchCommitWriteStreamsRequest
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ BatchCommitWriteStreamsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsRequest";
+ };
+
+ return BatchCommitWriteStreamsRequest;
+ })();
+
+ v1beta2.BatchCommitWriteStreamsResponse = (function() {
+
+ /**
+ * Properties of a BatchCommitWriteStreamsResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IBatchCommitWriteStreamsResponse
+ * @property {google.protobuf.ITimestamp|null} [commitTime] BatchCommitWriteStreamsResponse commitTime
+ * @property {Array.|null} [streamErrors] BatchCommitWriteStreamsResponse streamErrors
+ */
+
+ /**
+ * Constructs a new BatchCommitWriteStreamsResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a BatchCommitWriteStreamsResponse.
+ * @implements IBatchCommitWriteStreamsResponse
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse=} [properties] Properties to set
+ */
+ function BatchCommitWriteStreamsResponse(properties) {
+ this.streamErrors = [];
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * BatchCommitWriteStreamsResponse commitTime.
+ * @member {google.protobuf.ITimestamp|null|undefined} commitTime
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @instance
+ */
+ BatchCommitWriteStreamsResponse.prototype.commitTime = null;
+
+ /**
+ * BatchCommitWriteStreamsResponse streamErrors.
+ * @member {Array.} streamErrors
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @instance
+ */
+ BatchCommitWriteStreamsResponse.prototype.streamErrors = $util.emptyArray;
+
+ /**
+ * Creates a new BatchCommitWriteStreamsResponse instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse instance
+ */
+ BatchCommitWriteStreamsResponse.create = function create(properties) {
+ return new BatchCommitWriteStreamsResponse(properties);
+ };
+
+ /**
+ * Encodes the specified BatchCommitWriteStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ BatchCommitWriteStreamsResponse.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.commitTime != null && Object.hasOwnProperty.call(message, "commitTime"))
+ $root.google.protobuf.Timestamp.encode(message.commitTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();
+ if (message.streamErrors != null && message.streamErrors.length)
+ for (var i = 0; i < message.streamErrors.length; ++i)
+ $root.google.cloud.bigquery.storage.v1beta2.StorageError.encode(message.streamErrors[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified BatchCommitWriteStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ BatchCommitWriteStreamsResponse.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ BatchCommitWriteStreamsResponse.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 2: {
+ if (!(message.streamErrors && message.streamErrors.length))
+ message.streamErrors = [];
+ message.streamErrors.push($root.google.cloud.bigquery.storage.v1beta2.StorageError.decode(reader, reader.uint32(), undefined, long + 1));
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ BatchCommitWriteStreamsResponse.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a BatchCommitWriteStreamsResponse message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ BatchCommitWriteStreamsResponse.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.commitTime != null && message.hasOwnProperty("commitTime")) {
+ var error = $root.google.protobuf.Timestamp.verify(message.commitTime, long + 1);
+ if (error)
+ return "commitTime." + error;
+ }
+ if (message.streamErrors != null && message.hasOwnProperty("streamErrors")) {
+ if (!Array.isArray(message.streamErrors))
+ return "streamErrors: array expected";
+ for (var i = 0; i < message.streamErrors.length; ++i) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.StorageError.verify(message.streamErrors[i], long + 1);
+ if (error)
+ return "streamErrors." + error;
+ }
+ }
+ return null;
+ };
+
+ /**
+ * Creates a BatchCommitWriteStreamsResponse message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse
+ */
+ BatchCommitWriteStreamsResponse.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse();
+ if (object.commitTime != null) {
+ if (typeof object.commitTime !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse.commitTime: object expected");
+ message.commitTime = $root.google.protobuf.Timestamp.fromObject(object.commitTime, long + 1);
+ }
+ if (object.streamErrors) {
+ if (!Array.isArray(object.streamErrors))
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse.streamErrors: array expected");
+ message.streamErrors = [];
+ for (var i = 0; i < object.streamErrors.length; ++i) {
+ if (typeof object.streamErrors[i] !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse.streamErrors: object expected");
+ message.streamErrors[i] = $root.google.cloud.bigquery.storage.v1beta2.StorageError.fromObject(object.streamErrors[i], long + 1);
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a BatchCommitWriteStreamsResponse message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ BatchCommitWriteStreamsResponse.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.arrays || options.defaults)
+ object.streamErrors = [];
+ if (options.defaults)
+ object.commitTime = null;
+ if (message.commitTime != null && message.hasOwnProperty("commitTime"))
+ object.commitTime = $root.google.protobuf.Timestamp.toObject(message.commitTime, options);
+ if (message.streamErrors && message.streamErrors.length) {
+ object.streamErrors = [];
+ for (var j = 0; j < message.streamErrors.length; ++j)
+ object.streamErrors[j] = $root.google.cloud.bigquery.storage.v1beta2.StorageError.toObject(message.streamErrors[j], options);
+ }
+ return object;
+ };
+
+ /**
+ * Converts this BatchCommitWriteStreamsResponse to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ BatchCommitWriteStreamsResponse.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for BatchCommitWriteStreamsResponse
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ BatchCommitWriteStreamsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse";
+ };
+
+ return BatchCommitWriteStreamsResponse;
+ })();
+
+ v1beta2.FinalizeWriteStreamRequest = (function() {
+
+ /**
+ * Properties of a FinalizeWriteStreamRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IFinalizeWriteStreamRequest
+ * @property {string|null} [name] FinalizeWriteStreamRequest name
+ */
+
+ /**
+ * Constructs a new FinalizeWriteStreamRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a FinalizeWriteStreamRequest.
+ * @implements IFinalizeWriteStreamRequest
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest=} [properties] Properties to set
+ */
+ function FinalizeWriteStreamRequest(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * FinalizeWriteStreamRequest name.
+ * @member {string} name
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @instance
+ */
+ FinalizeWriteStreamRequest.prototype.name = "";
+
+ /**
+ * Creates a new FinalizeWriteStreamRequest instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest instance
+ */
+ FinalizeWriteStreamRequest.create = function create(properties) {
+ return new FinalizeWriteStreamRequest(properties);
+ };
+
+ /**
+ * Encodes the specified FinalizeWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest} message FinalizeWriteStreamRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ FinalizeWriteStreamRequest.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.name != null && Object.hasOwnProperty.call(message, "name"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified FinalizeWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest} message FinalizeWriteStreamRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ FinalizeWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ FinalizeWriteStreamRequest.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.name = reader.string();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ FinalizeWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a FinalizeWriteStreamRequest message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ FinalizeWriteStreamRequest.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.name != null && message.hasOwnProperty("name"))
+ if (!$util.isString(message.name))
+ return "name: string expected";
+ return null;
+ };
+
+ /**
+ * Creates a FinalizeWriteStreamRequest message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest
+ */
+ FinalizeWriteStreamRequest.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest();
+ if (object.name != null)
+ message.name = String(object.name);
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a FinalizeWriteStreamRequest message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest} message FinalizeWriteStreamRequest
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ FinalizeWriteStreamRequest.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.name = "";
+ if (message.name != null && message.hasOwnProperty("name"))
+ object.name = message.name;
+ return object;
+ };
+
+ /**
+ * Converts this FinalizeWriteStreamRequest to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ FinalizeWriteStreamRequest.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for FinalizeWriteStreamRequest
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ FinalizeWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamRequest";
+ };
+
+ return FinalizeWriteStreamRequest;
+ })();
+
+ v1beta2.FinalizeWriteStreamResponse = (function() {
+
+ /**
+ * Properties of a FinalizeWriteStreamResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IFinalizeWriteStreamResponse
+ * @property {number|Long|null} [rowCount] FinalizeWriteStreamResponse rowCount
+ */
+
+ /**
+ * Constructs a new FinalizeWriteStreamResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a FinalizeWriteStreamResponse.
+ * @implements IFinalizeWriteStreamResponse
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse=} [properties] Properties to set
+ */
+ function FinalizeWriteStreamResponse(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * FinalizeWriteStreamResponse rowCount.
+ * @member {number|Long} rowCount
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @instance
+ */
+ FinalizeWriteStreamResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0;
+
+ /**
+ * Creates a new FinalizeWriteStreamResponse instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse instance
+ */
+ FinalizeWriteStreamResponse.create = function create(properties) {
+ return new FinalizeWriteStreamResponse(properties);
+ };
+
+ /**
+ * Encodes the specified FinalizeWriteStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse} message FinalizeWriteStreamResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ FinalizeWriteStreamResponse.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount"))
+ writer.uint32(/* id 1, wireType 0 =*/8).int64(message.rowCount);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified FinalizeWriteStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse} message FinalizeWriteStreamResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ FinalizeWriteStreamResponse.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ FinalizeWriteStreamResponse.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.rowCount = reader.int64();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ FinalizeWriteStreamResponse.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a FinalizeWriteStreamResponse message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ FinalizeWriteStreamResponse.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.rowCount != null && message.hasOwnProperty("rowCount"))
+ if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high)))
+ return "rowCount: integer|Long expected";
+ return null;
+ };
+
+ /**
+ * Creates a FinalizeWriteStreamResponse message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse
+ */
+ FinalizeWriteStreamResponse.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse();
+ if (object.rowCount != null)
+ if ($util.Long)
+ (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false;
+ else if (typeof object.rowCount === "string")
+ message.rowCount = parseInt(object.rowCount, 10);
+ else if (typeof object.rowCount === "number")
+ message.rowCount = object.rowCount;
+ else if (typeof object.rowCount === "object")
+ message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber();
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a FinalizeWriteStreamResponse message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse} message FinalizeWriteStreamResponse
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ FinalizeWriteStreamResponse.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ if ($util.Long) {
+ var long = new $util.Long(0, 0, false);
+ object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;
+ } else
+ object.rowCount = options.longs === String ? "0" : 0;
+ if (message.rowCount != null && message.hasOwnProperty("rowCount"))
+ if (typeof message.rowCount === "number")
+ object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount;
+ else
+ object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount;
+ return object;
+ };
+
+ /**
+ * Converts this FinalizeWriteStreamResponse to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ FinalizeWriteStreamResponse.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for FinalizeWriteStreamResponse
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ FinalizeWriteStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse";
+ };
+
+ return FinalizeWriteStreamResponse;
+ })();
+
+ v1beta2.FlushRowsRequest = (function() {
+
+ /**
+ * Properties of a FlushRowsRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IFlushRowsRequest
+ * @property {string|null} [writeStream] FlushRowsRequest writeStream
+ * @property {google.protobuf.IInt64Value|null} [offset] FlushRowsRequest offset
+ */
+
+ /**
+ * Constructs a new FlushRowsRequest.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a FlushRowsRequest.
+ * @implements IFlushRowsRequest
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest=} [properties] Properties to set
+ */
+ function FlushRowsRequest(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * FlushRowsRequest writeStream.
+ * @member {string} writeStream
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @instance
+ */
+ FlushRowsRequest.prototype.writeStream = "";
+
+ /**
+ * FlushRowsRequest offset.
+ * @member {google.protobuf.IInt64Value|null|undefined} offset
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @instance
+ */
+ FlushRowsRequest.prototype.offset = null;
+
+ /**
+ * Creates a new FlushRowsRequest instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.FlushRowsRequest} FlushRowsRequest instance
+ */
+ FlushRowsRequest.create = function create(properties) {
+ return new FlushRowsRequest(properties);
+ };
+
+ /**
+ * Encodes the specified FlushRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FlushRowsRequest.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest} message FlushRowsRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ FlushRowsRequest.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.writeStream);
+ if (message.offset != null && Object.hasOwnProperty.call(message, "offset"))
+ $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified FlushRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FlushRowsRequest.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest} message FlushRowsRequest message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ FlushRowsRequest.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a FlushRowsRequest message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.FlushRowsRequest} FlushRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ FlushRowsRequest.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.FlushRowsRequest();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.writeStream = reader.string();
+ break;
+ }
+ case 2: {
+ message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a FlushRowsRequest message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.FlushRowsRequest} FlushRowsRequest
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ FlushRowsRequest.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a FlushRowsRequest message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ FlushRowsRequest.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.writeStream != null && message.hasOwnProperty("writeStream"))
+ if (!$util.isString(message.writeStream))
+ return "writeStream: string expected";
+ if (message.offset != null && message.hasOwnProperty("offset")) {
+ var error = $root.google.protobuf.Int64Value.verify(message.offset, long + 1);
+ if (error)
+ return "offset." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates a FlushRowsRequest message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.FlushRowsRequest} FlushRowsRequest
+ */
+ FlushRowsRequest.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.FlushRowsRequest)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.FlushRowsRequest();
+ if (object.writeStream != null)
+ message.writeStream = String(object.writeStream);
+ if (object.offset != null) {
+ if (typeof object.offset !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.FlushRowsRequest.offset: object expected");
+ message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a FlushRowsRequest message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.FlushRowsRequest} message FlushRowsRequest
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ FlushRowsRequest.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.writeStream = "";
+ object.offset = null;
+ }
+ if (message.writeStream != null && message.hasOwnProperty("writeStream"))
+ object.writeStream = message.writeStream;
+ if (message.offset != null && message.hasOwnProperty("offset"))
+ object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options);
+ return object;
+ };
+
+ /**
+ * Converts this FlushRowsRequest to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ FlushRowsRequest.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for FlushRowsRequest
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsRequest
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ FlushRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.FlushRowsRequest";
+ };
+
+ return FlushRowsRequest;
+ })();
+
+ v1beta2.FlushRowsResponse = (function() {
+
+ /**
+ * Properties of a FlushRowsResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IFlushRowsResponse
+ * @property {number|Long|null} [offset] FlushRowsResponse offset
+ */
+
+ /**
+ * Constructs a new FlushRowsResponse.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a FlushRowsResponse.
+ * @implements IFlushRowsResponse
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse=} [properties] Properties to set
+ */
+ function FlushRowsResponse(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * FlushRowsResponse offset.
+ * @member {number|Long} offset
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @instance
+ */
+ FlushRowsResponse.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0;
+
+ /**
+ * Creates a new FlushRowsResponse instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.FlushRowsResponse} FlushRowsResponse instance
+ */
+ FlushRowsResponse.create = function create(properties) {
+ return new FlushRowsResponse(properties);
+ };
+
+ /**
+ * Encodes the specified FlushRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FlushRowsResponse.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse} message FlushRowsResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ FlushRowsResponse.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.offset != null && Object.hasOwnProperty.call(message, "offset"))
+ writer.uint32(/* id 1, wireType 0 =*/8).int64(message.offset);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified FlushRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.FlushRowsResponse.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse} message FlushRowsResponse message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ FlushRowsResponse.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a FlushRowsResponse message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.FlushRowsResponse} FlushRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ FlushRowsResponse.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.FlushRowsResponse();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.offset = reader.int64();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a FlushRowsResponse message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.FlushRowsResponse} FlushRowsResponse
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ FlushRowsResponse.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a FlushRowsResponse message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ FlushRowsResponse.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.offset != null && message.hasOwnProperty("offset"))
+ if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high)))
+ return "offset: integer|Long expected";
+ return null;
+ };
+
+ /**
+ * Creates a FlushRowsResponse message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.FlushRowsResponse} FlushRowsResponse
+ */
+ FlushRowsResponse.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.FlushRowsResponse)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.FlushRowsResponse();
+ if (object.offset != null)
+ if ($util.Long)
+ (message.offset = $util.Long.fromValue(object.offset)).unsigned = false;
+ else if (typeof object.offset === "string")
+ message.offset = parseInt(object.offset, 10);
+ else if (typeof object.offset === "number")
+ message.offset = object.offset;
+ else if (typeof object.offset === "object")
+ message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber();
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a FlushRowsResponse message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.FlushRowsResponse} message FlushRowsResponse
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ FlushRowsResponse.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ if ($util.Long) {
+ var long = new $util.Long(0, 0, false);
+ object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;
+ } else
+ object.offset = options.longs === String ? "0" : 0;
+ if (message.offset != null && message.hasOwnProperty("offset"))
+ if (typeof message.offset === "number")
+ object.offset = options.longs === String ? String(message.offset) : message.offset;
+ else
+ object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset;
+ return object;
+ };
+
+ /**
+ * Converts this FlushRowsResponse to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ FlushRowsResponse.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for FlushRowsResponse
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.FlushRowsResponse
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ FlushRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.FlushRowsResponse";
+ };
+
+ return FlushRowsResponse;
+ })();
+
+ v1beta2.StorageError = (function() {
+
+ /**
+ * Properties of a StorageError.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IStorageError
+ * @property {google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode|null} [code] StorageError code
+ * @property {string|null} [entity] StorageError entity
+ * @property {string|null} [errorMessage] StorageError errorMessage
+ */
+
+ /**
+ * Constructs a new StorageError.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a StorageError.
+ * @implements IStorageError
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IStorageError=} [properties] Properties to set
+ */
+ function StorageError(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * StorageError code.
+ * @member {google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode} code
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @instance
+ */
+ StorageError.prototype.code = 0;
+
+ /**
+ * StorageError entity.
+ * @member {string} entity
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @instance
+ */
+ StorageError.prototype.entity = "";
+
+ /**
+ * StorageError errorMessage.
+ * @member {string} errorMessage
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @instance
+ */
+ StorageError.prototype.errorMessage = "";
+
+ /**
+ * Creates a new StorageError instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IStorageError=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.StorageError} StorageError instance
+ */
+ StorageError.create = function create(properties) {
+ return new StorageError(properties);
+ };
+
+ /**
+ * Encodes the specified StorageError message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StorageError.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IStorageError} message StorageError message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ StorageError.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.code != null && Object.hasOwnProperty.call(message, "code"))
+ writer.uint32(/* id 1, wireType 0 =*/8).int32(message.code);
+ if (message.entity != null && Object.hasOwnProperty.call(message, "entity"))
+ writer.uint32(/* id 2, wireType 2 =*/18).string(message.entity);
+ if (message.errorMessage != null && Object.hasOwnProperty.call(message, "errorMessage"))
+ writer.uint32(/* id 3, wireType 2 =*/26).string(message.errorMessage);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified StorageError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.StorageError.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IStorageError} message StorageError message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ StorageError.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a StorageError message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.StorageError} StorageError
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ StorageError.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.StorageError();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.code = reader.int32();
+ break;
+ }
+ case 2: {
+ message.entity = reader.string();
+ break;
+ }
+ case 3: {
+ message.errorMessage = reader.string();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a StorageError message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.StorageError} StorageError
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ StorageError.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a StorageError message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ StorageError.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.code != null && message.hasOwnProperty("code"))
+ switch (message.code) {
+ default:
+ return "code: enum value expected";
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ case 4:
+ case 5:
+ case 6:
+ break;
+ }
+ if (message.entity != null && message.hasOwnProperty("entity"))
+ if (!$util.isString(message.entity))
+ return "entity: string expected";
+ if (message.errorMessage != null && message.hasOwnProperty("errorMessage"))
+ if (!$util.isString(message.errorMessage))
+ return "errorMessage: string expected";
+ return null;
+ };
+
+ /**
+ * Creates a StorageError message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.StorageError} StorageError
+ */
+ StorageError.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.StorageError)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.StorageError();
+ switch (object.code) {
+ default:
+ if (typeof object.code === "number") {
+ message.code = object.code;
+ break;
+ }
+ break;
+ case "STORAGE_ERROR_CODE_UNSPECIFIED":
+ case 0:
+ message.code = 0;
+ break;
+ case "TABLE_NOT_FOUND":
+ case 1:
+ message.code = 1;
+ break;
+ case "STREAM_ALREADY_COMMITTED":
+ case 2:
+ message.code = 2;
+ break;
+ case "STREAM_NOT_FOUND":
+ case 3:
+ message.code = 3;
+ break;
+ case "INVALID_STREAM_TYPE":
+ case 4:
+ message.code = 4;
+ break;
+ case "INVALID_STREAM_STATE":
+ case 5:
+ message.code = 5;
+ break;
+ case "STREAM_FINALIZED":
+ case 6:
+ message.code = 6;
+ break;
+ }
+ if (object.entity != null)
+ message.entity = String(object.entity);
+ if (object.errorMessage != null)
+ message.errorMessage = String(object.errorMessage);
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a StorageError message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.StorageError} message StorageError
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ StorageError.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.code = options.enums === String ? "STORAGE_ERROR_CODE_UNSPECIFIED" : 0;
+ object.entity = "";
+ object.errorMessage = "";
+ }
+ if (message.code != null && message.hasOwnProperty("code"))
+ object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode[message.code] === undefined ? message.code : $root.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode[message.code] : message.code;
+ if (message.entity != null && message.hasOwnProperty("entity"))
+ object.entity = message.entity;
+ if (message.errorMessage != null && message.hasOwnProperty("errorMessage"))
+ object.errorMessage = message.errorMessage;
+ return object;
+ };
+
+ /**
+ * Converts this StorageError to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ StorageError.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for StorageError
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.StorageError
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ StorageError.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.StorageError";
+ };
+
+ /**
+ * StorageErrorCode enum.
+ * @name google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode
+ * @enum {number}
+ * @property {number} STORAGE_ERROR_CODE_UNSPECIFIED=0 STORAGE_ERROR_CODE_UNSPECIFIED value
+ * @property {number} TABLE_NOT_FOUND=1 TABLE_NOT_FOUND value
+ * @property {number} STREAM_ALREADY_COMMITTED=2 STREAM_ALREADY_COMMITTED value
+ * @property {number} STREAM_NOT_FOUND=3 STREAM_NOT_FOUND value
+ * @property {number} INVALID_STREAM_TYPE=4 INVALID_STREAM_TYPE value
+ * @property {number} INVALID_STREAM_STATE=5 INVALID_STREAM_STATE value
+ * @property {number} STREAM_FINALIZED=6 STREAM_FINALIZED value
+ */
+ StorageError.StorageErrorCode = (function() {
+ var valuesById = {}, values = Object.create(valuesById);
+ values[valuesById[0] = "STORAGE_ERROR_CODE_UNSPECIFIED"] = 0;
+ values[valuesById[1] = "TABLE_NOT_FOUND"] = 1;
+ values[valuesById[2] = "STREAM_ALREADY_COMMITTED"] = 2;
+ values[valuesById[3] = "STREAM_NOT_FOUND"] = 3;
+ values[valuesById[4] = "INVALID_STREAM_TYPE"] = 4;
+ values[valuesById[5] = "INVALID_STREAM_STATE"] = 5;
+ values[valuesById[6] = "STREAM_FINALIZED"] = 6;
+ return values;
+ })();
+
+ return StorageError;
+ })();
+
+ /**
+ * DataFormat enum.
+ * @name google.cloud.bigquery.storage.v1beta2.DataFormat
+ * @enum {number}
+ * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value
+ * @property {number} AVRO=1 AVRO value
+ * @property {number} ARROW=2 ARROW value
+ */
+ v1beta2.DataFormat = (function() {
+ var valuesById = {}, values = Object.create(valuesById);
+ values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0;
+ values[valuesById[1] = "AVRO"] = 1;
+ values[valuesById[2] = "ARROW"] = 2;
+ return values;
+ })();
+
+ v1beta2.ReadSession = (function() {
+
+ /**
+ * Properties of a ReadSession.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IReadSession
+ * @property {string|null} [name] ReadSession name
+ * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime
+ * @property {google.cloud.bigquery.storage.v1beta2.DataFormat|null} [dataFormat] ReadSession dataFormat
+ * @property {google.cloud.bigquery.storage.v1beta2.IAvroSchema|null} [avroSchema] ReadSession avroSchema
+ * @property {google.cloud.bigquery.storage.v1beta2.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema
+ * @property {string|null} [table] ReadSession table
+ * @property {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers
+ * @property {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions
+ * @property {Array.|null} [streams] ReadSession streams
+ */
+
+ /**
+ * Constructs a new ReadSession.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a ReadSession.
+ * @implements IReadSession
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadSession=} [properties] Properties to set
+ */
+ function ReadSession(properties) {
+ this.streams = [];
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ReadSession name.
+ * @member {string} name
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ ReadSession.prototype.name = "";
+
+ /**
+ * ReadSession expireTime.
+ * @member {google.protobuf.ITimestamp|null|undefined} expireTime
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ ReadSession.prototype.expireTime = null;
+
+ /**
+ * ReadSession dataFormat.
+ * @member {google.cloud.bigquery.storage.v1beta2.DataFormat} dataFormat
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ ReadSession.prototype.dataFormat = 0;
+
+ /**
+ * ReadSession avroSchema.
+ * @member {google.cloud.bigquery.storage.v1beta2.IAvroSchema|null|undefined} avroSchema
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ ReadSession.prototype.avroSchema = null;
+
+ /**
+ * ReadSession arrowSchema.
+ * @member {google.cloud.bigquery.storage.v1beta2.IArrowSchema|null|undefined} arrowSchema
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ ReadSession.prototype.arrowSchema = null;
+
+ /**
+ * ReadSession table.
+ * @member {string} table
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ ReadSession.prototype.table = "";
+
+ /**
+ * ReadSession tableModifiers.
+ * @member {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers|null|undefined} tableModifiers
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ ReadSession.prototype.tableModifiers = null;
+
+ /**
+ * ReadSession readOptions.
+ * @member {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions|null|undefined} readOptions
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ ReadSession.prototype.readOptions = null;
+
+ /**
+ * ReadSession streams.
+ * @member {Array.} streams
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ ReadSession.prototype.streams = $util.emptyArray;
+
+ // OneOf field names bound to virtual getters and setters
+ var $oneOfFields;
+
+ /**
+ * ReadSession schema.
+ * @member {"avroSchema"|"arrowSchema"|undefined} schema
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ */
+ Object.defineProperty(ReadSession.prototype, "schema", {
+ get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]),
+ set: $util.oneOfSetter($oneOfFields)
+ });
+
+ /**
+ * Creates a new ReadSession instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadSession=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession} ReadSession instance
+ */
+ ReadSession.create = function create(properties) {
+ return new ReadSession(properties);
+ };
+
+ /**
+ * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadSession} message ReadSession message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ReadSession.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.name != null && Object.hasOwnProperty.call(message, "name"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);
+ if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime"))
+ $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
+ if (message.dataFormat != null && Object.hasOwnProperty.call(message, "dataFormat"))
+ writer.uint32(/* id 3, wireType 0 =*/24).int32(message.dataFormat);
+ if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema"))
+ $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();
+ if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema"))
+ $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();
+ if (message.table != null && Object.hasOwnProperty.call(message, "table"))
+ writer.uint32(/* id 6, wireType 2 =*/50).string(message.table);
+ if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers"))
+ $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim();
+ if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions"))
+ $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim();
+ if (message.streams != null && message.streams.length)
+ for (var i = 0; i < message.streams.length; ++i)
+ $root.google.cloud.bigquery.storage.v1beta2.ReadStream.encode(message.streams[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadSession} message ReadSession message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ReadSession.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a ReadSession message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession} ReadSession
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ReadSession.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ReadSession();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.name = reader.string();
+ break;
+ }
+ case 2: {
+ message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 3: {
+ message.dataFormat = reader.int32();
+ break;
+ }
+ case 4: {
+ message.avroSchema = $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 5: {
+ message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 6: {
+ message.table = reader.string();
+ break;
+ }
+ case 7: {
+ message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 8: {
+ message.readOptions = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 10: {
+ if (!(message.streams && message.streams.length))
+ message.streams = [];
+ message.streams.push($root.google.cloud.bigquery.storage.v1beta2.ReadStream.decode(reader, reader.uint32(), undefined, long + 1));
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a ReadSession message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession} ReadSession
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ReadSession.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a ReadSession message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ReadSession.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ var properties = {};
+ if (message.name != null && message.hasOwnProperty("name"))
+ if (!$util.isString(message.name))
+ return "name: string expected";
+ if (message.expireTime != null && message.hasOwnProperty("expireTime")) {
+ var error = $root.google.protobuf.Timestamp.verify(message.expireTime, long + 1);
+ if (error)
+ return "expireTime." + error;
+ }
+ if (message.dataFormat != null && message.hasOwnProperty("dataFormat"))
+ switch (message.dataFormat) {
+ default:
+ return "dataFormat: enum value expected";
+ case 0:
+ case 1:
+ case 2:
+ break;
+ }
+ if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) {
+ properties.schema = 1;
+ {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.verify(message.avroSchema, long + 1);
+ if (error)
+ return "avroSchema." + error;
+ }
+ }
+ if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) {
+ if (properties.schema === 1)
+ return "schema: multiple values";
+ properties.schema = 1;
+ {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.verify(message.arrowSchema, long + 1);
+ if (error)
+ return "arrowSchema." + error;
+ }
+ }
+ if (message.table != null && message.hasOwnProperty("table"))
+ if (!$util.isString(message.table))
+ return "table: string expected";
+ if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.verify(message.tableModifiers, long + 1);
+ if (error)
+ return "tableModifiers." + error;
+ }
+ if (message.readOptions != null && message.hasOwnProperty("readOptions")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.verify(message.readOptions, long + 1);
+ if (error)
+ return "readOptions." + error;
+ }
+ if (message.streams != null && message.hasOwnProperty("streams")) {
+ if (!Array.isArray(message.streams))
+ return "streams: array expected";
+ for (var i = 0; i < message.streams.length; ++i) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.verify(message.streams[i], long + 1);
+ if (error)
+ return "streams." + error;
+ }
+ }
+ return null;
+ };
+
+ /**
+ * Creates a ReadSession message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession} ReadSession
+ */
+ ReadSession.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ReadSession)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ReadSession();
+ if (object.name != null)
+ message.name = String(object.name);
+ if (object.expireTime != null) {
+ if (typeof object.expireTime !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.expireTime: object expected");
+ message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime, long + 1);
+ }
+ switch (object.dataFormat) {
+ default:
+ if (typeof object.dataFormat === "number") {
+ message.dataFormat = object.dataFormat;
+ break;
+ }
+ break;
+ case "DATA_FORMAT_UNSPECIFIED":
+ case 0:
+ message.dataFormat = 0;
+ break;
+ case "AVRO":
+ case 1:
+ message.dataFormat = 1;
+ break;
+ case "ARROW":
+ case 2:
+ message.dataFormat = 2;
+ break;
+ }
+ if (object.avroSchema != null) {
+ if (typeof object.avroSchema !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.avroSchema: object expected");
+ message.avroSchema = $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.fromObject(object.avroSchema, long + 1);
+ }
+ if (object.arrowSchema != null) {
+ if (typeof object.arrowSchema !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.arrowSchema: object expected");
+ message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.fromObject(object.arrowSchema, long + 1);
+ }
+ if (object.table != null)
+ message.table = String(object.table);
+ if (object.tableModifiers != null) {
+ if (typeof object.tableModifiers !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.tableModifiers: object expected");
+ message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.fromObject(object.tableModifiers, long + 1);
+ }
+ if (object.readOptions != null) {
+ if (typeof object.readOptions !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.readOptions: object expected");
+ message.readOptions = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.fromObject(object.readOptions, long + 1);
+ }
+ if (object.streams) {
+ if (!Array.isArray(object.streams))
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.streams: array expected");
+ message.streams = [];
+ for (var i = 0; i < object.streams.length; ++i) {
+ if (typeof object.streams[i] !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.streams: object expected");
+ message.streams[i] = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.fromObject(object.streams[i], long + 1);
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a ReadSession message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession} message ReadSession
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ReadSession.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.arrays || options.defaults)
+ object.streams = [];
+ if (options.defaults) {
+ object.name = "";
+ object.expireTime = null;
+ object.dataFormat = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0;
+ object.table = "";
+ object.tableModifiers = null;
+ object.readOptions = null;
+ }
+ if (message.name != null && message.hasOwnProperty("name"))
+ object.name = message.name;
+ if (message.expireTime != null && message.hasOwnProperty("expireTime"))
+ object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options);
+ if (message.dataFormat != null && message.hasOwnProperty("dataFormat"))
+ object.dataFormat = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta2.DataFormat[message.dataFormat] === undefined ? message.dataFormat : $root.google.cloud.bigquery.storage.v1beta2.DataFormat[message.dataFormat] : message.dataFormat;
+ if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) {
+ object.avroSchema = $root.google.cloud.bigquery.storage.v1beta2.AvroSchema.toObject(message.avroSchema, options);
+ if (options.oneofs)
+ object.schema = "avroSchema";
+ }
+ if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) {
+ object.arrowSchema = $root.google.cloud.bigquery.storage.v1beta2.ArrowSchema.toObject(message.arrowSchema, options);
+ if (options.oneofs)
+ object.schema = "arrowSchema";
+ }
+ if (message.table != null && message.hasOwnProperty("table"))
+ object.table = message.table;
+ if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers"))
+ object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.toObject(message.tableModifiers, options);
+ if (message.readOptions != null && message.hasOwnProperty("readOptions"))
+ object.readOptions = $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.toObject(message.readOptions, options);
+ if (message.streams && message.streams.length) {
+ object.streams = [];
+ for (var j = 0; j < message.streams.length; ++j)
+ object.streams[j] = $root.google.cloud.bigquery.storage.v1beta2.ReadStream.toObject(message.streams[j], options);
+ }
+ return object;
+ };
+
+ /**
+ * Converts this ReadSession to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ReadSession.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ReadSession
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ReadSession.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ReadSession";
+ };
+
+ ReadSession.TableModifiers = (function() {
+
+ /**
+ * Properties of a TableModifiers.
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @interface ITableModifiers
+ * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime
+ */
+
+ /**
+ * Constructs a new TableModifiers.
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @classdesc Represents a TableModifiers.
+ * @implements ITableModifiers
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers=} [properties] Properties to set
+ */
+ function TableModifiers(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * TableModifiers snapshotTime.
+ * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @instance
+ */
+ TableModifiers.prototype.snapshotTime = null;
+
+ /**
+ * Creates a new TableModifiers instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers} TableModifiers instance
+ */
+ TableModifiers.create = function create(properties) {
+ return new TableModifiers(properties);
+ };
+
+ /**
+ * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ TableModifiers.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime"))
+ $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ TableModifiers.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a TableModifiers message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers} TableModifiers
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ TableModifiers.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a TableModifiers message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers} TableModifiers
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ TableModifiers.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a TableModifiers message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ TableModifiers.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) {
+ var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime, long + 1);
+ if (error)
+ return "snapshotTime." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers} TableModifiers
+ */
+ TableModifiers.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers();
+ if (object.snapshotTime != null) {
+ if (typeof object.snapshotTime !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers.snapshotTime: object expected");
+ message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a TableModifiers message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers} message TableModifiers
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ TableModifiers.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.snapshotTime = null;
+ if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime"))
+ object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options);
+ return object;
+ };
+
+ /**
+ * Converts this TableModifiers to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ TableModifiers.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for TableModifiers
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ TableModifiers.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ReadSession.TableModifiers";
+ };
+
+ return TableModifiers;
+ })();
+
+ ReadSession.TableReadOptions = (function() {
+
+ /**
+ * Properties of a TableReadOptions.
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @interface ITableReadOptions
+ * @property {Array.|null} [selectedFields] TableReadOptions selectedFields
+ * @property {string|null} [rowRestriction] TableReadOptions rowRestriction
+ * @property {google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions|null} [arrowSerializationOptions] TableReadOptions arrowSerializationOptions
+ */
+
+ /**
+ * Constructs a new TableReadOptions.
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession
+ * @classdesc Represents a TableReadOptions.
+ * @implements ITableReadOptions
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions=} [properties] Properties to set
+ */
+ function TableReadOptions(properties) {
+ this.selectedFields = [];
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * TableReadOptions selectedFields.
+ * @member {Array.} selectedFields
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @instance
+ */
+ TableReadOptions.prototype.selectedFields = $util.emptyArray;
+
+ /**
+ * TableReadOptions rowRestriction.
+ * @member {string} rowRestriction
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @instance
+ */
+ TableReadOptions.prototype.rowRestriction = "";
+
+ /**
+ * TableReadOptions arrowSerializationOptions.
+ * @member {google.cloud.bigquery.storage.v1beta2.IArrowSerializationOptions|null|undefined} arrowSerializationOptions
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @instance
+ */
+ TableReadOptions.prototype.arrowSerializationOptions = null;
+
+ /**
+ * Creates a new TableReadOptions instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions} TableReadOptions instance
+ */
+ TableReadOptions.create = function create(properties) {
+ return new TableReadOptions(properties);
+ };
+
+ /**
+ * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ TableReadOptions.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.selectedFields != null && message.selectedFields.length)
+ for (var i = 0; i < message.selectedFields.length; ++i)
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]);
+ if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction"))
+ writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction);
+ if (message.arrowSerializationOptions != null && Object.hasOwnProperty.call(message, "arrowSerializationOptions"))
+ $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.encode(message.arrowSerializationOptions, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a TableReadOptions message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions} TableReadOptions
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ TableReadOptions.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ if (!(message.selectedFields && message.selectedFields.length))
+ message.selectedFields = [];
+ message.selectedFields.push(reader.string());
+ break;
+ }
+ case 2: {
+ message.rowRestriction = reader.string();
+ break;
+ }
+ case 3: {
+ message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a TableReadOptions message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions} TableReadOptions
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ TableReadOptions.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a TableReadOptions message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ TableReadOptions.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) {
+ if (!Array.isArray(message.selectedFields))
+ return "selectedFields: array expected";
+ for (var i = 0; i < message.selectedFields.length; ++i)
+ if (!$util.isString(message.selectedFields[i]))
+ return "selectedFields: string[] expected";
+ }
+ if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction"))
+ if (!$util.isString(message.rowRestriction))
+ return "rowRestriction: string expected";
+ if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.verify(message.arrowSerializationOptions, long + 1);
+ if (error)
+ return "arrowSerializationOptions." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions} TableReadOptions
+ */
+ TableReadOptions.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions();
+ if (object.selectedFields) {
+ if (!Array.isArray(object.selectedFields))
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.selectedFields: array expected");
+ message.selectedFields = [];
+ for (var i = 0; i < object.selectedFields.length; ++i)
+ message.selectedFields[i] = String(object.selectedFields[i]);
+ }
+ if (object.rowRestriction != null)
+ message.rowRestriction = String(object.rowRestriction);
+ if (object.arrowSerializationOptions != null) {
+ if (typeof object.arrowSerializationOptions !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions.arrowSerializationOptions: object expected");
+ message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.fromObject(object.arrowSerializationOptions, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions} message TableReadOptions
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ TableReadOptions.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.arrays || options.defaults)
+ object.selectedFields = [];
+ if (options.defaults) {
+ object.rowRestriction = "";
+ object.arrowSerializationOptions = null;
+ }
+ if (message.selectedFields && message.selectedFields.length) {
+ object.selectedFields = [];
+ for (var j = 0; j < message.selectedFields.length; ++j)
+ object.selectedFields[j] = message.selectedFields[j];
+ }
+ if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction"))
+ object.rowRestriction = message.rowRestriction;
+ if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions"))
+ object.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions.toObject(message.arrowSerializationOptions, options);
+ return object;
+ };
+
+ /**
+ * Converts this TableReadOptions to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ TableReadOptions.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for TableReadOptions
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ TableReadOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ReadSession.TableReadOptions";
+ };
+
+ return TableReadOptions;
+ })();
+
+ return ReadSession;
+ })();
+
+ v1beta2.ReadStream = (function() {
+
+ /**
+ * Properties of a ReadStream.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IReadStream
+ * @property {string|null} [name] ReadStream name
+ */
+
+ /**
+ * Constructs a new ReadStream.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a ReadStream.
+ * @implements IReadStream
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadStream=} [properties] Properties to set
+ */
+ function ReadStream(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ReadStream name.
+ * @member {string} name
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @instance
+ */
+ ReadStream.prototype.name = "";
+
+ /**
+ * Creates a new ReadStream instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadStream=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadStream} ReadStream instance
+ */
+ ReadStream.create = function create(properties) {
+ return new ReadStream(properties);
+ };
+
+ /**
+ * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadStream.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadStream} message ReadStream message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ReadStream.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.name != null && Object.hasOwnProperty.call(message, "name"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.ReadStream.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IReadStream} message ReadStream message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ReadStream.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a ReadStream message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadStream} ReadStream
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ReadStream.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.ReadStream();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.name = reader.string();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a ReadStream message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadStream} ReadStream
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ReadStream.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a ReadStream message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ReadStream.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.name != null && message.hasOwnProperty("name"))
+ if (!$util.isString(message.name))
+ return "name: string expected";
+ return null;
+ };
+
+ /**
+ * Creates a ReadStream message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.ReadStream} ReadStream
+ */
+ ReadStream.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.ReadStream)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.ReadStream();
+ if (object.name != null)
+ message.name = String(object.name);
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a ReadStream message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadStream} message ReadStream
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ ReadStream.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults)
+ object.name = "";
+ if (message.name != null && message.hasOwnProperty("name"))
+ object.name = message.name;
+ return object;
+ };
+
+ /**
+ * Converts this ReadStream to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ ReadStream.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for ReadStream
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.ReadStream
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ ReadStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.ReadStream";
+ };
+
+ return ReadStream;
+ })();
+
+ v1beta2.WriteStream = (function() {
+
+ /**
+ * Properties of a WriteStream.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface IWriteStream
+ * @property {string|null} [name] WriteStream name
+ * @property {google.cloud.bigquery.storage.v1beta2.WriteStream.Type|null} [type] WriteStream type
+ * @property {google.protobuf.ITimestamp|null} [createTime] WriteStream createTime
+ * @property {google.protobuf.ITimestamp|null} [commitTime] WriteStream commitTime
+ * @property {google.cloud.bigquery.storage.v1beta2.ITableSchema|null} [tableSchema] WriteStream tableSchema
+ */
+
+ /**
+ * Constructs a new WriteStream.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a WriteStream.
+ * @implements IWriteStream
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.IWriteStream=} [properties] Properties to set
+ */
+ function WriteStream(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * WriteStream name.
+ * @member {string} name
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @instance
+ */
+ WriteStream.prototype.name = "";
+
+ /**
+ * WriteStream type.
+ * @member {google.cloud.bigquery.storage.v1beta2.WriteStream.Type} type
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @instance
+ */
+ WriteStream.prototype.type = 0;
+
+ /**
+ * WriteStream createTime.
+ * @member {google.protobuf.ITimestamp|null|undefined} createTime
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @instance
+ */
+ WriteStream.prototype.createTime = null;
+
+ /**
+ * WriteStream commitTime.
+ * @member {google.protobuf.ITimestamp|null|undefined} commitTime
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @instance
+ */
+ WriteStream.prototype.commitTime = null;
+
+ /**
+ * WriteStream tableSchema.
+ * @member {google.cloud.bigquery.storage.v1beta2.ITableSchema|null|undefined} tableSchema
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @instance
+ */
+ WriteStream.prototype.tableSchema = null;
+
+ /**
+ * Creates a new WriteStream instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IWriteStream=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.WriteStream} WriteStream instance
+ */
+ WriteStream.create = function create(properties) {
+ return new WriteStream(properties);
+ };
+
+ /**
+ * Encodes the specified WriteStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.WriteStream.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IWriteStream} message WriteStream message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ WriteStream.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.name != null && Object.hasOwnProperty.call(message, "name"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);
+ if (message.type != null && Object.hasOwnProperty.call(message, "type"))
+ writer.uint32(/* id 2, wireType 0 =*/16).int32(message.type);
+ if (message.createTime != null && Object.hasOwnProperty.call(message, "createTime"))
+ $root.google.protobuf.Timestamp.encode(message.createTime, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim();
+ if (message.commitTime != null && Object.hasOwnProperty.call(message, "commitTime"))
+ $root.google.protobuf.Timestamp.encode(message.commitTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();
+ if (message.tableSchema != null && Object.hasOwnProperty.call(message, "tableSchema"))
+ $root.google.cloud.bigquery.storage.v1beta2.TableSchema.encode(message.tableSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified WriteStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.WriteStream.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.IWriteStream} message WriteStream message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ WriteStream.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a WriteStream message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.WriteStream} WriteStream
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ WriteStream.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.WriteStream();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.name = reader.string();
+ break;
+ }
+ case 2: {
+ message.type = reader.int32();
+ break;
+ }
+ case 3: {
+ message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 4: {
+ message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ case 5: {
+ message.tableSchema = $root.google.cloud.bigquery.storage.v1beta2.TableSchema.decode(reader, reader.uint32(), undefined, long + 1);
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a WriteStream message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.WriteStream} WriteStream
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ WriteStream.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a WriteStream message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ WriteStream.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.name != null && message.hasOwnProperty("name"))
+ if (!$util.isString(message.name))
+ return "name: string expected";
+ if (message.type != null && message.hasOwnProperty("type"))
+ switch (message.type) {
+ default:
+ return "type: enum value expected";
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ break;
+ }
+ if (message.createTime != null && message.hasOwnProperty("createTime")) {
+ var error = $root.google.protobuf.Timestamp.verify(message.createTime, long + 1);
+ if (error)
+ return "createTime." + error;
+ }
+ if (message.commitTime != null && message.hasOwnProperty("commitTime")) {
+ var error = $root.google.protobuf.Timestamp.verify(message.commitTime, long + 1);
+ if (error)
+ return "commitTime." + error;
+ }
+ if (message.tableSchema != null && message.hasOwnProperty("tableSchema")) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.TableSchema.verify(message.tableSchema, long + 1);
+ if (error)
+ return "tableSchema." + error;
+ }
+ return null;
+ };
+
+ /**
+ * Creates a WriteStream message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.WriteStream} WriteStream
+ */
+ WriteStream.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.WriteStream)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.WriteStream();
+ if (object.name != null)
+ message.name = String(object.name);
+ switch (object.type) {
+ default:
+ if (typeof object.type === "number") {
+ message.type = object.type;
+ break;
+ }
+ break;
+ case "TYPE_UNSPECIFIED":
+ case 0:
+ message.type = 0;
+ break;
+ case "COMMITTED":
+ case 1:
+ message.type = 1;
+ break;
+ case "PENDING":
+ case 2:
+ message.type = 2;
+ break;
+ case "BUFFERED":
+ case 3:
+ message.type = 3;
+ break;
+ }
+ if (object.createTime != null) {
+ if (typeof object.createTime !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.WriteStream.createTime: object expected");
+ message.createTime = $root.google.protobuf.Timestamp.fromObject(object.createTime, long + 1);
+ }
+ if (object.commitTime != null) {
+ if (typeof object.commitTime !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.WriteStream.commitTime: object expected");
+ message.commitTime = $root.google.protobuf.Timestamp.fromObject(object.commitTime, long + 1);
+ }
+ if (object.tableSchema != null) {
+ if (typeof object.tableSchema !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.WriteStream.tableSchema: object expected");
+ message.tableSchema = $root.google.cloud.bigquery.storage.v1beta2.TableSchema.fromObject(object.tableSchema, long + 1);
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a WriteStream message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.WriteStream} message WriteStream
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ WriteStream.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.defaults) {
+ object.name = "";
+ object.type = options.enums === String ? "TYPE_UNSPECIFIED" : 0;
+ object.createTime = null;
+ object.commitTime = null;
+ object.tableSchema = null;
+ }
+ if (message.name != null && message.hasOwnProperty("name"))
+ object.name = message.name;
+ if (message.type != null && message.hasOwnProperty("type"))
+ object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta2.WriteStream.Type[message.type] === undefined ? message.type : $root.google.cloud.bigquery.storage.v1beta2.WriteStream.Type[message.type] : message.type;
+ if (message.createTime != null && message.hasOwnProperty("createTime"))
+ object.createTime = $root.google.protobuf.Timestamp.toObject(message.createTime, options);
+ if (message.commitTime != null && message.hasOwnProperty("commitTime"))
+ object.commitTime = $root.google.protobuf.Timestamp.toObject(message.commitTime, options);
+ if (message.tableSchema != null && message.hasOwnProperty("tableSchema"))
+ object.tableSchema = $root.google.cloud.bigquery.storage.v1beta2.TableSchema.toObject(message.tableSchema, options);
+ return object;
+ };
+
+ /**
+ * Converts this WriteStream to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ WriteStream.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for WriteStream
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.WriteStream
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ WriteStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.WriteStream";
+ };
+
+ /**
+ * Type enum.
+ * @name google.cloud.bigquery.storage.v1beta2.WriteStream.Type
+ * @enum {number}
+ * @property {number} TYPE_UNSPECIFIED=0 TYPE_UNSPECIFIED value
+ * @property {number} COMMITTED=1 COMMITTED value
+ * @property {number} PENDING=2 PENDING value
+ * @property {number} BUFFERED=3 BUFFERED value
+ */
+ WriteStream.Type = (function() {
+ var valuesById = {}, values = Object.create(valuesById);
+ values[valuesById[0] = "TYPE_UNSPECIFIED"] = 0;
+ values[valuesById[1] = "COMMITTED"] = 1;
+ values[valuesById[2] = "PENDING"] = 2;
+ values[valuesById[3] = "BUFFERED"] = 3;
+ return values;
+ })();
+
+ return WriteStream;
+ })();
+
+ v1beta2.TableSchema = (function() {
+
+ /**
+ * Properties of a TableSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface ITableSchema
+ * @property {Array.|null} [fields] TableSchema fields
+ */
+
+ /**
+ * Constructs a new TableSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a TableSchema.
+ * @implements ITableSchema
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.ITableSchema=} [properties] Properties to set
+ */
+ function TableSchema(properties) {
+ this.fields = [];
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * TableSchema fields.
+ * @member {Array.} fields
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @instance
+ */
+ TableSchema.prototype.fields = $util.emptyArray;
+
+ /**
+ * Creates a new TableSchema instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ITableSchema=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.TableSchema} TableSchema instance
+ */
+ TableSchema.create = function create(properties) {
+ return new TableSchema(properties);
+ };
+
+ /**
+ * Encodes the specified TableSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.TableSchema.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ITableSchema} message TableSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ TableSchema.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.fields != null && message.fields.length)
+ for (var i = 0; i < message.fields.length; ++i)
+ $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.encode(message.fields[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();
+ return writer;
+ };
+
+ /**
+ * Encodes the specified TableSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.TableSchema.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ITableSchema} message TableSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ TableSchema.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a TableSchema message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.TableSchema} TableSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ TableSchema.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.TableSchema();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ if (!(message.fields && message.fields.length))
+ message.fields = [];
+ message.fields.push($root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.decode(reader, reader.uint32(), undefined, long + 1));
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a TableSchema message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.TableSchema} TableSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ TableSchema.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a TableSchema message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ TableSchema.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.fields != null && message.hasOwnProperty("fields")) {
+ if (!Array.isArray(message.fields))
+ return "fields: array expected";
+ for (var i = 0; i < message.fields.length; ++i) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.verify(message.fields[i], long + 1);
+ if (error)
+ return "fields." + error;
+ }
+ }
+ return null;
+ };
+
+ /**
+ * Creates a TableSchema message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.TableSchema} TableSchema
+ */
+ TableSchema.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.TableSchema)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.TableSchema();
+ if (object.fields) {
+ if (!Array.isArray(object.fields))
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.TableSchema.fields: array expected");
+ message.fields = [];
+ for (var i = 0; i < object.fields.length; ++i) {
+ if (typeof object.fields[i] !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.TableSchema.fields: object expected");
+ message.fields[i] = $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.fromObject(object.fields[i], long + 1);
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a TableSchema message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.TableSchema} message TableSchema
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ TableSchema.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.arrays || options.defaults)
+ object.fields = [];
+ if (message.fields && message.fields.length) {
+ object.fields = [];
+ for (var j = 0; j < message.fields.length; ++j)
+ object.fields[j] = $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.toObject(message.fields[j], options);
+ }
+ return object;
+ };
+
+ /**
+ * Converts this TableSchema to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ TableSchema.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for TableSchema
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableSchema
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ TableSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.TableSchema";
+ };
+
+ return TableSchema;
+ })();
+
+ v1beta2.TableFieldSchema = (function() {
+
+ /**
+ * Properties of a TableFieldSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @interface ITableFieldSchema
+ * @property {string|null} [name] TableFieldSchema name
+ * @property {google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Type|null} [type] TableFieldSchema type
+ * @property {google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Mode|null} [mode] TableFieldSchema mode
+ * @property {Array.|null} [fields] TableFieldSchema fields
+ * @property {string|null} [description] TableFieldSchema description
+ */
+
+ /**
+ * Constructs a new TableFieldSchema.
+ * @memberof google.cloud.bigquery.storage.v1beta2
+ * @classdesc Represents a TableFieldSchema.
+ * @implements ITableFieldSchema
+ * @constructor
+ * @param {google.cloud.bigquery.storage.v1beta2.ITableFieldSchema=} [properties] Properties to set
+ */
+ function TableFieldSchema(properties) {
+ this.fields = [];
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null && keys[i] !== "__proto__")
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * TableFieldSchema name.
+ * @member {string} name
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @instance
+ */
+ TableFieldSchema.prototype.name = "";
+
+ /**
+ * TableFieldSchema type.
+ * @member {google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Type} type
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @instance
+ */
+ TableFieldSchema.prototype.type = 0;
+
+ /**
+ * TableFieldSchema mode.
+ * @member {google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Mode} mode
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @instance
+ */
+ TableFieldSchema.prototype.mode = 0;
+
+ /**
+ * TableFieldSchema fields.
+ * @member {Array.} fields
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @instance
+ */
+ TableFieldSchema.prototype.fields = $util.emptyArray;
+
+ /**
+ * TableFieldSchema description.
+ * @member {string} description
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @instance
+ */
+ TableFieldSchema.prototype.description = "";
+
+ /**
+ * Creates a new TableFieldSchema instance using the specified properties.
+ * @function create
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ITableFieldSchema=} [properties] Properties to set
+ * @returns {google.cloud.bigquery.storage.v1beta2.TableFieldSchema} TableFieldSchema instance
+ */
+ TableFieldSchema.create = function create(properties) {
+ return new TableFieldSchema(properties);
+ };
+
+ /**
+ * Encodes the specified TableFieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.TableFieldSchema.verify|verify} messages.
+ * @function encode
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ITableFieldSchema} message TableFieldSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ TableFieldSchema.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.name != null && Object.hasOwnProperty.call(message, "name"))
+ writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);
+ if (message.type != null && Object.hasOwnProperty.call(message, "type"))
+ writer.uint32(/* id 2, wireType 0 =*/16).int32(message.type);
+ if (message.mode != null && Object.hasOwnProperty.call(message, "mode"))
+ writer.uint32(/* id 3, wireType 0 =*/24).int32(message.mode);
+ if (message.fields != null && message.fields.length)
+ for (var i = 0; i < message.fields.length; ++i)
+ $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.encode(message.fields[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();
+ if (message.description != null && Object.hasOwnProperty.call(message, "description"))
+ writer.uint32(/* id 6, wireType 2 =*/50).string(message.description);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified TableFieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta2.TableFieldSchema.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.ITableFieldSchema} message TableFieldSchema message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ TableFieldSchema.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes a TableFieldSchema message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.cloud.bigquery.storage.v1beta2.TableFieldSchema} TableFieldSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ TableFieldSchema.decode = function decode(reader, length, error, long) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ if (long === undefined)
+ long = 0;
+ if (long > $Reader.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.name = reader.string();
+ break;
+ }
+ case 2: {
+ message.type = reader.int32();
+ break;
+ }
+ case 3: {
+ message.mode = reader.int32();
+ break;
+ }
+ case 4: {
+ if (!(message.fields && message.fields.length))
+ message.fields = [];
+ message.fields.push($root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.decode(reader, reader.uint32(), undefined, long + 1));
+ break;
+ }
+ case 6: {
+ message.description = reader.string();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7, long);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes a TableFieldSchema message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.cloud.bigquery.storage.v1beta2.TableFieldSchema} TableFieldSchema
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ TableFieldSchema.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies a TableFieldSchema message.
+ * @function verify
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ TableFieldSchema.verify = function verify(message, long) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ return "maximum nesting depth exceeded";
+ if (message.name != null && message.hasOwnProperty("name"))
+ if (!$util.isString(message.name))
+ return "name: string expected";
+ if (message.type != null && message.hasOwnProperty("type"))
+ switch (message.type) {
+ default:
+ return "type: enum value expected";
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ case 4:
+ case 5:
+ case 6:
+ case 7:
+ case 8:
+ case 9:
+ case 10:
+ case 11:
+ case 12:
+ case 13:
+ case 14:
+ case 15:
+ break;
+ }
+ if (message.mode != null && message.hasOwnProperty("mode"))
+ switch (message.mode) {
+ default:
+ return "mode: enum value expected";
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ break;
+ }
+ if (message.fields != null && message.hasOwnProperty("fields")) {
+ if (!Array.isArray(message.fields))
+ return "fields: array expected";
+ for (var i = 0; i < message.fields.length; ++i) {
+ var error = $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.verify(message.fields[i], long + 1);
+ if (error)
+ return "fields." + error;
+ }
+ }
+ if (message.description != null && message.hasOwnProperty("description"))
+ if (!$util.isString(message.description))
+ return "description: string expected";
+ return null;
+ };
+
+ /**
+ * Creates a TableFieldSchema message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @static
+ * @param {Object.} object Plain object
+ * @returns {google.cloud.bigquery.storage.v1beta2.TableFieldSchema} TableFieldSchema
+ */
+ TableFieldSchema.fromObject = function fromObject(object, long) {
+ if (object instanceof $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema)
+ return object;
+ if (long === undefined)
+ long = 0;
+ if (long > $util.recursionLimit)
+ throw Error("maximum nesting depth exceeded");
+ var message = new $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema();
+ if (object.name != null)
+ message.name = String(object.name);
+ switch (object.type) {
+ default:
+ if (typeof object.type === "number") {
+ message.type = object.type;
+ break;
+ }
+ break;
+ case "TYPE_UNSPECIFIED":
+ case 0:
+ message.type = 0;
+ break;
+ case "STRING":
+ case 1:
+ message.type = 1;
+ break;
+ case "INT64":
+ case 2:
+ message.type = 2;
+ break;
+ case "DOUBLE":
+ case 3:
+ message.type = 3;
+ break;
+ case "STRUCT":
+ case 4:
+ message.type = 4;
+ break;
+ case "BYTES":
+ case 5:
+ message.type = 5;
+ break;
+ case "BOOL":
+ case 6:
+ message.type = 6;
+ break;
+ case "TIMESTAMP":
+ case 7:
+ message.type = 7;
+ break;
+ case "DATE":
+ case 8:
+ message.type = 8;
+ break;
+ case "TIME":
+ case 9:
+ message.type = 9;
+ break;
+ case "DATETIME":
+ case 10:
+ message.type = 10;
+ break;
+ case "GEOGRAPHY":
+ case 11:
+ message.type = 11;
+ break;
+ case "NUMERIC":
+ case 12:
+ message.type = 12;
+ break;
+ case "BIGNUMERIC":
+ case 13:
+ message.type = 13;
+ break;
+ case "INTERVAL":
+ case 14:
+ message.type = 14;
+ break;
+ case "JSON":
+ case 15:
+ message.type = 15;
+ break;
+ }
+ switch (object.mode) {
+ default:
+ if (typeof object.mode === "number") {
+ message.mode = object.mode;
+ break;
+ }
+ break;
+ case "MODE_UNSPECIFIED":
+ case 0:
+ message.mode = 0;
+ break;
+ case "NULLABLE":
+ case 1:
+ message.mode = 1;
+ break;
+ case "REQUIRED":
+ case 2:
+ message.mode = 2;
+ break;
+ case "REPEATED":
+ case 3:
+ message.mode = 3;
+ break;
+ }
+ if (object.fields) {
+ if (!Array.isArray(object.fields))
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.TableFieldSchema.fields: array expected");
+ message.fields = [];
+ for (var i = 0; i < object.fields.length; ++i) {
+ if (typeof object.fields[i] !== "object")
+ throw TypeError(".google.cloud.bigquery.storage.v1beta2.TableFieldSchema.fields: object expected");
+ message.fields[i] = $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.fromObject(object.fields[i], long + 1);
+ }
+ }
+ if (object.description != null)
+ message.description = String(object.description);
+ return message;
+ };
+
+ /**
+ * Creates a plain object from a TableFieldSchema message. Also converts values to other types if specified.
+ * @function toObject
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @static
+ * @param {google.cloud.bigquery.storage.v1beta2.TableFieldSchema} message TableFieldSchema
+ * @param {$protobuf.IConversionOptions} [options] Conversion options
+ * @returns {Object.} Plain object
+ */
+ TableFieldSchema.toObject = function toObject(message, options) {
+ if (!options)
+ options = {};
+ var object = {};
+ if (options.arrays || options.defaults)
+ object.fields = [];
+ if (options.defaults) {
+ object.name = "";
+ object.type = options.enums === String ? "TYPE_UNSPECIFIED" : 0;
+ object.mode = options.enums === String ? "MODE_UNSPECIFIED" : 0;
+ object.description = "";
+ }
+ if (message.name != null && message.hasOwnProperty("name"))
+ object.name = message.name;
+ if (message.type != null && message.hasOwnProperty("type"))
+ object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Type[message.type] === undefined ? message.type : $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Type[message.type] : message.type;
+ if (message.mode != null && message.hasOwnProperty("mode"))
+ object.mode = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Mode[message.mode] === undefined ? message.mode : $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Mode[message.mode] : message.mode;
+ if (message.fields && message.fields.length) {
+ object.fields = [];
+ for (var j = 0; j < message.fields.length; ++j)
+ object.fields[j] = $root.google.cloud.bigquery.storage.v1beta2.TableFieldSchema.toObject(message.fields[j], options);
+ }
+ if (message.description != null && message.hasOwnProperty("description"))
+ object.description = message.description;
+ return object;
+ };
+
+ /**
+ * Converts this TableFieldSchema to JSON.
+ * @function toJSON
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @instance
+ * @returns {Object.} JSON object
+ */
+ TableFieldSchema.prototype.toJSON = function toJSON() {
+ return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+ };
+
+ /**
+ * Gets the default type url for TableFieldSchema
+ * @function getTypeUrl
+ * @memberof google.cloud.bigquery.storage.v1beta2.TableFieldSchema
+ * @static
+ * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns {string} The default type url
+ */
+ TableFieldSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+ if (typeUrlPrefix === undefined) {
+ typeUrlPrefix = "type.googleapis.com";
+ }
+ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta2.TableFieldSchema";
+ };
+
+ /**
+ * Type enum.
+ * @name google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Type
+ * @enum {number}
+ * @property {number} TYPE_UNSPECIFIED=0 TYPE_UNSPECIFIED value
+ * @property {number} STRING=1 STRING value
+ * @property {number} INT64=2 INT64 value
+ * @property {number} DOUBLE=3 DOUBLE value
+ * @property {number} STRUCT=4 STRUCT value
+ * @property {number} BYTES=5 BYTES value
+ * @property {number} BOOL=6 BOOL value
+ * @property {number} TIMESTAMP=7 TIMESTAMP value
+ * @property {number} DATE=8 DATE value
+ * @property {number} TIME=9 TIME value
+ * @property {number} DATETIME=10 DATETIME value
+ * @property {number} GEOGRAPHY=11 GEOGRAPHY value
+ * @property {number} NUMERIC=12 NUMERIC value
+ * @property {number} BIGNUMERIC=13 BIGNUMERIC value
+ * @property {number} INTERVAL=14 INTERVAL value
+ * @property {number} JSON=15 JSON value
+ */
+ TableFieldSchema.Type = (function() {
+ var valuesById = {}, values = Object.create(valuesById);
+ values[valuesById[0] = "TYPE_UNSPECIFIED"] = 0;
+ values[valuesById[1] = "STRING"] = 1;
+ values[valuesById[2] = "INT64"] = 2;
+ values[valuesById[3] = "DOUBLE"] = 3;
+ values[valuesById[4] = "STRUCT"] = 4;
+ values[valuesById[5] = "BYTES"] = 5;
+ values[valuesById[6] = "BOOL"] = 6;
+ values[valuesById[7] = "TIMESTAMP"] = 7;
+ values[valuesById[8] = "DATE"] = 8;
+ values[valuesById[9] = "TIME"] = 9;
+ values[valuesById[10] = "DATETIME"] = 10;
+ values[valuesById[11] = "GEOGRAPHY"] = 11;
+ values[valuesById[12] = "NUMERIC"] = 12;
+ values[valuesById[13] = "BIGNUMERIC"] = 13;
+ values[valuesById[14] = "INTERVAL"] = 14;
+ values[valuesById[15] = "JSON"] = 15;
+ return values;
+ })();
+
+ /**
+ * Mode enum.
+ * @name google.cloud.bigquery.storage.v1beta2.TableFieldSchema.Mode
+ * @enum {number}
+ * @property {number} MODE_UNSPECIFIED=0 MODE_UNSPECIFIED value
+ * @property {number} NULLABLE=1 NULLABLE value
+ * @property {number} REQUIRED=2 REQUIRED value
+ * @property {number} REPEATED=3 REPEATED value
+ */
+ TableFieldSchema.Mode = (function() {
+ var valuesById = {}, values = Object.create(valuesById);
+ values[valuesById[0] = "MODE_UNSPECIFIED"] = 0;
+ values[valuesById[1] = "NULLABLE"] = 1;
+ values[valuesById[2] = "REQUIRED"] = 2;
+ values[valuesById[3] = "REPEATED"] = 3;
+ return values;
+ })();
+
+ return TableFieldSchema;
+ })();
+
+ return v1beta2;
+ })();
+
return storage;
})();
diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json
index 3cf9dc5a2ae2..3d53db5b99e8 100644
--- a/handwritten/bigquery-storage/protos/protos.json
+++ b/handwritten/bigquery-storage/protos/protos.json
@@ -2730,6 +2730,927 @@
}
}
}
+ },
+ "v1beta2": {
+ "options": {
+ "go_package": "cloud.google.com/go/bigquery/storage/apiv1beta2/storagepb;storagepb",
+ "java_multiple_files": true,
+ "java_outer_classname": "TableProto",
+ "java_package": "com.google.cloud.bigquery.storage.v1beta2",
+ "(google.api.resource_definition).type": "bigquery.googleapis.com/Table",
+ "(google.api.resource_definition).pattern": "projects/{project}/datasets/{dataset}/tables/{table}"
+ },
+ "nested": {
+ "ArrowSchema": {
+ "fields": {
+ "serializedSchema": {
+ "type": "bytes",
+ "id": 1
+ }
+ }
+ },
+ "ArrowRecordBatch": {
+ "fields": {
+ "serializedRecordBatch": {
+ "type": "bytes",
+ "id": 1
+ }
+ }
+ },
+ "ArrowSerializationOptions": {
+ "fields": {
+ "format": {
+ "type": "Format",
+ "id": 1
+ }
+ },
+ "nested": {
+ "Format": {
+ "values": {
+ "FORMAT_UNSPECIFIED": 0,
+ "ARROW_0_14": 1,
+ "ARROW_0_15": 2
+ }
+ }
+ }
+ },
+ "AvroSchema": {
+ "fields": {
+ "schema": {
+ "type": "string",
+ "id": 1
+ }
+ }
+ },
+ "AvroRows": {
+ "fields": {
+ "serializedBinaryRows": {
+ "type": "bytes",
+ "id": 1
+ }
+ }
+ },
+ "ProtoSchema": {
+ "fields": {
+ "protoDescriptor": {
+ "type": "google.protobuf.DescriptorProto",
+ "id": 1
+ }
+ }
+ },
+ "ProtoRows": {
+ "fields": {
+ "serializedRows": {
+ "rule": "repeated",
+ "type": "bytes",
+ "id": 1
+ }
+ }
+ },
+ "BigQueryRead": {
+ "options": {
+ "(google.api.default_host)": "bigquerystorage.googleapis.com",
+ "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform"
+ },
+ "methods": {
+ "CreateReadSession": {
+ "requestType": "CreateReadSessionRequest",
+ "responseType": "ReadSession",
+ "options": {
+ "(google.api.http).post": "/v1beta2/{read_session.table=projects/*/datasets/*/tables/*}",
+ "(google.api.http).body": "*",
+ "(google.api.method_signature)": "parent,read_session,max_stream_count"
+ },
+ "parsedOptions": [
+ {
+ "(google.api.http)": {
+ "post": "/v1beta2/{read_session.table=projects/*/datasets/*/tables/*}",
+ "body": "*"
+ }
+ },
+ {
+ "(google.api.method_signature)": "parent,read_session,max_stream_count"
+ }
+ ]
+ },
+ "ReadRows": {
+ "requestType": "ReadRowsRequest",
+ "responseType": "ReadRowsResponse",
+ "responseStream": true,
+ "options": {
+ "(google.api.http).get": "/v1beta2/{read_stream=projects/*/locations/*/sessions/*/streams/*}",
+ "(google.api.method_signature)": "read_stream,offset"
+ },
+ "parsedOptions": [
+ {
+ "(google.api.http)": {
+ "get": "/v1beta2/{read_stream=projects/*/locations/*/sessions/*/streams/*}"
+ }
+ },
+ {
+ "(google.api.method_signature)": "read_stream,offset"
+ }
+ ]
+ },
+ "SplitReadStream": {
+ "requestType": "SplitReadStreamRequest",
+ "responseType": "SplitReadStreamResponse",
+ "options": {
+ "(google.api.http).get": "/v1beta2/{name=projects/*/locations/*/sessions/*/streams/*}"
+ },
+ "parsedOptions": [
+ {
+ "(google.api.http)": {
+ "get": "/v1beta2/{name=projects/*/locations/*/sessions/*/streams/*}"
+ }
+ }
+ ]
+ }
+ }
+ },
+ "BigQueryWrite": {
+ "options": {
+ "deprecated": true,
+ "(google.api.default_host)": "bigquerystorage.googleapis.com",
+ "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.insertdata,https://www.googleapis.com/auth/cloud-platform"
+ },
+ "methods": {
+ "CreateWriteStream": {
+ "requestType": "CreateWriteStreamRequest",
+ "responseType": "WriteStream",
+ "options": {
+ "deprecated": true,
+ "(google.api.http).post": "/v1beta2/{parent=projects/*/datasets/*/tables/*}",
+ "(google.api.http).body": "write_stream",
+ "(google.api.method_signature)": "parent,write_stream"
+ },
+ "parsedOptions": [
+ {
+ "deprecated": true
+ },
+ {
+ "(google.api.http)": {
+ "post": "/v1beta2/{parent=projects/*/datasets/*/tables/*}",
+ "body": "write_stream"
+ }
+ },
+ {
+ "(google.api.method_signature)": "parent,write_stream"
+ }
+ ]
+ },
+ "AppendRows": {
+ "requestType": "AppendRowsRequest",
+ "requestStream": true,
+ "responseType": "AppendRowsResponse",
+ "responseStream": true,
+ "options": {
+ "deprecated": true,
+ "(google.api.http).post": "/v1beta2/{write_stream=projects/*/datasets/*/tables/*/streams/*}",
+ "(google.api.http).body": "*",
+ "(google.api.method_signature)": "write_stream"
+ },
+ "parsedOptions": [
+ {
+ "deprecated": true
+ },
+ {
+ "(google.api.http)": {
+ "post": "/v1beta2/{write_stream=projects/*/datasets/*/tables/*/streams/*}",
+ "body": "*"
+ }
+ },
+ {
+ "(google.api.method_signature)": "write_stream"
+ }
+ ]
+ },
+ "GetWriteStream": {
+ "requestType": "GetWriteStreamRequest",
+ "responseType": "WriteStream",
+ "options": {
+ "deprecated": true,
+ "(google.api.http).post": "/v1beta2/{name=projects/*/datasets/*/tables/*/streams/*}",
+ "(google.api.http).body": "*",
+ "(google.api.method_signature)": "name"
+ },
+ "parsedOptions": [
+ {
+ "deprecated": true
+ },
+ {
+ "(google.api.http)": {
+ "post": "/v1beta2/{name=projects/*/datasets/*/tables/*/streams/*}",
+ "body": "*"
+ }
+ },
+ {
+ "(google.api.method_signature)": "name"
+ }
+ ]
+ },
+ "FinalizeWriteStream": {
+ "requestType": "FinalizeWriteStreamRequest",
+ "responseType": "FinalizeWriteStreamResponse",
+ "options": {
+ "deprecated": true,
+ "(google.api.http).post": "/v1beta2/{name=projects/*/datasets/*/tables/*/streams/*}",
+ "(google.api.http).body": "*",
+ "(google.api.method_signature)": "name"
+ },
+ "parsedOptions": [
+ {
+ "deprecated": true
+ },
+ {
+ "(google.api.http)": {
+ "post": "/v1beta2/{name=projects/*/datasets/*/tables/*/streams/*}",
+ "body": "*"
+ }
+ },
+ {
+ "(google.api.method_signature)": "name"
+ }
+ ]
+ },
+ "BatchCommitWriteStreams": {
+ "requestType": "BatchCommitWriteStreamsRequest",
+ "responseType": "BatchCommitWriteStreamsResponse",
+ "options": {
+ "deprecated": true,
+ "(google.api.http).get": "/v1beta2/{parent=projects/*/datasets/*/tables/*}",
+ "(google.api.method_signature)": "parent"
+ },
+ "parsedOptions": [
+ {
+ "deprecated": true
+ },
+ {
+ "(google.api.http)": {
+ "get": "/v1beta2/{parent=projects/*/datasets/*/tables/*}"
+ }
+ },
+ {
+ "(google.api.method_signature)": "parent"
+ }
+ ]
+ },
+ "FlushRows": {
+ "requestType": "FlushRowsRequest",
+ "responseType": "FlushRowsResponse",
+ "options": {
+ "deprecated": true,
+ "(google.api.http).post": "/v1beta2/{write_stream=projects/*/datasets/*/tables/*/streams/*}",
+ "(google.api.http).body": "*",
+ "(google.api.method_signature)": "write_stream"
+ },
+ "parsedOptions": [
+ {
+ "deprecated": true
+ },
+ {
+ "(google.api.http)": {
+ "post": "/v1beta2/{write_stream=projects/*/datasets/*/tables/*/streams/*}",
+ "body": "*"
+ }
+ },
+ {
+ "(google.api.method_signature)": "write_stream"
+ }
+ ]
+ }
+ }
+ },
+ "CreateReadSessionRequest": {
+ "fields": {
+ "parent": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED",
+ "(google.api.resource_reference).type": "cloudresourcemanager.googleapis.com/Project"
+ }
+ },
+ "readSession": {
+ "type": "ReadSession",
+ "id": 2,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED"
+ }
+ },
+ "maxStreamCount": {
+ "type": "int32",
+ "id": 3
+ }
+ }
+ },
+ "ReadRowsRequest": {
+ "fields": {
+ "readStream": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED",
+ "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/ReadStream"
+ }
+ },
+ "offset": {
+ "type": "int64",
+ "id": 2
+ }
+ }
+ },
+ "ThrottleState": {
+ "fields": {
+ "throttlePercent": {
+ "type": "int32",
+ "id": 1
+ }
+ }
+ },
+ "StreamStats": {
+ "fields": {
+ "progress": {
+ "type": "Progress",
+ "id": 2
+ }
+ },
+ "nested": {
+ "Progress": {
+ "fields": {
+ "atResponseStart": {
+ "type": "double",
+ "id": 1
+ },
+ "atResponseEnd": {
+ "type": "double",
+ "id": 2
+ }
+ }
+ }
+ }
+ },
+ "ReadRowsResponse": {
+ "oneofs": {
+ "rows": {
+ "oneof": [
+ "avroRows",
+ "arrowRecordBatch"
+ ]
+ },
+ "schema": {
+ "oneof": [
+ "avroSchema",
+ "arrowSchema"
+ ]
+ }
+ },
+ "fields": {
+ "avroRows": {
+ "type": "AvroRows",
+ "id": 3
+ },
+ "arrowRecordBatch": {
+ "type": "ArrowRecordBatch",
+ "id": 4
+ },
+ "rowCount": {
+ "type": "int64",
+ "id": 6
+ },
+ "stats": {
+ "type": "StreamStats",
+ "id": 2
+ },
+ "throttleState": {
+ "type": "ThrottleState",
+ "id": 5
+ },
+ "avroSchema": {
+ "type": "AvroSchema",
+ "id": 7,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ },
+ "arrowSchema": {
+ "type": "ArrowSchema",
+ "id": 8,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ }
+ }
+ },
+ "SplitReadStreamRequest": {
+ "fields": {
+ "name": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED",
+ "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/ReadStream"
+ }
+ },
+ "fraction": {
+ "type": "double",
+ "id": 2
+ }
+ }
+ },
+ "SplitReadStreamResponse": {
+ "fields": {
+ "primaryStream": {
+ "type": "ReadStream",
+ "id": 1
+ },
+ "remainderStream": {
+ "type": "ReadStream",
+ "id": 2
+ }
+ }
+ },
+ "CreateWriteStreamRequest": {
+ "fields": {
+ "parent": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED",
+ "(google.api.resource_reference).type": "bigquery.googleapis.com/Table"
+ }
+ },
+ "writeStream": {
+ "type": "WriteStream",
+ "id": 2,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED"
+ }
+ }
+ }
+ },
+ "AppendRowsRequest": {
+ "oneofs": {
+ "rows": {
+ "oneof": [
+ "protoRows"
+ ]
+ }
+ },
+ "fields": {
+ "writeStream": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED",
+ "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream"
+ }
+ },
+ "offset": {
+ "type": "google.protobuf.Int64Value",
+ "id": 2
+ },
+ "protoRows": {
+ "type": "ProtoData",
+ "id": 4
+ },
+ "traceId": {
+ "type": "string",
+ "id": 6
+ }
+ },
+ "nested": {
+ "ProtoData": {
+ "fields": {
+ "writerSchema": {
+ "type": "ProtoSchema",
+ "id": 1
+ },
+ "rows": {
+ "type": "ProtoRows",
+ "id": 2
+ }
+ }
+ }
+ }
+ },
+ "AppendRowsResponse": {
+ "oneofs": {
+ "response": {
+ "oneof": [
+ "appendResult",
+ "error"
+ ]
+ }
+ },
+ "fields": {
+ "appendResult": {
+ "type": "AppendResult",
+ "id": 1
+ },
+ "error": {
+ "type": "google.rpc.Status",
+ "id": 2
+ },
+ "updatedSchema": {
+ "type": "TableSchema",
+ "id": 3
+ }
+ },
+ "nested": {
+ "AppendResult": {
+ "fields": {
+ "offset": {
+ "type": "google.protobuf.Int64Value",
+ "id": 1
+ }
+ }
+ }
+ }
+ },
+ "GetWriteStreamRequest": {
+ "fields": {
+ "name": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED",
+ "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream"
+ }
+ }
+ }
+ },
+ "BatchCommitWriteStreamsRequest": {
+ "fields": {
+ "parent": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED"
+ }
+ },
+ "writeStreams": {
+ "rule": "repeated",
+ "type": "string",
+ "id": 2,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED"
+ }
+ }
+ }
+ },
+ "BatchCommitWriteStreamsResponse": {
+ "fields": {
+ "commitTime": {
+ "type": "google.protobuf.Timestamp",
+ "id": 1
+ },
+ "streamErrors": {
+ "rule": "repeated",
+ "type": "StorageError",
+ "id": 2
+ }
+ }
+ },
+ "FinalizeWriteStreamRequest": {
+ "fields": {
+ "name": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED",
+ "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream"
+ }
+ }
+ }
+ },
+ "FinalizeWriteStreamResponse": {
+ "fields": {
+ "rowCount": {
+ "type": "int64",
+ "id": 1
+ }
+ }
+ },
+ "FlushRowsRequest": {
+ "fields": {
+ "writeStream": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED",
+ "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream"
+ }
+ },
+ "offset": {
+ "type": "google.protobuf.Int64Value",
+ "id": 2
+ }
+ }
+ },
+ "FlushRowsResponse": {
+ "fields": {
+ "offset": {
+ "type": "int64",
+ "id": 1
+ }
+ }
+ },
+ "StorageError": {
+ "fields": {
+ "code": {
+ "type": "StorageErrorCode",
+ "id": 1
+ },
+ "entity": {
+ "type": "string",
+ "id": 2
+ },
+ "errorMessage": {
+ "type": "string",
+ "id": 3
+ }
+ },
+ "nested": {
+ "StorageErrorCode": {
+ "values": {
+ "STORAGE_ERROR_CODE_UNSPECIFIED": 0,
+ "TABLE_NOT_FOUND": 1,
+ "STREAM_ALREADY_COMMITTED": 2,
+ "STREAM_NOT_FOUND": 3,
+ "INVALID_STREAM_TYPE": 4,
+ "INVALID_STREAM_STATE": 5,
+ "STREAM_FINALIZED": 6
+ }
+ }
+ }
+ },
+ "DataFormat": {
+ "values": {
+ "DATA_FORMAT_UNSPECIFIED": 0,
+ "AVRO": 1,
+ "ARROW": 2
+ }
+ },
+ "ReadSession": {
+ "options": {
+ "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadSession",
+ "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}"
+ },
+ "oneofs": {
+ "schema": {
+ "oneof": [
+ "avroSchema",
+ "arrowSchema"
+ ]
+ }
+ },
+ "fields": {
+ "name": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ },
+ "expireTime": {
+ "type": "google.protobuf.Timestamp",
+ "id": 2,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ },
+ "dataFormat": {
+ "type": "DataFormat",
+ "id": 3,
+ "options": {
+ "(google.api.field_behavior)": "IMMUTABLE"
+ }
+ },
+ "avroSchema": {
+ "type": "AvroSchema",
+ "id": 4,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ },
+ "arrowSchema": {
+ "type": "ArrowSchema",
+ "id": 5,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ },
+ "table": {
+ "type": "string",
+ "id": 6,
+ "options": {
+ "(google.api.field_behavior)": "IMMUTABLE",
+ "(google.api.resource_reference).type": "bigquery.googleapis.com/Table"
+ }
+ },
+ "tableModifiers": {
+ "type": "TableModifiers",
+ "id": 7,
+ "options": {
+ "(google.api.field_behavior)": "OPTIONAL"
+ }
+ },
+ "readOptions": {
+ "type": "TableReadOptions",
+ "id": 8,
+ "options": {
+ "(google.api.field_behavior)": "OPTIONAL"
+ }
+ },
+ "streams": {
+ "rule": "repeated",
+ "type": "ReadStream",
+ "id": 10,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ }
+ },
+ "nested": {
+ "TableModifiers": {
+ "fields": {
+ "snapshotTime": {
+ "type": "google.protobuf.Timestamp",
+ "id": 1
+ }
+ }
+ },
+ "TableReadOptions": {
+ "fields": {
+ "selectedFields": {
+ "rule": "repeated",
+ "type": "string",
+ "id": 1
+ },
+ "rowRestriction": {
+ "type": "string",
+ "id": 2
+ },
+ "arrowSerializationOptions": {
+ "type": "ArrowSerializationOptions",
+ "id": 3,
+ "options": {
+ "(google.api.field_behavior)": "OPTIONAL"
+ }
+ }
+ }
+ }
+ }
+ },
+ "ReadStream": {
+ "options": {
+ "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadStream",
+ "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}"
+ },
+ "fields": {
+ "name": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ }
+ }
+ },
+ "WriteStream": {
+ "options": {
+ "(google.api.resource).type": "bigquerystorage.googleapis.com/WriteStream",
+ "(google.api.resource).pattern": "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"
+ },
+ "fields": {
+ "name": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ },
+ "type": {
+ "type": "Type",
+ "id": 2,
+ "options": {
+ "(google.api.field_behavior)": "IMMUTABLE"
+ }
+ },
+ "createTime": {
+ "type": "google.protobuf.Timestamp",
+ "id": 3,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ },
+ "commitTime": {
+ "type": "google.protobuf.Timestamp",
+ "id": 4,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ },
+ "tableSchema": {
+ "type": "TableSchema",
+ "id": 5,
+ "options": {
+ "(google.api.field_behavior)": "OUTPUT_ONLY"
+ }
+ }
+ },
+ "nested": {
+ "Type": {
+ "values": {
+ "TYPE_UNSPECIFIED": 0,
+ "COMMITTED": 1,
+ "PENDING": 2,
+ "BUFFERED": 3
+ }
+ }
+ }
+ },
+ "TableSchema": {
+ "fields": {
+ "fields": {
+ "rule": "repeated",
+ "type": "TableFieldSchema",
+ "id": 1
+ }
+ }
+ },
+ "TableFieldSchema": {
+ "fields": {
+ "name": {
+ "type": "string",
+ "id": 1,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED"
+ }
+ },
+ "type": {
+ "type": "Type",
+ "id": 2,
+ "options": {
+ "(google.api.field_behavior)": "REQUIRED"
+ }
+ },
+ "mode": {
+ "type": "Mode",
+ "id": 3,
+ "options": {
+ "(google.api.field_behavior)": "OPTIONAL"
+ }
+ },
+ "fields": {
+ "rule": "repeated",
+ "type": "TableFieldSchema",
+ "id": 4,
+ "options": {
+ "(google.api.field_behavior)": "OPTIONAL"
+ }
+ },
+ "description": {
+ "type": "string",
+ "id": 6,
+ "options": {
+ "(google.api.field_behavior)": "OPTIONAL"
+ }
+ }
+ },
+ "nested": {
+ "Type": {
+ "values": {
+ "TYPE_UNSPECIFIED": 0,
+ "STRING": 1,
+ "INT64": 2,
+ "DOUBLE": 3,
+ "STRUCT": 4,
+ "BYTES": 5,
+ "BOOL": 6,
+ "TIMESTAMP": 7,
+ "DATE": 8,
+ "TIME": 9,
+ "DATETIME": 10,
+ "GEOGRAPHY": 11,
+ "NUMERIC": 12,
+ "BIGNUMERIC": 13,
+ "INTERVAL": 14,
+ "JSON": 15
+ }
+ },
+ "Mode": {
+ "values": {
+ "MODE_UNSPECIFIED": 0,
+ "NULLABLE": 1,
+ "REQUIRED": 2,
+ "REPEATED": 3
+ }
+ }
+ }
+ }
+ }
}
}
}
diff --git a/handwritten/bigquery-storage/samples/README.md b/handwritten/bigquery-storage/samples/README.md
index 6568c068d303..054cb5ffd232 100644
--- a/handwritten/bigquery-storage/samples/README.md
+++ b/handwritten/bigquery-storage/samples/README.md
@@ -36,6 +36,15 @@
* [Big_query_storage.finalize_stream](#big_query_storage.finalize_stream)
* [Big_query_storage.read_rows](#big_query_storage.read_rows)
* [Big_query_storage.split_read_stream](#big_query_storage.split_read_stream)
+ * [Big_query_read.create_read_session](#big_query_read.create_read_session)
+ * [Big_query_read.read_rows](#big_query_read.read_rows)
+ * [Big_query_read.split_read_stream](#big_query_read.split_read_stream)
+ * [Big_query_write.append_rows](#big_query_write.append_rows)
+ * [Big_query_write.batch_commit_write_streams](#big_query_write.batch_commit_write_streams)
+ * [Big_query_write.create_write_stream](#big_query_write.create_write_stream)
+ * [Big_query_write.finalize_write_stream](#big_query_write.finalize_write_stream)
+ * [Big_query_write.flush_rows](#big_query_write.flush_rows)
+ * [Big_query_write.get_write_stream](#big_query_write.get_write_stream)
## Before you begin
@@ -455,6 +464,159 @@ __Usage:__
`node handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js`
+-----
+
+
+
+
+### Big_query_read.create_read_session
+
+View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.create_read_session.js).
+
+[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.create_read_session.js,samples/README.md)
+
+__Usage:__
+
+
+`node handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.create_read_session.js`
+
+
+-----
+
+
+
+
+### Big_query_read.read_rows
+
+View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.read_rows.js).
+
+[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.read_rows.js,samples/README.md)
+
+__Usage:__
+
+
+`node handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.read_rows.js`
+
+
+-----
+
+
+
+
+### Big_query_read.split_read_stream
+
+View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.split_read_stream.js).
+
+[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.split_read_stream.js,samples/README.md)
+
+__Usage:__
+
+
+`node handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.split_read_stream.js`
+
+
+-----
+
+
+
+
+### Big_query_write.append_rows
+
+View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.append_rows.js).
+
+[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.append_rows.js,samples/README.md)
+
+__Usage:__
+
+
+`node handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.append_rows.js`
+
+
+-----
+
+
+
+
+### Big_query_write.batch_commit_write_streams
+
+View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.batch_commit_write_streams.js).
+
+[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.batch_commit_write_streams.js,samples/README.md)
+
+__Usage:__
+
+
+`node handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.batch_commit_write_streams.js`
+
+
+-----
+
+
+
+
+### Big_query_write.create_write_stream
+
+View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.create_write_stream.js).
+
+[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.create_write_stream.js,samples/README.md)
+
+__Usage:__
+
+
+`node handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.create_write_stream.js`
+
+
+-----
+
+
+
+
+### Big_query_write.finalize_write_stream
+
+View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.finalize_write_stream.js).
+
+[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.finalize_write_stream.js,samples/README.md)
+
+__Usage:__
+
+
+`node handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.finalize_write_stream.js`
+
+
+-----
+
+
+
+
+### Big_query_write.flush_rows
+
+View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.flush_rows.js).
+
+[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.flush_rows.js,samples/README.md)
+
+__Usage:__
+
+
+`node handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.flush_rows.js`
+
+
+-----
+
+
+
+
+### Big_query_write.get_write_stream
+
+View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.get_write_stream.js).
+
+[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.get_write_stream.js,samples/README.md)
+
+__Usage:__
+
+
+`node handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.get_write_stream.js`
+
+
diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js
index d6f53319cfd6..6b77630dc878 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js
+++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js
index 90f0b63cda71..9fd06b134f93 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js
+++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js
index 2ec67f39f570..3fade2a91fdf 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js
+++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js
index 240d6e2a6285..f4cd68adfb81 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js
+++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -62,8 +62,7 @@ function main(writeStream) {
*/
// const protoRows = {}
/**
- * Rows in arrow format. This is an experimental feature only selected for
- * allowlisted customers.
+ * Rows in arrow format.
*/
// const arrowRows = {}
/**
@@ -91,8 +90,8 @@ function main(writeStream) {
/**
* Optional. Default missing value interpretation for all columns in the
* table. When a value is specified on an `AppendRowsRequest`, it is applied
- * to all requests on the connection from that point forward, until a
- * subsequent `AppendRowsRequest` sets it to a different value.
+ * to all requests from that point forward, until a subsequent
+ * `AppendRowsRequest` sets it to a different value.
* `missing_value_interpretation` can override
* `default_missing_value_interpretation`. For example, if you want to write
* `NULL` instead of using default values for some columns, you can set
diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js
index 72d3877721ec..a12e8a444344 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js
+++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js
index 022ca1217a4f..89d46f575081 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js
+++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js
index f20f6f0a7318..01e7737068c3 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js
+++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js
index 8434830cd8c2..90babd9f6563 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js
+++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js
index 94785274b882..83e6d0849ca7 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js
+++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json
index 0f17af129f2d..76642dc35926 100644
--- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json
+++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json
@@ -1,435 +1,435 @@
{
- "clientLibrary": {
- "name": "nodejs-storage",
- "version": "5.1.0",
- "language": "TYPESCRIPT",
- "apis": [
- {
- "id": "google.cloud.bigquery.storage.v1",
- "version": "v1"
- }
- ]
- },
- "snippets": [
+ "clientLibrary": {
+ "name": "nodejs-storage",
+ "version": "0.1.0",
+ "language": "TYPESCRIPT",
+ "apis": [
+ {
+ "id": "google.cloud.bigquery.storage.v1",
+ "version": "v1"
+ }
+ ]
+ },
+ "snippets": [
+ {
+ "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async",
+ "title": "BigQueryRead createReadSession Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Data is assigned to each stream such that roughly the same number of rows can be read from each stream. Because the server-side unit for assigning data is collections of rows, the API does not guarantee that each stream will return the same number or rows. Additionally, the limits are enforced based on the number of pre-filtered rows, so some filters can lead to lopsided assignments. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.",
+ "canonical": true,
+ "file": "big_query_read.create_read_session.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async",
- "title": "BigQueryRead createReadSession Sample",
- "origin": "API_DEFINITION",
- "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Data is assigned to each stream such that roughly the same number of rows can be read from each stream. Because the server-side unit for assigning data is collections of rows, the API does not guarantee that each stream will return the same number or rows. Additionally, the limits are enforced based on the number of pre-filtered rows, so some filters can lead to lopsided assignments. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.",
- "canonical": true,
- "file": "big_query_read.create_read_session.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 81,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateReadSession",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "read_session",
- "type": ".google.cloud.bigquery.storage.v1.ReadSession"
- },
- {
- "name": "max_stream_count",
- "type": "TYPE_INT32"
- },
- {
- "name": "preferred_min_stream_count",
- "type": "TYPE_INT32"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1.ReadSession",
- "client": {
- "shortName": "BigQueryReadClient",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient"
- },
- "method": {
- "shortName": "CreateReadSession",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession",
- "service": {
- "shortName": "BigQueryRead",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead"
- }
- }
- }
+ "start": 25,
+ "end": 81,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateReadSession",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "read_session",
+ "type": ".google.cloud.bigquery.storage.v1.ReadSession"
+ },
+ {
+ "name": "max_stream_count",
+ "type": "TYPE_INT32"
+ },
+ {
+ "name": "preferred_min_stream_count",
+ "type": "TYPE_INT32"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1.ReadSession",
+ "client": {
+ "shortName": "BigQueryReadClient",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient"
},
+ "method": {
+ "shortName": "CreateReadSession",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession",
+ "service": {
+ "shortName": "BigQueryRead",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async",
+ "title": "BigQueryRead readRows Sample",
+ "origin": "API_DEFINITION",
+ "description": " Reads rows from the stream in the format prescribed by the ReadSession. Each response contains one or more table rows, up to a maximum of 128 MB per response; read requests which attempt to read individual rows larger than 128 MB will fail. Each request also returns a set of stream statistics reflecting the current state of the stream.",
+ "canonical": true,
+ "file": "big_query_read.read_rows.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async",
- "title": "BigQueryRead readRows Sample",
- "origin": "API_DEFINITION",
- "description": " Reads rows from the stream in the format prescribed by the ReadSession. Each response contains one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to read individual rows larger than 100 MiB will fail. Each request also returns a set of stream statistics reflecting the current state of the stream.",
- "canonical": true,
- "file": "big_query_read.read_rows.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 61,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ReadRows",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows",
- "async": true,
- "parameters": [
- {
- "name": "read_stream",
- "type": "TYPE_STRING"
- },
- {
- "name": "offset",
- "type": "TYPE_INT64"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1.ReadRowsResponse",
- "client": {
- "shortName": "BigQueryReadClient",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient"
- },
- "method": {
- "shortName": "ReadRows",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows",
- "service": {
- "shortName": "BigQueryRead",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead"
- }
- }
- }
+ "start": 25,
+ "end": 61,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ReadRows",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows",
+ "async": true,
+ "parameters": [
+ {
+ "name": "read_stream",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "offset",
+ "type": "TYPE_INT64"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1.ReadRowsResponse",
+ "client": {
+ "shortName": "BigQueryReadClient",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient"
},
+ "method": {
+ "shortName": "ReadRows",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows",
+ "service": {
+ "shortName": "BigQueryRead",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async",
+ "title": "BigQueryRead splitReadStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are referred to as the primary and the residual streams of the split. The original `ReadStream` can still be read from in the same manner as before. Both of the returned `ReadStream` objects can also be read from, and the rows returned by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back-to-back in the original `ReadStream`. Concretely, it is guaranteed that for streams original, primary, and residual, that original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion.",
+ "canonical": true,
+ "file": "big_query_read.split_read_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async",
- "title": "BigQueryRead splitReadStream Sample",
- "origin": "API_DEFINITION",
- "description": " Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are referred to as the primary and the residual streams of the split. The original `ReadStream` can still be read from in the same manner as before. Both of the returned `ReadStream` objects can also be read from, and the rows returned by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back-to-back in the original `ReadStream`. Concretely, it is guaranteed that for streams original, primary, and residual, that original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion.",
- "canonical": true,
- "file": "big_query_read.split_read_stream.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 63,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "SplitReadStream",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- },
- {
- "name": "fraction",
- "type": "TYPE_DOUBLE"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1.SplitReadStreamResponse",
- "client": {
- "shortName": "BigQueryReadClient",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient"
- },
- "method": {
- "shortName": "SplitReadStream",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream",
- "service": {
- "shortName": "BigQueryRead",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead"
- }
- }
- }
+ "start": 25,
+ "end": 63,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "SplitReadStream",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "fraction",
+ "type": "TYPE_DOUBLE"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1.SplitReadStreamResponse",
+ "client": {
+ "shortName": "BigQueryReadClient",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient"
},
+ "method": {
+ "shortName": "SplitReadStream",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream",
+ "service": {
+ "shortName": "BigQueryRead",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async",
+ "title": "BigQueryRead createWriteStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a write stream to the given table. Additionally, every table has a special stream named '_default' to which data can be written. This stream doesn't need to be created using CreateWriteStream. It is a stream that can be used simultaneously by any number of clients. Data written to this stream is considered committed as soon as an acknowledgement is received.",
+ "canonical": true,
+ "file": "big_query_write.create_write_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async",
- "title": "BigQueryRead createWriteStream Sample",
- "origin": "API_DEFINITION",
- "description": " Creates a write stream to the given table. Additionally, every table has a special stream named '_default' to which data can be written. This stream doesn't need to be created using CreateWriteStream. It is a stream that can be used simultaneously by any number of clients. Data written to this stream is considered committed as soon as an acknowledgement is received.",
- "canonical": true,
- "file": "big_query_write.create_write_stream.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 59,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateWriteStream",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "write_stream",
- "type": ".google.cloud.bigquery.storage.v1.WriteStream"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1.WriteStream",
- "client": {
- "shortName": "BigQueryWriteClient",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
- },
- "method": {
- "shortName": "CreateWriteStream",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream",
- "service": {
- "shortName": "BigQueryWrite",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
- }
- }
- }
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "write_stream",
+ "type": ".google.cloud.bigquery.storage.v1.WriteStream"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1.WriteStream",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
},
+ "method": {
+ "shortName": "CreateWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async",
+ "title": "BigQueryRead appendRows Sample",
+ "origin": "API_DEFINITION",
+ "description": " Appends data to the given stream. If `offset` is specified, the `offset` is checked against the end of stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an attempt is made to append to an offset beyond the current end of the stream or `ALREADY_EXISTS` if user provides an `offset` that has already been written to. User can retry with adjusted offset within the same RPC connection. If `offset` is not specified, append happens at the end of the stream. The response contains an optional offset at which the append happened. No offset information will be returned for appends to a default stream. Responses are received in the same order in which requests are sent. There will be one response for each successful inserted request. Responses may optionally embed error information if the originating AppendRequest was not successfully processed. The specifics of when successfully appended data is made visible to the table are governed by the type of stream: * For COMMITTED streams (which includes the default stream), data is visible immediately upon successful append. * For BUFFERED streams, data is made visible via a subsequent `FlushRows` rpc which advances a cursor to a newer offset in the stream. * For PENDING streams, data is not made visible until the stream itself is finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly committed via the `BatchCommitWriteStreams` rpc.",
+ "canonical": true,
+ "file": "big_query_write.append_rows.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async",
- "title": "BigQueryRead appendRows Sample",
- "origin": "API_DEFINITION",
- "description": " Appends data to the given stream. If `offset` is specified, the `offset` is checked against the end of stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an attempt is made to append to an offset beyond the current end of the stream or `ALREADY_EXISTS` if user provides an `offset` that has already been written to. User can retry with adjusted offset within the same RPC connection. If `offset` is not specified, append happens at the end of the stream. The response contains an optional offset at which the append happened. No offset information will be returned for appends to a default stream. Responses are received in the same order in which requests are sent. There will be one response for each successful inserted request. Responses may optionally embed error information if the originating AppendRequest was not successfully processed. The specifics of when successfully appended data is made visible to the table are governed by the type of stream: * For COMMITTED streams (which includes the default stream), data is visible immediately upon successful append. * For BUFFERED streams, data is made visible via a subsequent `FlushRows` rpc which advances a cursor to a newer offset in the stream. * For PENDING streams, data is not made visible until the stream itself is finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly committed via the `BatchCommitWriteStreams` rpc.",
- "canonical": true,
- "file": "big_query_write.append_rows.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 125,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "AppendRows",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows",
- "async": true,
- "parameters": [
- {
- "name": "write_stream",
- "type": "TYPE_STRING"
- },
- {
- "name": "offset",
- "type": ".google.protobuf.Int64Value"
- },
- {
- "name": "proto_rows",
- "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData"
- },
- {
- "name": "arrow_rows",
- "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData"
- },
- {
- "name": "trace_id",
- "type": "TYPE_STRING"
- },
- {
- "name": "missing_value_interpretations",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "default_missing_value_interpretation",
- "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1.AppendRowsResponse",
- "client": {
- "shortName": "BigQueryWriteClient",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
- },
- "method": {
- "shortName": "AppendRows",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows",
- "service": {
- "shortName": "BigQueryWrite",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
- }
- }
- }
+ "start": 25,
+ "end": 124,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "AppendRows",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows",
+ "async": true,
+ "parameters": [
+ {
+ "name": "write_stream",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "offset",
+ "type": ".google.protobuf.Int64Value"
+ },
+ {
+ "name": "proto_rows",
+ "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData"
+ },
+ {
+ "name": "arrow_rows",
+ "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "missing_value_interpretations",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "default_missing_value_interpretation",
+ "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1.AppendRowsResponse",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
},
+ "method": {
+ "shortName": "AppendRows",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async",
+ "title": "BigQueryRead getWriteStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets information about a write stream.",
+ "canonical": true,
+ "file": "big_query_write.get_write_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async",
- "title": "BigQueryRead getWriteStream Sample",
- "origin": "API_DEFINITION",
- "description": " Gets information about a write stream.",
- "canonical": true,
- "file": "big_query_write.get_write_stream.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 59,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "GetWriteStream",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- },
- {
- "name": "view",
- "type": ".google.cloud.bigquery.storage.v1.WriteStreamView"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1.WriteStream",
- "client": {
- "shortName": "BigQueryWriteClient",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
- },
- "method": {
- "shortName": "GetWriteStream",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream",
- "service": {
- "shortName": "BigQueryWrite",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
- }
- }
- }
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "view",
+ "type": ".google.cloud.bigquery.storage.v1.WriteStreamView"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1.WriteStream",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
},
+ "method": {
+ "shortName": "GetWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async",
+ "title": "BigQueryRead finalizeWriteStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream.",
+ "canonical": true,
+ "file": "big_query_write.finalize_write_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async",
- "title": "BigQueryRead finalizeWriteStream Sample",
- "origin": "API_DEFINITION",
- "description": " Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream.",
- "canonical": true,
- "file": "big_query_write.finalize_write_stream.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 54,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "FinalizeWriteStream",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse",
- "client": {
- "shortName": "BigQueryWriteClient",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
- },
- "method": {
- "shortName": "FinalizeWriteStream",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream",
- "service": {
- "shortName": "BigQueryWrite",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
- }
- }
- }
+ "start": 25,
+ "end": 54,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "FinalizeWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
},
+ "method": {
+ "shortName": "FinalizeWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async",
+ "title": "BigQueryRead batchCommitWriteStreams Sample",
+ "origin": "API_DEFINITION",
+ "description": " Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams must be finalized before commit and cannot be committed multiple times. Once a stream is committed, data in the stream becomes available for read operations.",
+ "canonical": true,
+ "file": "big_query_write.batch_commit_write_streams.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async",
- "title": "BigQueryRead batchCommitWriteStreams Sample",
- "origin": "API_DEFINITION",
- "description": " Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams must be finalized before commit and cannot be committed multiple times. Once a stream is committed, data in the stream becomes available for read operations.",
- "canonical": true,
- "file": "big_query_write.batch_commit_write_streams.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 59,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "BatchCommitWriteStreams",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "write_streams",
- "type": "TYPE_STRING[]"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse",
- "client": {
- "shortName": "BigQueryWriteClient",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
- },
- "method": {
- "shortName": "BatchCommitWriteStreams",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams",
- "service": {
- "shortName": "BigQueryWrite",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
- }
- }
- }
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "BatchCommitWriteStreams",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "write_streams",
+ "type": "TYPE_STRING[]"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
},
+ "method": {
+ "shortName": "BatchCommitWriteStreams",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async",
+ "title": "BigQueryRead flushRows Sample",
+ "origin": "API_DEFINITION",
+ "description": " Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush operation is required in order for the rows to become available for reading. A Flush operation flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in the request. Flush is not supported on the _default stream, since it is not BUFFERED.",
+ "canonical": true,
+ "file": "big_query_write.flush_rows.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async",
- "title": "BigQueryRead flushRows Sample",
- "origin": "API_DEFINITION",
- "description": " Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush operation is required in order for the rows to become available for reading. A Flush operation flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in the request. Flush is not supported on the _default stream, since it is not BUFFERED.",
- "canonical": true,
- "file": "big_query_write.flush_rows.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 58,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "FlushRows",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows",
- "async": true,
- "parameters": [
- {
- "name": "write_stream",
- "type": "TYPE_STRING"
- },
- {
- "name": "offset",
- "type": ".google.protobuf.Int64Value"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1.FlushRowsResponse",
- "client": {
- "shortName": "BigQueryWriteClient",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
- },
- "method": {
- "shortName": "FlushRows",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows",
- "service": {
- "shortName": "BigQueryWrite",
- "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
- }
- }
- }
+ "start": 25,
+ "end": 58,
+ "type": "FULL"
}
- ]
-}
\ No newline at end of file
+ ],
+ "clientMethod": {
+ "shortName": "FlushRows",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows",
+ "async": true,
+ "parameters": [
+ {
+ "name": "write_stream",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "offset",
+ "type": ".google.protobuf.Int64Value"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1.FlushRowsResponse",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient"
+ },
+ "method": {
+ "shortName": "FlushRows",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite"
+ }
+ }
+ }
+ }
+ ]
+}
diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js
index f9552ee4a0cd..912f110b99d1 100644
--- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js
index 10d3f93907a6..89d8e0efb0d2 100644
--- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js
index 0d662372fb4b..d330a5dcb0bd 100644
--- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js
index 7b4faf856fd4..d076f31e69f7 100644
--- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js
index 233f9675c138..49ad7f13cc66 100644
--- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json
index 3fedb87b58a9..7ba7beeb6de5 100644
--- a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json
+++ b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json
@@ -1,259 +1,259 @@
{
- "clientLibrary": {
- "name": "nodejs-storage",
- "version": "5.1.0",
- "language": "TYPESCRIPT",
- "apis": [
- {
- "id": "google.cloud.bigquery.storage.v1alpha",
- "version": "v1alpha"
- }
- ]
- },
- "snippets": [
+ "clientLibrary": {
+ "name": "nodejs-storage",
+ "version": "0.1.0",
+ "language": "TYPESCRIPT",
+ "apis": [
+ {
+ "id": "google.cloud.bigquery.storage.v1alpha",
+ "version": "v1alpha"
+ }
+ ]
+ },
+ "snippets": [
+ {
+ "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async",
+ "title": "MetastorePartitionService batchCreateMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " Adds metastore partitions to a table.",
+ "canonical": true,
+ "file": "metastore_partition_service.batch_create_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async",
- "title": "MetastorePartitionService batchCreateMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " Adds metastore partitions to a table.",
- "canonical": true,
- "file": "metastore_partition_service.batch_create_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 76,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "BatchCreateMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "requests",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "skip_existing_partitions",
- "type": "TYPE_BOOL"
- },
- {
- "name": "trace_id",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "BatchCreateMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 76,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "BatchCreateMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "requests",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "skip_existing_partitions",
+ "type": "TYPE_BOOL"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
},
+ "method": {
+ "shortName": "BatchCreateMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async",
+ "title": "MetastorePartitionService batchDeleteMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " Deletes metastore partitions from a table.",
+ "canonical": true,
+ "file": "metastore_partition_service.batch_delete_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async",
- "title": "MetastorePartitionService batchDeleteMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " Deletes metastore partitions from a table.",
- "canonical": true,
- "file": "metastore_partition_service.batch_delete_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 69,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "BatchDeleteMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "partition_values",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "trace_id",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "BatchDeleteMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 69,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "BatchDeleteMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "partition_values",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
},
+ "method": {
+ "shortName": "BatchDeleteMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async",
+ "title": "MetastorePartitionService batchUpdateMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " Updates metastore partitions in a table.",
+ "canonical": true,
+ "file": "metastore_partition_service.batch_update_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async",
- "title": "MetastorePartitionService batchUpdateMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " Updates metastore partitions in a table.",
- "canonical": true,
- "file": "metastore_partition_service.batch_update_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 68,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "BatchUpdateMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "requests",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "trace_id",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "BatchUpdateMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 68,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "BatchUpdateMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "requests",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
},
+ "method": {
+ "shortName": "BatchUpdateMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async",
+ "title": "MetastorePartitionService listMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets metastore partitions from a table.",
+ "canonical": true,
+ "file": "metastore_partition_service.list_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async",
- "title": "MetastorePartitionService listMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " Gets metastore partitions from a table.",
- "canonical": true,
- "file": "metastore_partition_service.list_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 75,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "filter",
- "type": "TYPE_STRING"
- },
- {
- "name": "trace_id",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "ListMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 75,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "filter",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
},
+ "method": {
+ "shortName": "ListMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async",
+ "title": "MetastorePartitionService streamMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " This is a bi-di streaming rpc method that allows the client to send a stream of partitions and commit all of them atomically at the end. If the commit is successful, the server will return a response and close the stream. If the commit fails (due to duplicate partitions or other reason), the server will close the stream with an error. This method is only available via the gRPC API (not REST).",
+ "canonical": true,
+ "file": "metastore_partition_service.stream_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async",
- "title": "MetastorePartitionService streamMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " This is a bi-di streaming rpc method that allows the client to send a stream of partitions and commit all of them atomically at the end. If the commit is successful, the server will return a response and close the stream. If the commit fails (due to duplicate partitions or other reason), the server will close the stream with an error. This method is only available via the gRPC API (not REST).",
- "canonical": true,
- "file": "metastore_partition_service.stream_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 74,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "StreamMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "metastore_partitions",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "skip_existing_partitions",
- "type": "TYPE_BOOL"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "StreamMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 74,
+ "type": "FULL"
}
- ]
-}
\ No newline at end of file
+ ],
+ "clientMethod": {
+ "shortName": "StreamMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "metastore_partitions",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "skip_existing_partitions",
+ "type": "TYPE_BOOL"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient"
+ },
+ "method": {
+ "shortName": "StreamMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService"
+ }
+ }
+ }
+ }
+ ]
+}
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js
index ce5d32f25151..e3360292d0f6 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js
index f289e265b937..77c7e30cac8c 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js
index 4ef034bffbfd..86cb412b34fa 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js
index 188046ccc072..7c2ed318f048 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js
index a773fb970542..74d9060c4c03 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json b/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json
index 12b215c9741a..934ce0384000 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json
+++ b/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json
@@ -1,259 +1,259 @@
{
- "clientLibrary": {
- "name": "nodejs-storage",
- "version": "5.1.0",
- "language": "TYPESCRIPT",
- "apis": [
- {
- "id": "google.cloud.bigquery.storage.v1beta",
- "version": "v1beta"
- }
- ]
- },
- "snippets": [
+ "clientLibrary": {
+ "name": "nodejs-storage",
+ "version": "0.1.0",
+ "language": "TYPESCRIPT",
+ "apis": [
+ {
+ "id": "google.cloud.bigquery.storage.v1beta",
+ "version": "v1beta"
+ }
+ ]
+ },
+ "snippets": [
+ {
+ "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async",
+ "title": "MetastorePartitionService batchCreateMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " Adds metastore partitions to a table.",
+ "canonical": true,
+ "file": "metastore_partition_service.batch_create_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async",
- "title": "MetastorePartitionService batchCreateMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " Adds metastore partitions to a table.",
- "canonical": true,
- "file": "metastore_partition_service.batch_create_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 76,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "BatchCreateMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "requests",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "skip_existing_partitions",
- "type": "TYPE_BOOL"
- },
- {
- "name": "trace_id",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "BatchCreateMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 76,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "BatchCreateMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "requests",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "skip_existing_partitions",
+ "type": "TYPE_BOOL"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
},
+ "method": {
+ "shortName": "BatchCreateMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async",
+ "title": "MetastorePartitionService batchDeleteMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " Deletes metastore partitions from a table.",
+ "canonical": true,
+ "file": "metastore_partition_service.batch_delete_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async",
- "title": "MetastorePartitionService batchDeleteMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " Deletes metastore partitions from a table.",
- "canonical": true,
- "file": "metastore_partition_service.batch_delete_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 69,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "BatchDeleteMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "partition_values",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "trace_id",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "BatchDeleteMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 69,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "BatchDeleteMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "partition_values",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
},
+ "method": {
+ "shortName": "BatchDeleteMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async",
+ "title": "MetastorePartitionService batchUpdateMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " Updates metastore partitions in a table.",
+ "canonical": true,
+ "file": "metastore_partition_service.batch_update_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async",
- "title": "MetastorePartitionService batchUpdateMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " Updates metastore partitions in a table.",
- "canonical": true,
- "file": "metastore_partition_service.batch_update_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 68,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "BatchUpdateMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "requests",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "trace_id",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "BatchUpdateMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 68,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "BatchUpdateMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "requests",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
},
+ "method": {
+ "shortName": "BatchUpdateMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async",
+ "title": "MetastorePartitionService listMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets metastore partitions from a table.",
+ "canonical": true,
+ "file": "metastore_partition_service.list_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async",
- "title": "MetastorePartitionService listMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " Gets metastore partitions from a table.",
- "canonical": true,
- "file": "metastore_partition_service.list_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 76,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "filter",
- "type": "TYPE_STRING"
- },
- {
- "name": "trace_id",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "ListMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 76,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "filter",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
},
+ "method": {
+ "shortName": "ListMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async",
+ "title": "MetastorePartitionService streamMetastorePartitions Sample",
+ "origin": "API_DEFINITION",
+ "description": " This is a bi-di streaming rpc method that allows the client to send a stream of partitions and commit all of them atomically at the end. If the commit is successful, the server will return a response and close the stream. If the commit fails (due to duplicate partitions or other reason), the server will close the stream with an error. This method is only available via the gRPC API (not REST).",
+ "canonical": true,
+ "file": "metastore_partition_service.stream_metastore_partitions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async",
- "title": "MetastorePartitionService streamMetastorePartitions Sample",
- "origin": "API_DEFINITION",
- "description": " This is a bi-di streaming rpc method that allows the client to send a stream of partitions and commit all of them atomically at the end. If the commit is successful, the server will return a response and close the stream. If the commit fails (due to duplicate partitions or other reason), the server will close the stream with an error. This method is only available via the gRPC API (not REST).",
- "canonical": true,
- "file": "metastore_partition_service.stream_metastore_partitions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 74,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "StreamMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "metastore_partitions",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "skip_existing_partitions",
- "type": "TYPE_BOOL"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse",
- "client": {
- "shortName": "MetastorePartitionServiceClient",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
- },
- "method": {
- "shortName": "StreamMetastorePartitions",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions",
- "service": {
- "shortName": "MetastorePartitionService",
- "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
- }
- }
- }
+ "start": 25,
+ "end": 74,
+ "type": "FULL"
}
- ]
-}
\ No newline at end of file
+ ],
+ "clientMethod": {
+ "shortName": "StreamMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "metastore_partitions",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "skip_existing_partitions",
+ "type": "TYPE_BOOL"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse",
+ "client": {
+ "shortName": "MetastorePartitionServiceClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient"
+ },
+ "method": {
+ "shortName": "StreamMetastorePartitions",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions",
+ "service": {
+ "shortName": "MetastorePartitionService",
+ "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService"
+ }
+ }
+ }
+ }
+ ]
+}
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js
index ab34427123c0..b02f97001fc4 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js
index 7b75eb954cdb..68c773ef506e 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js
index 9f23c6a54dbf..9d2a5912080f 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js
index 8dc6da158b4f..6b8aec2af86d 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js
index e3fad01c93db..6a77bbf294a5 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js
+++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json
index 8ced04685a1d..39cb1d2c8692 100644
--- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json
+++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json
@@ -1,247 +1,247 @@
{
- "clientLibrary": {
- "name": "nodejs-storage",
- "version": "5.1.0",
- "language": "TYPESCRIPT",
- "apis": [
- {
- "id": "google.cloud.bigquery.storage.v1beta1",
- "version": "v1beta1"
- }
- ]
- },
- "snippets": [
+ "clientLibrary": {
+ "name": "nodejs-storage",
+ "version": "0.1.0",
+ "language": "TYPESCRIPT",
+ "apis": [
+ {
+ "id": "google.cloud.bigquery.storage.v1beta1",
+ "version": "v1beta1"
+ }
+ ]
+ },
+ "snippets": [
+ {
+ "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async",
+ "title": "BigQueryStorage createReadSession Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.",
+ "canonical": true,
+ "file": "big_query_storage.create_read_session.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async",
- "title": "BigQueryStorage createReadSession Sample",
- "origin": "API_DEFINITION",
- "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.",
- "canonical": true,
- "file": "big_query_storage.create_read_session.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 87,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateReadSession",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession",
- "async": true,
- "parameters": [
- {
- "name": "table_reference",
- "type": ".google.cloud.bigquery.storage.v1beta1.TableReference"
- },
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "table_modifiers",
- "type": ".google.cloud.bigquery.storage.v1beta1.TableModifiers"
- },
- {
- "name": "requested_streams",
- "type": "TYPE_INT32"
- },
- {
- "name": "read_options",
- "type": ".google.cloud.bigquery.storage.v1beta1.TableReadOptions"
- },
- {
- "name": "format",
- "type": ".google.cloud.bigquery.storage.v1beta1.DataFormat"
- },
- {
- "name": "sharding_strategy",
- "type": ".google.cloud.bigquery.storage.v1beta1.ShardingStrategy"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadSession",
- "client": {
- "shortName": "BigQueryStorageClient",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
- },
- "method": {
- "shortName": "CreateReadSession",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession",
- "service": {
- "shortName": "BigQueryStorage",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
- }
- }
- }
+ "start": 25,
+ "end": 87,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateReadSession",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession",
+ "async": true,
+ "parameters": [
+ {
+ "name": "table_reference",
+ "type": ".google.cloud.bigquery.storage.v1beta1.TableReference"
+ },
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "table_modifiers",
+ "type": ".google.cloud.bigquery.storage.v1beta1.TableModifiers"
+ },
+ {
+ "name": "requested_streams",
+ "type": "TYPE_INT32"
+ },
+ {
+ "name": "read_options",
+ "type": ".google.cloud.bigquery.storage.v1beta1.TableReadOptions"
+ },
+ {
+ "name": "format",
+ "type": ".google.cloud.bigquery.storage.v1beta1.DataFormat"
+ },
+ {
+ "name": "sharding_strategy",
+ "type": ".google.cloud.bigquery.storage.v1beta1.ShardingStrategy"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadSession",
+ "client": {
+ "shortName": "BigQueryStorageClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
},
+ "method": {
+ "shortName": "CreateReadSession",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession",
+ "service": {
+ "shortName": "BigQueryStorage",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async",
+ "title": "BigQueryStorage readRows Sample",
+ "origin": "API_DEFINITION",
+ "description": " Reads rows from the table in the format prescribed by the read session. Each response contains one or more table rows, up to a maximum of 10 MiB per response; read requests which attempt to read individual rows larger than this will fail. Each request also returns a set of stream statistics reflecting the estimated total number of rows in the read stream. This number is computed based on the total table size and the number of active streams in the read session, and may change as other streams continue to read data.",
+ "canonical": true,
+ "file": "big_query_storage.read_rows.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async",
- "title": "BigQueryStorage readRows Sample",
- "origin": "API_DEFINITION",
- "description": " Reads rows from the table in the format prescribed by the read session. Each response contains one or more table rows, up to a maximum of 10 MiB per response; read requests which attempt to read individual rows larger than this will fail. Each request also returns a set of stream statistics reflecting the estimated total number of rows in the read stream. This number is computed based on the total table size and the number of active streams in the read session, and may change as other streams continue to read data.",
- "canonical": true,
- "file": "big_query_storage.read_rows.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 57,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ReadRows",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows",
- "async": true,
- "parameters": [
- {
- "name": "read_position",
- "type": ".google.cloud.bigquery.storage.v1beta1.StreamPosition"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse",
- "client": {
- "shortName": "BigQueryStorageClient",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
- },
- "method": {
- "shortName": "ReadRows",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows",
- "service": {
- "shortName": "BigQueryStorage",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
- }
- }
- }
+ "start": 25,
+ "end": 57,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ReadRows",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows",
+ "async": true,
+ "parameters": [
+ {
+ "name": "read_position",
+ "type": ".google.cloud.bigquery.storage.v1beta1.StreamPosition"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse",
+ "client": {
+ "shortName": "BigQueryStorageClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
},
+ "method": {
+ "shortName": "ReadRows",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows",
+ "service": {
+ "shortName": "BigQueryStorage",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async",
+ "title": "BigQueryStorage batchCreateReadSessionStreams Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates additional streams for a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task upwards by adding additional workers.",
+ "canonical": true,
+ "file": "big_query_storage.batch_create_read_session_streams.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async",
- "title": "BigQueryStorage batchCreateReadSessionStreams Sample",
- "origin": "API_DEFINITION",
- "description": " Creates additional streams for a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task upwards by adding additional workers.",
- "canonical": true,
- "file": "big_query_storage.batch_create_read_session_streams.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 61,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "BatchCreateReadSessionStreams",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams",
- "async": true,
- "parameters": [
- {
- "name": "session",
- "type": ".google.cloud.bigquery.storage.v1beta1.ReadSession"
- },
- {
- "name": "requested_streams",
- "type": "TYPE_INT32"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse",
- "client": {
- "shortName": "BigQueryStorageClient",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
- },
- "method": {
- "shortName": "BatchCreateReadSessionStreams",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams",
- "service": {
- "shortName": "BigQueryStorage",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
- }
- }
- }
+ "start": 25,
+ "end": 61,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "BatchCreateReadSessionStreams",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams",
+ "async": true,
+ "parameters": [
+ {
+ "name": "session",
+ "type": ".google.cloud.bigquery.storage.v1beta1.ReadSession"
+ },
+ {
+ "name": "requested_streams",
+ "type": "TYPE_INT32"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse",
+ "client": {
+ "shortName": "BigQueryStorageClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
},
+ "method": {
+ "shortName": "BatchCreateReadSessionStreams",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams",
+ "service": {
+ "shortName": "BigQueryStorage",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async",
+ "title": "BigQueryStorage finalizeStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Causes a single stream in a ReadSession to gracefully stop. This API can be used to dynamically adjust the parallelism of a batch processing task downwards without losing data. This API does not delete the stream -- it remains visible in the ReadSession, and any data processed by the stream is not released to other streams. However, no additional data will be assigned to the stream once this call completes. Callers must continue reading data on the stream until the end of the stream is reached so that data which has already been assigned to the stream will be processed. This method will return an error if there are no other live streams in the Session, or if SplitReadStream() has been called on the given Stream.",
+ "canonical": true,
+ "file": "big_query_storage.finalize_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async",
- "title": "BigQueryStorage finalizeStream Sample",
- "origin": "API_DEFINITION",
- "description": " Causes a single stream in a ReadSession to gracefully stop. This API can be used to dynamically adjust the parallelism of a batch processing task downwards without losing data. This API does not delete the stream -- it remains visible in the ReadSession, and any data processed by the stream is not released to other streams. However, no additional data will be assigned to the stream once this call completes. Callers must continue reading data on the stream until the end of the stream is reached so that data which has already been assigned to the stream will be processed. This method will return an error if there are no other live streams in the Session, or if SplitReadStream() has been called on the given Stream.",
- "canonical": true,
- "file": "big_query_storage.finalize_stream.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 53,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "FinalizeStream",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream",
- "async": true,
- "parameters": [
- {
- "name": "stream",
- "type": ".google.cloud.bigquery.storage.v1beta1.Stream"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "BigQueryStorageClient",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
- },
- "method": {
- "shortName": "FinalizeStream",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream",
- "service": {
- "shortName": "BigQueryStorage",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
- }
- }
- }
+ "start": 25,
+ "end": 53,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "FinalizeStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "stream",
+ "type": ".google.cloud.bigquery.storage.v1beta1.Stream"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "BigQueryStorageClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
},
+ "method": {
+ "shortName": "FinalizeStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream",
+ "service": {
+ "shortName": "BigQueryStorage",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async",
+ "title": "BigQueryStorage splitReadStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Splits a given read stream into two Streams. These streams are referred to as the primary and the residual of the split. The original stream can still be read from in the same manner as before. Both of the returned streams can also be read from, and the total rows return by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back to back in the original Stream. Concretely, it is guaranteed that for streams Original, Primary, and Residual, that Original[0-j] = Primary[0-j] and Original[j-n] = Residual[0-m] once the streams have been read to completion. This method is guaranteed to be idempotent.",
+ "canonical": true,
+ "file": "big_query_storage.split_read_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async",
- "title": "BigQueryStorage splitReadStream Sample",
- "origin": "API_DEFINITION",
- "description": " Splits a given read stream into two Streams. These streams are referred to as the primary and the residual of the split. The original stream can still be read from in the same manner as before. Both of the returned streams can also be read from, and the total rows return by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back to back in the original Stream. Concretely, it is guaranteed that for streams Original, Primary, and Residual, that Original[0-j] = Primary[0-j] and Original[j-n] = Residual[0-m] once the streams have been read to completion. This method is guaranteed to be idempotent.",
- "canonical": true,
- "file": "big_query_storage.split_read_stream.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 63,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "SplitReadStream",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream",
- "async": true,
- "parameters": [
- {
- "name": "original_stream",
- "type": ".google.cloud.bigquery.storage.v1beta1.Stream"
- },
- {
- "name": "fraction",
- "type": "TYPE_FLOAT"
- }
- ],
- "resultType": ".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse",
- "client": {
- "shortName": "BigQueryStorageClient",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
- },
- "method": {
- "shortName": "SplitReadStream",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream",
- "service": {
- "shortName": "BigQueryStorage",
- "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
- }
- }
- }
+ "start": 25,
+ "end": 63,
+ "type": "FULL"
}
- ]
-}
\ No newline at end of file
+ ],
+ "clientMethod": {
+ "shortName": "SplitReadStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "original_stream",
+ "type": ".google.cloud.bigquery.storage.v1beta1.Stream"
+ },
+ {
+ "name": "fraction",
+ "type": "TYPE_FLOAT"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse",
+ "client": {
+ "shortName": "BigQueryStorageClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient"
+ },
+ "method": {
+ "shortName": "SplitReadStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream",
+ "service": {
+ "shortName": "BigQueryStorage",
+ "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage"
+ }
+ }
+ }
+ }
+ ]
+}
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.create_read_session.js
new file mode 100644
index 000000000000..bbf8b2a7ab25
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.create_read_session.js
@@ -0,0 +1,77 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+
+
+'use strict';
+
+function main(parent, readSession) {
+ // [START bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async]
+ /**
+ * This snippet has been automatically generated and should be regarded as a code template only.
+ * It will require modifications to work.
+ * It may require correct/in-range values for request initialization.
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ /**
+ * Required. The request project that owns the session, in the form of
+ * `projects/{project_id}`.
+ */
+ // const parent = 'abc123'
+ /**
+ * Required. Session to be created.
+ */
+ // const readSession = {}
+ /**
+ * Max initial number of streams. If unset or zero, the server will
+ * provide a value of streams so as to produce reasonable throughput. Must be
+ * non-negative. The number of streams may be lower than the requested number,
+ * depending on the amount parallelism that is reasonable for the table. Error
+ * will be returned if the max count is greater than the current system
+ * max limit of 1,000.
+ * Streams must be read starting from offset 0.
+ */
+ // const maxStreamCount = 1234
+
+ // Imports the Storage library
+ const {BigQueryReadClient} = require('storage').v1beta2;
+
+ // Instantiates a client
+ const storageClient = new BigQueryReadClient();
+
+ async function callCreateReadSession() {
+ // Construct request
+ const request = {
+ parent,
+ readSession,
+ };
+
+ // Run request
+ const response = await storageClient.createReadSession(request);
+ console.log(response);
+ }
+
+ callCreateReadSession();
+ // [END bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.read_rows.js
new file mode 100644
index 000000000000..51de056a3787
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.read_rows.js
@@ -0,0 +1,69 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+
+
+'use strict';
+
+function main(readStream) {
+ // [START bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async]
+ /**
+ * This snippet has been automatically generated and should be regarded as a code template only.
+ * It will require modifications to work.
+ * It may require correct/in-range values for request initialization.
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ /**
+ * Required. Stream to read rows from.
+ */
+ // const readStream = 'abc123'
+ /**
+ * The offset requested must be less than the last row read from Read.
+ * Requesting a larger offset is undefined. If not specified, start reading
+ * from offset zero.
+ */
+ // const offset = 1234
+
+ // Imports the Storage library
+ const {BigQueryReadClient} = require('storage').v1beta2;
+
+ // Instantiates a client
+ const storageClient = new BigQueryReadClient();
+
+ async function callReadRows() {
+ // Construct request
+ const request = {
+ readStream,
+ };
+
+ // Run request
+ const stream = await storageClient.readRows(request);
+ stream.on('data', (response) => { console.log(response) });
+ stream.on('error', (err) => { throw(err) });
+ stream.on('end', () => { /* API call completed */ });
+ }
+
+ callReadRows();
+ // [END bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.split_read_stream.js
new file mode 100644
index 000000000000..409453802a79
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_read.split_read_stream.js
@@ -0,0 +1,71 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+
+
+'use strict';
+
+function main(name) {
+ // [START bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async]
+ /**
+ * This snippet has been automatically generated and should be regarded as a code template only.
+ * It will require modifications to work.
+ * It may require correct/in-range values for request initialization.
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ /**
+ * Required. Name of the stream to split.
+ */
+ // const name = 'abc123'
+ /**
+ * A value in the range (0.0, 1.0) that specifies the fractional point at
+ * which the original stream should be split. The actual split point is
+ * evaluated on pre-filtered rows, so if a filter is provided, then there is
+ * no guarantee that the division of the rows between the new child streams
+ * will be proportional to this fractional value. Additionally, because the
+ * server-side unit for assigning data is collections of rows, this fraction
+ * will always map to a data storage boundary on the server side.
+ */
+ // const fraction = 1234
+
+ // Imports the Storage library
+ const {BigQueryReadClient} = require('storage').v1beta2;
+
+ // Instantiates a client
+ const storageClient = new BigQueryReadClient();
+
+ async function callSplitReadStream() {
+ // Construct request
+ const request = {
+ name,
+ };
+
+ // Run request
+ const response = await storageClient.splitReadStream(request);
+ console.log(response);
+ }
+
+ callSplitReadStream();
+ // [END bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.append_rows.js
new file mode 100644
index 000000000000..800d70bb3399
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.append_rows.js
@@ -0,0 +1,85 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+
+
+'use strict';
+
+function main(writeStream) {
+ // [START bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async]
+ /**
+ * This snippet has been automatically generated and should be regarded as a code template only.
+ * It will require modifications to work.
+ * It may require correct/in-range values for request initialization.
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ /**
+ * Required. The stream that is the target of the append operation. This value
+ * must be specified for the initial request. If subsequent requests specify
+ * the stream name, it must equal to the value provided in the first request.
+ * To write to the _default stream, populate this field with a string in the
+ * format `projects/{project}/datasets/{dataset}/tables/{table}/_default`.
+ */
+ // const writeStream = 'abc123'
+ /**
+ * If present, the write is only performed if the next append offset is same
+ * as the provided value. If not present, the write is performed at the
+ * current end of stream. Specifying a value for this field is not allowed
+ * when calling AppendRows for the '_default' stream.
+ */
+ // const offset = {}
+ /**
+ * Rows in proto format.
+ */
+ // const protoRows = {}
+ /**
+ * Id set by client to annotate its identity. Only initial request setting is
+ * respected.
+ */
+ // const traceId = 'abc123'
+
+ // Imports the Storage library
+ const {BigQueryWriteClient} = require('storage').v1beta2;
+
+ // Instantiates a client
+ const storageClient = new BigQueryWriteClient();
+
+ async function callAppendRows() {
+ // Construct request
+ const request = {
+ writeStream,
+ };
+
+ // Run request
+ const stream = await storageClient.appendRows();
+ stream.on('data', (response) => { console.log(response) });
+ stream.on('error', (err) => { throw(err) });
+ stream.on('end', () => { /* API call completed */ });
+ stream.write(request);
+ stream.end();
+ }
+
+ callAppendRows();
+ // [END bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.batch_commit_write_streams.js
new file mode 100644
index 000000000000..ac93d4d78f41
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.batch_commit_write_streams.js
@@ -0,0 +1,67 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+
+
+'use strict';
+
+function main(parent, writeStreams) {
+ // [START bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async]
+ /**
+ * This snippet has been automatically generated and should be regarded as a code template only.
+ * It will require modifications to work.
+ * It may require correct/in-range values for request initialization.
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ /**
+ * Required. Parent table that all the streams should belong to, in the form
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
+ */
+ // const parent = 'abc123'
+ /**
+ * Required. The group of streams that will be committed atomically.
+ */
+ // const writeStreams = ['abc','def']
+
+ // Imports the Storage library
+ const {BigQueryWriteClient} = require('storage').v1beta2;
+
+ // Instantiates a client
+ const storageClient = new BigQueryWriteClient();
+
+ async function callBatchCommitWriteStreams() {
+ // Construct request
+ const request = {
+ parent,
+ writeStreams,
+ };
+
+ // Run request
+ const response = await storageClient.batchCommitWriteStreams(request);
+ console.log(response);
+ }
+
+ callBatchCommitWriteStreams();
+ // [END bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.create_write_stream.js
new file mode 100644
index 000000000000..0c97464b5fd4
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.create_write_stream.js
@@ -0,0 +1,67 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+
+
+'use strict';
+
+function main(parent, writeStream) {
+ // [START bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async]
+ /**
+ * This snippet has been automatically generated and should be regarded as a code template only.
+ * It will require modifications to work.
+ * It may require correct/in-range values for request initialization.
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ /**
+ * Required. Reference to the table to which the stream belongs, in the format
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
+ */
+ // const parent = 'abc123'
+ /**
+ * Required. Stream to be created.
+ */
+ // const writeStream = {}
+
+ // Imports the Storage library
+ const {BigQueryWriteClient} = require('storage').v1beta2;
+
+ // Instantiates a client
+ const storageClient = new BigQueryWriteClient();
+
+ async function callCreateWriteStream() {
+ // Construct request
+ const request = {
+ parent,
+ writeStream,
+ };
+
+ // Run request
+ const response = await storageClient.createWriteStream(request);
+ console.log(response);
+ }
+
+ callCreateWriteStream();
+ // [END bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.finalize_write_stream.js
new file mode 100644
index 000000000000..ccc8e99f9248
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.finalize_write_stream.js
@@ -0,0 +1,62 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+
+
+'use strict';
+
+function main(name) {
+ // [START bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async]
+ /**
+ * This snippet has been automatically generated and should be regarded as a code template only.
+ * It will require modifications to work.
+ * It may require correct/in-range values for request initialization.
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ /**
+ * Required. Name of the stream to finalize, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ */
+ // const name = 'abc123'
+
+ // Imports the Storage library
+ const {BigQueryWriteClient} = require('storage').v1beta2;
+
+ // Instantiates a client
+ const storageClient = new BigQueryWriteClient();
+
+ async function callFinalizeWriteStream() {
+ // Construct request
+ const request = {
+ name,
+ };
+
+ // Run request
+ const response = await storageClient.finalizeWriteStream(request);
+ console.log(response);
+ }
+
+ callFinalizeWriteStream();
+ // [END bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.flush_rows.js
new file mode 100644
index 000000000000..859953c76b01
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.flush_rows.js
@@ -0,0 +1,66 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+
+
+'use strict';
+
+function main(writeStream) {
+ // [START bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async]
+ /**
+ * This snippet has been automatically generated and should be regarded as a code template only.
+ * It will require modifications to work.
+ * It may require correct/in-range values for request initialization.
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ /**
+ * Required. The stream that is the target of the flush operation.
+ */
+ // const writeStream = 'abc123'
+ /**
+ * Ending offset of the flush operation. Rows before this offset(including
+ * this offset) will be flushed.
+ */
+ // const offset = {}
+
+ // Imports the Storage library
+ const {BigQueryWriteClient} = require('storage').v1beta2;
+
+ // Instantiates a client
+ const storageClient = new BigQueryWriteClient();
+
+ async function callFlushRows() {
+ // Construct request
+ const request = {
+ writeStream,
+ };
+
+ // Run request
+ const response = await storageClient.flushRows(request);
+ console.log(response);
+ }
+
+ callFlushRows();
+ // [END bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.get_write_stream.js
new file mode 100644
index 000000000000..a4f5f67f1302
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/big_query_write.get_write_stream.js
@@ -0,0 +1,62 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+
+
+'use strict';
+
+function main(name) {
+ // [START bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async]
+ /**
+ * This snippet has been automatically generated and should be regarded as a code template only.
+ * It will require modifications to work.
+ * It may require correct/in-range values for request initialization.
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ /**
+ * Required. Name of the stream to get, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ */
+ // const name = 'abc123'
+
+ // Imports the Storage library
+ const {BigQueryWriteClient} = require('storage').v1beta2;
+
+ // Instantiates a client
+ const storageClient = new BigQueryWriteClient();
+
+ async function callGetWriteStream() {
+ // Construct request
+ const request = {
+ name,
+ };
+
+ // Run request
+ const response = await storageClient.getWriteStream(request);
+ console.log(response);
+ }
+
+ callGetWriteStream();
+ // [END bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/handwritten/bigquery-storage/samples/generated/v1beta2/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/handwritten/bigquery-storage/samples/generated/v1beta2/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json
new file mode 100644
index 000000000000..949ee0cf2160
--- /dev/null
+++ b/handwritten/bigquery-storage/samples/generated/v1beta2/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json
@@ -0,0 +1,415 @@
+{
+ "clientLibrary": {
+ "name": "nodejs-storage",
+ "version": "0.1.0",
+ "language": "TYPESCRIPT",
+ "apis": [
+ {
+ "id": "google.cloud.bigquery.storage.v1beta2",
+ "version": "v1beta2"
+ }
+ ]
+ },
+ "snippets": [
+ {
+ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async",
+ "title": "BigQueryRead createReadSession Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Data is assigned to each stream such that roughly the same number of rows can be read from each stream. Because the server-side unit for assigning data is collections of rows, the API does not guarantee that each stream will return the same number or rows. Additionally, the limits are enforced based on the number of pre-filtered rows, so some filters can lead to lopsided assignments. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.",
+ "canonical": true,
+ "file": "big_query_read.create_read_session.js",
+ "language": "JAVASCRIPT",
+ "segments": [
+ {
+ "start": 25,
+ "end": 69,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateReadSession",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "read_session",
+ "type": ".google.cloud.bigquery.storage.v1beta2.ReadSession"
+ },
+ {
+ "name": "max_stream_count",
+ "type": "TYPE_INT32"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta2.ReadSession",
+ "client": {
+ "shortName": "BigQueryReadClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryReadClient"
+ },
+ "method": {
+ "shortName": "CreateReadSession",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession",
+ "service": {
+ "shortName": "BigQueryRead",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async",
+ "title": "BigQueryRead readRows Sample",
+ "origin": "API_DEFINITION",
+ "description": " Reads rows from the stream in the format prescribed by the ReadSession. Each response contains one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to read individual rows larger than 100 MiB will fail. Each request also returns a set of stream statistics reflecting the current state of the stream.",
+ "canonical": true,
+ "file": "big_query_read.read_rows.js",
+ "language": "JAVASCRIPT",
+ "segments": [
+ {
+ "start": 25,
+ "end": 61,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ReadRows",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows",
+ "async": true,
+ "parameters": [
+ {
+ "name": "read_stream",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "offset",
+ "type": "TYPE_INT64"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta2.ReadRowsResponse",
+ "client": {
+ "shortName": "BigQueryReadClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryReadClient"
+ },
+ "method": {
+ "shortName": "ReadRows",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows",
+ "service": {
+ "shortName": "BigQueryRead",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async",
+ "title": "BigQueryRead splitReadStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are referred to as the primary and the residual streams of the split. The original `ReadStream` can still be read from in the same manner as before. Both of the returned `ReadStream` objects can also be read from, and the rows returned by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back-to-back in the original `ReadStream`. Concretely, it is guaranteed that for streams original, primary, and residual, that original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion.",
+ "canonical": true,
+ "file": "big_query_read.split_read_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
+ {
+ "start": 25,
+ "end": 63,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "SplitReadStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "fraction",
+ "type": "TYPE_DOUBLE"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse",
+ "client": {
+ "shortName": "BigQueryReadClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryReadClient"
+ },
+ "method": {
+ "shortName": "SplitReadStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream",
+ "service": {
+ "shortName": "BigQueryRead",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async",
+ "title": "BigQueryRead createWriteStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a write stream to the given table. Additionally, every table has a special COMMITTED stream named '_default' to which data can be written. This stream doesn't need to be created using CreateWriteStream. It is a stream that can be used simultaneously by any number of clients. Data written to this stream is considered committed as soon as an acknowledgement is received.",
+ "canonical": true,
+ "file": "big_query_write.create_write_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
+ {
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "write_stream",
+ "type": ".google.cloud.bigquery.storage.v1beta2.WriteStream"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta2.WriteStream",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient"
+ },
+ "method": {
+ "shortName": "CreateWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async",
+ "title": "BigQueryRead appendRows Sample",
+ "origin": "API_DEFINITION",
+ "description": " Appends data to the given stream. If `offset` is specified, the `offset` is checked against the end of stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an attempt is made to append to an offset beyond the current end of the stream or `ALREADY_EXISTS` if user provids an `offset` that has already been written to. User can retry with adjusted offset within the same RPC stream. If `offset` is not specified, append happens at the end of the stream. The response contains the offset at which the append happened. Responses are received in the same order in which requests are sent. There will be one response for each successful request. If the `offset` is not set in response, it means append didn't happen due to some errors. If one request fails, all the subsequent requests will also fail until a success request is made again. If the stream is of `PENDING` type, data will only be available for read operations after the stream is committed.",
+ "canonical": true,
+ "file": "big_query_write.append_rows.js",
+ "language": "JAVASCRIPT",
+ "segments": [
+ {
+ "start": 25,
+ "end": 77,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "AppendRows",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows",
+ "async": true,
+ "parameters": [
+ {
+ "name": "write_stream",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "offset",
+ "type": ".google.protobuf.Int64Value"
+ },
+ {
+ "name": "proto_rows",
+ "type": ".google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData"
+ },
+ {
+ "name": "trace_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta2.AppendRowsResponse",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient"
+ },
+ "method": {
+ "shortName": "AppendRows",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async",
+ "title": "BigQueryRead getWriteStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets a write stream.",
+ "canonical": true,
+ "file": "big_query_write.get_write_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
+ {
+ "start": 25,
+ "end": 54,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta2.WriteStream",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient"
+ },
+ "method": {
+ "shortName": "GetWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async",
+ "title": "BigQueryRead finalizeWriteStream Sample",
+ "origin": "API_DEFINITION",
+ "description": " Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream.",
+ "canonical": true,
+ "file": "big_query_write.finalize_write_stream.js",
+ "language": "JAVASCRIPT",
+ "segments": [
+ {
+ "start": 25,
+ "end": 54,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "FinalizeWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient"
+ },
+ "method": {
+ "shortName": "FinalizeWriteStream",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async",
+ "title": "BigQueryRead batchCommitWriteStreams Sample",
+ "origin": "API_DEFINITION",
+ "description": " Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams must be finalized before commit and cannot be committed multiple times. Once a stream is committed, data in the stream becomes available for read operations.",
+ "canonical": true,
+ "file": "big_query_write.batch_commit_write_streams.js",
+ "language": "JAVASCRIPT",
+ "segments": [
+ {
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "BatchCommitWriteStreams",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "write_streams",
+ "type": "TYPE_STRING[]"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient"
+ },
+ "method": {
+ "shortName": "BatchCommitWriteStreams",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async",
+ "title": "BigQueryRead flushRows Sample",
+ "origin": "API_DEFINITION",
+ "description": " Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush operation is required in order for the rows to become available for reading. A Flush operation flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in the request. Flush is not supported on the _default stream, since it is not BUFFERED.",
+ "canonical": true,
+ "file": "big_query_write.flush_rows.js",
+ "language": "JAVASCRIPT",
+ "segments": [
+ {
+ "start": 25,
+ "end": 58,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "FlushRows",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows",
+ "async": true,
+ "parameters": [
+ {
+ "name": "write_stream",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "offset",
+ "type": ".google.protobuf.Int64Value"
+ }
+ ],
+ "resultType": ".google.cloud.bigquery.storage.v1beta2.FlushRowsResponse",
+ "client": {
+ "shortName": "BigQueryWriteClient",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient"
+ },
+ "method": {
+ "shortName": "FlushRows",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows",
+ "service": {
+ "shortName": "BigQueryWrite",
+ "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite"
+ }
+ }
+ }
+ }
+ ]
+}
diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts
index b34ec292cc94..0c6706f874be 100644
--- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts
+++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -24,10 +24,10 @@ import type {
Descriptors,
ClientOptions,
} from 'google-gax';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
-import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax';
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -51,7 +51,7 @@ export class BigQueryReadClient {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
private _log = logging.log('bigquery-storage');
@@ -64,9 +64,9 @@ export class BigQueryReadClient {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
- pathTemplates: {[name: string]: gax.PathTemplate};
- bigQueryReadStub?: Promise<{[name: string]: Function}>;
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
+ bigQueryReadStub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of BigQueryReadClient.
@@ -142,7 +142,7 @@ export class BigQueryReadClient {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case.
if (servicePath !== this._servicePath && !('scopes' in opts)) {
@@ -231,7 +231,7 @@ export class BigQueryReadClient {
'google.cloud.bigquery.storage.v1.BigQueryRead',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -271,7 +271,7 @@ export class BigQueryReadClient {
(this._protos as any).google.cloud.bigquery.storage.v1.BigQueryRead,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -282,11 +282,11 @@ export class BigQueryReadClient {
];
for (const methodName of bigQueryReadStubMethods) {
const callPromise = this.bigQueryReadStub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
if (methodName in this.descriptors.stream) {
- const stream = new PassThrough({objectMode: true});
+ const stream = new PassThrough({ objectMode: true });
setImmediate(() => {
stream.emit(
'error',
@@ -542,7 +542,7 @@ export class BigQueryReadClient {
this._gaxModule.routingHeader.fromParams({
'read_session.table': request.readSession!.table ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('createReadSession request %j', request);
@@ -704,7 +704,7 @@ export class BigQueryReadClient {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('splitReadStream request %j', request);
@@ -757,9 +757,9 @@ export class BigQueryReadClient {
/**
* Reads rows from the stream in the format prescribed by the ReadSession.
- * Each response contains one or more table rows, up to a maximum of 100 MiB
+ * Each response contains one or more table rows, up to a maximum of 128 MB
* per response; read requests which attempt to read individual rows larger
- * than 100 MiB will fail.
+ * than 128 MB will fail.
*
* Each request also returns a set of stream statistics reflecting the current
* state of the stream.
@@ -793,7 +793,7 @@ export class BigQueryReadClient {
this._gaxModule.routingHeader.fromParams({
read_stream: request.readStream ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('readRows stream %j', options);
@@ -1078,7 +1078,7 @@ export class BigQueryReadClient {
*/
close(): Promise {
if (this.bigQueryReadStub && !this._terminated) {
- return this.bigQueryReadStub.then(stub => {
+ return this.bigQueryReadStub.then((stub) => {
this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts
index a8db6ca408b2..73c531a999c6 100644
--- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts
+++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -24,10 +24,10 @@ import type {
Descriptors,
ClientOptions,
} from 'google-gax';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
-import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax';
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -54,7 +54,7 @@ export class BigQueryWriteClient {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
private _log = logging.log('bigquery-storage');
@@ -67,9 +67,9 @@ export class BigQueryWriteClient {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
- pathTemplates: {[name: string]: gax.PathTemplate};
- bigQueryWriteStub?: Promise<{[name: string]: Function}>;
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
+ bigQueryWriteStub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of BigQueryWriteClient.
@@ -145,7 +145,7 @@ export class BigQueryWriteClient {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case.
if (servicePath !== this._servicePath && !('scopes' in opts)) {
@@ -234,7 +234,7 @@ export class BigQueryWriteClient {
'google.cloud.bigquery.storage.v1.BigQueryWrite',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -274,7 +274,7 @@ export class BigQueryWriteClient {
(this._protos as any).google.cloud.bigquery.storage.v1.BigQueryWrite,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -288,11 +288,11 @@ export class BigQueryWriteClient {
];
for (const methodName of bigQueryWriteStubMethods) {
const callPromise = this.bigQueryWriteStub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
if (methodName in this.descriptors.stream) {
- const stream = new PassThrough({objectMode: true});
+ const stream = new PassThrough({ objectMode: true });
setImmediate(() => {
stream.emit(
'error',
@@ -516,7 +516,7 @@ export class BigQueryWriteClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('createWriteStream request %j', request);
@@ -663,7 +663,7 @@ export class BigQueryWriteClient {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('getWriteStream request %j', request);
@@ -808,7 +808,7 @@ export class BigQueryWriteClient {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('finalizeWriteStream request %j', request);
@@ -959,7 +959,7 @@ export class BigQueryWriteClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('batchCommitWriteStreams request %j', request);
@@ -1106,7 +1106,7 @@ export class BigQueryWriteClient {
this._gaxModule.routingHeader.fromParams({
write_stream: request.writeStream ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('flushRows request %j', request);
@@ -1199,7 +1199,7 @@ export class BigQueryWriteClient {
* region_tag:bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async
*/
appendRows(options?: CallOptions): gax.CancellableStream {
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('appendRows stream %j', options);
@@ -1484,7 +1484,7 @@ export class BigQueryWriteClient {
*/
close(): Promise {
if (this.bigQueryWriteStub && !this._terminated) {
- return this.bigQueryWriteStub.then(stub => {
+ return this.bigQueryWriteStub.then((stub) => {
this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts
index 4ef2dcd26417..ad672e49aae6 100644
--- a/handwritten/bigquery-storage/src/v1/index.ts
+++ b/handwritten/bigquery-storage/src/v1/index.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,5 +16,5 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-export {BigQueryReadClient} from './big_query_read_client';
-export {BigQueryWriteClient} from './big_query_write_client';
+export { BigQueryReadClient } from './big_query_read_client';
+export { BigQueryWriteClient } from './big_query_write_client';
diff --git a/handwritten/bigquery-storage/src/v1alpha/index.ts b/handwritten/bigquery-storage/src/v1alpha/index.ts
index c934f7b77877..df37f43c5abc 100644
--- a/handwritten/bigquery-storage/src/v1alpha/index.ts
+++ b/handwritten/bigquery-storage/src/v1alpha/index.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,4 +16,4 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-export {MetastorePartitionServiceClient} from './metastore_partition_service_client';
+export { MetastorePartitionServiceClient } from './metastore_partition_service_client';
diff --git a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts
index ff3134f46498..852858717601 100644
--- a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts
+++ b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -24,10 +24,10 @@ import type {
Descriptors,
ClientOptions,
} from 'google-gax';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
-import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax';
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -51,7 +51,7 @@ export class MetastorePartitionServiceClient {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
private _log = logging.log('storage');
@@ -64,9 +64,9 @@ export class MetastorePartitionServiceClient {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
- pathTemplates: {[name: string]: gax.PathTemplate};
- metastorePartitionServiceStub?: Promise<{[name: string]: Function}>;
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
+ metastorePartitionServiceStub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of MetastorePartitionServiceClient.
@@ -143,7 +143,7 @@ export class MetastorePartitionServiceClient {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// Request numeric enum values if REST transport is used.
opts.numericEnums = true;
@@ -226,7 +226,7 @@ export class MetastorePartitionServiceClient {
'google.cloud.bigquery.storage.v1alpha.MetastorePartitionService',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -267,7 +267,7 @@ export class MetastorePartitionServiceClient {
.MetastorePartitionService,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -280,11 +280,11 @@ export class MetastorePartitionServiceClient {
];
for (const methodName of metastorePartitionServiceStubMethods) {
const callPromise = this.metastorePartitionServiceStub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
if (methodName in this.descriptors.stream) {
- const stream = new PassThrough({objectMode: true});
+ const stream = new PassThrough({ objectMode: true });
setImmediate(() => {
stream.emit(
'error',
@@ -515,7 +515,7 @@ export class MetastorePartitionServiceClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('batchCreateMetastorePartitions request %j', request);
@@ -675,7 +675,7 @@ export class MetastorePartitionServiceClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('batchDeleteMetastorePartitions request %j', request);
@@ -834,7 +834,7 @@ export class MetastorePartitionServiceClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('batchUpdateMetastorePartitions request %j', request);
@@ -1002,7 +1002,7 @@ export class MetastorePartitionServiceClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('listMetastorePartitions request %j', request);
@@ -1073,7 +1073,7 @@ export class MetastorePartitionServiceClient {
* region_tag:bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async
*/
streamMetastorePartitions(options?: CallOptions): gax.CancellableStream {
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('streamMetastorePartitions stream %j', options);
@@ -1212,7 +1212,7 @@ export class MetastorePartitionServiceClient {
*/
close(): Promise {
if (this.metastorePartitionServiceStub && !this._terminated) {
- return this.metastorePartitionServiceStub.then(stub => {
+ return this.metastorePartitionServiceStub.then((stub) => {
this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
diff --git a/handwritten/bigquery-storage/src/v1beta/index.ts b/handwritten/bigquery-storage/src/v1beta/index.ts
index c934f7b77877..df37f43c5abc 100644
--- a/handwritten/bigquery-storage/src/v1beta/index.ts
+++ b/handwritten/bigquery-storage/src/v1beta/index.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,4 +16,4 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-export {MetastorePartitionServiceClient} from './metastore_partition_service_client';
+export { MetastorePartitionServiceClient } from './metastore_partition_service_client';
diff --git a/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts
index 2f0a4eb9d290..cf2ddf5aafef 100644
--- a/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts
+++ b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -24,10 +24,10 @@ import type {
Descriptors,
ClientOptions,
} from 'google-gax';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
-import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax';
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -51,7 +51,7 @@ export class MetastorePartitionServiceClient {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
private _log = logging.log('storage');
@@ -64,9 +64,9 @@ export class MetastorePartitionServiceClient {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
- pathTemplates: {[name: string]: gax.PathTemplate};
- metastorePartitionServiceStub?: Promise<{[name: string]: Function}>;
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
+ metastorePartitionServiceStub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of MetastorePartitionServiceClient.
@@ -143,7 +143,7 @@ export class MetastorePartitionServiceClient {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// Request numeric enum values if REST transport is used.
opts.numericEnums = true;
@@ -226,7 +226,7 @@ export class MetastorePartitionServiceClient {
'google.cloud.bigquery.storage.v1beta.MetastorePartitionService',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -267,7 +267,7 @@ export class MetastorePartitionServiceClient {
.MetastorePartitionService,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -280,11 +280,11 @@ export class MetastorePartitionServiceClient {
];
for (const methodName of metastorePartitionServiceStubMethods) {
const callPromise = this.metastorePartitionServiceStub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
if (methodName in this.descriptors.stream) {
- const stream = new PassThrough({objectMode: true});
+ const stream = new PassThrough({ objectMode: true });
setImmediate(() => {
stream.emit(
'error',
@@ -515,7 +515,7 @@ export class MetastorePartitionServiceClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('batchCreateMetastorePartitions request %j', request);
@@ -675,7 +675,7 @@ export class MetastorePartitionServiceClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('batchDeleteMetastorePartitions request %j', request);
@@ -834,7 +834,7 @@ export class MetastorePartitionServiceClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('batchUpdateMetastorePartitions request %j', request);
@@ -1004,7 +1004,7 @@ export class MetastorePartitionServiceClient {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('listMetastorePartitions request %j', request);
@@ -1075,7 +1075,7 @@ export class MetastorePartitionServiceClient {
* region_tag:bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async
*/
streamMetastorePartitions(options?: CallOptions): gax.CancellableStream {
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('streamMetastorePartitions stream %j', options);
@@ -1214,7 +1214,7 @@ export class MetastorePartitionServiceClient {
*/
close(): Promise {
if (this.metastorePartitionServiceStub && !this._terminated) {
- return this.metastorePartitionServiceStub.then(stub => {
+ return this.metastorePartitionServiceStub.then((stub) => {
this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts
index 782d60859b07..2f2e4c86ef11 100644
--- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts
+++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -24,10 +24,10 @@ import type {
Descriptors,
ClientOptions,
} from 'google-gax';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
-import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax';
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -56,7 +56,7 @@ export class BigQueryStorageClient {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
private _log = logging.log('bigquery-storage');
@@ -69,9 +69,9 @@ export class BigQueryStorageClient {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
- pathTemplates: {[name: string]: gax.PathTemplate};
- bigQueryStorageStub?: Promise<{[name: string]: Function}>;
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
+ bigQueryStorageStub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of BigQueryStorageClient.
@@ -147,7 +147,7 @@ export class BigQueryStorageClient {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case.
if (servicePath !== this._servicePath && !('scopes' in opts)) {
@@ -230,7 +230,7 @@ export class BigQueryStorageClient {
'google.cloud.bigquery.storage.v1beta1.BigQueryStorage',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -271,7 +271,7 @@ export class BigQueryStorageClient {
.BigQueryStorage,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -284,11 +284,11 @@ export class BigQueryStorageClient {
];
for (const methodName of bigQueryStorageStubMethods) {
const callPromise = this.bigQueryStorageStub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
if (methodName in this.descriptors.stream) {
- const stream = new PassThrough({objectMode: true});
+ const stream = new PassThrough({ objectMode: true });
setImmediate(() => {
stream.emit(
'error',
@@ -539,7 +539,7 @@ export class BigQueryStorageClient {
'table_reference.dataset_id':
request.tableReference!.datasetId?.toString() ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('createReadSession request %j', request);
@@ -689,7 +689,7 @@ export class BigQueryStorageClient {
this._gaxModule.routingHeader.fromParams({
'session.name': request.session!.name ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('batchCreateReadSessionStreams request %j', request);
@@ -845,7 +845,7 @@ export class BigQueryStorageClient {
this._gaxModule.routingHeader.fromParams({
'stream.name': request.stream!.name ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('finalizeStream request %j', request);
@@ -1008,7 +1008,7 @@ export class BigQueryStorageClient {
this._gaxModule.routingHeader.fromParams({
'original_stream.name': request.originalStream!.name ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('splitReadStream request %j', request);
@@ -1097,7 +1097,7 @@ export class BigQueryStorageClient {
this._gaxModule.routingHeader.fromParams({
'read_position.stream.name': request.readPosition!.stream!.name ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('readRows stream %j', options);
@@ -1240,7 +1240,7 @@ export class BigQueryStorageClient {
*/
close(): Promise {
if (this.bigQueryStorageStub && !this._terminated) {
- return this.bigQueryStorageStub.then(stub => {
+ return this.bigQueryStorageStub.then((stub) => {
this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts
index 157199d4f44d..9a6dba62856b 100644
--- a/handwritten/bigquery-storage/src/v1beta1/index.ts
+++ b/handwritten/bigquery-storage/src/v1beta1/index.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,4 +16,4 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-export {BigQueryStorageClient} from './big_query_storage_client';
+export { BigQueryStorageClient } from './big_query_storage_client';
diff --git a/handwritten/bigquery-storage/src/v1beta2/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1beta2/big_query_read_client.ts
new file mode 100644
index 000000000000..bf07163430ca
--- /dev/null
+++ b/handwritten/bigquery-storage/src/v1beta2/big_query_read_client.ts
@@ -0,0 +1,1082 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+/* global window */
+import type * as gax from 'google-gax';
+import type {
+ Callback,
+ CallOptions,
+ Descriptors,
+ ClientOptions,
+} from 'google-gax';
+import { PassThrough } from 'stream';
+import * as protos from '../../protos/protos';
+import jsonProtos = require('../../protos/protos.json');
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
+
+/**
+ * Client JSON configuration object, loaded from
+ * `src/v1beta2/big_query_read_client_config.json`.
+ * This file defines retry strategy and timeouts for all API methods in this library.
+ */
+import * as gapicConfig from './big_query_read_client_config.json';
+const version = require('../../../package.json').version;
+
+/**
+ * BigQuery Read API.
+ *
+ * The Read API can be used to read data from BigQuery.
+ *
+ * New code should use the v1 Read API going forward, if they don't use Write
+ * API at the same time.
+ * @class
+ * @memberof v1beta2
+ */
+export class BigQueryReadClient {
+ private _terminated = false;
+ private _opts: ClientOptions;
+ private _providedCustomServicePath: boolean;
+ private _gaxModule: typeof gax | typeof gax.fallback;
+ private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
+ private _protos: {};
+ private _defaults: { [method: string]: gax.CallSettings };
+ private _universeDomain: string;
+ private _servicePath: string;
+ private _log = logging.log('storage');
+
+ auth: gax.GoogleAuth;
+ descriptors: Descriptors = {
+ page: {},
+ stream: {},
+ longrunning: {},
+ batching: {},
+ };
+ warn: (code: string, message: string, warnType?: string) => void;
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
+ bigQueryReadStub?: Promise<{ [name: string]: Function }>;
+
+ /**
+ * Construct an instance of BigQueryReadClient.
+ *
+ * @param {object} [options] - The configuration object.
+ * The options accepted by the constructor are described in detail
+ * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance).
+ * The common options are:
+ * @param {object} [options.credentials] - Credentials object.
+ * @param {string} [options.credentials.client_email]
+ * @param {string} [options.credentials.private_key]
+ * @param {string} [options.email] - Account email address. Required when
+ * using a .pem or .p12 keyFilename.
+ * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or
+ * .p12 key downloaded from the Google Developers Console. If you provide
+ * a path to a JSON file, the projectId option below is not necessary.
+ * NOTE: .pem and .p12 require you to specify options.email as well.
+ * @param {number} [options.port] - The port on which to connect to
+ * the remote host.
+ * @param {string} [options.projectId] - The project ID from the Google
+ * Developer's Console, e.g. 'grape-spaceship-123'. We will also check
+ * the environment variable GCLOUD_PROJECT for your project ID. If your
+ * app is running in an environment which supports
+ * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials},
+ * your project ID will be detected automatically.
+ * @param {string} [options.apiEndpoint] - The domain name of the
+ * API remote host.
+ * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override.
+ * Follows the structure of {@link gapicConfig}.
+ * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode.
+ * For more information, please check the
+ * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}.
+ * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you
+ * need to avoid loading the default gRPC version and want to use the fallback
+ * HTTP implementation. Load only fallback version and pass it to the constructor:
+ * ```
+ * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC
+ * const client = new BigQueryReadClient({fallback: true}, gax);
+ * ```
+ */
+ constructor(
+ opts?: ClientOptions,
+ gaxInstance?: typeof gax | typeof gax.fallback,
+ ) {
+ // Ensure that options include all the required fields.
+ const staticMembers = this.constructor as typeof BigQueryReadClient;
+ if (
+ opts?.universe_domain &&
+ opts?.universeDomain &&
+ opts?.universe_domain !== opts?.universeDomain
+ ) {
+ throw new Error(
+ 'Please set either universe_domain or universeDomain, but not both.',
+ );
+ }
+ const universeDomainEnvVar =
+ typeof process === 'object' && typeof process.env === 'object'
+ ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']
+ : undefined;
+ this._universeDomain =
+ opts?.universeDomain ??
+ opts?.universe_domain ??
+ universeDomainEnvVar ??
+ 'googleapis.com';
+ this._servicePath = 'bigquerystorage.' + this._universeDomain;
+ const servicePath =
+ opts?.servicePath || opts?.apiEndpoint || this._servicePath;
+ this._providedCustomServicePath = !!(
+ opts?.servicePath || opts?.apiEndpoint
+ );
+ const port = opts?.port || staticMembers.port;
+ const clientConfig = opts?.clientConfig ?? {};
+ const fallback =
+ opts?.fallback ??
+ (typeof window !== 'undefined' && typeof window?.fetch === 'function');
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
+
+ // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case.
+ if (servicePath !== this._servicePath && !('scopes' in opts)) {
+ opts['scopes'] = staticMembers.scopes;
+ }
+
+ // Load google-gax module synchronously if needed
+ if (!gaxInstance) {
+ gaxInstance = require('google-gax') as typeof gax;
+ }
+
+ // Choose either gRPC or proto-over-HTTP implementation of google-gax.
+ this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance;
+
+ // Create a `gaxGrpc` object, with any grpc-specific options sent to the client.
+ this._gaxGrpc = new this._gaxModule.GrpcClient(opts);
+
+ // Save options to use in initialize() method.
+ this._opts = opts;
+
+ // Save the auth object to the client, for use by other methods.
+ this.auth = this._gaxGrpc.auth as gax.GoogleAuth;
+
+ // Set useJWTAccessWithScope on the auth object.
+ this.auth.useJWTAccessWithScope = true;
+
+ // Set defaultServicePath on the auth object.
+ this.auth.defaultServicePath = this._servicePath;
+
+ // Set the default scopes in auth client if needed.
+ if (servicePath === this._servicePath) {
+ this.auth.defaultScopes = staticMembers.scopes;
+ }
+
+ // Determine the client header string.
+ const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`];
+ if (typeof process === 'object' && 'versions' in process) {
+ clientHeader.push(`gl-node/${process.versions.node}`);
+ } else {
+ clientHeader.push(`gl-web/${this._gaxModule.version}`);
+ }
+ if (!opts.fallback) {
+ clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`);
+ } else {
+ clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`);
+ }
+ if (opts.libName && opts.libVersion) {
+ clientHeader.push(`${opts.libName}/${opts.libVersion}`);
+ }
+ // Load the applicable protos.
+ this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos);
+
+ // This API contains "path templates"; forward-slash-separated
+ // identifiers to uniquely identify resources within the API.
+ // Create useful helper objects for these.
+ this.pathTemplates = {
+ projectPathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}',
+ ),
+ readSessionPathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}/locations/{location}/sessions/{session}',
+ ),
+ readStreamPathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}',
+ ),
+ tablePathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}/datasets/{dataset}/tables/{table}',
+ ),
+ writeStreamPathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}',
+ ),
+ };
+
+ // Some of the methods on this service provide streaming responses.
+ // Provide descriptors for these.
+ this.descriptors.stream = {
+ readRows: new this._gaxModule.StreamDescriptor(
+ this._gaxModule.StreamType.SERVER_STREAMING,
+ !!opts.fallback,
+ !!opts.gaxServerStreamingRetries,
+ ),
+ };
+
+ // Put together the default options sent with requests.
+ this._defaults = this._gaxGrpc.constructSettings(
+ 'google.cloud.bigquery.storage.v1beta2.BigQueryRead',
+ gapicConfig as gax.ClientConfig,
+ opts.clientConfig || {},
+ { 'x-goog-api-client': clientHeader.join(' ') },
+ );
+
+ // Set up a dictionary of "inner API calls"; the core implementation
+ // of calling the API is handled in `google-gax`, with this code
+ // merely providing the destination and request information.
+ this.innerApiCalls = {};
+
+ // Add a warn function to the client constructor so it can be easily tested.
+ this.warn = this._gaxModule.warn;
+ }
+
+ /**
+ * Initialize the client.
+ * Performs asynchronous operations (such as authentication) and prepares the client.
+ * This function will be called automatically when any class method is called for the
+ * first time, but if you need to initialize it before calling an actual method,
+ * feel free to call initialize() directly.
+ *
+ * You can await on this method if you want to make sure the client is initialized.
+ *
+ * @returns {Promise} A promise that resolves to an authenticated service stub.
+ */
+ initialize() {
+ // If the client stub promise is already initialized, return immediately.
+ if (this.bigQueryReadStub) {
+ return this.bigQueryReadStub;
+ }
+
+ // Put together the "service stub" for
+ // google.cloud.bigquery.storage.v1beta2.BigQueryRead.
+ this.bigQueryReadStub = this._gaxGrpc.createStub(
+ this._opts.fallback
+ ? (this._protos as protobuf.Root).lookupService(
+ 'google.cloud.bigquery.storage.v1beta2.BigQueryRead',
+ )
+ : // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ (this._protos as any).google.cloud.bigquery.storage.v1beta2
+ .BigQueryRead,
+ this._opts,
+ this._providedCustomServicePath,
+ ) as Promise<{ [method: string]: Function }>;
+
+ // Iterate over each of the methods that the service provides
+ // and create an API call method for each.
+ const bigQueryReadStubMethods = [
+ 'createReadSession',
+ 'readRows',
+ 'splitReadStream',
+ ];
+ for (const methodName of bigQueryReadStubMethods) {
+ const callPromise = this.bigQueryReadStub.then(
+ (stub) =>
+ (...args: Array<{}>) => {
+ if (this._terminated) {
+ if (methodName in this.descriptors.stream) {
+ const stream = new PassThrough({ objectMode: true });
+ setImmediate(() => {
+ stream.emit(
+ 'error',
+ new this._gaxModule.GoogleError(
+ 'The client has already been closed.',
+ ),
+ );
+ });
+ return stream;
+ }
+ return Promise.reject('The client has already been closed.');
+ }
+ const func = stub[methodName];
+ return func.apply(stub, args);
+ },
+ (err: Error | null | undefined) => () => {
+ throw err;
+ },
+ );
+
+ const descriptor = this.descriptors.stream[methodName] || undefined;
+ const apiCall = this._gaxModule.createApiCall(
+ callPromise,
+ this._defaults[methodName],
+ descriptor,
+ this._opts.fallback,
+ );
+
+ this.innerApiCalls[methodName] = apiCall;
+ }
+
+ return this.bigQueryReadStub;
+ }
+
+ /**
+ * The DNS address for this API service.
+ * @deprecated Use the apiEndpoint method of the client instance.
+ * @returns {string} The DNS address for this service.
+ */
+ static get servicePath() {
+ if (
+ typeof process === 'object' &&
+ typeof process.emitWarning === 'function'
+ ) {
+ process.emitWarning(
+ 'Static servicePath is deprecated, please use the instance method instead.',
+ 'DeprecationWarning',
+ );
+ }
+ return 'bigquerystorage.googleapis.com';
+ }
+
+ /**
+ * The DNS address for this API service - same as servicePath.
+ * @deprecated Use the apiEndpoint method of the client instance.
+ * @returns {string} The DNS address for this service.
+ */
+ static get apiEndpoint() {
+ if (
+ typeof process === 'object' &&
+ typeof process.emitWarning === 'function'
+ ) {
+ process.emitWarning(
+ 'Static apiEndpoint is deprecated, please use the instance method instead.',
+ 'DeprecationWarning',
+ );
+ }
+ return 'bigquerystorage.googleapis.com';
+ }
+
+ /**
+ * The DNS address for this API service.
+ * @returns {string} The DNS address for this service.
+ */
+ get apiEndpoint() {
+ return this._servicePath;
+ }
+
+ get universeDomain() {
+ return this._universeDomain;
+ }
+
+ /**
+ * The port for this API service.
+ * @returns {number} The default port for this service.
+ */
+ static get port() {
+ return 443;
+ }
+
+ /**
+ * The scopes needed to make gRPC calls for every method defined
+ * in this service.
+ * @returns {string[]} List of default scopes.
+ */
+ static get scopes() {
+ return [
+ 'https://www.googleapis.com/auth/bigquery',
+ 'https://www.googleapis.com/auth/cloud-platform',
+ ];
+ }
+
+ getProjectId(): Promise;
+ getProjectId(callback: Callback): void;
+ /**
+ * Return the project ID used by this class.
+ * @returns {Promise} A promise that resolves to string containing the project ID.
+ */
+ getProjectId(
+ callback?: Callback,
+ ): Promise | void {
+ if (callback) {
+ this.auth.getProjectId(callback);
+ return;
+ }
+ return this.auth.getProjectId();
+ }
+
+ // -------------------
+ // -- Service calls --
+ // -------------------
+ /**
+ * Creates a new read session. A read session divides the contents of a
+ * BigQuery table into one or more streams, which can then be used to read
+ * data from the table. The read session also specifies properties of the
+ * data to be read, such as a list of columns or a push-down filter describing
+ * the rows to be returned.
+ *
+ * A particular row can be read by at most one stream. When the caller has
+ * reached the end of each stream in the session, then all the data in the
+ * table has been read.
+ *
+ * Data is assigned to each stream such that roughly the same number of
+ * rows can be read from each stream. Because the server-side unit for
+ * assigning data is collections of rows, the API does not guarantee that
+ * each stream will return the same number or rows. Additionally, the
+ * limits are enforced based on the number of pre-filtered rows, so some
+ * filters can lead to lopsided assignments.
+ *
+ * Read sessions automatically expire 6 hours after they are created and do
+ * not require manual clean-up by the caller.
+ *
+ * @param {Object} request
+ * The request object that will be sent.
+ * @param {string} request.parent
+ * Required. The request project that owns the session, in the form of
+ * `projects/{project_id}`.
+ * @param {google.cloud.bigquery.storage.v1beta2.ReadSession} request.readSession
+ * Required. Session to be created.
+ * @param {number} request.maxStreamCount
+ * Max initial number of streams. If unset or zero, the server will
+ * provide a value of streams so as to produce reasonable throughput. Must be
+ * non-negative. The number of streams may be lower than the requested number,
+ * depending on the amount parallelism that is reasonable for the table. Error
+ * will be returned if the max count is greater than the current system
+ * max limit of 1,000.
+ *
+ * Streams must be read starting from offset 0.
+ * @param {object} [options]
+ * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details.
+ * @returns {Promise} - The promise which resolves to an array.
+ * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta2.ReadSession|ReadSession}.
+ * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation }
+ * for more details and examples.
+ * @example include:samples/generated/v1beta2/big_query_read.create_read_session.js
+ * region_tag:bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async
+ */
+ createReadSession(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest,
+ options?: CallOptions,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IReadSession,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ >;
+ createReadSession(
+ request: protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest,
+ options: CallOptions,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IReadSession,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ createReadSession(
+ request: protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IReadSession,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ createReadSession(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest,
+ optionsOrCallback?:
+ | CallOptions
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IReadSession,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ callback?: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IReadSession,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IReadSession,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ > | void {
+ request = request || {};
+ let options: CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ 'read_session.table': request.readSession!.table ?? '',
+ });
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('createReadSession request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IReadSession,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('createReadSession response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .createReadSession(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.cloud.bigquery.storage.v1beta2.IReadSession,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateReadSessionRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]) => {
+ this._log.info('createReadSession response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
+ }
+ /**
+ * Splits a given `ReadStream` into two `ReadStream` objects. These
+ * `ReadStream` objects are referred to as the primary and the residual
+ * streams of the split. The original `ReadStream` can still be read from in
+ * the same manner as before. Both of the returned `ReadStream` objects can
+ * also be read from, and the rows returned by both child streams will be
+ * the same as the rows read from the original stream.
+ *
+ * Moreover, the two child streams will be allocated back-to-back in the
+ * original `ReadStream`. Concretely, it is guaranteed that for streams
+ * original, primary, and residual, that original[0-j] = primary[0-j] and
+ * original[j-n] = residual[0-m] once the streams have been read to
+ * completion.
+ *
+ * @param {Object} request
+ * The request object that will be sent.
+ * @param {string} request.name
+ * Required. Name of the stream to split.
+ * @param {number} request.fraction
+ * A value in the range (0.0, 1.0) that specifies the fractional point at
+ * which the original stream should be split. The actual split point is
+ * evaluated on pre-filtered rows, so if a filter is provided, then there is
+ * no guarantee that the division of the rows between the new child streams
+ * will be proportional to this fractional value. Additionally, because the
+ * server-side unit for assigning data is collections of rows, this fraction
+ * will always map to a data storage boundary on the server side.
+ * @param {object} [options]
+ * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details.
+ * @returns {Promise} - The promise which resolves to an array.
+ * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse|SplitReadStreamResponse}.
+ * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation }
+ * for more details and examples.
+ * @example include:samples/generated/v1beta2/big_query_read.split_read_stream.js
+ * region_tag:bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async
+ */
+ splitReadStream(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest,
+ options?: CallOptions,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ >;
+ splitReadStream(
+ request: protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest,
+ options: CallOptions,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ splitReadStream(
+ request: protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ splitReadStream(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest,
+ optionsOrCallback?:
+ | CallOptions
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ callback?: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ > | void {
+ request = request || {};
+ let options: CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ name: request.name ?? '',
+ });
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('splitReadStream request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('splitReadStream response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .splitReadStream(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]) => {
+ this._log.info('splitReadStream response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
+ }
+
+ /**
+ * Reads rows from the stream in the format prescribed by the ReadSession.
+ * Each response contains one or more table rows, up to a maximum of 100 MiB
+ * per response; read requests which attempt to read individual rows larger
+ * than 100 MiB will fail.
+ *
+ * Each request also returns a set of stream statistics reflecting the current
+ * state of the stream.
+ *
+ * @param {Object} request
+ * The request object that will be sent.
+ * @param {string} request.readStream
+ * Required. Stream to read rows from.
+ * @param {number} request.offset
+ * The offset requested must be less than the last row read from Read.
+ * Requesting a larger offset is undefined. If not specified, start reading
+ * from offset zero.
+ * @param {object} [options]
+ * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details.
+ * @returns {Stream}
+ * An object stream which emits {@link protos.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse|ReadRowsResponse} on 'data' event.
+ * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming | documentation }
+ * for more details and examples.
+ * @example include:samples/generated/v1beta2/big_query_read.read_rows.js
+ * region_tag:bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async
+ */
+ readRows(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.IReadRowsRequest,
+ options?: CallOptions,
+ ): gax.CancellableStream {
+ request = request || {};
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ read_stream: request.readStream ?? '',
+ });
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('readRows stream %j', options);
+ return this.innerApiCalls.readRows(request, options);
+ }
+
+ // --------------------
+ // -- Path templates --
+ // --------------------
+
+ /**
+ * Return a fully-qualified project resource name string.
+ *
+ * @param {string} project
+ * @returns {string} Resource name string.
+ */
+ projectPath(project: string) {
+ return this.pathTemplates.projectPathTemplate.render({
+ project: project,
+ });
+ }
+
+ /**
+ * Parse the project from Project resource.
+ *
+ * @param {string} projectName
+ * A fully-qualified path representing Project resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromProjectName(projectName: string) {
+ return this.pathTemplates.projectPathTemplate.match(projectName).project;
+ }
+
+ /**
+ * Return a fully-qualified readSession resource name string.
+ *
+ * @param {string} project
+ * @param {string} location
+ * @param {string} session
+ * @returns {string} Resource name string.
+ */
+ readSessionPath(project: string, location: string, session: string) {
+ return this.pathTemplates.readSessionPathTemplate.render({
+ project: project,
+ location: location,
+ session: session,
+ });
+ }
+
+ /**
+ * Parse the project from ReadSession resource.
+ *
+ * @param {string} readSessionName
+ * A fully-qualified path representing ReadSession resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromReadSessionName(readSessionName: string) {
+ return this.pathTemplates.readSessionPathTemplate.match(readSessionName)
+ .project;
+ }
+
+ /**
+ * Parse the location from ReadSession resource.
+ *
+ * @param {string} readSessionName
+ * A fully-qualified path representing ReadSession resource.
+ * @returns {string} A string representing the location.
+ */
+ matchLocationFromReadSessionName(readSessionName: string) {
+ return this.pathTemplates.readSessionPathTemplate.match(readSessionName)
+ .location;
+ }
+
+ /**
+ * Parse the session from ReadSession resource.
+ *
+ * @param {string} readSessionName
+ * A fully-qualified path representing ReadSession resource.
+ * @returns {string} A string representing the session.
+ */
+ matchSessionFromReadSessionName(readSessionName: string) {
+ return this.pathTemplates.readSessionPathTemplate.match(readSessionName)
+ .session;
+ }
+
+ /**
+ * Return a fully-qualified readStream resource name string.
+ *
+ * @param {string} project
+ * @param {string} location
+ * @param {string} session
+ * @param {string} stream
+ * @returns {string} Resource name string.
+ */
+ readStreamPath(
+ project: string,
+ location: string,
+ session: string,
+ stream: string,
+ ) {
+ return this.pathTemplates.readStreamPathTemplate.render({
+ project: project,
+ location: location,
+ session: session,
+ stream: stream,
+ });
+ }
+
+ /**
+ * Parse the project from ReadStream resource.
+ *
+ * @param {string} readStreamName
+ * A fully-qualified path representing ReadStream resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromReadStreamName(readStreamName: string) {
+ return this.pathTemplates.readStreamPathTemplate.match(readStreamName)
+ .project;
+ }
+
+ /**
+ * Parse the location from ReadStream resource.
+ *
+ * @param {string} readStreamName
+ * A fully-qualified path representing ReadStream resource.
+ * @returns {string} A string representing the location.
+ */
+ matchLocationFromReadStreamName(readStreamName: string) {
+ return this.pathTemplates.readStreamPathTemplate.match(readStreamName)
+ .location;
+ }
+
+ /**
+ * Parse the session from ReadStream resource.
+ *
+ * @param {string} readStreamName
+ * A fully-qualified path representing ReadStream resource.
+ * @returns {string} A string representing the session.
+ */
+ matchSessionFromReadStreamName(readStreamName: string) {
+ return this.pathTemplates.readStreamPathTemplate.match(readStreamName)
+ .session;
+ }
+
+ /**
+ * Parse the stream from ReadStream resource.
+ *
+ * @param {string} readStreamName
+ * A fully-qualified path representing ReadStream resource.
+ * @returns {string} A string representing the stream.
+ */
+ matchStreamFromReadStreamName(readStreamName: string) {
+ return this.pathTemplates.readStreamPathTemplate.match(readStreamName)
+ .stream;
+ }
+
+ /**
+ * Return a fully-qualified table resource name string.
+ *
+ * @param {string} project
+ * @param {string} dataset
+ * @param {string} table
+ * @returns {string} Resource name string.
+ */
+ tablePath(project: string, dataset: string, table: string) {
+ return this.pathTemplates.tablePathTemplate.render({
+ project: project,
+ dataset: dataset,
+ table: table,
+ });
+ }
+
+ /**
+ * Parse the project from Table resource.
+ *
+ * @param {string} tableName
+ * A fully-qualified path representing Table resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromTableName(tableName: string) {
+ return this.pathTemplates.tablePathTemplate.match(tableName).project;
+ }
+
+ /**
+ * Parse the dataset from Table resource.
+ *
+ * @param {string} tableName
+ * A fully-qualified path representing Table resource.
+ * @returns {string} A string representing the dataset.
+ */
+ matchDatasetFromTableName(tableName: string) {
+ return this.pathTemplates.tablePathTemplate.match(tableName).dataset;
+ }
+
+ /**
+ * Parse the table from Table resource.
+ *
+ * @param {string} tableName
+ * A fully-qualified path representing Table resource.
+ * @returns {string} A string representing the table.
+ */
+ matchTableFromTableName(tableName: string) {
+ return this.pathTemplates.tablePathTemplate.match(tableName).table;
+ }
+
+ /**
+ * Return a fully-qualified writeStream resource name string.
+ *
+ * @param {string} project
+ * @param {string} dataset
+ * @param {string} table
+ * @param {string} stream
+ * @returns {string} Resource name string.
+ */
+ writeStreamPath(
+ project: string,
+ dataset: string,
+ table: string,
+ stream: string,
+ ) {
+ return this.pathTemplates.writeStreamPathTemplate.render({
+ project: project,
+ dataset: dataset,
+ table: table,
+ stream: stream,
+ });
+ }
+
+ /**
+ * Parse the project from WriteStream resource.
+ *
+ * @param {string} writeStreamName
+ * A fully-qualified path representing WriteStream resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromWriteStreamName(writeStreamName: string) {
+ return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName)
+ .project;
+ }
+
+ /**
+ * Parse the dataset from WriteStream resource.
+ *
+ * @param {string} writeStreamName
+ * A fully-qualified path representing WriteStream resource.
+ * @returns {string} A string representing the dataset.
+ */
+ matchDatasetFromWriteStreamName(writeStreamName: string) {
+ return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName)
+ .dataset;
+ }
+
+ /**
+ * Parse the table from WriteStream resource.
+ *
+ * @param {string} writeStreamName
+ * A fully-qualified path representing WriteStream resource.
+ * @returns {string} A string representing the table.
+ */
+ matchTableFromWriteStreamName(writeStreamName: string) {
+ return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName)
+ .table;
+ }
+
+ /**
+ * Parse the stream from WriteStream resource.
+ *
+ * @param {string} writeStreamName
+ * A fully-qualified path representing WriteStream resource.
+ * @returns {string} A string representing the stream.
+ */
+ matchStreamFromWriteStreamName(writeStreamName: string) {
+ return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName)
+ .stream;
+ }
+
+ /**
+ * Terminate the gRPC channel and close the client.
+ *
+ * The client will no longer be usable and all future behavior is undefined.
+ * @returns {Promise} A promise that resolves when the client is closed.
+ */
+ close(): Promise {
+ if (this.bigQueryReadStub && !this._terminated) {
+ return this.bigQueryReadStub.then((stub) => {
+ this._log.info('ending gRPC channel');
+ this._terminated = true;
+ stub.close();
+ });
+ }
+ return Promise.resolve();
+ }
+}
diff --git a/handwritten/bigquery-storage/src/v1beta2/big_query_read_client_config.json b/handwritten/bigquery-storage/src/v1beta2/big_query_read_client_config.json
new file mode 100644
index 000000000000..67155e9fa09e
--- /dev/null
+++ b/handwritten/bigquery-storage/src/v1beta2/big_query_read_client_config.json
@@ -0,0 +1,44 @@
+{
+ "interfaces": {
+ "google.cloud.bigquery.storage.v1beta2.BigQueryRead": {
+ "retry_codes": {
+ "non_idempotent": [],
+ "idempotent": [
+ "DEADLINE_EXCEEDED",
+ "UNAVAILABLE"
+ ],
+ "unavailable": [
+ "UNAVAILABLE"
+ ]
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 60000,
+ "rpc_timeout_multiplier": 1,
+ "max_rpc_timeout_millis": 60000,
+ "total_timeout_millis": 600000
+ }
+ },
+ "methods": {
+ "CreateReadSession": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default"
+ },
+ "ReadRows": {
+ "timeout_millis": 86400000,
+ "retry_codes_name": "unavailable",
+ "retry_params_name": "default"
+ },
+ "SplitReadStream": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default"
+ }
+ }
+ }
+ }
+}
diff --git a/handwritten/bigquery-storage/src/v1beta2/big_query_read_proto_list.json b/handwritten/bigquery-storage/src/v1beta2/big_query_read_proto_list.json
new file mode 100644
index 000000000000..3a0940a9eb76
--- /dev/null
+++ b/handwritten/bigquery-storage/src/v1beta2/big_query_read_proto_list.json
@@ -0,0 +1,8 @@
+[
+ "../../protos/google/cloud/bigquery/storage/v1beta2/arrow.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/avro.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/protobuf.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/storage.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/stream.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/table.proto"
+]
diff --git a/handwritten/bigquery-storage/src/v1beta2/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1beta2/big_query_write_client.ts
new file mode 100644
index 000000000000..340810142f44
--- /dev/null
+++ b/handwritten/bigquery-storage/src/v1beta2/big_query_write_client.ts
@@ -0,0 +1,1536 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+/* global window */
+import type * as gax from 'google-gax';
+import type {
+ Callback,
+ CallOptions,
+ Descriptors,
+ ClientOptions,
+} from 'google-gax';
+import { PassThrough } from 'stream';
+import * as protos from '../../protos/protos';
+import jsonProtos = require('../../protos/protos.json');
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
+
+/**
+ * Client JSON configuration object, loaded from
+ * `src/v1beta2/big_query_write_client_config.json`.
+ * This file defines retry strategy and timeouts for all API methods in this library.
+ */
+import * as gapicConfig from './big_query_write_client_config.json';
+const version = require('../../../package.json').version;
+
+/**
+ * BigQuery Write API.
+ *
+ * The Write API can be used to write data to BigQuery.
+ *
+ *
+ * The [google.cloud.bigquery.storage.v1
+ * API](/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1)
+ * should be used instead of the v1beta2 API for BigQueryWrite operations.
+ * @class
+ * @memberof v1beta2
+ * @deprecated BigQueryWrite is deprecated and may be removed in a future version.
+ */
+export class BigQueryWriteClient {
+ private _terminated = false;
+ private _opts: ClientOptions;
+ private _providedCustomServicePath: boolean;
+ private _gaxModule: typeof gax | typeof gax.fallback;
+ private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
+ private _protos: {};
+ private _defaults: { [method: string]: gax.CallSettings };
+ private _universeDomain: string;
+ private _servicePath: string;
+ private _log = logging.log('storage');
+
+ auth: gax.GoogleAuth;
+ descriptors: Descriptors = {
+ page: {},
+ stream: {},
+ longrunning: {},
+ batching: {},
+ };
+ warn: (code: string, message: string, warnType?: string) => void;
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
+ bigQueryWriteStub?: Promise<{ [name: string]: Function }>;
+
+ /**
+ * Construct an instance of BigQueryWriteClient.
+ *
+ * @param {object} [options] - The configuration object.
+ * The options accepted by the constructor are described in detail
+ * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance).
+ * The common options are:
+ * @param {object} [options.credentials] - Credentials object.
+ * @param {string} [options.credentials.client_email]
+ * @param {string} [options.credentials.private_key]
+ * @param {string} [options.email] - Account email address. Required when
+ * using a .pem or .p12 keyFilename.
+ * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or
+ * .p12 key downloaded from the Google Developers Console. If you provide
+ * a path to a JSON file, the projectId option below is not necessary.
+ * NOTE: .pem and .p12 require you to specify options.email as well.
+ * @param {number} [options.port] - The port on which to connect to
+ * the remote host.
+ * @param {string} [options.projectId] - The project ID from the Google
+ * Developer's Console, e.g. 'grape-spaceship-123'. We will also check
+ * the environment variable GCLOUD_PROJECT for your project ID. If your
+ * app is running in an environment which supports
+ * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials},
+ * your project ID will be detected automatically.
+ * @param {string} [options.apiEndpoint] - The domain name of the
+ * API remote host.
+ * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override.
+ * Follows the structure of {@link gapicConfig}.
+ * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode.
+ * For more information, please check the
+ * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}.
+ * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you
+ * need to avoid loading the default gRPC version and want to use the fallback
+ * HTTP implementation. Load only fallback version and pass it to the constructor:
+ * ```
+ * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC
+ * const client = new BigQueryWriteClient({fallback: true}, gax);
+ * ```
+ */
+ constructor(
+ opts?: ClientOptions,
+ gaxInstance?: typeof gax | typeof gax.fallback,
+ ) {
+ // Ensure that options include all the required fields.
+ const staticMembers = this.constructor as typeof BigQueryWriteClient;
+ if (
+ opts?.universe_domain &&
+ opts?.universeDomain &&
+ opts?.universe_domain !== opts?.universeDomain
+ ) {
+ throw new Error(
+ 'Please set either universe_domain or universeDomain, but not both.',
+ );
+ }
+ const universeDomainEnvVar =
+ typeof process === 'object' && typeof process.env === 'object'
+ ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']
+ : undefined;
+ this._universeDomain =
+ opts?.universeDomain ??
+ opts?.universe_domain ??
+ universeDomainEnvVar ??
+ 'googleapis.com';
+ this._servicePath = 'bigquerystorage.' + this._universeDomain;
+ const servicePath =
+ opts?.servicePath || opts?.apiEndpoint || this._servicePath;
+ this._providedCustomServicePath = !!(
+ opts?.servicePath || opts?.apiEndpoint
+ );
+ const port = opts?.port || staticMembers.port;
+ const clientConfig = opts?.clientConfig ?? {};
+ const fallback =
+ opts?.fallback ??
+ (typeof window !== 'undefined' && typeof window?.fetch === 'function');
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
+
+ // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case.
+ if (servicePath !== this._servicePath && !('scopes' in opts)) {
+ opts['scopes'] = staticMembers.scopes;
+ }
+
+ // Load google-gax module synchronously if needed
+ if (!gaxInstance) {
+ gaxInstance = require('google-gax') as typeof gax;
+ }
+
+ // Choose either gRPC or proto-over-HTTP implementation of google-gax.
+ this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance;
+
+ // Create a `gaxGrpc` object, with any grpc-specific options sent to the client.
+ this._gaxGrpc = new this._gaxModule.GrpcClient(opts);
+
+ // Save options to use in initialize() method.
+ this._opts = opts;
+
+ // Save the auth object to the client, for use by other methods.
+ this.auth = this._gaxGrpc.auth as gax.GoogleAuth;
+
+ // Set useJWTAccessWithScope on the auth object.
+ this.auth.useJWTAccessWithScope = true;
+
+ // Set defaultServicePath on the auth object.
+ this.auth.defaultServicePath = this._servicePath;
+
+ // Set the default scopes in auth client if needed.
+ if (servicePath === this._servicePath) {
+ this.auth.defaultScopes = staticMembers.scopes;
+ }
+
+ // Determine the client header string.
+ const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`];
+ if (typeof process === 'object' && 'versions' in process) {
+ clientHeader.push(`gl-node/${process.versions.node}`);
+ } else {
+ clientHeader.push(`gl-web/${this._gaxModule.version}`);
+ }
+ if (!opts.fallback) {
+ clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`);
+ } else {
+ clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`);
+ }
+ if (opts.libName && opts.libVersion) {
+ clientHeader.push(`${opts.libName}/${opts.libVersion}`);
+ }
+ // Load the applicable protos.
+ this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos);
+
+ // This API contains "path templates"; forward-slash-separated
+ // identifiers to uniquely identify resources within the API.
+ // Create useful helper objects for these.
+ this.pathTemplates = {
+ projectPathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}',
+ ),
+ readSessionPathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}/locations/{location}/sessions/{session}',
+ ),
+ readStreamPathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}',
+ ),
+ tablePathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}/datasets/{dataset}/tables/{table}',
+ ),
+ writeStreamPathTemplate: new this._gaxModule.PathTemplate(
+ 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}',
+ ),
+ };
+
+ // Some of the methods on this service provide streaming responses.
+ // Provide descriptors for these.
+ this.descriptors.stream = {
+ appendRows: new this._gaxModule.StreamDescriptor(
+ this._gaxModule.StreamType.BIDI_STREAMING,
+ !!opts.fallback,
+ !!opts.gaxServerStreamingRetries,
+ ),
+ };
+
+ // Put together the default options sent with requests.
+ this._defaults = this._gaxGrpc.constructSettings(
+ 'google.cloud.bigquery.storage.v1beta2.BigQueryWrite',
+ gapicConfig as gax.ClientConfig,
+ opts.clientConfig || {},
+ { 'x-goog-api-client': clientHeader.join(' ') },
+ );
+
+ // Set up a dictionary of "inner API calls"; the core implementation
+ // of calling the API is handled in `google-gax`, with this code
+ // merely providing the destination and request information.
+ this.innerApiCalls = {};
+
+ // Add a warn function to the client constructor so it can be easily tested.
+ this.warn = this._gaxModule.warn;
+ }
+
+ /**
+ * Initialize the client.
+ * Performs asynchronous operations (such as authentication) and prepares the client.
+ * This function will be called automatically when any class method is called for the
+ * first time, but if you need to initialize it before calling an actual method,
+ * feel free to call initialize() directly.
+ *
+ * You can await on this method if you want to make sure the client is initialized.
+ *
+ * @returns {Promise} A promise that resolves to an authenticated service stub.
+ */
+ initialize() {
+ // If the client stub promise is already initialized, return immediately.
+ if (this.bigQueryWriteStub) {
+ this.warn(
+ 'DEP$BigQueryWrite',
+ 'BigQueryWrite is deprecated and may be removed in a future version.',
+ 'DeprecationWarning',
+ );
+ return this.bigQueryWriteStub;
+ }
+
+ // Put together the "service stub" for
+ // google.cloud.bigquery.storage.v1beta2.BigQueryWrite.
+ this.bigQueryWriteStub = this._gaxGrpc.createStub(
+ this._opts.fallback
+ ? (this._protos as protobuf.Root).lookupService(
+ 'google.cloud.bigquery.storage.v1beta2.BigQueryWrite',
+ )
+ : // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ (this._protos as any).google.cloud.bigquery.storage.v1beta2
+ .BigQueryWrite,
+ this._opts,
+ this._providedCustomServicePath,
+ ) as Promise<{ [method: string]: Function }>;
+
+ // Iterate over each of the methods that the service provides
+ // and create an API call method for each.
+ const bigQueryWriteStubMethods = [
+ 'createWriteStream',
+ 'appendRows',
+ 'getWriteStream',
+ 'finalizeWriteStream',
+ 'batchCommitWriteStreams',
+ 'flushRows',
+ ];
+ for (const methodName of bigQueryWriteStubMethods) {
+ const callPromise = this.bigQueryWriteStub.then(
+ (stub) =>
+ (...args: Array<{}>) => {
+ if (this._terminated) {
+ if (methodName in this.descriptors.stream) {
+ const stream = new PassThrough({ objectMode: true });
+ setImmediate(() => {
+ stream.emit(
+ 'error',
+ new this._gaxModule.GoogleError(
+ 'The client has already been closed.',
+ ),
+ );
+ });
+ return stream;
+ }
+ return Promise.reject('The client has already been closed.');
+ }
+ const func = stub[methodName];
+ return func.apply(stub, args);
+ },
+ (err: Error | null | undefined) => () => {
+ throw err;
+ },
+ );
+
+ const descriptor = this.descriptors.stream[methodName] || undefined;
+ const apiCall = this._gaxModule.createApiCall(
+ callPromise,
+ this._defaults[methodName],
+ descriptor,
+ this._opts.fallback,
+ );
+
+ this.innerApiCalls[methodName] = apiCall;
+ }
+ this.warn(
+ 'DEP$BigQueryWrite',
+ 'BigQueryWrite is deprecated and may be removed in a future version.',
+ 'DeprecationWarning',
+ );
+
+ return this.bigQueryWriteStub;
+ }
+
+ /**
+ * The DNS address for this API service.
+ * @deprecated Use the apiEndpoint method of the client instance.
+ * @returns {string} The DNS address for this service.
+ */
+ static get servicePath() {
+ if (
+ typeof process === 'object' &&
+ typeof process.emitWarning === 'function'
+ ) {
+ process.emitWarning(
+ 'Static servicePath is deprecated, please use the instance method instead.',
+ 'DeprecationWarning',
+ );
+ }
+ return 'bigquerystorage.googleapis.com';
+ }
+
+ /**
+ * The DNS address for this API service - same as servicePath.
+ * @deprecated Use the apiEndpoint method of the client instance.
+ * @returns {string} The DNS address for this service.
+ */
+ static get apiEndpoint() {
+ if (
+ typeof process === 'object' &&
+ typeof process.emitWarning === 'function'
+ ) {
+ process.emitWarning(
+ 'Static apiEndpoint is deprecated, please use the instance method instead.',
+ 'DeprecationWarning',
+ );
+ }
+ return 'bigquerystorage.googleapis.com';
+ }
+
+ /**
+ * The DNS address for this API service.
+ * @returns {string} The DNS address for this service.
+ */
+ get apiEndpoint() {
+ return this._servicePath;
+ }
+
+ get universeDomain() {
+ return this._universeDomain;
+ }
+
+ /**
+ * The port for this API service.
+ * @returns {number} The default port for this service.
+ */
+ static get port() {
+ return 443;
+ }
+
+ /**
+ * The scopes needed to make gRPC calls for every method defined
+ * in this service.
+ * @returns {string[]} List of default scopes.
+ */
+ static get scopes() {
+ return [
+ 'https://www.googleapis.com/auth/bigquery',
+ 'https://www.googleapis.com/auth/bigquery.insertdata',
+ 'https://www.googleapis.com/auth/cloud-platform',
+ ];
+ }
+
+ getProjectId(): Promise;
+ getProjectId(callback: Callback): void;
+ /**
+ * Return the project ID used by this class.
+ * @returns {Promise} A promise that resolves to string containing the project ID.
+ */
+ getProjectId(
+ callback?: Callback,
+ ): Promise | void {
+ if (callback) {
+ this.auth.getProjectId(callback);
+ return;
+ }
+ return this.auth.getProjectId();
+ }
+
+ // -------------------
+ // -- Service calls --
+ // -------------------
+ /**
+ * Creates a write stream to the given table.
+ * Additionally, every table has a special COMMITTED stream named '_default'
+ * to which data can be written. This stream doesn't need to be created using
+ * CreateWriteStream. It is a stream that can be used simultaneously by any
+ * number of clients. Data written to this stream is considered committed as
+ * soon as an acknowledgement is received.
+ *
+ * @param {Object} request
+ * The request object that will be sent.
+ * @param {string} request.parent
+ * Required. Reference to the table to which the stream belongs, in the format
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
+ * @param {google.cloud.bigquery.storage.v1beta2.WriteStream} request.writeStream
+ * Required. Stream to be created.
+ * @param {object} [options]
+ * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details.
+ * @returns {Promise} - The promise which resolves to an array.
+ * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta2.WriteStream|WriteStream}.
+ * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation }
+ * for more details and examples.
+ * @example include:samples/generated/v1beta2/big_query_write.create_write_stream.js
+ * region_tag:bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async
+ * @deprecated CreateWriteStream is deprecated and may be removed in a future version.
+ */
+ createWriteStream(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest,
+ options?: CallOptions,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ >;
+ createWriteStream(
+ request: protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest,
+ options: CallOptions,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ createWriteStream(
+ request: protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ createWriteStream(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest,
+ optionsOrCallback?:
+ | CallOptions
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ callback?: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ > | void {
+ request = request || {};
+ let options: CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ parent: request.parent ?? '',
+ });
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this.warn(
+ 'DEP$BigQueryWrite-$CreateWriteStream',
+ 'CreateWriteStream is deprecated and may be removed in a future version.',
+ 'DeprecationWarning',
+ );
+ this._log.info('createWriteStream request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('createWriteStream response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .createWriteStream(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.ICreateWriteStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]) => {
+ this._log.info('createWriteStream response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
+ }
+ /**
+ * Gets a write stream.
+ *
+ * @param {Object} request
+ * The request object that will be sent.
+ * @param {string} request.name
+ * Required. Name of the stream to get, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ * @param {object} [options]
+ * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details.
+ * @returns {Promise} - The promise which resolves to an array.
+ * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta2.WriteStream|WriteStream}.
+ * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation }
+ * for more details and examples.
+ * @example include:samples/generated/v1beta2/big_query_write.get_write_stream.js
+ * region_tag:bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async
+ * @deprecated GetWriteStream is deprecated and may be removed in a future version.
+ */
+ getWriteStream(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest,
+ options?: CallOptions,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ >;
+ getWriteStream(
+ request: protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest,
+ options: CallOptions,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ getWriteStream(
+ request: protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ getWriteStream(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest,
+ optionsOrCallback?:
+ | CallOptions
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ callback?: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ > | void {
+ request = request || {};
+ let options: CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ name: request.name ?? '',
+ });
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this.warn(
+ 'DEP$BigQueryWrite-$GetWriteStream',
+ 'GetWriteStream is deprecated and may be removed in a future version.',
+ 'DeprecationWarning',
+ );
+ this._log.info('getWriteStream request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ | protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('getWriteStream response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .getWriteStream(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.cloud.bigquery.storage.v1beta2.IWriteStream,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IGetWriteStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]) => {
+ this._log.info('getWriteStream response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
+ }
+ /**
+ * Finalize a write stream so that no new data can be appended to the
+ * stream. Finalize is not supported on the '_default' stream.
+ *
+ * @param {Object} request
+ * The request object that will be sent.
+ * @param {string} request.name
+ * Required. Name of the stream to finalize, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ * @param {object} [options]
+ * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details.
+ * @returns {Promise} - The promise which resolves to an array.
+ * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta2.FinalizeWriteStreamResponse|FinalizeWriteStreamResponse}.
+ * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation }
+ * for more details and examples.
+ * @example include:samples/generated/v1beta2/big_query_write.finalize_write_stream.js
+ * region_tag:bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async
+ * @deprecated FinalizeWriteStream is deprecated and may be removed in a future version.
+ */
+ finalizeWriteStream(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest,
+ options?: CallOptions,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ >;
+ finalizeWriteStream(
+ request: protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest,
+ options: CallOptions,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ finalizeWriteStream(
+ request: protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ finalizeWriteStream(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest,
+ optionsOrCallback?:
+ | CallOptions
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ callback?: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ > | void {
+ request = request || {};
+ let options: CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ name: request.name ?? '',
+ });
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this.warn(
+ 'DEP$BigQueryWrite-$FinalizeWriteStream',
+ 'FinalizeWriteStream is deprecated and may be removed in a future version.',
+ 'DeprecationWarning',
+ );
+ this._log.info('finalizeWriteStream request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('finalizeWriteStream response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .finalizeWriteStream(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IFinalizeWriteStreamRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]) => {
+ this._log.info('finalizeWriteStream response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
+ }
+ /**
+ * Atomically commits a group of `PENDING` streams that belong to the same
+ * `parent` table.
+ * Streams must be finalized before commit and cannot be committed multiple
+ * times. Once a stream is committed, data in the stream becomes available
+ * for read operations.
+ *
+ * @param {Object} request
+ * The request object that will be sent.
+ * @param {string} request.parent
+ * Required. Parent table that all the streams should belong to, in the form
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
+ * @param {string[]} request.writeStreams
+ * Required. The group of streams that will be committed atomically.
+ * @param {object} [options]
+ * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details.
+ * @returns {Promise} - The promise which resolves to an array.
+ * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse|BatchCommitWriteStreamsResponse}.
+ * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation }
+ * for more details and examples.
+ * @example include:samples/generated/v1beta2/big_query_write.batch_commit_write_streams.js
+ * region_tag:bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async
+ * @deprecated BatchCommitWriteStreams is deprecated and may be removed in a future version.
+ */
+ batchCommitWriteStreams(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest,
+ options?: CallOptions,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ >;
+ batchCommitWriteStreams(
+ request: protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest,
+ options: CallOptions,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ batchCommitWriteStreams(
+ request: protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ batchCommitWriteStreams(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest,
+ optionsOrCallback?:
+ | CallOptions
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ callback?: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ > | void {
+ request = request || {};
+ let options: CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ parent: request.parent ?? '',
+ });
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this.warn(
+ 'DEP$BigQueryWrite-$BatchCommitWriteStreams',
+ 'BatchCommitWriteStreams is deprecated and may be removed in a future version.',
+ 'DeprecationWarning',
+ );
+ this._log.info('batchCommitWriteStreams request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('batchCommitWriteStreams response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .batchCommitWriteStreams(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IBatchCommitWriteStreamsRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]) => {
+ this._log.info('batchCommitWriteStreams response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
+ }
+ /**
+ * Flushes rows to a BUFFERED stream.
+ * If users are appending rows to BUFFERED stream, flush operation is
+ * required in order for the rows to become available for reading. A
+ * Flush operation flushes up to any previously flushed offset in a BUFFERED
+ * stream, to the offset specified in the request.
+ * Flush is not supported on the _default stream, since it is not BUFFERED.
+ *
+ * @param {Object} request
+ * The request object that will be sent.
+ * @param {string} request.writeStream
+ * Required. The stream that is the target of the flush operation.
+ * @param {google.protobuf.Int64Value} request.offset
+ * Ending offset of the flush operation. Rows before this offset(including
+ * this offset) will be flushed.
+ * @param {object} [options]
+ * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details.
+ * @returns {Promise} - The promise which resolves to an array.
+ * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta2.FlushRowsResponse|FlushRowsResponse}.
+ * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation }
+ * for more details and examples.
+ * @example include:samples/generated/v1beta2/big_query_write.flush_rows.js
+ * region_tag:bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async
+ * @deprecated FlushRows is deprecated and may be removed in a future version.
+ */
+ flushRows(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest,
+ options?: CallOptions,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ >;
+ flushRows(
+ request: protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest,
+ options: CallOptions,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ flushRows(
+ request: protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest,
+ callback: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): void;
+ flushRows(
+ request?: protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest,
+ optionsOrCallback?:
+ | CallOptions
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ callback?: Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >,
+ ): Promise<
+ [
+ protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]
+ > | void {
+ request = request || {};
+ let options: CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ write_stream: request.writeStream ?? '',
+ });
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this.warn(
+ 'DEP$BigQueryWrite-$FlushRows',
+ 'FlushRows is deprecated and may be removed in a future version.',
+ 'DeprecationWarning',
+ );
+ this._log.info('flushRows request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse,
+ | protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('flushRows response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .flushRows(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsResponse,
+ (
+ | protos.google.cloud.bigquery.storage.v1beta2.IFlushRowsRequest
+ | undefined
+ ),
+ {} | undefined,
+ ]) => {
+ this._log.info('flushRows response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
+ }
+
+ /**
+ * Appends data to the given stream.
+ *
+ * If `offset` is specified, the `offset` is checked against the end of
+ * stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
+ * attempt is made to append to an offset beyond the current end of the stream
+ * or `ALREADY_EXISTS` if user provids an `offset` that has already been
+ * written to. User can retry with adjusted offset within the same RPC
+ * stream. If `offset` is not specified, append happens at the end of the
+ * stream.
+ *
+ * The response contains the offset at which the append happened. Responses
+ * are received in the same order in which requests are sent. There will be
+ * one response for each successful request. If the `offset` is not set in
+ * response, it means append didn't happen due to some errors. If one request
+ * fails, all the subsequent requests will also fail until a success request
+ * is made again.
+ *
+ * If the stream is of `PENDING` type, data will only be available for read
+ * operations after the stream is committed.
+ *
+ * @param {object} [options]
+ * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details.
+ * @returns {Stream}
+ * An object stream which is both readable and writable. It accepts objects
+ * representing {@link protos.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest|AppendRowsRequest} for write() method, and
+ * will emit objects representing {@link protos.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse|AppendRowsResponse} on 'data' event asynchronously.
+ * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming | documentation }
+ * for more details and examples.
+ * @example include:samples/generated/v1beta2/big_query_write.append_rows.js
+ * region_tag:bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async
+ * @deprecated AppendRows is deprecated and may be removed in a future version.
+ */
+ appendRows(options?: CallOptions): gax.CancellableStream {
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this.warn(
+ 'DEP$BigQueryWrite-$AppendRows',
+ 'AppendRows is deprecated and may be removed in a future version.',
+ 'DeprecationWarning',
+ );
+ this._log.info('appendRows stream %j', options);
+ return this.innerApiCalls.appendRows(null, options);
+ }
+
+ // --------------------
+ // -- Path templates --
+ // --------------------
+
+ /**
+ * Return a fully-qualified project resource name string.
+ *
+ * @param {string} project
+ * @returns {string} Resource name string.
+ */
+ projectPath(project: string) {
+ return this.pathTemplates.projectPathTemplate.render({
+ project: project,
+ });
+ }
+
+ /**
+ * Parse the project from Project resource.
+ *
+ * @param {string} projectName
+ * A fully-qualified path representing Project resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromProjectName(projectName: string) {
+ return this.pathTemplates.projectPathTemplate.match(projectName).project;
+ }
+
+ /**
+ * Return a fully-qualified readSession resource name string.
+ *
+ * @param {string} project
+ * @param {string} location
+ * @param {string} session
+ * @returns {string} Resource name string.
+ */
+ readSessionPath(project: string, location: string, session: string) {
+ return this.pathTemplates.readSessionPathTemplate.render({
+ project: project,
+ location: location,
+ session: session,
+ });
+ }
+
+ /**
+ * Parse the project from ReadSession resource.
+ *
+ * @param {string} readSessionName
+ * A fully-qualified path representing ReadSession resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromReadSessionName(readSessionName: string) {
+ return this.pathTemplates.readSessionPathTemplate.match(readSessionName)
+ .project;
+ }
+
+ /**
+ * Parse the location from ReadSession resource.
+ *
+ * @param {string} readSessionName
+ * A fully-qualified path representing ReadSession resource.
+ * @returns {string} A string representing the location.
+ */
+ matchLocationFromReadSessionName(readSessionName: string) {
+ return this.pathTemplates.readSessionPathTemplate.match(readSessionName)
+ .location;
+ }
+
+ /**
+ * Parse the session from ReadSession resource.
+ *
+ * @param {string} readSessionName
+ * A fully-qualified path representing ReadSession resource.
+ * @returns {string} A string representing the session.
+ */
+ matchSessionFromReadSessionName(readSessionName: string) {
+ return this.pathTemplates.readSessionPathTemplate.match(readSessionName)
+ .session;
+ }
+
+ /**
+ * Return a fully-qualified readStream resource name string.
+ *
+ * @param {string} project
+ * @param {string} location
+ * @param {string} session
+ * @param {string} stream
+ * @returns {string} Resource name string.
+ */
+ readStreamPath(
+ project: string,
+ location: string,
+ session: string,
+ stream: string,
+ ) {
+ return this.pathTemplates.readStreamPathTemplate.render({
+ project: project,
+ location: location,
+ session: session,
+ stream: stream,
+ });
+ }
+
+ /**
+ * Parse the project from ReadStream resource.
+ *
+ * @param {string} readStreamName
+ * A fully-qualified path representing ReadStream resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromReadStreamName(readStreamName: string) {
+ return this.pathTemplates.readStreamPathTemplate.match(readStreamName)
+ .project;
+ }
+
+ /**
+ * Parse the location from ReadStream resource.
+ *
+ * @param {string} readStreamName
+ * A fully-qualified path representing ReadStream resource.
+ * @returns {string} A string representing the location.
+ */
+ matchLocationFromReadStreamName(readStreamName: string) {
+ return this.pathTemplates.readStreamPathTemplate.match(readStreamName)
+ .location;
+ }
+
+ /**
+ * Parse the session from ReadStream resource.
+ *
+ * @param {string} readStreamName
+ * A fully-qualified path representing ReadStream resource.
+ * @returns {string} A string representing the session.
+ */
+ matchSessionFromReadStreamName(readStreamName: string) {
+ return this.pathTemplates.readStreamPathTemplate.match(readStreamName)
+ .session;
+ }
+
+ /**
+ * Parse the stream from ReadStream resource.
+ *
+ * @param {string} readStreamName
+ * A fully-qualified path representing ReadStream resource.
+ * @returns {string} A string representing the stream.
+ */
+ matchStreamFromReadStreamName(readStreamName: string) {
+ return this.pathTemplates.readStreamPathTemplate.match(readStreamName)
+ .stream;
+ }
+
+ /**
+ * Return a fully-qualified table resource name string.
+ *
+ * @param {string} project
+ * @param {string} dataset
+ * @param {string} table
+ * @returns {string} Resource name string.
+ */
+ tablePath(project: string, dataset: string, table: string) {
+ return this.pathTemplates.tablePathTemplate.render({
+ project: project,
+ dataset: dataset,
+ table: table,
+ });
+ }
+
+ /**
+ * Parse the project from Table resource.
+ *
+ * @param {string} tableName
+ * A fully-qualified path representing Table resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromTableName(tableName: string) {
+ return this.pathTemplates.tablePathTemplate.match(tableName).project;
+ }
+
+ /**
+ * Parse the dataset from Table resource.
+ *
+ * @param {string} tableName
+ * A fully-qualified path representing Table resource.
+ * @returns {string} A string representing the dataset.
+ */
+ matchDatasetFromTableName(tableName: string) {
+ return this.pathTemplates.tablePathTemplate.match(tableName).dataset;
+ }
+
+ /**
+ * Parse the table from Table resource.
+ *
+ * @param {string} tableName
+ * A fully-qualified path representing Table resource.
+ * @returns {string} A string representing the table.
+ */
+ matchTableFromTableName(tableName: string) {
+ return this.pathTemplates.tablePathTemplate.match(tableName).table;
+ }
+
+ /**
+ * Return a fully-qualified writeStream resource name string.
+ *
+ * @param {string} project
+ * @param {string} dataset
+ * @param {string} table
+ * @param {string} stream
+ * @returns {string} Resource name string.
+ */
+ writeStreamPath(
+ project: string,
+ dataset: string,
+ table: string,
+ stream: string,
+ ) {
+ return this.pathTemplates.writeStreamPathTemplate.render({
+ project: project,
+ dataset: dataset,
+ table: table,
+ stream: stream,
+ });
+ }
+
+ /**
+ * Parse the project from WriteStream resource.
+ *
+ * @param {string} writeStreamName
+ * A fully-qualified path representing WriteStream resource.
+ * @returns {string} A string representing the project.
+ */
+ matchProjectFromWriteStreamName(writeStreamName: string) {
+ return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName)
+ .project;
+ }
+
+ /**
+ * Parse the dataset from WriteStream resource.
+ *
+ * @param {string} writeStreamName
+ * A fully-qualified path representing WriteStream resource.
+ * @returns {string} A string representing the dataset.
+ */
+ matchDatasetFromWriteStreamName(writeStreamName: string) {
+ return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName)
+ .dataset;
+ }
+
+ /**
+ * Parse the table from WriteStream resource.
+ *
+ * @param {string} writeStreamName
+ * A fully-qualified path representing WriteStream resource.
+ * @returns {string} A string representing the table.
+ */
+ matchTableFromWriteStreamName(writeStreamName: string) {
+ return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName)
+ .table;
+ }
+
+ /**
+ * Parse the stream from WriteStream resource.
+ *
+ * @param {string} writeStreamName
+ * A fully-qualified path representing WriteStream resource.
+ * @returns {string} A string representing the stream.
+ */
+ matchStreamFromWriteStreamName(writeStreamName: string) {
+ return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName)
+ .stream;
+ }
+
+ /**
+ * Terminate the gRPC channel and close the client.
+ *
+ * The client will no longer be usable and all future behavior is undefined.
+ * @returns {Promise} A promise that resolves when the client is closed.
+ */
+ close(): Promise {
+ if (this.bigQueryWriteStub && !this._terminated) {
+ return this.bigQueryWriteStub.then((stub) => {
+ this._log.info('ending gRPC channel');
+ this._terminated = true;
+ stub.close();
+ });
+ }
+ return Promise.resolve();
+ }
+}
diff --git a/handwritten/bigquery-storage/src/v1beta2/big_query_write_client_config.json b/handwritten/bigquery-storage/src/v1beta2/big_query_write_client_config.json
new file mode 100644
index 000000000000..7f28169e4cf1
--- /dev/null
+++ b/handwritten/bigquery-storage/src/v1beta2/big_query_write_client_config.json
@@ -0,0 +1,65 @@
+{
+ "interfaces": {
+ "google.cloud.bigquery.storage.v1beta2.BigQueryWrite": {
+ "retry_codes": {
+ "non_idempotent": [],
+ "idempotent": [
+ "DEADLINE_EXCEEDED",
+ "UNAVAILABLE"
+ ],
+ "deadline_exceeded_resource_exhausted_unavailable": [
+ "DEADLINE_EXCEEDED",
+ "RESOURCE_EXHAUSTED",
+ "UNAVAILABLE"
+ ],
+ "resource_exhausted_unavailable": [
+ "RESOURCE_EXHAUSTED",
+ "UNAVAILABLE"
+ ]
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 60000,
+ "rpc_timeout_multiplier": 1,
+ "max_rpc_timeout_millis": 60000,
+ "total_timeout_millis": 600000
+ }
+ },
+ "methods": {
+ "CreateWriteStream": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "deadline_exceeded_resource_exhausted_unavailable",
+ "retry_params_name": "default"
+ },
+ "AppendRows": {
+ "timeout_millis": 86400000,
+ "retry_codes_name": "resource_exhausted_unavailable",
+ "retry_params_name": "default"
+ },
+ "GetWriteStream": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default"
+ },
+ "FinalizeWriteStream": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default"
+ },
+ "BatchCommitWriteStreams": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default"
+ },
+ "FlushRows": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default"
+ }
+ }
+ }
+ }
+}
diff --git a/handwritten/bigquery-storage/src/v1beta2/big_query_write_proto_list.json b/handwritten/bigquery-storage/src/v1beta2/big_query_write_proto_list.json
new file mode 100644
index 000000000000..3a0940a9eb76
--- /dev/null
+++ b/handwritten/bigquery-storage/src/v1beta2/big_query_write_proto_list.json
@@ -0,0 +1,8 @@
+[
+ "../../protos/google/cloud/bigquery/storage/v1beta2/arrow.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/avro.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/protobuf.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/storage.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/stream.proto",
+ "../../protos/google/cloud/bigquery/storage/v1beta2/table.proto"
+]
diff --git a/handwritten/bigquery-storage/src/v1beta2/gapic_metadata.json b/handwritten/bigquery-storage/src/v1beta2/gapic_metadata.json
new file mode 100644
index 000000000000..d360825be2d2
--- /dev/null
+++ b/handwritten/bigquery-storage/src/v1beta2/gapic_metadata.json
@@ -0,0 +1,117 @@
+{
+ "schema": "1.0",
+ "comment": "This file maps proto services/RPCs to the corresponding library clients/methods",
+ "language": "typescript",
+ "protoPackage": "google.cloud.bigquery.storage.v1beta2",
+ "libraryPackage": "storage",
+ "services": {
+ "BigQueryRead": {
+ "clients": {
+ "grpc": {
+ "libraryClient": "BigQueryReadClient",
+ "rpcs": {
+ "CreateReadSession": {
+ "methods": [
+ "createReadSession"
+ ]
+ },
+ "SplitReadStream": {
+ "methods": [
+ "splitReadStream"
+ ]
+ },
+ "ReadRows": {
+ "methods": [
+ "readRows"
+ ]
+ }
+ }
+ },
+ "grpc-fallback": {
+ "libraryClient": "BigQueryReadClient",
+ "rpcs": {
+ "CreateReadSession": {
+ "methods": [
+ "createReadSession"
+ ]
+ },
+ "SplitReadStream": {
+ "methods": [
+ "splitReadStream"
+ ]
+ }
+ }
+ }
+ }
+ },
+ "BigQueryWrite": {
+ "clients": {
+ "grpc": {
+ "libraryClient": "BigQueryWriteClient",
+ "rpcs": {
+ "CreateWriteStream": {
+ "methods": [
+ "createWriteStream"
+ ]
+ },
+ "GetWriteStream": {
+ "methods": [
+ "getWriteStream"
+ ]
+ },
+ "FinalizeWriteStream": {
+ "methods": [
+ "finalizeWriteStream"
+ ]
+ },
+ "BatchCommitWriteStreams": {
+ "methods": [
+ "batchCommitWriteStreams"
+ ]
+ },
+ "FlushRows": {
+ "methods": [
+ "flushRows"
+ ]
+ },
+ "AppendRows": {
+ "methods": [
+ "appendRows"
+ ]
+ }
+ }
+ },
+ "grpc-fallback": {
+ "libraryClient": "BigQueryWriteClient",
+ "rpcs": {
+ "CreateWriteStream": {
+ "methods": [
+ "createWriteStream"
+ ]
+ },
+ "GetWriteStream": {
+ "methods": [
+ "getWriteStream"
+ ]
+ },
+ "FinalizeWriteStream": {
+ "methods": [
+ "finalizeWriteStream"
+ ]
+ },
+ "BatchCommitWriteStreams": {
+ "methods": [
+ "batchCommitWriteStreams"
+ ]
+ },
+ "FlushRows": {
+ "methods": [
+ "flushRows"
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/handwritten/bigquery-storage/src/v1beta2/index.ts b/handwritten/bigquery-storage/src/v1beta2/index.ts
new file mode 100644
index 000000000000..ad672e49aae6
--- /dev/null
+++ b/handwritten/bigquery-storage/src/v1beta2/index.ts
@@ -0,0 +1,20 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+export { BigQueryReadClient } from './big_query_read_client';
+export { BigQueryWriteClient } from './big_query_write_client';
diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js
index 4d1e990cb8dc..bf392b491d64 100644
--- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js
+++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,7 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
/* eslint-disable node/no-missing-require, no-unused-vars */
const storage = require('@google-cloud/bigquery-storage');
diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts
index 3c3da47e6951..2f82355fef41 100644
--- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts
+++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,7 +16,10 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-import {BigQueryReadClient, BigQueryWriteClient} from '@google-cloud/bigquery-storage';
+import {
+ BigQueryReadClient,
+ BigQueryWriteClient,
+} from '@google-cloud/bigquery-storage';
// check that the client class type name can be used
function doStuffWithBigQueryReadClient(client: BigQueryReadClient) {
diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts
index 5257a7ba101c..ccf167042d2e 100644
--- a/handwritten/bigquery-storage/system-test/install.ts
+++ b/handwritten/bigquery-storage/system-test/install.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,9 +16,9 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-import {packNTest} from 'pack-n-play';
-import {readFileSync} from 'fs';
-import {describe, it} from 'mocha';
+import { packNTest } from 'pack-n-play';
+import { readFileSync } from 'fs';
+import { describe, it } from 'mocha';
describe('📦 pack-n-play test', () => {
it('TypeScript code', async function () {
@@ -41,7 +41,7 @@ describe('📦 pack-n-play test', () => {
packageDir: process.cwd(),
sample: {
description: 'JavaScript user can use the library',
- ts: readFileSync(
+ cjs: readFileSync(
'./system-test/fixtures/sample/src/index.js',
).toString(),
},
diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts
index 0672ab7ec6f4..e0a3162347a1 100644
--- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts
+++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,13 +19,13 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as bigqueryreadModule from '../src';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
-import {protobuf} from 'google-gax';
+import { protobuf } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -45,7 +45,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -202,7 +202,7 @@ describe('v1.BigQueryReadClient', () => {
it('has initialize method and supports deferred initialization', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.bigQueryReadStub, undefined);
@@ -210,12 +210,12 @@ describe('v1.BigQueryReadClient', () => {
assert(client.bigQueryReadStub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize().catch(err => {
+ client.initialize().catch((err) => {
throw err;
});
assert(client.bigQueryReadStub);
@@ -224,14 +224,14 @@ describe('v1.BigQueryReadClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.bigQueryReadStub, undefined);
@@ -240,7 +240,7 @@ describe('v1.BigQueryReadClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
@@ -248,7 +248,7 @@ describe('v1.BigQueryReadClient', () => {
it('has getProjectId method', async () => {
const fakeProjectId = 'fake-project-id';
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
@@ -260,7 +260,7 @@ describe('v1.BigQueryReadClient', () => {
it('has getProjectId method with callback', async () => {
const fakeProjectId = 'fake-project-id';
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon
@@ -283,7 +283,7 @@ describe('v1.BigQueryReadClient', () => {
describe('createReadSession', () => {
it('invokes createReadSession without error', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -315,7 +315,7 @@ describe('v1.BigQueryReadClient', () => {
it('invokes createReadSession without error using callback', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -363,7 +363,7 @@ describe('v1.BigQueryReadClient', () => {
it('invokes createReadSession with error', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -395,7 +395,7 @@ describe('v1.BigQueryReadClient', () => {
it('invokes createReadSession with closed client', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -409,7 +409,7 @@ describe('v1.BigQueryReadClient', () => {
);
request.readSession.table = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(client.createReadSession(request), expectedError);
@@ -419,7 +419,7 @@ describe('v1.BigQueryReadClient', () => {
describe('splitReadStream', () => {
it('invokes splitReadStream without error', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -450,7 +450,7 @@ describe('v1.BigQueryReadClient', () => {
it('invokes splitReadStream without error using callback', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -497,7 +497,7 @@ describe('v1.BigQueryReadClient', () => {
it('invokes splitReadStream with error', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -528,7 +528,7 @@ describe('v1.BigQueryReadClient', () => {
it('invokes splitReadStream with closed client', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -541,7 +541,7 @@ describe('v1.BigQueryReadClient', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(client.splitReadStream(request), expectedError);
@@ -551,7 +551,7 @@ describe('v1.BigQueryReadClient', () => {
describe('readRows', () => {
it('invokes readRows without error', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -596,7 +596,7 @@ describe('v1.BigQueryReadClient', () => {
it('invokes readRows without error and gaxServerStreamingRetries enabled', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
gaxServerStreamingRetries: true,
});
@@ -642,7 +642,7 @@ describe('v1.BigQueryReadClient', () => {
it('invokes readRows with error', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -687,7 +687,7 @@ describe('v1.BigQueryReadClient', () => {
it('invokes readRows with closed client', async () => {
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -700,11 +700,11 @@ describe('v1.BigQueryReadClient', () => {
);
request.readStream = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
const stream = client.readRows(request, {
- retryRequestOptions: {noResponseRetries: 0},
+ retryRequestOptions: { noResponseRetries: 0 },
});
const promise = new Promise((resolve, reject) => {
stream.on(
@@ -736,7 +736,7 @@ describe('v1.BigQueryReadClient', () => {
project: 'projectValue',
};
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -776,7 +776,7 @@ describe('v1.BigQueryReadClient', () => {
session: 'sessionValue',
};
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -841,7 +841,7 @@ describe('v1.BigQueryReadClient', () => {
stream: 'streamValue',
};
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -916,7 +916,7 @@ describe('v1.BigQueryReadClient', () => {
table: 'tableValue',
};
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -981,7 +981,7 @@ describe('v1.BigQueryReadClient', () => {
stream: 'streamValue',
};
const client = new bigqueryreadModule.v1.BigQueryReadClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1beta2.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1beta2.ts
new file mode 100644
index 000000000000..3a9df05fe195
--- /dev/null
+++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1beta2.ts
@@ -0,0 +1,1051 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ** This file is automatically generated by gapic-generator-typescript. **
+// ** https://github.com/googleapis/gapic-generator-typescript **
+// ** All changes to this file may be overwritten. **
+
+import * as protos from '../protos/protos';
+import * as assert from 'assert';
+import * as sinon from 'sinon';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
+import * as bigqueryreadModule from '../src';
+
+import { PassThrough } from 'stream';
+
+import { protobuf } from 'google-gax';
+
+// Dynamically loaded proto JSON is needed to get the type information
+// to fill in default values for request objects
+const root = protobuf.Root.fromJSON(
+ require('../protos/protos.json'),
+).resolveAll();
+
+// eslint-disable-next-line @typescript-eslint/no-unused-vars
+function getTypeDefaultValue(typeName: string, fields: string[]) {
+ let type = root.lookupType(typeName) as protobuf.Type;
+ for (const field of fields.slice(0, -1)) {
+ type = type.fields[field]?.resolvedType as protobuf.Type;
+ }
+ return type.fields[fields[fields.length - 1]]?.defaultValue;
+}
+
+function generateSampleMessage(instance: T) {
+ const filledObject = (
+ instance.constructor as typeof protobuf.Message
+ ).toObject(instance as protobuf.Message, { defaults: true });
+ return (instance.constructor as typeof protobuf.Message).fromObject(
+ filledObject,
+ ) as T;
+}
+
+function stubSimpleCall(response?: ResponseType, error?: Error) {
+ return error
+ ? sinon.stub().rejects(error)
+ : sinon.stub().resolves([response]);
+}
+
+function stubSimpleCallWithCallback(
+ response?: ResponseType,
+ error?: Error,
+) {
+ return error
+ ? sinon.stub().callsArgWith(2, error)
+ : sinon.stub().callsArgWith(2, null, response);
+}
+
+function stubServerStreamingCall(
+ response?: ResponseType,
+ error?: Error,
+) {
+ const transformStub = error
+ ? sinon.stub().callsArgWith(2, error)
+ : sinon.stub().callsArgWith(2, null, response);
+ const mockStream = new PassThrough({
+ objectMode: true,
+ transform: transformStub,
+ });
+ // write something to the stream to trigger transformStub and send the response back to the client
+ setImmediate(() => {
+ mockStream.write({});
+ });
+ setImmediate(() => {
+ mockStream.end();
+ });
+ return sinon.stub().returns(mockStream);
+}
+
+describe('v1beta2.BigQueryReadClient', () => {
+ describe('Common methods', () => {
+ it('has apiEndpoint', () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient();
+ const apiEndpoint = client.apiEndpoint;
+ assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com');
+ });
+
+ it('has universeDomain', () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient();
+ const universeDomain = client.universeDomain;
+ assert.strictEqual(universeDomain, 'googleapis.com');
+ });
+
+ if (
+ typeof process === 'object' &&
+ typeof process.emitWarning === 'function'
+ ) {
+ it('throws DeprecationWarning if static servicePath is used', () => {
+ const stub = sinon.stub(process, 'emitWarning');
+ const servicePath =
+ bigqueryreadModule.v1beta2.BigQueryReadClient.servicePath;
+ assert.strictEqual(servicePath, 'bigquerystorage.googleapis.com');
+ assert(stub.called);
+ stub.restore();
+ });
+
+ it('throws DeprecationWarning if static apiEndpoint is used', () => {
+ const stub = sinon.stub(process, 'emitWarning');
+ const apiEndpoint =
+ bigqueryreadModule.v1beta2.BigQueryReadClient.apiEndpoint;
+ assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com');
+ assert(stub.called);
+ stub.restore();
+ });
+ }
+ it('sets apiEndpoint according to universe domain camelCase', () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ universeDomain: 'example.com',
+ });
+ const servicePath = client.apiEndpoint;
+ assert.strictEqual(servicePath, 'bigquerystorage.example.com');
+ });
+
+ it('sets apiEndpoint according to universe domain snakeCase', () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ universe_domain: 'example.com',
+ });
+ const servicePath = client.apiEndpoint;
+ assert.strictEqual(servicePath, 'bigquerystorage.example.com');
+ });
+
+ if (typeof process === 'object' && 'env' in process) {
+ describe('GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variable', () => {
+ it('sets apiEndpoint from environment variable', () => {
+ const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'];
+ process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com';
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient();
+ const servicePath = client.apiEndpoint;
+ assert.strictEqual(servicePath, 'bigquerystorage.example.com');
+ if (saved) {
+ process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved;
+ } else {
+ delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'];
+ }
+ });
+
+ it('value configured in code has priority over environment variable', () => {
+ const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'];
+ process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com';
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ universeDomain: 'configured.example.com',
+ });
+ const servicePath = client.apiEndpoint;
+ assert.strictEqual(
+ servicePath,
+ 'bigquerystorage.configured.example.com',
+ );
+ if (saved) {
+ process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved;
+ } else {
+ delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'];
+ }
+ });
+ });
+ }
+ it('does not allow setting both universeDomain and universe_domain', () => {
+ assert.throws(() => {
+ new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ universe_domain: 'example.com',
+ universeDomain: 'example.net',
+ });
+ });
+ });
+
+ it('has port', () => {
+ const port = bigqueryreadModule.v1beta2.BigQueryReadClient.port;
+ assert(port);
+ assert(typeof port === 'number');
+ });
+
+ it('should create a client with no option', () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient();
+ assert(client);
+ });
+
+ it('should create a client with gRPC fallback', () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ fallback: true,
+ });
+ assert(client);
+ });
+
+ it('has initialize method and supports deferred initialization', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ assert.strictEqual(client.bigQueryReadStub, undefined);
+ await client.initialize();
+ assert(client.bigQueryReadStub);
+ });
+
+ it('has close method for the initialized client', (done) => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ client.initialize().catch((err) => {
+ throw err;
+ });
+ assert(client.bigQueryReadStub);
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
+ });
+
+ it('has close method for the non-initialized client', (done) => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ assert.strictEqual(client.bigQueryReadStub, undefined);
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
+ });
+
+ it('has getProjectId method', async () => {
+ const fakeProjectId = 'fake-project-id';
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
+ const result = await client.getProjectId();
+ assert.strictEqual(result, fakeProjectId);
+ assert((client.auth.getProjectId as SinonStub).calledWithExactly());
+ });
+
+ it('has getProjectId method with callback', async () => {
+ const fakeProjectId = 'fake-project-id';
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ client.auth.getProjectId = sinon
+ .stub()
+ .callsArgWith(0, null, fakeProjectId);
+ const promise = new Promise((resolve, reject) => {
+ client.getProjectId((err?: Error | null, projectId?: string | null) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(projectId);
+ }
+ });
+ });
+ const result = await promise;
+ assert.strictEqual(result, fakeProjectId);
+ });
+ });
+
+ describe('createReadSession', () => {
+ it('invokes createReadSession without error', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest(),
+ );
+ request.readSession ??= {};
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest',
+ ['readSession', 'table'],
+ );
+ request.readSession.table = defaultValue1;
+ const expectedHeaderRequestParams = `read_session.table=${defaultValue1 ?? ''}`;
+ const expectedResponse = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.ReadSession(),
+ );
+ client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse);
+ const [response] = await client.createReadSession(request);
+ assert.deepStrictEqual(response, expectedResponse);
+ const actualRequest = (
+ client.innerApiCalls.createReadSession as SinonStub
+ ).getCall(0).args[0];
+ assert.deepStrictEqual(actualRequest, request);
+ const actualHeaderRequestParams = (
+ client.innerApiCalls.createReadSession as SinonStub
+ ).getCall(0).args[1].otherArgs.headers['x-goog-request-params'];
+ assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams));
+ });
+
+ it('invokes createReadSession without error using callback', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest(),
+ );
+ request.readSession ??= {};
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest',
+ ['readSession', 'table'],
+ );
+ request.readSession.table = defaultValue1;
+ const expectedHeaderRequestParams = `read_session.table=${defaultValue1 ?? ''}`;
+ const expectedResponse = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.ReadSession(),
+ );
+ client.innerApiCalls.createReadSession =
+ stubSimpleCallWithCallback(expectedResponse);
+ const promise = new Promise((resolve, reject) => {
+ client.createReadSession(
+ request,
+ (
+ err?: Error | null,
+ result?: protos.google.cloud.bigquery.storage.v1beta2.IReadSession | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ );
+ });
+ const response = await promise;
+ assert.deepStrictEqual(response, expectedResponse);
+ const actualRequest = (
+ client.innerApiCalls.createReadSession as SinonStub
+ ).getCall(0).args[0];
+ assert.deepStrictEqual(actualRequest, request);
+ const actualHeaderRequestParams = (
+ client.innerApiCalls.createReadSession as SinonStub
+ ).getCall(0).args[1].otherArgs.headers['x-goog-request-params'];
+ assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams));
+ });
+
+ it('invokes createReadSession with error', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest(),
+ );
+ request.readSession ??= {};
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest',
+ ['readSession', 'table'],
+ );
+ request.readSession.table = defaultValue1;
+ const expectedHeaderRequestParams = `read_session.table=${defaultValue1 ?? ''}`;
+ const expectedError = new Error('expected');
+ client.innerApiCalls.createReadSession = stubSimpleCall(
+ undefined,
+ expectedError,
+ );
+ await assert.rejects(client.createReadSession(request), expectedError);
+ const actualRequest = (
+ client.innerApiCalls.createReadSession as SinonStub
+ ).getCall(0).args[0];
+ assert.deepStrictEqual(actualRequest, request);
+ const actualHeaderRequestParams = (
+ client.innerApiCalls.createReadSession as SinonStub
+ ).getCall(0).args[1].otherArgs.headers['x-goog-request-params'];
+ assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams));
+ });
+
+ it('invokes createReadSession with closed client', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest(),
+ );
+ request.readSession ??= {};
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.CreateReadSessionRequest',
+ ['readSession', 'table'],
+ );
+ request.readSession.table = defaultValue1;
+ const expectedError = new Error('The client has already been closed.');
+ client.close().catch((err) => {
+ throw err;
+ });
+ await assert.rejects(client.createReadSession(request), expectedError);
+ });
+ });
+
+ describe('splitReadStream', () => {
+ it('invokes splitReadStream without error', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest(),
+ );
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest',
+ ['name'],
+ );
+ request.name = defaultValue1;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
+ const expectedResponse = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse(),
+ );
+ client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse);
+ const [response] = await client.splitReadStream(request);
+ assert.deepStrictEqual(response, expectedResponse);
+ const actualRequest = (
+ client.innerApiCalls.splitReadStream as SinonStub
+ ).getCall(0).args[0];
+ assert.deepStrictEqual(actualRequest, request);
+ const actualHeaderRequestParams = (
+ client.innerApiCalls.splitReadStream as SinonStub
+ ).getCall(0).args[1].otherArgs.headers['x-goog-request-params'];
+ assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams));
+ });
+
+ it('invokes splitReadStream without error using callback', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest(),
+ );
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest',
+ ['name'],
+ );
+ request.name = defaultValue1;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
+ const expectedResponse = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse(),
+ );
+ client.innerApiCalls.splitReadStream =
+ stubSimpleCallWithCallback(expectedResponse);
+ const promise = new Promise((resolve, reject) => {
+ client.splitReadStream(
+ request,
+ (
+ err?: Error | null,
+ result?: protos.google.cloud.bigquery.storage.v1beta2.ISplitReadStreamResponse | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ );
+ });
+ const response = await promise;
+ assert.deepStrictEqual(response, expectedResponse);
+ const actualRequest = (
+ client.innerApiCalls.splitReadStream as SinonStub
+ ).getCall(0).args[0];
+ assert.deepStrictEqual(actualRequest, request);
+ const actualHeaderRequestParams = (
+ client.innerApiCalls.splitReadStream as SinonStub
+ ).getCall(0).args[1].otherArgs.headers['x-goog-request-params'];
+ assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams));
+ });
+
+ it('invokes splitReadStream with error', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest(),
+ );
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest',
+ ['name'],
+ );
+ request.name = defaultValue1;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
+ const expectedError = new Error('expected');
+ client.innerApiCalls.splitReadStream = stubSimpleCall(
+ undefined,
+ expectedError,
+ );
+ await assert.rejects(client.splitReadStream(request), expectedError);
+ const actualRequest = (
+ client.innerApiCalls.splitReadStream as SinonStub
+ ).getCall(0).args[0];
+ assert.deepStrictEqual(actualRequest, request);
+ const actualHeaderRequestParams = (
+ client.innerApiCalls.splitReadStream as SinonStub
+ ).getCall(0).args[1].otherArgs.headers['x-goog-request-params'];
+ assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams));
+ });
+
+ it('invokes splitReadStream with closed client', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest(),
+ );
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest',
+ ['name'],
+ );
+ request.name = defaultValue1;
+ const expectedError = new Error('The client has already been closed.');
+ client.close().catch((err) => {
+ throw err;
+ });
+ await assert.rejects(client.splitReadStream(request), expectedError);
+ });
+ });
+
+ describe('readRows', () => {
+ it('invokes readRows without error', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest(),
+ );
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest',
+ ['readStream'],
+ );
+ request.readStream = defaultValue1;
+ const expectedHeaderRequestParams = `read_stream=${defaultValue1 ?? ''}`;
+ const expectedResponse = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse(),
+ );
+ client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse);
+ const stream = client.readRows(request);
+ const promise = new Promise((resolve, reject) => {
+ stream.on(
+ 'data',
+ (
+ response: protos.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse,
+ ) => {
+ resolve(response);
+ },
+ );
+ stream.on('error', (err: Error) => {
+ reject(err);
+ });
+ });
+ const response = await promise;
+ assert.deepStrictEqual(response, expectedResponse);
+ const actualRequest = (
+ client.innerApiCalls.readRows as SinonStub
+ ).getCall(0).args[0];
+ assert.deepStrictEqual(actualRequest, request);
+ const actualHeaderRequestParams = (
+ client.innerApiCalls.readRows as SinonStub
+ ).getCall(0).args[1].otherArgs.headers['x-goog-request-params'];
+ assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams));
+ });
+
+ it('invokes readRows without error and gaxServerStreamingRetries enabled', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ gaxServerStreamingRetries: true,
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest(),
+ );
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest',
+ ['readStream'],
+ );
+ request.readStream = defaultValue1;
+ const expectedHeaderRequestParams = `read_stream=${defaultValue1 ?? ''}`;
+ const expectedResponse = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse(),
+ );
+ client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse);
+ const stream = client.readRows(request);
+ const promise = new Promise((resolve, reject) => {
+ stream.on(
+ 'data',
+ (
+ response: protos.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse,
+ ) => {
+ resolve(response);
+ },
+ );
+ stream.on('error', (err: Error) => {
+ reject(err);
+ });
+ });
+ const response = await promise;
+ assert.deepStrictEqual(response, expectedResponse);
+ const actualRequest = (
+ client.innerApiCalls.readRows as SinonStub
+ ).getCall(0).args[0];
+ assert.deepStrictEqual(actualRequest, request);
+ const actualHeaderRequestParams = (
+ client.innerApiCalls.readRows as SinonStub
+ ).getCall(0).args[1].otherArgs.headers['x-goog-request-params'];
+ assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams));
+ });
+
+ it('invokes readRows with error', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest(),
+ );
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest',
+ ['readStream'],
+ );
+ request.readStream = defaultValue1;
+ const expectedHeaderRequestParams = `read_stream=${defaultValue1 ?? ''}`;
+ const expectedError = new Error('expected');
+ client.innerApiCalls.readRows = stubServerStreamingCall(
+ undefined,
+ expectedError,
+ );
+ const stream = client.readRows(request);
+ const promise = new Promise((resolve, reject) => {
+ stream.on(
+ 'data',
+ (
+ response: protos.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse,
+ ) => {
+ resolve(response);
+ },
+ );
+ stream.on('error', (err: Error) => {
+ reject(err);
+ });
+ });
+ await assert.rejects(promise, expectedError);
+ const actualRequest = (
+ client.innerApiCalls.readRows as SinonStub
+ ).getCall(0).args[0];
+ assert.deepStrictEqual(actualRequest, request);
+ const actualHeaderRequestParams = (
+ client.innerApiCalls.readRows as SinonStub
+ ).getCall(0).args[1].otherArgs.headers['x-goog-request-params'];
+ assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams));
+ });
+
+ it('invokes readRows with closed client', async () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ const request = generateSampleMessage(
+ new protos.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest(),
+ );
+ const defaultValue1 = getTypeDefaultValue(
+ '.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest',
+ ['readStream'],
+ );
+ request.readStream = defaultValue1;
+ const expectedError = new Error('The client has already been closed.');
+ client.close().catch((err) => {
+ throw err;
+ });
+ const stream = client.readRows(request, {
+ retryRequestOptions: { noResponseRetries: 0 },
+ });
+ const promise = new Promise((resolve, reject) => {
+ stream.on(
+ 'data',
+ (
+ response: protos.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse,
+ ) => {
+ resolve(response);
+ },
+ );
+ stream.on('error', (err: Error) => {
+ reject(err);
+ });
+ });
+ await assert.rejects(promise, expectedError);
+ });
+ it('should create a client with gaxServerStreamingRetries enabled', () => {
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ gaxServerStreamingRetries: true,
+ });
+ assert(client);
+ });
+ });
+
+ describe('Path templates', () => {
+ describe('project', async () => {
+ const fakePath = '/rendered/path/project';
+ const expectedParameters = {
+ project: 'projectValue',
+ };
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ client.pathTemplates.projectPathTemplate.render = sinon
+ .stub()
+ .returns(fakePath);
+ client.pathTemplates.projectPathTemplate.match = sinon
+ .stub()
+ .returns(expectedParameters);
+
+ it('projectPath', () => {
+ const result = client.projectPath('projectValue');
+ assert.strictEqual(result, fakePath);
+ assert(
+ (client.pathTemplates.projectPathTemplate.render as SinonStub)
+ .getCall(-1)
+ .calledWith(expectedParameters),
+ );
+ });
+
+ it('matchProjectFromProjectName', () => {
+ const result = client.matchProjectFromProjectName(fakePath);
+ assert.strictEqual(result, 'projectValue');
+ assert(
+ (client.pathTemplates.projectPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+ });
+
+ describe('readSession', async () => {
+ const fakePath = '/rendered/path/readSession';
+ const expectedParameters = {
+ project: 'projectValue',
+ location: 'locationValue',
+ session: 'sessionValue',
+ };
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ client.pathTemplates.readSessionPathTemplate.render = sinon
+ .stub()
+ .returns(fakePath);
+ client.pathTemplates.readSessionPathTemplate.match = sinon
+ .stub()
+ .returns(expectedParameters);
+
+ it('readSessionPath', () => {
+ const result = client.readSessionPath(
+ 'projectValue',
+ 'locationValue',
+ 'sessionValue',
+ );
+ assert.strictEqual(result, fakePath);
+ assert(
+ (client.pathTemplates.readSessionPathTemplate.render as SinonStub)
+ .getCall(-1)
+ .calledWith(expectedParameters),
+ );
+ });
+
+ it('matchProjectFromReadSessionName', () => {
+ const result = client.matchProjectFromReadSessionName(fakePath);
+ assert.strictEqual(result, 'projectValue');
+ assert(
+ (client.pathTemplates.readSessionPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchLocationFromReadSessionName', () => {
+ const result = client.matchLocationFromReadSessionName(fakePath);
+ assert.strictEqual(result, 'locationValue');
+ assert(
+ (client.pathTemplates.readSessionPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchSessionFromReadSessionName', () => {
+ const result = client.matchSessionFromReadSessionName(fakePath);
+ assert.strictEqual(result, 'sessionValue');
+ assert(
+ (client.pathTemplates.readSessionPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+ });
+
+ describe('readStream', async () => {
+ const fakePath = '/rendered/path/readStream';
+ const expectedParameters = {
+ project: 'projectValue',
+ location: 'locationValue',
+ session: 'sessionValue',
+ stream: 'streamValue',
+ };
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ client.pathTemplates.readStreamPathTemplate.render = sinon
+ .stub()
+ .returns(fakePath);
+ client.pathTemplates.readStreamPathTemplate.match = sinon
+ .stub()
+ .returns(expectedParameters);
+
+ it('readStreamPath', () => {
+ const result = client.readStreamPath(
+ 'projectValue',
+ 'locationValue',
+ 'sessionValue',
+ 'streamValue',
+ );
+ assert.strictEqual(result, fakePath);
+ assert(
+ (client.pathTemplates.readStreamPathTemplate.render as SinonStub)
+ .getCall(-1)
+ .calledWith(expectedParameters),
+ );
+ });
+
+ it('matchProjectFromReadStreamName', () => {
+ const result = client.matchProjectFromReadStreamName(fakePath);
+ assert.strictEqual(result, 'projectValue');
+ assert(
+ (client.pathTemplates.readStreamPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchLocationFromReadStreamName', () => {
+ const result = client.matchLocationFromReadStreamName(fakePath);
+ assert.strictEqual(result, 'locationValue');
+ assert(
+ (client.pathTemplates.readStreamPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchSessionFromReadStreamName', () => {
+ const result = client.matchSessionFromReadStreamName(fakePath);
+ assert.strictEqual(result, 'sessionValue');
+ assert(
+ (client.pathTemplates.readStreamPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchStreamFromReadStreamName', () => {
+ const result = client.matchStreamFromReadStreamName(fakePath);
+ assert.strictEqual(result, 'streamValue');
+ assert(
+ (client.pathTemplates.readStreamPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+ });
+
+ describe('table', async () => {
+ const fakePath = '/rendered/path/table';
+ const expectedParameters = {
+ project: 'projectValue',
+ dataset: 'datasetValue',
+ table: 'tableValue',
+ };
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ client.pathTemplates.tablePathTemplate.render = sinon
+ .stub()
+ .returns(fakePath);
+ client.pathTemplates.tablePathTemplate.match = sinon
+ .stub()
+ .returns(expectedParameters);
+
+ it('tablePath', () => {
+ const result = client.tablePath(
+ 'projectValue',
+ 'datasetValue',
+ 'tableValue',
+ );
+ assert.strictEqual(result, fakePath);
+ assert(
+ (client.pathTemplates.tablePathTemplate.render as SinonStub)
+ .getCall(-1)
+ .calledWith(expectedParameters),
+ );
+ });
+
+ it('matchProjectFromTableName', () => {
+ const result = client.matchProjectFromTableName(fakePath);
+ assert.strictEqual(result, 'projectValue');
+ assert(
+ (client.pathTemplates.tablePathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchDatasetFromTableName', () => {
+ const result = client.matchDatasetFromTableName(fakePath);
+ assert.strictEqual(result, 'datasetValue');
+ assert(
+ (client.pathTemplates.tablePathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchTableFromTableName', () => {
+ const result = client.matchTableFromTableName(fakePath);
+ assert.strictEqual(result, 'tableValue');
+ assert(
+ (client.pathTemplates.tablePathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+ });
+
+ describe('writeStream', async () => {
+ const fakePath = '/rendered/path/writeStream';
+ const expectedParameters = {
+ project: 'projectValue',
+ dataset: 'datasetValue',
+ table: 'tableValue',
+ stream: 'streamValue',
+ };
+ const client = new bigqueryreadModule.v1beta2.BigQueryReadClient({
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
+ projectId: 'bogus',
+ });
+ await client.initialize();
+ client.pathTemplates.writeStreamPathTemplate.render = sinon
+ .stub()
+ .returns(fakePath);
+ client.pathTemplates.writeStreamPathTemplate.match = sinon
+ .stub()
+ .returns(expectedParameters);
+
+ it('writeStreamPath', () => {
+ const result = client.writeStreamPath(
+ 'projectValue',
+ 'datasetValue',
+ 'tableValue',
+ 'streamValue',
+ );
+ assert.strictEqual(result, fakePath);
+ assert(
+ (client.pathTemplates.writeStreamPathTemplate.render as SinonStub)
+ .getCall(-1)
+ .calledWith(expectedParameters),
+ );
+ });
+
+ it('matchProjectFromWriteStreamName', () => {
+ const result = client.matchProjectFromWriteStreamName(fakePath);
+ assert.strictEqual(result, 'projectValue');
+ assert(
+ (client.pathTemplates.writeStreamPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchDatasetFromWriteStreamName', () => {
+ const result = client.matchDatasetFromWriteStreamName(fakePath);
+ assert.strictEqual(result, 'datasetValue');
+ assert(
+ (client.pathTemplates.writeStreamPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchTableFromWriteStreamName', () => {
+ const result = client.matchTableFromWriteStreamName(fakePath);
+ assert.strictEqual(result, 'tableValue');
+ assert(
+ (client.pathTemplates.writeStreamPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+
+ it('matchStreamFromWriteStreamName', () => {
+ const result = client.matchStreamFromWriteStreamName(fakePath);
+ assert.strictEqual(result, 'streamValue');
+ assert(
+ (client.pathTemplates.writeStreamPathTemplate.match as SinonStub)
+ .getCall(-1)
+ .calledWith(fakePath),
+ );
+ });
+ });
+ });
+});
diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts
index 27cdb803d130..8a3938d925ea 100644
--- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts
+++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,13 +19,13 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as bigquerystorageModule from '../src';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
-import {protobuf} from 'google-gax';
+import { protobuf } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -45,7 +45,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -204,7 +204,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('has initialize method and supports deferred initialization', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.bigQueryStorageStub, undefined);
@@ -212,12 +212,12 @@ describe('v1beta1.BigQueryStorageClient', () => {
assert(client.bigQueryStorageStub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize().catch(err => {
+ client.initialize().catch((err) => {
throw err;
});
assert(client.bigQueryStorageStub);
@@ -226,14 +226,14 @@ describe('v1beta1.BigQueryStorageClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.bigQueryStorageStub, undefined);
@@ -242,7 +242,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
@@ -250,7 +250,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('has getProjectId method', async () => {
const fakeProjectId = 'fake-project-id';
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
@@ -262,7 +262,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('has getProjectId method with callback', async () => {
const fakeProjectId = 'fake-project-id';
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon
@@ -285,7 +285,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
describe('createReadSession', () => {
it('invokes createReadSession without error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -323,7 +323,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes createReadSession without error using callback', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -377,7 +377,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes createReadSession with error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -415,7 +415,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes createReadSession with closed client', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -435,7 +435,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
);
request.tableReference.datasetId = defaultValue2;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(client.createReadSession(request), expectedError);
@@ -445,7 +445,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
describe('batchCreateReadSessionStreams', () => {
it('invokes batchCreateReadSessionStreams without error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -478,7 +478,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes batchCreateReadSessionStreams without error using callback', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -526,7 +526,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes batchCreateReadSessionStreams with error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -561,7 +561,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes batchCreateReadSessionStreams with closed client', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -575,7 +575,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
);
request.session.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -588,7 +588,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
describe('finalizeStream', () => {
it('invokes finalizeStream without error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -620,7 +620,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes finalizeStream without error using callback', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -668,7 +668,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes finalizeStream with error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -700,7 +700,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes finalizeStream with closed client', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -714,7 +714,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
);
request.stream.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(client.finalizeStream(request), expectedError);
@@ -724,7 +724,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
describe('splitReadStream', () => {
it('invokes splitReadStream without error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -756,7 +756,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes splitReadStream without error using callback', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -804,7 +804,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes splitReadStream with error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -836,7 +836,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes splitReadStream with closed client', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -850,7 +850,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
);
request.originalStream.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(client.splitReadStream(request), expectedError);
@@ -860,7 +860,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
describe('readRows', () => {
it('invokes readRows without error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -907,7 +907,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes readRows without error and gaxServerStreamingRetries enabled', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
gaxServerStreamingRetries: true,
});
@@ -955,7 +955,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes readRows with error', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1002,7 +1002,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
it('invokes readRows with closed client', async () => {
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1017,11 +1017,11 @@ describe('v1beta1.BigQueryStorageClient', () => {
);
request.readPosition.stream.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
const stream = client.readRows(request, {
- retryRequestOptions: {noResponseRetries: 0},
+ retryRequestOptions: { noResponseRetries: 0 },
});
const promise = new Promise((resolve, reject) => {
stream.on(
@@ -1053,7 +1053,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
project: 'projectValue',
};
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1093,7 +1093,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
session: 'sessionValue',
};
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1157,7 +1157,7 @@ describe('v1beta1.BigQueryStorageClient', () => {
stream: 'streamValue',
};
const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts
index 7c78a4d3b30b..164b1603e083 100644
--- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts
+++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,13 +19,13 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as bigquerywriteModule from '../src';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
-import {protobuf} from 'google-gax';
+import { protobuf } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -45,7 +45,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -195,7 +195,7 @@ describe('v1.BigQueryWriteClient', () => {
it('has initialize method and supports deferred initialization', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.bigQueryWriteStub, undefined);
@@ -203,12 +203,12 @@ describe('v1.BigQueryWriteClient', () => {
assert(client.bigQueryWriteStub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize().catch(err => {
+ client.initialize().catch((err) => {
throw err;
});
assert(client.bigQueryWriteStub);
@@ -217,14 +217,14 @@ describe('v1.BigQueryWriteClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.bigQueryWriteStub, undefined);
@@ -233,7 +233,7 @@ describe('v1.BigQueryWriteClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
@@ -241,7 +241,7 @@ describe('v1.BigQueryWriteClient', () => {
it('has getProjectId method', async () => {
const fakeProjectId = 'fake-project-id';
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
@@ -253,7 +253,7 @@ describe('v1.BigQueryWriteClient', () => {
it('has getProjectId method with callback', async () => {
const fakeProjectId = 'fake-project-id';
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon
@@ -276,7 +276,7 @@ describe('v1.BigQueryWriteClient', () => {
describe('createWriteStream', () => {
it('invokes createWriteStream without error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -307,7 +307,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes createWriteStream without error using callback', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -354,7 +354,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes createWriteStream with error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -385,7 +385,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes createWriteStream with closed client', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -398,7 +398,7 @@ describe('v1.BigQueryWriteClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(client.createWriteStream(request), expectedError);
@@ -408,7 +408,7 @@ describe('v1.BigQueryWriteClient', () => {
describe('getWriteStream', () => {
it('invokes getWriteStream without error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -439,7 +439,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes getWriteStream without error using callback', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -486,7 +486,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes getWriteStream with error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -517,7 +517,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes getWriteStream with closed client', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -530,7 +530,7 @@ describe('v1.BigQueryWriteClient', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(client.getWriteStream(request), expectedError);
@@ -540,7 +540,7 @@ describe('v1.BigQueryWriteClient', () => {
describe('finalizeWriteStream', () => {
it('invokes finalizeWriteStream without error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -572,7 +572,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes finalizeWriteStream without error using callback', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -619,7 +619,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes finalizeWriteStream with error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -650,7 +650,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes finalizeWriteStream with closed client', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -663,7 +663,7 @@ describe('v1.BigQueryWriteClient', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(client.finalizeWriteStream(request), expectedError);
@@ -673,7 +673,7 @@ describe('v1.BigQueryWriteClient', () => {
describe('batchCommitWriteStreams', () => {
it('invokes batchCommitWriteStreams without error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -705,7 +705,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes batchCommitWriteStreams without error using callback', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -752,7 +752,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes batchCommitWriteStreams with error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -786,7 +786,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes batchCommitWriteStreams with closed client', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -799,7 +799,7 @@ describe('v1.BigQueryWriteClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -812,7 +812,7 @@ describe('v1.BigQueryWriteClient', () => {
describe('flushRows', () => {
it('invokes flushRows without error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -843,7 +843,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes flushRows without error using callback', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -890,7 +890,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes flushRows with error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -918,7 +918,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes flushRows with closed client', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -931,7 +931,7 @@ describe('v1.BigQueryWriteClient', () => {
);
request.writeStream = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(client.flushRows(request), expectedError);
@@ -941,7 +941,7 @@ describe('v1.BigQueryWriteClient', () => {
describe('appendRows', () => {
it('invokes appendRows without error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -985,7 +985,7 @@ describe('v1.BigQueryWriteClient', () => {
it('invokes appendRows with error', async () => {
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1034,7 +1034,7 @@ describe('v1.BigQueryWriteClient', () => {
project: 'projectValue',
};
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1074,7 +1074,7 @@ describe('v1.BigQueryWriteClient', () => {
session: 'sessionValue',
};
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1139,7 +1139,7 @@ describe('v1.BigQueryWriteClient', () => {
stream: 'streamValue',
};
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1214,7 +1214,7 @@ describe('v1.BigQueryWriteClient', () => {
table: 'tableValue',
};
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1279,7 +1279,7 @@ describe('v1.BigQueryWriteClient', () => {
stream: 'streamValue',
};
const client = new bigquerywriteModule.v1.BigQueryWriteClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
diff --git a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts
index 5746d5e08b6f..245b16ba78f5 100644
--- a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts
+++ b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,13 +19,13 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as metastorepartitionserviceModule from '../src';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
-import {protobuf} from 'google-gax';
+import { protobuf } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -45,7 +45,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -123,7 +123,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
it('sets apiEndpoint according to universe domain camelCase', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
- {universeDomain: 'example.com'},
+ { universeDomain: 'example.com' },
);
const servicePath = client.apiEndpoint;
assert.strictEqual(servicePath, 'bigquerystorage.example.com');
@@ -132,7 +132,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
it('sets apiEndpoint according to universe domain snakeCase', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
- {universe_domain: 'example.com'},
+ { universe_domain: 'example.com' },
);
const servicePath = client.apiEndpoint;
assert.strictEqual(servicePath, 'bigquerystorage.example.com');
@@ -159,7 +159,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com';
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
- {universeDomain: 'configured.example.com'},
+ { universeDomain: 'configured.example.com' },
);
const servicePath = client.apiEndpoint;
assert.strictEqual(
@@ -177,7 +177,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
it('does not allow setting both universeDomain and universe_domain', () => {
assert.throws(() => {
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
- {universe_domain: 'example.com', universeDomain: 'example.net'},
+ { universe_domain: 'example.com', universeDomain: 'example.net' },
);
});
});
@@ -210,7 +210,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -219,15 +219,15 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
assert(client.metastorePartitionServiceStub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
- client.initialize().catch(err => {
+ client.initialize().catch((err) => {
throw err;
});
assert(client.metastorePartitionServiceStub);
@@ -236,16 +236,16 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -255,7 +255,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
@@ -265,7 +265,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -280,7 +280,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -306,7 +306,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -341,7 +341,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -391,7 +391,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -428,7 +428,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -442,7 +442,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -457,7 +457,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -492,7 +492,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -542,7 +542,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -579,7 +579,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -593,7 +593,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -608,7 +608,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -643,7 +643,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -693,7 +693,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -730,7 +730,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -744,7 +744,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -759,7 +759,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -794,7 +794,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -844,7 +844,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -881,7 +881,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -895,7 +895,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -910,7 +910,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -958,7 +958,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -1013,7 +1013,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -1091,7 +1091,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
diff --git a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1beta.ts b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1beta.ts
index da3e55a9c512..408a160921b4 100644
--- a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1beta.ts
+++ b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1beta.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,13 +19,13 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as metastorepartitionserviceModule from '../src';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
-import {protobuf} from 'google-gax';
+import { protobuf } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -45,7 +45,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -123,7 +123,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
it('sets apiEndpoint according to universe domain camelCase', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
- {universeDomain: 'example.com'},
+ { universeDomain: 'example.com' },
);
const servicePath = client.apiEndpoint;
assert.strictEqual(servicePath, 'bigquerystorage.example.com');
@@ -132,7 +132,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
it('sets apiEndpoint according to universe domain snakeCase', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
- {universe_domain: 'example.com'},
+ { universe_domain: 'example.com' },
);
const servicePath = client.apiEndpoint;
assert.strictEqual(servicePath, 'bigquerystorage.example.com');
@@ -159,7 +159,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com';
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
- {universeDomain: 'configured.example.com'},
+ { universeDomain: 'configured.example.com' },
);
const servicePath = client.apiEndpoint;
assert.strictEqual(
@@ -177,7 +177,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
it('does not allow setting both universeDomain and universe_domain', () => {
assert.throws(() => {
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
- {universe_domain: 'example.com', universeDomain: 'example.net'},
+ { universe_domain: 'example.com', universeDomain: 'example.net' },
);
});
});
@@ -210,7 +210,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -219,15 +219,15 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
assert(client.metastorePartitionServiceStub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
- client.initialize().catch(err => {
+ client.initialize().catch((err) => {
throw err;
});
assert(client.metastorePartitionServiceStub);
@@ -236,16 +236,16 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -255,7 +255,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
.then(() => {
done();
})
- .catch(err => {
+ .catch((err) => {
throw err;
});
});
@@ -265,7 +265,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -280,7 +280,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -306,7 +306,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -341,7 +341,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -391,7 +391,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -428,7 +428,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -442,7 +442,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -457,7 +457,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -492,7 +492,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -542,7 +542,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -579,7 +579,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -593,7 +593,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -608,7 +608,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -643,7 +643,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -693,7 +693,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -730,7 +730,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -744,7 +744,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -759,7 +759,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -794,7 +794,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -844,7 +844,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -881,7 +881,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -895,7 +895,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close().catch(err => {
+ client.close().catch((err) => {
throw err;
});
await assert.rejects(
@@ -910,7 +910,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -958,7 +958,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -1013,7 +1013,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
@@ -1091,7 +1091,7 @@ describe('v1beta.MetastorePartitionServiceClient', () => {
const client =
new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(
{
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
},
);
diff --git a/handwritten/bigquery-storage/tsconfig.json b/handwritten/bigquery-storage/tsconfig.json
index ee0702dcdefd..ca73e7bfc824 100644
--- a/handwritten/bigquery-storage/tsconfig.json
+++ b/handwritten/bigquery-storage/tsconfig.json
@@ -14,9 +14,7 @@
"src/**/*.ts",
"test/*.ts",
"test/**/*.ts",
- "test/testdata/message.json",
"system-test/*.ts",
- "system-test/fixtures/customer_record.json",
"src/**/*.json",
"samples/**/*.json",
"protos/protos.json"
diff --git a/handwritten/bigquery-storage/webpack.config.js b/handwritten/bigquery-storage/webpack.config.js
index de163617408c..bad73cc0fe26 100644
--- a/handwritten/bigquery-storage/webpack.config.js
+++ b/handwritten/bigquery-storage/webpack.config.js
@@ -36,27 +36,27 @@ module.exports = {
{
test: /\.tsx?$/,
use: 'ts-loader',
- exclude: /node_modules/,
+ exclude: /node_modules/
},
{
test: /node_modules[\\/]@grpc[\\/]grpc-js/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]grpc/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]retry-request/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]https?-proxy-agent/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]gtoken/,
- use: 'null-loader',
+ use: 'null-loader'
},
],
},
diff --git a/handwritten/datastore/.OwlBot.yaml b/handwritten/datastore/.OwlBot.yaml
index 7db91a105f96..7ed9b7b5cff3 100644
--- a/handwritten/datastore/.OwlBot.yaml
+++ b/handwritten/datastore/.OwlBot.yaml
@@ -1,10 +1,10 @@
-# Copyright 2021 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,15 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
-deep-remove-regex:
- - /owl-bot-staging
-
deep-copy-regex:
- - source: /google/datastore/(v.*)/.*-nodejs
- dest: /owl-bot-staging/datastore/$1
- - source: /google/datastore/(admin/v.*)/.*-nodejs
- dest: /owl-bot-staging/datastore/$1
-
-begin-after-commit-hash: fb91803ccef5d7c695139b22788b309e2197856b
+ - source: /google/datastore/admin/google-datastore-admin-nodejs
+ dest: /owl-bot-staging/google-datastore-admin
+api-name: admin
\ No newline at end of file
diff --git a/handwritten/datastore/.jsdoc.js b/handwritten/datastore/.jsdoc.js
index 4e00e5b8f282..47c538344e03 100644
--- a/handwritten/datastore/.jsdoc.js
+++ b/handwritten/datastore/.jsdoc.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -40,7 +40,7 @@ module.exports = {
includePattern: '\\.js$'
},
templates: {
- copyright: 'Copyright 2025 Google LLC',
+ copyright: 'Copyright 2026 Google LLC',
includeDate: false,
sourceFiles: false,
systemName: '@google-cloud/datastore',
diff --git a/handwritten/datastore/samples/generated/v1/datastore.allocate_ids.js b/handwritten/datastore/samples/generated/v1/datastore.allocate_ids.js
index e47fb3a80670..a9d6a4da9b1f 100644
--- a/handwritten/datastore/samples/generated/v1/datastore.allocate_ids.js
+++ b/handwritten/datastore/samples/generated/v1/datastore.allocate_ids.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore.begin_transaction.js b/handwritten/datastore/samples/generated/v1/datastore.begin_transaction.js
index 048d1ef5e3a7..8ebff8759bd9 100644
--- a/handwritten/datastore/samples/generated/v1/datastore.begin_transaction.js
+++ b/handwritten/datastore/samples/generated/v1/datastore.begin_transaction.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore.commit.js b/handwritten/datastore/samples/generated/v1/datastore.commit.js
index 6adbb99605fd..1e8af82f7b31 100644
--- a/handwritten/datastore/samples/generated/v1/datastore.commit.js
+++ b/handwritten/datastore/samples/generated/v1/datastore.commit.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore.lookup.js b/handwritten/datastore/samples/generated/v1/datastore.lookup.js
index 10605896735c..73830172b6e2 100644
--- a/handwritten/datastore/samples/generated/v1/datastore.lookup.js
+++ b/handwritten/datastore/samples/generated/v1/datastore.lookup.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore.reserve_ids.js b/handwritten/datastore/samples/generated/v1/datastore.reserve_ids.js
index d6a5b8559648..3a8c0bc36889 100644
--- a/handwritten/datastore/samples/generated/v1/datastore.reserve_ids.js
+++ b/handwritten/datastore/samples/generated/v1/datastore.reserve_ids.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore.rollback.js b/handwritten/datastore/samples/generated/v1/datastore.rollback.js
index 27757dcc8282..f76f8f7be802 100644
--- a/handwritten/datastore/samples/generated/v1/datastore.rollback.js
+++ b/handwritten/datastore/samples/generated/v1/datastore.rollback.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore.run_aggregation_query.js b/handwritten/datastore/samples/generated/v1/datastore.run_aggregation_query.js
index b5c44b6ef74c..b3d26c8fcc08 100644
--- a/handwritten/datastore/samples/generated/v1/datastore.run_aggregation_query.js
+++ b/handwritten/datastore/samples/generated/v1/datastore.run_aggregation_query.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore.run_query.js b/handwritten/datastore/samples/generated/v1/datastore.run_query.js
index 70237069d9c9..87b7f9d16bc2 100644
--- a/handwritten/datastore/samples/generated/v1/datastore.run_query.js
+++ b/handwritten/datastore/samples/generated/v1/datastore.run_query.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore_admin.create_index.js b/handwritten/datastore/samples/generated/v1/datastore_admin.create_index.js
index a54d726a8a74..72b80a24bc81 100644
--- a/handwritten/datastore/samples/generated/v1/datastore_admin.create_index.js
+++ b/handwritten/datastore/samples/generated/v1/datastore_admin.create_index.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore_admin.delete_index.js b/handwritten/datastore/samples/generated/v1/datastore_admin.delete_index.js
index 8b74e8d5e4dc..e891224de6a9 100644
--- a/handwritten/datastore/samples/generated/v1/datastore_admin.delete_index.js
+++ b/handwritten/datastore/samples/generated/v1/datastore_admin.delete_index.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore_admin.export_entities.js b/handwritten/datastore/samples/generated/v1/datastore_admin.export_entities.js
index de3f899694b2..60681aefd50d 100644
--- a/handwritten/datastore/samples/generated/v1/datastore_admin.export_entities.js
+++ b/handwritten/datastore/samples/generated/v1/datastore_admin.export_entities.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore_admin.get_index.js b/handwritten/datastore/samples/generated/v1/datastore_admin.get_index.js
index 9c930af60b89..d3b3032284a1 100644
--- a/handwritten/datastore/samples/generated/v1/datastore_admin.get_index.js
+++ b/handwritten/datastore/samples/generated/v1/datastore_admin.get_index.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore_admin.import_entities.js b/handwritten/datastore/samples/generated/v1/datastore_admin.import_entities.js
index a17a03e9c2fb..f607e51e5e3b 100644
--- a/handwritten/datastore/samples/generated/v1/datastore_admin.import_entities.js
+++ b/handwritten/datastore/samples/generated/v1/datastore_admin.import_entities.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/samples/generated/v1/datastore_admin.list_indexes.js b/handwritten/datastore/samples/generated/v1/datastore_admin.list_indexes.js
index 624f64231486..16c954b3f325 100644
--- a/handwritten/datastore/samples/generated/v1/datastore_admin.list_indexes.js
+++ b/handwritten/datastore/samples/generated/v1/datastore_admin.list_indexes.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/datastore/src/v1/datastore_admin_client.ts b/handwritten/datastore/src/v1/datastore_admin_client.ts
index 1156724c081d..4e22175c3309 100644
--- a/handwritten/datastore/src/v1/datastore_admin_client.ts
+++ b/handwritten/datastore/src/v1/datastore_admin_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -28,10 +28,10 @@ import type {
PaginationCallback,
GaxCall,
} from 'google-gax';
-import {Transform} from 'stream';
+import { Transform } from 'stream';
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
-import {loggingUtils as logging} from 'google-gax';
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -99,7 +99,7 @@ export class DatastoreAdminClient {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
private _log = logging.log('datastore-admin');
@@ -112,9 +112,9 @@ export class DatastoreAdminClient {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
+ innerApiCalls: { [name: string]: Function };
operationsClient: gax.OperationsClient;
- datastoreAdminStub?: Promise<{[name: string]: Function}>;
+ datastoreAdminStub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of DatastoreAdminClient.
@@ -190,7 +190,7 @@ export class DatastoreAdminClient {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// Request numeric enum values if REST transport is used.
opts.numericEnums = true;
@@ -257,7 +257,7 @@ export class DatastoreAdminClient {
),
};
- const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos);
+ const protoFilesRoot = this._gaxModule.protobufFromJSON(jsonProtos);
// This API contains "long-running operations", which return a
// an Operation object that allows for tracking of the operation,
// rather than holding a request open.
@@ -342,7 +342,7 @@ export class DatastoreAdminClient {
'google.datastore.admin.v1.DatastoreAdmin',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -382,7 +382,7 @@ export class DatastoreAdminClient {
(this._protos as any).google.datastore.admin.v1.DatastoreAdmin,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -396,7 +396,7 @@ export class DatastoreAdminClient {
];
for (const methodName of datastoreAdminStubMethods) {
const callPromise = this.datastoreAdminStub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
return Promise.reject('The client has already been closed.');
@@ -592,10 +592,10 @@ export class DatastoreAdminClient {
options.otherArgs.headers = options.otherArgs.headers || {};
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams({
- project_id: request.projectId ?? '',
- index_id: request.indexId ?? '',
+ project_id: request.projectId?.toString() ?? '',
+ index_id: request.indexId?.toString() ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('getIndex request %j', request);
@@ -622,7 +622,23 @@ export class DatastoreAdminClient {
this._log.info('getIndex response %j', response);
return [response, options, rawResponse];
},
- );
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
@@ -752,9 +768,9 @@ export class DatastoreAdminClient {
options.otherArgs.headers = options.otherArgs.headers || {};
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams({
- project_id: request.projectId ?? '',
+ project_id: request.projectId?.toString() ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
const wrappedCallback:
@@ -811,7 +827,7 @@ export class DatastoreAdminClient {
this._log.info('exportEntities long-running');
const request =
new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest(
- {name},
+ { name },
);
const [operation] = await this.operationsClient.getOperation(request);
const decodeOperation = new this._gaxModule.Operation(
@@ -947,9 +963,9 @@ export class DatastoreAdminClient {
options.otherArgs.headers = options.otherArgs.headers || {};
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams({
- project_id: request.projectId ?? '',
+ project_id: request.projectId?.toString() ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
const wrappedCallback:
@@ -1006,7 +1022,7 @@ export class DatastoreAdminClient {
this._log.info('importEntities long-running');
const request =
new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest(
- {name},
+ { name },
);
const [operation] = await this.operationsClient.getOperation(request);
const decodeOperation = new this._gaxModule.Operation(
@@ -1132,9 +1148,9 @@ export class DatastoreAdminClient {
options.otherArgs.headers = options.otherArgs.headers || {};
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams({
- project_id: request.projectId ?? '',
+ project_id: request.projectId?.toString() ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
const wrappedCallback:
@@ -1191,7 +1207,7 @@ export class DatastoreAdminClient {
this._log.info('createIndex long-running');
const request =
new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest(
- {name},
+ { name },
);
const [operation] = await this.operationsClient.getOperation(request);
const decodeOperation = new this._gaxModule.Operation(
@@ -1313,10 +1329,10 @@ export class DatastoreAdminClient {
options.otherArgs.headers = options.otherArgs.headers || {};
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams({
- project_id: request.projectId ?? '',
- index_id: request.indexId ?? '',
+ project_id: request.projectId?.toString() ?? '',
+ index_id: request.indexId?.toString() ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
const wrappedCallback:
@@ -1373,7 +1389,7 @@ export class DatastoreAdminClient {
this._log.info('deleteIndex long-running');
const request =
new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest(
- {name},
+ { name },
);
const [operation] = await this.operationsClient.getOperation(request);
const decodeOperation = new this._gaxModule.Operation(
@@ -1476,9 +1492,9 @@ export class DatastoreAdminClient {
options.otherArgs.headers = options.otherArgs.headers || {};
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams({
- project_id: request.projectId ?? '',
+ project_id: request.projectId?.toString() ?? '',
});
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
const wrappedCallback:
@@ -1543,11 +1559,11 @@ export class DatastoreAdminClient {
options.otherArgs.headers = options.otherArgs.headers || {};
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams({
- project_id: request.projectId ?? '',
+ project_id: request.projectId?.toString() ?? '',
});
const defaultCallSettings = this._defaults['listIndexes'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('listIndexes stream %j', request);
@@ -1594,11 +1610,11 @@ export class DatastoreAdminClient {
options.otherArgs.headers = options.otherArgs.headers || {};
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams({
- project_id: request.projectId ?? '',
+ project_id: request.projectId?.toString() ?? '',
});
const defaultCallSettings = this._defaults['listIndexes'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('listIndexes iterate %j', request);
@@ -1774,7 +1790,6 @@ export class DatastoreAdminClient {
});
return this.operationsClient.cancelOperation(request, options, callback);
}
-
/**
* Deletes a long-running operation. This method indicates that the client is
* no longer interested in the operation result. It does not cancel the
@@ -1840,11 +1855,11 @@ export class DatastoreAdminClient {
*/
close(): Promise {
if (this.datastoreAdminStub && !this._terminated) {
- return this.datastoreAdminStub.then(stub => {
+ return this.datastoreAdminStub.then((stub) => {
this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
- this.operationsClient.close();
+ void this.operationsClient.close();
});
}
return Promise.resolve();
diff --git a/handwritten/datastore/src/v1/datastore_client.ts b/handwritten/datastore/src/v1/datastore_client.ts
index 1b1158315ea8..e830c956aea5 100644
--- a/handwritten/datastore/src/v1/datastore_client.ts
+++ b/handwritten/datastore/src/v1/datastore_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -28,7 +28,7 @@ import type {
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
-import {loggingUtils as logging} from 'google-gax';
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -56,7 +56,7 @@ export class DatastoreClient {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
private _log = logging.log('datastore');
@@ -69,9 +69,9 @@ export class DatastoreClient {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
+ innerApiCalls: { [name: string]: Function };
operationsClient: gax.OperationsClient;
- datastoreStub?: Promise<{[name: string]: Function}>;
+ datastoreStub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of DatastoreClient.
@@ -147,7 +147,7 @@ export class DatastoreClient {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// Request numeric enum values if REST transport is used.
opts.numericEnums = true;
@@ -203,7 +203,7 @@ export class DatastoreClient {
// Load the applicable protos.
this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos);
- const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos);
+ const protoFilesRoot = this._gaxModule.protobufFromJSON(jsonProtos);
// This API contains "long-running operations", which return a
// an Operation object that allows for tracking of the operation,
// rather than holding a request open.
@@ -243,7 +243,7 @@ export class DatastoreClient {
'google.datastore.v1.Datastore',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -283,7 +283,7 @@ export class DatastoreClient {
(this._protos as any).google.datastore.v1.Datastore,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -299,7 +299,7 @@ export class DatastoreClient {
];
for (const methodName of datastoreStubMethods) {
const callPromise = this.datastoreStub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
return Promise.reject('The client has already been closed.');
@@ -504,14 +504,14 @@ export class DatastoreClient {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- const routingParameter = {};
+ let routingParameter = {};
{
const fieldValue = request.projectId;
if (fieldValue !== undefined && fieldValue !== null) {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['project_id'] ?? fieldValue;
- Object.assign(routingParameter, {project_id: parameterValue});
+ Object.assign(routingParameter, { project_id: parameterValue });
}
}
}
@@ -521,13 +521,13 @@ export class DatastoreClient {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['database_id'] ?? fieldValue;
- Object.assign(routingParameter, {database_id: parameterValue});
+ Object.assign(routingParameter, { database_id: parameterValue });
}
}
}
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams(routingParameter);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('lookup request %j', request);
@@ -554,7 +554,23 @@ export class DatastoreClient {
this._log.info('lookup response %j', response);
return [response, options, rawResponse];
},
- );
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Queries for entities.
@@ -656,14 +672,14 @@ export class DatastoreClient {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- const routingParameter = {};
+ let routingParameter = {};
{
const fieldValue = request.projectId;
if (fieldValue !== undefined && fieldValue !== null) {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['project_id'] ?? fieldValue;
- Object.assign(routingParameter, {project_id: parameterValue});
+ Object.assign(routingParameter, { project_id: parameterValue });
}
}
}
@@ -673,13 +689,13 @@ export class DatastoreClient {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['database_id'] ?? fieldValue;
- Object.assign(routingParameter, {database_id: parameterValue});
+ Object.assign(routingParameter, { database_id: parameterValue });
}
}
}
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams(routingParameter);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('runQuery request %j', request);
@@ -706,7 +722,23 @@ export class DatastoreClient {
this._log.info('runQuery response %j', response);
return [response, options, rawResponse];
},
- );
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Runs an aggregation query.
@@ -804,14 +836,14 @@ export class DatastoreClient {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- const routingParameter = {};
+ let routingParameter = {};
{
const fieldValue = request.projectId;
if (fieldValue !== undefined && fieldValue !== null) {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['project_id'] ?? fieldValue;
- Object.assign(routingParameter, {project_id: parameterValue});
+ Object.assign(routingParameter, { project_id: parameterValue });
}
}
}
@@ -821,13 +853,13 @@ export class DatastoreClient {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['database_id'] ?? fieldValue;
- Object.assign(routingParameter, {database_id: parameterValue});
+ Object.assign(routingParameter, { database_id: parameterValue });
}
}
}
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams(routingParameter);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('runAggregationQuery request %j', request);
@@ -856,7 +888,23 @@ export class DatastoreClient {
this._log.info('runAggregationQuery response %j', response);
return [response, options, rawResponse];
},
- );
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Begins a new transaction.
@@ -942,14 +990,14 @@ export class DatastoreClient {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- const routingParameter = {};
+ let routingParameter = {};
{
const fieldValue = request.projectId;
if (fieldValue !== undefined && fieldValue !== null) {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['project_id'] ?? fieldValue;
- Object.assign(routingParameter, {project_id: parameterValue});
+ Object.assign(routingParameter, { project_id: parameterValue });
}
}
}
@@ -959,13 +1007,13 @@ export class DatastoreClient {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['database_id'] ?? fieldValue;
- Object.assign(routingParameter, {database_id: parameterValue});
+ Object.assign(routingParameter, { database_id: parameterValue });
}
}
}
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams(routingParameter);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('beginTransaction request %j', request);
@@ -994,7 +1042,23 @@ export class DatastoreClient {
this._log.info('beginTransaction response %j', response);
return [response, options, rawResponse];
},
- );
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Commits a transaction, optionally creating, deleting or modifying some
@@ -1102,14 +1166,14 @@ export class DatastoreClient {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- const routingParameter = {};
+ let routingParameter = {};
{
const fieldValue = request.projectId;
if (fieldValue !== undefined && fieldValue !== null) {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['project_id'] ?? fieldValue;
- Object.assign(routingParameter, {project_id: parameterValue});
+ Object.assign(routingParameter, { project_id: parameterValue });
}
}
}
@@ -1119,13 +1183,13 @@ export class DatastoreClient {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['database_id'] ?? fieldValue;
- Object.assign(routingParameter, {database_id: parameterValue});
+ Object.assign(routingParameter, { database_id: parameterValue });
}
}
}
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams(routingParameter);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('commit request %j', request);
@@ -1152,7 +1216,23 @@ export class DatastoreClient {
this._log.info('commit response %j', response);
return [response, options, rawResponse];
},
- );
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Rolls back a transaction.
@@ -1237,14 +1317,14 @@ export class DatastoreClient {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- const routingParameter = {};
+ let routingParameter = {};
{
const fieldValue = request.projectId;
if (fieldValue !== undefined && fieldValue !== null) {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['project_id'] ?? fieldValue;
- Object.assign(routingParameter, {project_id: parameterValue});
+ Object.assign(routingParameter, { project_id: parameterValue });
}
}
}
@@ -1254,13 +1334,13 @@ export class DatastoreClient {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['database_id'] ?? fieldValue;
- Object.assign(routingParameter, {database_id: parameterValue});
+ Object.assign(routingParameter, { database_id: parameterValue });
}
}
}
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams(routingParameter);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('rollback request %j', request);
@@ -1287,7 +1367,23 @@ export class DatastoreClient {
this._log.info('rollback response %j', response);
return [response, options, rawResponse];
},
- );
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Allocates IDs for the given keys, which is useful for referencing an entity
@@ -1373,14 +1469,14 @@ export class DatastoreClient {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- const routingParameter = {};
+ let routingParameter = {};
{
const fieldValue = request.projectId;
if (fieldValue !== undefined && fieldValue !== null) {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['project_id'] ?? fieldValue;
- Object.assign(routingParameter, {project_id: parameterValue});
+ Object.assign(routingParameter, { project_id: parameterValue });
}
}
}
@@ -1390,13 +1486,13 @@ export class DatastoreClient {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['database_id'] ?? fieldValue;
- Object.assign(routingParameter, {database_id: parameterValue});
+ Object.assign(routingParameter, { database_id: parameterValue });
}
}
}
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams(routingParameter);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('allocateIds request %j', request);
@@ -1423,7 +1519,23 @@ export class DatastoreClient {
this._log.info('allocateIds response %j', response);
return [response, options, rawResponse];
},
- );
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Prevents the supplied keys' IDs from being auto-allocated by Cloud
@@ -1509,14 +1621,14 @@ export class DatastoreClient {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- const routingParameter = {};
+ let routingParameter = {};
{
const fieldValue = request.projectId;
if (fieldValue !== undefined && fieldValue !== null) {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['project_id'] ?? fieldValue;
- Object.assign(routingParameter, {project_id: parameterValue});
+ Object.assign(routingParameter, { project_id: parameterValue });
}
}
}
@@ -1526,13 +1638,13 @@ export class DatastoreClient {
const match = fieldValue.toString().match(RegExp('(?.*)'));
if (match) {
const parameterValue = match.groups?.['database_id'] ?? fieldValue;
- Object.assign(routingParameter, {database_id: parameterValue});
+ Object.assign(routingParameter, { database_id: parameterValue });
}
}
}
options.otherArgs.headers['x-goog-request-params'] =
this._gaxModule.routingHeader.fromParams(routingParameter);
- this.initialize().catch(err => {
+ this.initialize().catch((err) => {
throw err;
});
this._log.info('reserveIds request %j', request);
@@ -1559,7 +1671,23 @@ export class DatastoreClient {
this._log.info('reserveIds response %j', response);
return [response, options, rawResponse];
},
- );
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
@@ -1728,7 +1856,6 @@ export class DatastoreClient {
});
return this.operationsClient.cancelOperation(request, options, callback);
}
-
/**
* Deletes a long-running operation. This method indicates that the client is
* no longer interested in the operation result. It does not cancel the
@@ -1794,11 +1921,11 @@ export class DatastoreClient {
*/
close(): Promise {
if (this.datastoreStub && !this._terminated) {
- return this.datastoreStub.then(stub => {
+ return this.datastoreStub.then((stub) => {
this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
- this.operationsClient.close();
+ void this.operationsClient.close();
});
}
return Promise.resolve();
diff --git a/handwritten/datastore/system-test/fixtures/sample/src/index.js b/handwritten/datastore/system-test/fixtures/sample/src/index.js
index 1cb393060623..ee7482f4b6ee 100644
--- a/handwritten/datastore/system-test/fixtures/sample/src/index.js
+++ b/handwritten/datastore/system-test/fixtures/sample/src/index.js
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,7 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
/* eslint-disable node/no-missing-require, no-unused-vars */
const datastore = require('@google-cloud/datastore');
diff --git a/handwritten/datastore/system-test/fixtures/sample/src/index.ts b/handwritten/datastore/system-test/fixtures/sample/src/index.ts
index c0c27e781f15..0a18405b51ed 100644
--- a/handwritten/datastore/system-test/fixtures/sample/src/index.ts
+++ b/handwritten/datastore/system-test/fixtures/sample/src/index.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,7 +16,7 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-import {Datastore} from '@google-cloud/datastore';
+import { Datastore } from '@google-cloud/datastore';
// check that the client class type name can be used
function doStuffWithDatastore(client: Datastore) {
diff --git a/handwritten/datastore/system-test/install.ts b/handwritten/datastore/system-test/install.ts
index 5257a7ba101c..ccf167042d2e 100644
--- a/handwritten/datastore/system-test/install.ts
+++ b/handwritten/datastore/system-test/install.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,9 +16,9 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-import {packNTest} from 'pack-n-play';
-import {readFileSync} from 'fs';
-import {describe, it} from 'mocha';
+import { packNTest } from 'pack-n-play';
+import { readFileSync } from 'fs';
+import { describe, it } from 'mocha';
describe('📦 pack-n-play test', () => {
it('TypeScript code', async function () {
@@ -41,7 +41,7 @@ describe('📦 pack-n-play test', () => {
packageDir: process.cwd(),
sample: {
description: 'JavaScript user can use the library',
- ts: readFileSync(
+ cjs: readFileSync(
'./system-test/fixtures/sample/src/index.js',
).toString(),
},
diff --git a/handwritten/datastore/test/gapic_datastore_admin_v1.ts b/handwritten/datastore/test/gapic_datastore_admin_v1.ts
index 33169355de8c..55cd3968baad 100644
--- a/handwritten/datastore/test/gapic_datastore_admin_v1.ts
+++ b/handwritten/datastore/test/gapic_datastore_admin_v1.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,13 +19,13 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as datastoreadminModule from '../src';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
-import {protobuf, LROperation, operationsProtos} from 'google-gax';
+import { protobuf, LROperation, operationsProtos } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -45,7 +45,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -149,9 +149,9 @@ function stubAsyncIterationCall(
return Promise.reject(error);
}
if (counter >= responses!.length) {
- return Promise.resolve({done: true, value: undefined});
+ return Promise.resolve({ done: true, value: undefined });
}
- return Promise.resolve({done: false, value: responses![counter++]});
+ return Promise.resolve({ done: false, value: responses![counter++] });
},
};
},
@@ -271,7 +271,7 @@ describe('v1.DatastoreAdminClient', () => {
it('has initialize method and supports deferred initialization', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.datastoreAdminStub, undefined);
@@ -279,35 +279,45 @@ describe('v1.DatastoreAdminClient', () => {
assert(client.datastoreAdminStub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize().catch((err: any) => {
+ client.initialize().catch((err) => {
throw err;
});
assert(client.datastoreAdminStub);
- client.close().then(() => {
- done();
- });
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.datastoreAdminStub, undefined);
- client.close().then(() => {
- done();
- });
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
it('has getProjectId method', async () => {
const fakeProjectId = 'fake-project-id';
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
@@ -319,7 +329,7 @@ describe('v1.DatastoreAdminClient', () => {
it('has getProjectId method with callback', async () => {
const fakeProjectId = 'fake-project-id';
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon
@@ -342,7 +352,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('getIndex', () => {
it('invokes getIndex without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -378,7 +388,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes getIndex without error using callback', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -430,7 +440,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes getIndex with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -463,7 +473,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes getIndex with closed client', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -481,7 +491,9 @@ describe('v1.DatastoreAdminClient', () => {
);
request.indexId = defaultValue2;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.getIndex(request), expectedError);
});
});
@@ -489,7 +501,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('exportEntities', () => {
it('invokes exportEntities without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -522,7 +534,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes exportEntities without error using callback', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -576,7 +588,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes exportEntities with call error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -607,7 +619,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes exportEntities with LRO error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -640,7 +652,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes checkExportEntitiesProgress without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -648,8 +660,8 @@ describe('v1.DatastoreAdminClient', () => {
new operationsProtos.google.longrunning.Operation(),
);
expectedResponse.name = 'test';
- expectedResponse.response = {type_url: 'url', value: Buffer.from('')};
- expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')};
+ expectedResponse.response = { type_url: 'url', value: Buffer.from('') };
+ expectedResponse.metadata = { type_url: 'url', value: Buffer.from('') };
client.operationsClient.getOperation = stubSimpleCall(expectedResponse);
const decodedOperation = await client.checkExportEntitiesProgress(
@@ -662,7 +674,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes checkExportEntitiesProgress with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -683,7 +695,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('importEntities', () => {
it('invokes importEntities without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -716,7 +728,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes importEntities without error using callback', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -770,7 +782,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes importEntities with call error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -801,7 +813,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes importEntities with LRO error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -834,7 +846,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes checkImportEntitiesProgress without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -842,8 +854,8 @@ describe('v1.DatastoreAdminClient', () => {
new operationsProtos.google.longrunning.Operation(),
);
expectedResponse.name = 'test';
- expectedResponse.response = {type_url: 'url', value: Buffer.from('')};
- expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')};
+ expectedResponse.response = { type_url: 'url', value: Buffer.from('') };
+ expectedResponse.metadata = { type_url: 'url', value: Buffer.from('') };
client.operationsClient.getOperation = stubSimpleCall(expectedResponse);
const decodedOperation = await client.checkImportEntitiesProgress(
@@ -856,7 +868,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes checkImportEntitiesProgress with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -877,7 +889,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('createIndex', () => {
it('invokes createIndex without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -909,7 +921,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes createIndex without error using callback', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -963,7 +975,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes createIndex with call error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -994,7 +1006,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes createIndex with LRO error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1027,7 +1039,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes checkCreateIndexProgress without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1035,8 +1047,8 @@ describe('v1.DatastoreAdminClient', () => {
new operationsProtos.google.longrunning.Operation(),
);
expectedResponse.name = 'test';
- expectedResponse.response = {type_url: 'url', value: Buffer.from('')};
- expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')};
+ expectedResponse.response = { type_url: 'url', value: Buffer.from('') };
+ expectedResponse.metadata = { type_url: 'url', value: Buffer.from('') };
client.operationsClient.getOperation = stubSimpleCall(expectedResponse);
const decodedOperation = await client.checkCreateIndexProgress(
@@ -1049,7 +1061,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes checkCreateIndexProgress with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1067,7 +1079,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('deleteIndex', () => {
it('invokes deleteIndex without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1104,7 +1116,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes deleteIndex without error using callback', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1163,7 +1175,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes deleteIndex with call error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1199,7 +1211,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes deleteIndex with LRO error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1237,7 +1249,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes checkDeleteIndexProgress without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1245,8 +1257,8 @@ describe('v1.DatastoreAdminClient', () => {
new operationsProtos.google.longrunning.Operation(),
);
expectedResponse.name = 'test';
- expectedResponse.response = {type_url: 'url', value: Buffer.from('')};
- expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')};
+ expectedResponse.response = { type_url: 'url', value: Buffer.from('') };
+ expectedResponse.metadata = { type_url: 'url', value: Buffer.from('') };
client.operationsClient.getOperation = stubSimpleCall(expectedResponse);
const decodedOperation = await client.checkDeleteIndexProgress(
@@ -1259,7 +1271,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes checkDeleteIndexProgress with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1277,7 +1289,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('listIndexes', () => {
it('invokes listIndexes without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1310,7 +1322,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes listIndexes without error using callback', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1359,7 +1371,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes listIndexes with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1390,7 +1402,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes listIndexesStream without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1444,7 +1456,7 @@ describe('v1.DatastoreAdminClient', () => {
it('invokes listIndexesStream with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1495,7 +1507,7 @@ describe('v1.DatastoreAdminClient', () => {
it('uses async iteration with listIndexes without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1538,7 +1550,7 @@ describe('v1.DatastoreAdminClient', () => {
it('uses async iteration with listIndexes with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1581,7 +1593,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('getOperation', () => {
it('invokes getOperation without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1602,7 +1614,7 @@ describe('v1.DatastoreAdminClient', () => {
});
it('invokes getOperation without error using callback', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1615,20 +1627,24 @@ describe('v1.DatastoreAdminClient', () => {
.stub()
.callsArgWith(2, null, expectedResponse);
const promise = new Promise((resolve, reject) => {
- client.operationsClient.getOperation(
- request,
- undefined,
- (
- err?: Error | null,
- result?: operationsProtos.google.longrunning.Operation | null,
- ) => {
- if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- },
- );
+ client.operationsClient
+ .getOperation(
+ request,
+ undefined,
+ (
+ err?: Error | null,
+ result?: operationsProtos.google.longrunning.Operation | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ )
+ .catch((err) => {
+ throw err;
+ });
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
@@ -1636,7 +1652,7 @@ describe('v1.DatastoreAdminClient', () => {
});
it('invokes getOperation with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1660,7 +1676,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('cancelOperation', () => {
it('invokes cancelOperation without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1682,7 +1698,7 @@ describe('v1.DatastoreAdminClient', () => {
});
it('invokes cancelOperation without error using callback', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1695,20 +1711,24 @@ describe('v1.DatastoreAdminClient', () => {
.stub()
.callsArgWith(2, null, expectedResponse);
const promise = new Promise((resolve, reject) => {
- client.operationsClient.cancelOperation(
- request,
- undefined,
- (
- err?: Error | null,
- result?: protos.google.protobuf.Empty | null,
- ) => {
- if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- },
- );
+ client.operationsClient
+ .cancelOperation(
+ request,
+ undefined,
+ (
+ err?: Error | null,
+ result?: protos.google.protobuf.Empty | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ )
+ .catch((err) => {
+ throw err;
+ });
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
@@ -1716,7 +1736,7 @@ describe('v1.DatastoreAdminClient', () => {
});
it('invokes cancelOperation with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1740,7 +1760,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('deleteOperation', () => {
it('invokes deleteOperation without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1762,7 +1782,7 @@ describe('v1.DatastoreAdminClient', () => {
});
it('invokes deleteOperation without error using callback', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1775,20 +1795,24 @@ describe('v1.DatastoreAdminClient', () => {
.stub()
.callsArgWith(2, null, expectedResponse);
const promise = new Promise((resolve, reject) => {
- client.operationsClient.deleteOperation(
- request,
- undefined,
- (
- err?: Error | null,
- result?: protos.google.protobuf.Empty | null,
- ) => {
- if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- },
- );
+ client.operationsClient
+ .deleteOperation(
+ request,
+ undefined,
+ (
+ err?: Error | null,
+ result?: protos.google.protobuf.Empty | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ )
+ .catch((err) => {
+ throw err;
+ });
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
@@ -1796,7 +1820,7 @@ describe('v1.DatastoreAdminClient', () => {
});
it('invokes deleteOperation with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1820,7 +1844,7 @@ describe('v1.DatastoreAdminClient', () => {
describe('listOperationsAsync', () => {
it('uses async iteration with listOperations without error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1855,7 +1879,7 @@ describe('v1.DatastoreAdminClient', () => {
});
it('uses async iteration with listOperations with error', async () => {
const client = new datastoreadminModule.v1.DatastoreAdminClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
diff --git a/handwritten/datastore/test/gapic_datastore_v1.ts b/handwritten/datastore/test/gapic_datastore_v1.ts
index 23c97b88c0ed..5d08da074b51 100644
--- a/handwritten/datastore/test/gapic_datastore_v1.ts
+++ b/handwritten/datastore/test/gapic_datastore_v1.ts
@@ -1,4 +1,4 @@
-// Copyright 2025 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,11 +19,11 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as datastoreModule from '../src';
-import {protobuf, operationsProtos} from 'google-gax';
+import { protobuf, operationsProtos } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -43,7 +43,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -77,9 +77,9 @@ function stubAsyncIterationCall(
return Promise.reject(error);
}
if (counter >= responses!.length) {
- return Promise.resolve({done: true, value: undefined});
+ return Promise.resolve({ done: true, value: undefined });
}
- return Promise.resolve({done: false, value: responses![counter++]});
+ return Promise.resolve({ done: false, value: responses![counter++] });
},
};
},
@@ -197,7 +197,7 @@ describe('v1.DatastoreClient', () => {
it('has initialize method and supports deferred initialization', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.datastoreStub, undefined);
@@ -205,35 +205,45 @@ describe('v1.DatastoreClient', () => {
assert(client.datastoreStub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize().catch((err: any) => {
+ client.initialize().catch((err) => {
throw err;
});
assert(client.datastoreStub);
- client.close().then(() => {
- done();
- });
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.datastoreStub, undefined);
- client.close().then(() => {
- done();
- });
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
it('has getProjectId method', async () => {
const fakeProjectId = 'fake-project-id';
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
@@ -245,7 +255,7 @@ describe('v1.DatastoreClient', () => {
it('has getProjectId method with callback', async () => {
const fakeProjectId = 'fake-project-id';
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon
@@ -268,7 +278,7 @@ describe('v1.DatastoreClient', () => {
describe('lookup', () => {
it('invokes lookup without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -296,7 +306,7 @@ describe('v1.DatastoreClient', () => {
it('invokes lookup without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -340,7 +350,7 @@ describe('v1.DatastoreClient', () => {
it('invokes lookup with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -365,7 +375,7 @@ describe('v1.DatastoreClient', () => {
it('invokes lookup with closed client', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -375,7 +385,9 @@ describe('v1.DatastoreClient', () => {
// path template is empty
request.databaseId = 'value';
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.lookup(request), expectedError);
});
});
@@ -383,7 +395,7 @@ describe('v1.DatastoreClient', () => {
describe('runQuery', () => {
it('invokes runQuery without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -411,7 +423,7 @@ describe('v1.DatastoreClient', () => {
it('invokes runQuery without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -455,7 +467,7 @@ describe('v1.DatastoreClient', () => {
it('invokes runQuery with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -480,7 +492,7 @@ describe('v1.DatastoreClient', () => {
it('invokes runQuery with closed client', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -490,7 +502,9 @@ describe('v1.DatastoreClient', () => {
// path template is empty
request.databaseId = 'value';
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.runQuery(request), expectedError);
});
});
@@ -498,7 +512,7 @@ describe('v1.DatastoreClient', () => {
describe('runAggregationQuery', () => {
it('invokes runAggregationQuery without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -527,7 +541,7 @@ describe('v1.DatastoreClient', () => {
it('invokes runAggregationQuery without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -571,7 +585,7 @@ describe('v1.DatastoreClient', () => {
it('invokes runAggregationQuery with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -599,7 +613,7 @@ describe('v1.DatastoreClient', () => {
it('invokes runAggregationQuery with closed client', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -609,7 +623,9 @@ describe('v1.DatastoreClient', () => {
// path template is empty
request.databaseId = 'value';
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.runAggregationQuery(request), expectedError);
});
});
@@ -617,7 +633,7 @@ describe('v1.DatastoreClient', () => {
describe('beginTransaction', () => {
it('invokes beginTransaction without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -645,7 +661,7 @@ describe('v1.DatastoreClient', () => {
it('invokes beginTransaction without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -689,7 +705,7 @@ describe('v1.DatastoreClient', () => {
it('invokes beginTransaction with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -717,7 +733,7 @@ describe('v1.DatastoreClient', () => {
it('invokes beginTransaction with closed client', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -727,7 +743,9 @@ describe('v1.DatastoreClient', () => {
// path template is empty
request.databaseId = 'value';
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.beginTransaction(request), expectedError);
});
});
@@ -735,7 +753,7 @@ describe('v1.DatastoreClient', () => {
describe('commit', () => {
it('invokes commit without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -763,7 +781,7 @@ describe('v1.DatastoreClient', () => {
it('invokes commit without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -807,7 +825,7 @@ describe('v1.DatastoreClient', () => {
it('invokes commit with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -832,7 +850,7 @@ describe('v1.DatastoreClient', () => {
it('invokes commit with closed client', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -842,7 +860,9 @@ describe('v1.DatastoreClient', () => {
// path template is empty
request.databaseId = 'value';
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.commit(request), expectedError);
});
});
@@ -850,7 +870,7 @@ describe('v1.DatastoreClient', () => {
describe('rollback', () => {
it('invokes rollback without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -878,7 +898,7 @@ describe('v1.DatastoreClient', () => {
it('invokes rollback without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -922,7 +942,7 @@ describe('v1.DatastoreClient', () => {
it('invokes rollback with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -947,7 +967,7 @@ describe('v1.DatastoreClient', () => {
it('invokes rollback with closed client', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -957,7 +977,9 @@ describe('v1.DatastoreClient', () => {
// path template is empty
request.databaseId = 'value';
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.rollback(request), expectedError);
});
});
@@ -965,7 +987,7 @@ describe('v1.DatastoreClient', () => {
describe('allocateIds', () => {
it('invokes allocateIds without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -993,7 +1015,7 @@ describe('v1.DatastoreClient', () => {
it('invokes allocateIds without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1037,7 +1059,7 @@ describe('v1.DatastoreClient', () => {
it('invokes allocateIds with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1065,7 +1087,7 @@ describe('v1.DatastoreClient', () => {
it('invokes allocateIds with closed client', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1075,7 +1097,9 @@ describe('v1.DatastoreClient', () => {
// path template is empty
request.databaseId = 'value';
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.allocateIds(request), expectedError);
});
});
@@ -1083,7 +1107,7 @@ describe('v1.DatastoreClient', () => {
describe('reserveIds', () => {
it('invokes reserveIds without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1111,7 +1135,7 @@ describe('v1.DatastoreClient', () => {
it('invokes reserveIds without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1155,7 +1179,7 @@ describe('v1.DatastoreClient', () => {
it('invokes reserveIds with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1183,7 +1207,7 @@ describe('v1.DatastoreClient', () => {
it('invokes reserveIds with closed client', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1193,14 +1217,16 @@ describe('v1.DatastoreClient', () => {
// path template is empty
request.databaseId = 'value';
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.reserveIds(request), expectedError);
});
});
describe('getOperation', () => {
it('invokes getOperation without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1221,7 +1247,7 @@ describe('v1.DatastoreClient', () => {
});
it('invokes getOperation without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1234,20 +1260,24 @@ describe('v1.DatastoreClient', () => {
.stub()
.callsArgWith(2, null, expectedResponse);
const promise = new Promise((resolve, reject) => {
- client.operationsClient.getOperation(
- request,
- undefined,
- (
- err?: Error | null,
- result?: operationsProtos.google.longrunning.Operation | null,
- ) => {
- if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- },
- );
+ client.operationsClient
+ .getOperation(
+ request,
+ undefined,
+ (
+ err?: Error | null,
+ result?: operationsProtos.google.longrunning.Operation | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ )
+ .catch((err) => {
+ throw err;
+ });
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
@@ -1255,7 +1285,7 @@ describe('v1.DatastoreClient', () => {
});
it('invokes getOperation with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1279,7 +1309,7 @@ describe('v1.DatastoreClient', () => {
describe('cancelOperation', () => {
it('invokes cancelOperation without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1301,7 +1331,7 @@ describe('v1.DatastoreClient', () => {
});
it('invokes cancelOperation without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1314,20 +1344,24 @@ describe('v1.DatastoreClient', () => {
.stub()
.callsArgWith(2, null, expectedResponse);
const promise = new Promise((resolve, reject) => {
- client.operationsClient.cancelOperation(
- request,
- undefined,
- (
- err?: Error | null,
- result?: protos.google.protobuf.Empty | null,
- ) => {
- if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- },
- );
+ client.operationsClient
+ .cancelOperation(
+ request,
+ undefined,
+ (
+ err?: Error | null,
+ result?: protos.google.protobuf.Empty | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ )
+ .catch((err) => {
+ throw err;
+ });
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
@@ -1335,7 +1369,7 @@ describe('v1.DatastoreClient', () => {
});
it('invokes cancelOperation with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1359,7 +1393,7 @@ describe('v1.DatastoreClient', () => {
describe('deleteOperation', () => {
it('invokes deleteOperation without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
@@ -1381,7 +1415,7 @@ describe('v1.DatastoreClient', () => {
});
it('invokes deleteOperation without error using callback', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1394,20 +1428,24 @@ describe('v1.DatastoreClient', () => {
.stub()
.callsArgWith(2, null, expectedResponse);
const promise = new Promise((resolve, reject) => {
- client.operationsClient.deleteOperation(
- request,
- undefined,
- (
- err?: Error | null,
- result?: protos.google.protobuf.Empty | null,
- ) => {
- if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- },
- );
+ client.operationsClient
+ .deleteOperation(
+ request,
+ undefined,
+ (
+ err?: Error | null,
+ result?: protos.google.protobuf.Empty | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ )
+ .catch((err) => {
+ throw err;
+ });
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
@@ -1415,7 +1453,7 @@ describe('v1.DatastoreClient', () => {
});
it('invokes deleteOperation with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1439,7 +1477,7 @@ describe('v1.DatastoreClient', () => {
describe('listOperationsAsync', () => {
it('uses async iteration with listOperations without error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -1474,7 +1512,7 @@ describe('v1.DatastoreClient', () => {
});
it('uses async iteration with listOperations with error', async () => {
const client = new datastoreModule.v1.DatastoreClient({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
await client.initialize();
diff --git a/handwritten/datastore/tsconfig.json b/handwritten/datastore/tsconfig.json
index a4b71611d51f..ca73e7bfc824 100644
--- a/handwritten/datastore/tsconfig.json
+++ b/handwritten/datastore/tsconfig.json
@@ -5,7 +5,7 @@
"outDir": "build",
"resolveJsonModule": true,
"lib": [
- "es2018",
+ "es2023",
"dom"
]
},
@@ -15,9 +15,8 @@
"test/*.ts",
"test/**/*.ts",
"system-test/*.ts",
- "mock-server/datastore-server.ts",
- "src/v1/datastore_client_config.json",
- "protos/protos.json",
- "src/v1/datastore_admin_client_config.json"
+ "src/**/*.json",
+ "samples/**/*.json",
+ "protos/protos.json"
]
}
diff --git a/handwritten/datastore/webpack.config.js b/handwritten/datastore/webpack.config.js
index f37985e35894..6df22195e271 100644
--- a/handwritten/datastore/webpack.config.js
+++ b/handwritten/datastore/webpack.config.js
@@ -36,27 +36,27 @@ module.exports = {
{
test: /\.tsx?$/,
use: 'ts-loader',
- exclude: /node_modules/,
+ exclude: /node_modules/
},
{
test: /node_modules[\\/]@grpc[\\/]grpc-js/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]grpc/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]retry-request/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]https?-proxy-agent/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]gtoken/,
- use: 'null-loader',
+ use: 'null-loader'
},
],
},
diff --git a/handwritten/logging/.OwlBot.yaml b/handwritten/logging/.OwlBot.yaml
index 360ece41c701..7f3806ae0318 100644
--- a/handwritten/logging/.OwlBot.yaml
+++ b/handwritten/logging/.OwlBot.yaml
@@ -1,10 +1,10 @@
-# Copyright 2021 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,13 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
-deep-remove-regex:
- - /owl-bot-staging
-
deep-copy-regex:
- - source: /google/logging/(v.*)/.*-nodejs
- dest: /owl-bot-staging/logging/$1
-
-begin-after-commit-hash: fb91803ccef5d7c695139b22788b309e2197856b
+ - source: /google/logging/google-logging-nodejs
+ dest: /owl-bot-staging/google-logging
+api-name: logging
\ No newline at end of file
diff --git a/handwritten/logging/.jsdoc.js b/handwritten/logging/.jsdoc.js
index 1f6bfb791904..999b4824630f 100644
--- a/handwritten/logging/.jsdoc.js
+++ b/handwritten/logging/.jsdoc.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -40,7 +40,7 @@ module.exports = {
includePattern: '\\.js$'
},
templates: {
- copyright: 'Copyright 2024 Google LLC',
+ copyright: 'Copyright 2026 Google LLC',
includeDate: false,
sourceFiles: false,
systemName: '@google-cloud/logging',
diff --git a/handwritten/logging/protos/google/logging/type/http_request.proto b/handwritten/logging/protos/google/logging/type/http_request.proto
index fa2dd64e8346..b31522b69c7c 100644
--- a/handwritten/logging/protos/google/logging/type/http_request.proto
+++ b/handwritten/logging/protos/google/logging/type/http_request.proto
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/logging/protos/google/logging/type/log_severity.proto b/handwritten/logging/protos/google/logging/type/log_severity.proto
index 96ff874688ab..406b8173a3aa 100644
--- a/handwritten/logging/protos/google/logging/type/log_severity.proto
+++ b/handwritten/logging/protos/google/logging/type/log_severity.proto
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/logging/protos/google/logging/v2/log_entry.proto b/handwritten/logging/protos/google/logging/v2/log_entry.proto
index 2404219f6aa2..820b047b5735 100644
--- a/handwritten/logging/protos/google/logging/v2/log_entry.proto
+++ b/handwritten/logging/protos/google/logging/v2/log_entry.proto
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/logging/protos/google/logging/v2/logging.proto b/handwritten/logging/protos/google/logging/v2/logging.proto
index cd686e9ff393..e984d6ec0d5b 100644
--- a/handwritten/logging/protos/google/logging/v2/logging.proto
+++ b/handwritten/logging/protos/google/logging/v2/logging.proto
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/logging/protos/google/logging/v2/logging_config.proto b/handwritten/logging/protos/google/logging/v2/logging_config.proto
index d914df1bae5b..05ed940b3972 100644
--- a/handwritten/logging/protos/google/logging/v2/logging_config.proto
+++ b/handwritten/logging/protos/google/logging/v2/logging_config.proto
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/logging/protos/google/logging/v2/logging_metrics.proto b/handwritten/logging/protos/google/logging/v2/logging_metrics.proto
index a387ef2b4a56..a0575cff4471 100644
--- a/handwritten/logging/protos/google/logging/v2/logging_metrics.proto
+++ b/handwritten/logging/protos/google/logging/v2/logging_metrics.proto
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.copy_log_entries.js b/handwritten/logging/samples/generated/v2/config_service_v2.copy_log_entries.js
index cea748d5f793..bc4d30d85da4 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.copy_log_entries.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.copy_log_entries.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name, destination) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.create_bucket.js b/handwritten/logging/samples/generated/v2/config_service_v2.create_bucket.js
index 84fd76b4d59e..a78633507942 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.create_bucket.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.create_bucket.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent, bucketId, bucket) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.create_bucket_async.js b/handwritten/logging/samples/generated/v2/config_service_v2.create_bucket_async.js
index 9d26cadd5c44..0cc98416c00c 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.create_bucket_async.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.create_bucket_async.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent, bucketId, bucket) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.create_exclusion.js b/handwritten/logging/samples/generated/v2/config_service_v2.create_exclusion.js
index a5742a7b24bf..b1e6a0908c8a 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.create_exclusion.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.create_exclusion.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent, exclusion) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.create_link.js b/handwritten/logging/samples/generated/v2/config_service_v2.create_link.js
index 31bcbe9b5419..b2923477952f 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.create_link.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.create_link.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent, link, linkId) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.create_sink.js b/handwritten/logging/samples/generated/v2/config_service_v2.create_sink.js
index c210d088c8c3..b95361847c7a 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.create_sink.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.create_sink.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent, sink) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.create_view.js b/handwritten/logging/samples/generated/v2/config_service_v2.create_view.js
index f7aa3784891c..30c237aa11d6 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.create_view.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.create_view.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent, viewId, view) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.delete_bucket.js b/handwritten/logging/samples/generated/v2/config_service_v2.delete_bucket.js
index 7c3e0d3e7de7..94443cc58703 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.delete_bucket.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.delete_bucket.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.delete_exclusion.js b/handwritten/logging/samples/generated/v2/config_service_v2.delete_exclusion.js
index d0e11e399978..c6b6b2d28cc6 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.delete_exclusion.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.delete_exclusion.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.delete_link.js b/handwritten/logging/samples/generated/v2/config_service_v2.delete_link.js
index 61e21816f2e2..c8065914749c 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.delete_link.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.delete_link.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.delete_sink.js b/handwritten/logging/samples/generated/v2/config_service_v2.delete_sink.js
index 57cd54a80bff..18088005bc9c 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.delete_sink.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.delete_sink.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(sinkName) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.delete_view.js b/handwritten/logging/samples/generated/v2/config_service_v2.delete_view.js
index 4c3ac7a4844e..b99989576e74 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.delete_view.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.delete_view.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.get_bucket.js b/handwritten/logging/samples/generated/v2/config_service_v2.get_bucket.js
index 1bcf694211d1..f85aa92d6aa0 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.get_bucket.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.get_bucket.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.get_cmek_settings.js b/handwritten/logging/samples/generated/v2/config_service_v2.get_cmek_settings.js
index 39a9c897bfec..c138860cef23 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.get_cmek_settings.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.get_cmek_settings.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.get_exclusion.js b/handwritten/logging/samples/generated/v2/config_service_v2.get_exclusion.js
index 34e779244b54..a14d73f7db63 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.get_exclusion.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.get_exclusion.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.get_link.js b/handwritten/logging/samples/generated/v2/config_service_v2.get_link.js
index 23afd1bbf045..54622e6ea094 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.get_link.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.get_link.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.get_settings.js b/handwritten/logging/samples/generated/v2/config_service_v2.get_settings.js
index 2f157e764f22..578f4e52a706 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.get_settings.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.get_settings.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.get_sink.js b/handwritten/logging/samples/generated/v2/config_service_v2.get_sink.js
index a27ea5ed874a..5e0e2d045a7a 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.get_sink.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.get_sink.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(sinkName) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.get_view.js b/handwritten/logging/samples/generated/v2/config_service_v2.get_view.js
index f6851c090f33..1e7a6c232788 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.get_view.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.get_view.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.list_buckets.js b/handwritten/logging/samples/generated/v2/config_service_v2.list_buckets.js
index eecf060419f9..cea04327caf8 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.list_buckets.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.list_buckets.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent) {
@@ -66,7 +68,7 @@ function main(parent) {
// Run request
const iterable = loggingClient.listBucketsAsync(request);
for await (const response of iterable) {
- console.log(response);
+ console.log(response);
}
}
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.list_exclusions.js b/handwritten/logging/samples/generated/v2/config_service_v2.list_exclusions.js
index 213a30bb6606..91646b57072f 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.list_exclusions.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.list_exclusions.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent) {
@@ -63,7 +65,7 @@ function main(parent) {
// Run request
const iterable = loggingClient.listExclusionsAsync(request);
for await (const response of iterable) {
- console.log(response);
+ console.log(response);
}
}
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.list_links.js b/handwritten/logging/samples/generated/v2/config_service_v2.list_links.js
index 09b94f34333e..f2a67ec42075 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.list_links.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.list_links.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent) {
@@ -60,7 +62,7 @@ function main(parent) {
// Run request
const iterable = loggingClient.listLinksAsync(request);
for await (const response of iterable) {
- console.log(response);
+ console.log(response);
}
}
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.list_sinks.js b/handwritten/logging/samples/generated/v2/config_service_v2.list_sinks.js
index e48139c6bc01..502f1ca1d791 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.list_sinks.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.list_sinks.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent) {
@@ -63,7 +65,7 @@ function main(parent) {
// Run request
const iterable = loggingClient.listSinksAsync(request);
for await (const response of iterable) {
- console.log(response);
+ console.log(response);
}
}
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.list_views.js b/handwritten/logging/samples/generated/v2/config_service_v2.list_views.js
index c68583702e48..4685f8e0af57 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.list_views.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.list_views.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent) {
@@ -60,7 +62,7 @@ function main(parent) {
// Run request
const iterable = loggingClient.listViewsAsync(request);
for await (const response of iterable) {
- console.log(response);
+ console.log(response);
}
}
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.undelete_bucket.js b/handwritten/logging/samples/generated/v2/config_service_v2.undelete_bucket.js
index 4721db37b467..7621abdf21ba 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.undelete_bucket.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.undelete_bucket.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.update_bucket.js b/handwritten/logging/samples/generated/v2/config_service_v2.update_bucket.js
index 0d2138d8f45a..2d727570f288 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.update_bucket.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.update_bucket.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name, bucket, updateMask) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.update_bucket_async.js b/handwritten/logging/samples/generated/v2/config_service_v2.update_bucket_async.js
index 05cdad75cef3..4748c59bf582 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.update_bucket_async.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.update_bucket_async.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name, bucket, updateMask) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.update_cmek_settings.js b/handwritten/logging/samples/generated/v2/config_service_v2.update_cmek_settings.js
index 9f60a26a743e..4372c6b45c4e 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.update_cmek_settings.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.update_cmek_settings.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name, cmekSettings) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.update_exclusion.js b/handwritten/logging/samples/generated/v2/config_service_v2.update_exclusion.js
index 90e9a1cf1e6e..9682a7fe9b5b 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.update_exclusion.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.update_exclusion.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name, exclusion, updateMask) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.update_settings.js b/handwritten/logging/samples/generated/v2/config_service_v2.update_settings.js
index 837122b5be6d..d999f34a0e8f 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.update_settings.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.update_settings.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name, settings) {
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.update_sink.js b/handwritten/logging/samples/generated/v2/config_service_v2.update_sink.js
index bb78c55d0356..54b35fc59339 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.update_sink.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.update_sink.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(sinkName, sink) {
@@ -43,7 +45,7 @@ function main(sinkName, sink) {
*/
// const sink = {}
/**
- * Optional. See sinks.create google.logging.v2.ConfigServiceV2.CreateSink
+ * Optional. See sinks.create google.logging.v2.ConfigServiceV2.CreateSink
* for a description of this field. When updating a sink, the effect of this
* field on the value of `writer_identity` in the updated sink depends on both
* the old and new values of this field:
diff --git a/handwritten/logging/samples/generated/v2/config_service_v2.update_view.js b/handwritten/logging/samples/generated/v2/config_service_v2.update_view.js
index 6717f1d8d23f..a9d9e9d5a55b 100644
--- a/handwritten/logging/samples/generated/v2/config_service_v2.update_view.js
+++ b/handwritten/logging/samples/generated/v2/config_service_v2.update_view.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(name, view) {
diff --git a/handwritten/logging/samples/generated/v2/logging_service_v2.delete_log.js b/handwritten/logging/samples/generated/v2/logging_service_v2.delete_log.js
index 4bb09342ad39..cffc95d6c8bc 100644
--- a/handwritten/logging/samples/generated/v2/logging_service_v2.delete_log.js
+++ b/handwritten/logging/samples/generated/v2/logging_service_v2.delete_log.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(logName) {
diff --git a/handwritten/logging/samples/generated/v2/logging_service_v2.list_log_entries.js b/handwritten/logging/samples/generated/v2/logging_service_v2.list_log_entries.js
index 1857cae39b7c..fc3ab769abd1 100644
--- a/handwritten/logging/samples/generated/v2/logging_service_v2.list_log_entries.js
+++ b/handwritten/logging/samples/generated/v2/logging_service_v2.list_log_entries.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(resourceNames) {
@@ -89,7 +91,7 @@ function main(resourceNames) {
// Run request
const iterable = loggingClient.listLogEntriesAsync(request);
for await (const response of iterable) {
- console.log(response);
+ console.log(response);
}
}
diff --git a/handwritten/logging/samples/generated/v2/logging_service_v2.list_logs.js b/handwritten/logging/samples/generated/v2/logging_service_v2.list_logs.js
index 688d137a8de2..ea22c08fb25a 100644
--- a/handwritten/logging/samples/generated/v2/logging_service_v2.list_logs.js
+++ b/handwritten/logging/samples/generated/v2/logging_service_v2.list_logs.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent) {
@@ -77,7 +79,7 @@ function main(parent) {
// Run request
const iterable = loggingClient.listLogsAsync(request);
for await (const response of iterable) {
- console.log(response);
+ console.log(response);
}
}
diff --git a/handwritten/logging/samples/generated/v2/logging_service_v2.list_monitored_resource_descriptors.js b/handwritten/logging/samples/generated/v2/logging_service_v2.list_monitored_resource_descriptors.js
index 60a67502e704..0d00fea56963 100644
--- a/handwritten/logging/samples/generated/v2/logging_service_v2.list_monitored_resource_descriptors.js
+++ b/handwritten/logging/samples/generated/v2/logging_service_v2.list_monitored_resource_descriptors.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main() {
@@ -48,13 +50,13 @@ function main() {
async function callListMonitoredResourceDescriptors() {
// Construct request
- const request = {};
+ const request = {
+ };
// Run request
- const iterable =
- loggingClient.listMonitoredResourceDescriptorsAsync(request);
+ const iterable = loggingClient.listMonitoredResourceDescriptorsAsync(request);
for await (const response of iterable) {
- console.log(response);
+ console.log(response);
}
}
diff --git a/handwritten/logging/samples/generated/v2/logging_service_v2.tail_log_entries.js b/handwritten/logging/samples/generated/v2/logging_service_v2.tail_log_entries.js
index c1a4e1c9e356..c45d65d04e1f 100644
--- a/handwritten/logging/samples/generated/v2/logging_service_v2.tail_log_entries.js
+++ b/handwritten/logging/samples/generated/v2/logging_service_v2.tail_log_entries.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(resourceNames) {
@@ -69,15 +71,9 @@ function main(resourceNames) {
// Run request
const stream = await loggingClient.tailLogEntries();
- stream.on('data', response => {
- console.log(response);
- });
- stream.on('error', err => {
- throw err;
- });
- stream.on('end', () => {
- /* API call completed */
- });
+ stream.on('data', (response) => { console.log(response) });
+ stream.on('error', (err) => { throw(err) });
+ stream.on('end', () => { /* API call completed */ });
stream.write(request);
stream.end();
}
diff --git a/handwritten/logging/samples/generated/v2/logging_service_v2.write_log_entries.js b/handwritten/logging/samples/generated/v2/logging_service_v2.write_log_entries.js
index ec3cecba66ca..f6fc4e022546 100644
--- a/handwritten/logging/samples/generated/v2/logging_service_v2.write_log_entries.js
+++ b/handwritten/logging/samples/generated/v2/logging_service_v2.write_log_entries.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(entries) {
diff --git a/handwritten/logging/samples/generated/v2/metrics_service_v2.create_log_metric.js b/handwritten/logging/samples/generated/v2/metrics_service_v2.create_log_metric.js
index 21144f293c18..8daaf940287e 100644
--- a/handwritten/logging/samples/generated/v2/metrics_service_v2.create_log_metric.js
+++ b/handwritten/logging/samples/generated/v2/metrics_service_v2.create_log_metric.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent, metric) {
diff --git a/handwritten/logging/samples/generated/v2/metrics_service_v2.delete_log_metric.js b/handwritten/logging/samples/generated/v2/metrics_service_v2.delete_log_metric.js
index 162f8ab079d6..0b53ce24557c 100644
--- a/handwritten/logging/samples/generated/v2/metrics_service_v2.delete_log_metric.js
+++ b/handwritten/logging/samples/generated/v2/metrics_service_v2.delete_log_metric.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(metricName) {
diff --git a/handwritten/logging/samples/generated/v2/metrics_service_v2.get_log_metric.js b/handwritten/logging/samples/generated/v2/metrics_service_v2.get_log_metric.js
index dde0f30f8f16..ec5d653d7b77 100644
--- a/handwritten/logging/samples/generated/v2/metrics_service_v2.get_log_metric.js
+++ b/handwritten/logging/samples/generated/v2/metrics_service_v2.get_log_metric.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(metricName) {
diff --git a/handwritten/logging/samples/generated/v2/metrics_service_v2.list_log_metrics.js b/handwritten/logging/samples/generated/v2/metrics_service_v2.list_log_metrics.js
index 4ffff33a1bd8..84325b16285f 100644
--- a/handwritten/logging/samples/generated/v2/metrics_service_v2.list_log_metrics.js
+++ b/handwritten/logging/samples/generated/v2/metrics_service_v2.list_log_metrics.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(parent) {
@@ -60,7 +62,7 @@ function main(parent) {
// Run request
const iterable = loggingClient.listLogMetricsAsync(request);
for await (const response of iterable) {
- console.log(response);
+ console.log(response);
}
}
diff --git a/handwritten/logging/samples/generated/v2/metrics_service_v2.update_log_metric.js b/handwritten/logging/samples/generated/v2/metrics_service_v2.update_log_metric.js
index f414710dacf1..22f8e91bf828 100644
--- a/handwritten/logging/samples/generated/v2/metrics_service_v2.update_log_metric.js
+++ b/handwritten/logging/samples/generated/v2/metrics_service_v2.update_log_metric.js
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
+
+
'use strict';
function main(metricName, metric) {
diff --git a/handwritten/logging/samples/generated/v2/snippet_metadata_google.logging.v2.json b/handwritten/logging/samples/generated/v2/snippet_metadata_google.logging.v2.json
index ca8724bf4fa4..c67987806adf 100644
--- a/handwritten/logging/samples/generated/v2/snippet_metadata_google.logging.v2.json
+++ b/handwritten/logging/samples/generated/v2/snippet_metadata_google.logging.v2.json
@@ -1,1963 +1,1963 @@
{
- "clientLibrary": {
- "name": "nodejs-logging",
- "version": "11.2.1",
- "language": "TYPESCRIPT",
- "apis": [
- {
- "id": "google.logging.v2",
- "version": "v2"
- }
- ]
- },
- "snippets": [
+ "clientLibrary": {
+ "name": "nodejs-logging",
+ "version": "0.1.0",
+ "language": "TYPESCRIPT",
+ "apis": [
+ {
+ "id": "google.logging.v2",
+ "version": "v2"
+ }
+ ]
+ },
+ "snippets": [
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async",
+ "title": "logging listBuckets Sample",
+ "origin": "API_DEFINITION",
+ "description": " Lists log buckets.",
+ "canonical": true,
+ "file": "config_service_v2.list_buckets.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async",
- "title": "logging listBuckets Sample",
- "origin": "API_DEFINITION",
- "description": " Lists log buckets.",
- "canonical": true,
- "file": "config_service_v2.list_buckets.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 75,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListBuckets",
- "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_token",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_size",
- "type": "TYPE_INT32"
- }
- ],
- "resultType": ".google.logging.v2.ListBucketsResponse",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "ListBuckets",
- "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 75,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListBuckets",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_token",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_size",
+ "type": "TYPE_INT32"
+ }
+ ],
+ "resultType": ".google.logging.v2.ListBucketsResponse",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "ListBuckets",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async",
+ "title": "logging getBucket Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets a log bucket.",
+ "canonical": true,
+ "file": "config_service_v2.get_bucket.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async",
- "title": "logging getBucket Sample",
- "origin": "API_DEFINITION",
- "description": " Gets a log bucket.",
- "canonical": true,
- "file": "config_service_v2.get_bucket.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 59,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "GetBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.GetBucket",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.LogBucket",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "GetBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.GetBucket",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetBucket",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogBucket",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "GetBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetBucket",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async",
+ "title": "logging createBucketAsync Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed.",
+ "canonical": true,
+ "file": "config_service_v2.create_bucket_async.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async",
- "title": "logging createBucketAsync Sample",
- "origin": "API_DEFINITION",
- "description": " Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed.",
- "canonical": true,
- "file": "config_service_v2.create_bucket_async.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 71,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateBucketAsync",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "bucket_id",
- "type": "TYPE_STRING"
- },
- {
- "name": "bucket",
- "type": ".google.logging.v2.LogBucket"
- }
- ],
- "resultType": ".google.longrunning.Operation",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "CreateBucketAsync",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 71,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateBucketAsync",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "bucket_id",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "bucket",
+ "type": ".google.logging.v2.LogBucket"
+ }
+ ],
+ "resultType": ".google.longrunning.Operation",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "CreateBucketAsync",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async",
+ "title": "logging updateBucketAsync Sample",
+ "origin": "API_DEFINITION",
+ "description": " Updates a log bucket asynchronously. If the bucket has a `lifecycle_state` of `DELETE_REQUESTED`, then `FAILED_PRECONDITION` will be returned. After a bucket has been created, the bucket's location cannot be changed.",
+ "canonical": true,
+ "file": "config_service_v2.update_bucket_async.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async",
- "title": "logging updateBucketAsync Sample",
- "origin": "API_DEFINITION",
- "description": " Updates a log bucket asynchronously. If the bucket has a `lifecycle_state` of `DELETE_REQUESTED`, then `FAILED_PRECONDITION` will be returned. After a bucket has been created, the bucket's location cannot be changed.",
- "canonical": true,
- "file": "config_service_v2.update_bucket_async.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 75,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "UpdateBucketAsync",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- },
- {
- "name": "bucket",
- "type": ".google.logging.v2.LogBucket"
- },
- {
- "name": "update_mask",
- "type": ".google.protobuf.FieldMask"
- }
- ],
- "resultType": ".google.longrunning.Operation",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "UpdateBucketAsync",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 75,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "UpdateBucketAsync",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "bucket",
+ "type": ".google.logging.v2.LogBucket"
+ },
+ {
+ "name": "update_mask",
+ "type": ".google.protobuf.FieldMask"
+ }
+ ],
+ "resultType": ".google.longrunning.Operation",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "UpdateBucketAsync",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async",
+ "title": "logging createBucket Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed.",
+ "canonical": true,
+ "file": "config_service_v2.create_bucket.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async",
- "title": "logging createBucket Sample",
- "origin": "API_DEFINITION",
- "description": " Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed.",
- "canonical": true,
- "file": "config_service_v2.create_bucket.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 70,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "bucket_id",
- "type": "TYPE_STRING"
- },
- {
- "name": "bucket",
- "type": ".google.logging.v2.LogBucket"
- }
- ],
- "resultType": ".google.logging.v2.LogBucket",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "CreateBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 70,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "bucket_id",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "bucket",
+ "type": ".google.logging.v2.LogBucket"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogBucket",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "CreateBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async",
+ "title": "logging updateBucket Sample",
+ "origin": "API_DEFINITION",
+ "description": " Updates a log bucket. If the bucket has a `lifecycle_state` of `DELETE_REQUESTED`, then `FAILED_PRECONDITION` will be returned. After a bucket has been created, the bucket's location cannot be changed.",
+ "canonical": true,
+ "file": "config_service_v2.update_bucket.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async",
- "title": "logging updateBucket Sample",
- "origin": "API_DEFINITION",
- "description": " Updates a log bucket. If the bucket has a `lifecycle_state` of `DELETE_REQUESTED`, then `FAILED_PRECONDITION` will be returned. After a bucket has been created, the bucket's location cannot be changed.",
- "canonical": true,
- "file": "config_service_v2.update_bucket.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 74,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "UpdateBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- },
- {
- "name": "bucket",
- "type": ".google.logging.v2.LogBucket"
- },
- {
- "name": "update_mask",
- "type": ".google.protobuf.FieldMask"
- }
- ],
- "resultType": ".google.logging.v2.LogBucket",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "UpdateBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 74,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "UpdateBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "bucket",
+ "type": ".google.logging.v2.LogBucket"
+ },
+ {
+ "name": "update_mask",
+ "type": ".google.protobuf.FieldMask"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogBucket",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "UpdateBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async",
+ "title": "logging deleteBucket Sample",
+ "origin": "API_DEFINITION",
+ "description": " Deletes a log bucket. Changes the bucket's `lifecycle_state` to the `DELETE_REQUESTED` state. After 7 days, the bucket will be purged and all log entries in the bucket will be permanently deleted.",
+ "canonical": true,
+ "file": "config_service_v2.delete_bucket.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async",
- "title": "logging deleteBucket Sample",
- "origin": "API_DEFINITION",
- "description": " Deletes a log bucket. Changes the bucket's `lifecycle_state` to the `DELETE_REQUESTED` state. After 7 days, the bucket will be purged and all log entries in the bucket will be permanently deleted.",
- "canonical": true,
- "file": "config_service_v2.delete_bucket.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 59,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "DeleteBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "DeleteBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "DeleteBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "DeleteBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async",
+ "title": "logging undeleteBucket Sample",
+ "origin": "API_DEFINITION",
+ "description": " Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days.",
+ "canonical": true,
+ "file": "config_service_v2.undelete_bucket.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async",
- "title": "logging undeleteBucket Sample",
- "origin": "API_DEFINITION",
- "description": " Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days.",
- "canonical": true,
- "file": "config_service_v2.undelete_bucket.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 59,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "UndeleteBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "UndeleteBucket",
- "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "UndeleteBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "UndeleteBucket",
+ "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async",
+ "title": "logging listViews Sample",
+ "origin": "API_DEFINITION",
+ "description": " Lists views on a log bucket.",
+ "canonical": true,
+ "file": "config_service_v2.list_views.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async",
- "title": "logging listViews Sample",
- "origin": "API_DEFINITION",
- "description": " Lists views on a log bucket.",
- "canonical": true,
- "file": "config_service_v2.list_views.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 69,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListViews",
- "fullName": "google.logging.v2.ConfigServiceV2.ListViews",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_token",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_size",
- "type": "TYPE_INT32"
- }
- ],
- "resultType": ".google.logging.v2.ListViewsResponse",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "ListViews",
- "fullName": "google.logging.v2.ConfigServiceV2.ListViews",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 69,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListViews",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListViews",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_token",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_size",
+ "type": "TYPE_INT32"
+ }
+ ],
+ "resultType": ".google.logging.v2.ListViewsResponse",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "ListViews",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListViews",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async",
+ "title": "logging getView Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets a view on a log bucket..",
+ "canonical": true,
+ "file": "config_service_v2.get_view.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async",
- "title": "logging getView Sample",
- "origin": "API_DEFINITION",
- "description": " Gets a view on a log bucket..",
- "canonical": true,
- "file": "config_service_v2.get_view.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 56,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "GetView",
- "fullName": "google.logging.v2.ConfigServiceV2.GetView",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.LogView",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "GetView",
- "fullName": "google.logging.v2.ConfigServiceV2.GetView",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 56,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetView",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetView",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogView",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "GetView",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetView",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async",
+ "title": "logging createView Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views.",
+ "canonical": true,
+ "file": "config_service_v2.create_view.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async",
- "title": "logging createView Sample",
- "origin": "API_DEFINITION",
- "description": " Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views.",
- "canonical": true,
- "file": "config_service_v2.create_view.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 68,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateView",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateView",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "view_id",
- "type": "TYPE_STRING"
- },
- {
- "name": "view",
- "type": ".google.logging.v2.LogView"
- }
- ],
- "resultType": ".google.logging.v2.LogView",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "CreateView",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateView",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 68,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateView",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateView",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "view_id",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "view",
+ "type": ".google.logging.v2.LogView"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogView",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "CreateView",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateView",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async",
+ "title": "logging updateView Sample",
+ "origin": "API_DEFINITION",
+ "description": " Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: `filter`. If an `UNAVAILABLE` error is returned, this indicates that system is not in a state where it can update the view. If this occurs, please try again in a few minutes.",
+ "canonical": true,
+ "file": "config_service_v2.update_view.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async",
- "title": "logging updateView Sample",
- "origin": "API_DEFINITION",
- "description": " Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: `filter`. If an `UNAVAILABLE` error is returned, this indicates that system is not in a state where it can update the view. If this occurs, please try again in a few minutes.",
- "canonical": true,
- "file": "config_service_v2.update_view.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 70,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "UpdateView",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateView",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- },
- {
- "name": "view",
- "type": ".google.logging.v2.LogView"
- },
- {
- "name": "update_mask",
- "type": ".google.protobuf.FieldMask"
- }
- ],
- "resultType": ".google.logging.v2.LogView",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "UpdateView",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateView",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 70,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "UpdateView",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateView",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "view",
+ "type": ".google.logging.v2.LogView"
+ },
+ {
+ "name": "update_mask",
+ "type": ".google.protobuf.FieldMask"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogView",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "UpdateView",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateView",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async",
+ "title": "logging deleteView Sample",
+ "origin": "API_DEFINITION",
+ "description": " Deletes a view on a log bucket. If an `UNAVAILABLE` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few minutes.",
+ "canonical": true,
+ "file": "config_service_v2.delete_view.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async",
- "title": "logging deleteView Sample",
- "origin": "API_DEFINITION",
- "description": " Deletes a view on a log bucket. If an `UNAVAILABLE` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few minutes.",
- "canonical": true,
- "file": "config_service_v2.delete_view.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 56,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "DeleteView",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteView",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "DeleteView",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteView",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 56,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "DeleteView",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteView",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "DeleteView",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteView",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async",
+ "title": "logging listSinks Sample",
+ "origin": "API_DEFINITION",
+ "description": " Lists sinks.",
+ "canonical": true,
+ "file": "config_service_v2.list_sinks.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async",
- "title": "logging listSinks Sample",
- "origin": "API_DEFINITION",
- "description": " Lists sinks.",
- "canonical": true,
- "file": "config_service_v2.list_sinks.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 72,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListSinks",
- "fullName": "google.logging.v2.ConfigServiceV2.ListSinks",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_token",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_size",
- "type": "TYPE_INT32"
- }
- ],
- "resultType": ".google.logging.v2.ListSinksResponse",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "ListSinks",
- "fullName": "google.logging.v2.ConfigServiceV2.ListSinks",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 72,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListSinks",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListSinks",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_token",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_size",
+ "type": "TYPE_INT32"
+ }
+ ],
+ "resultType": ".google.logging.v2.ListSinksResponse",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "ListSinks",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListSinks",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async",
+ "title": "logging getSink Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets a sink.",
+ "canonical": true,
+ "file": "config_service_v2.get_sink.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async",
- "title": "logging getSink Sample",
- "origin": "API_DEFINITION",
- "description": " Gets a sink.",
- "canonical": true,
- "file": "config_service_v2.get_sink.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 59,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "GetSink",
- "fullName": "google.logging.v2.ConfigServiceV2.GetSink",
- "async": true,
- "parameters": [
- {
- "name": "sink_name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.LogSink",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "GetSink",
- "fullName": "google.logging.v2.ConfigServiceV2.GetSink",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetSink",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetSink",
+ "async": true,
+ "parameters": [
+ {
+ "name": "sink_name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogSink",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "GetSink",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetSink",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async",
+ "title": "logging createSink Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's `writer_identity` is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink.",
+ "canonical": true,
+ "file": "config_service_v2.create_sink.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async",
- "title": "logging createSink Sample",
- "origin": "API_DEFINITION",
- "description": " Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's `writer_identity` is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink.",
- "canonical": true,
- "file": "config_service_v2.create_sink.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 80,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateSink",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateSink",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "sink",
- "type": ".google.logging.v2.LogSink"
- },
- {
- "name": "unique_writer_identity",
- "type": "TYPE_BOOL"
- }
- ],
- "resultType": ".google.logging.v2.LogSink",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "CreateSink",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateSink",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 80,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateSink",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateSink",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "sink",
+ "type": ".google.logging.v2.LogSink"
+ },
+ {
+ "name": "unique_writer_identity",
+ "type": "TYPE_BOOL"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogSink",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "CreateSink",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateSink",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async",
+ "title": "logging updateSink Sample",
+ "origin": "API_DEFINITION",
+ "description": " Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: `destination`, and `filter`. The updated sink might also have a new `writer_identity`; see the `unique_writer_identity` field.",
+ "canonical": true,
+ "file": "config_service_v2.update_sink.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async",
- "title": "logging updateSink Sample",
- "origin": "API_DEFINITION",
- "description": " Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: `destination`, and `filter`. The updated sink might also have a new `writer_identity`; see the `unique_writer_identity` field.",
- "canonical": true,
- "file": "config_service_v2.update_sink.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 93,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "UpdateSink",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink",
- "async": true,
- "parameters": [
- {
- "name": "sink_name",
- "type": "TYPE_STRING"
- },
- {
- "name": "sink",
- "type": ".google.logging.v2.LogSink"
- },
- {
- "name": "unique_writer_identity",
- "type": "TYPE_BOOL"
- },
- {
- "name": "update_mask",
- "type": ".google.protobuf.FieldMask"
- }
- ],
- "resultType": ".google.logging.v2.LogSink",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "UpdateSink",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 93,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "UpdateSink",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink",
+ "async": true,
+ "parameters": [
+ {
+ "name": "sink_name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "sink",
+ "type": ".google.logging.v2.LogSink"
+ },
+ {
+ "name": "unique_writer_identity",
+ "type": "TYPE_BOOL"
+ },
+ {
+ "name": "update_mask",
+ "type": ".google.protobuf.FieldMask"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogSink",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "UpdateSink",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async",
+ "title": "logging deleteSink Sample",
+ "origin": "API_DEFINITION",
+ "description": " Deletes a sink. If the sink has a unique `writer_identity`, then that service account is also deleted.",
+ "canonical": true,
+ "file": "config_service_v2.delete_sink.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async",
- "title": "logging deleteSink Sample",
- "origin": "API_DEFINITION",
- "description": " Deletes a sink. If the sink has a unique `writer_identity`, then that service account is also deleted.",
- "canonical": true,
- "file": "config_service_v2.delete_sink.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 60,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "DeleteSink",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink",
- "async": true,
- "parameters": [
- {
- "name": "sink_name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "DeleteSink",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 60,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "DeleteSink",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink",
+ "async": true,
+ "parameters": [
+ {
+ "name": "sink_name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "DeleteSink",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_async",
+ "title": "logging createLink Sample",
+ "origin": "API_DEFINITION",
+ "description": " Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently only contain one link.",
+ "canonical": true,
+ "file": "config_service_v2.create_link.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_async",
- "title": "logging createLink Sample",
- "origin": "API_DEFINITION",
- "description": " Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently only contain one link.",
- "canonical": true,
- "file": "config_service_v2.create_link.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 70,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateLink",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateLink",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "link",
- "type": ".google.logging.v2.Link"
- },
- {
- "name": "link_id",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.longrunning.Operation",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "CreateLink",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateLink",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 70,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateLink",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateLink",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "link",
+ "type": ".google.logging.v2.Link"
+ },
+ {
+ "name": "link_id",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.longrunning.Operation",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "CreateLink",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateLink",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_async",
+ "title": "logging deleteLink Sample",
+ "origin": "API_DEFINITION",
+ "description": " Deletes a link. This will also delete the corresponding BigQuery linked dataset.",
+ "canonical": true,
+ "file": "config_service_v2.delete_link.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_async",
- "title": "logging deleteLink Sample",
- "origin": "API_DEFINITION",
- "description": " Deletes a link. This will also delete the corresponding BigQuery linked dataset.",
- "canonical": true,
- "file": "config_service_v2.delete_link.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 58,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "DeleteLink",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.longrunning.Operation",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "DeleteLink",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 58,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "DeleteLink",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.longrunning.Operation",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "DeleteLink",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_async",
+ "title": "logging listLinks Sample",
+ "origin": "API_DEFINITION",
+ "description": " Lists links.",
+ "canonical": true,
+ "file": "config_service_v2.list_links.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_async",
- "title": "logging listLinks Sample",
- "origin": "API_DEFINITION",
- "description": " Lists links.",
- "canonical": true,
- "file": "config_service_v2.list_links.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 69,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListLinks",
- "fullName": "google.logging.v2.ConfigServiceV2.ListLinks",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_token",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_size",
- "type": "TYPE_INT32"
- }
- ],
- "resultType": ".google.logging.v2.ListLinksResponse",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "ListLinks",
- "fullName": "google.logging.v2.ConfigServiceV2.ListLinks",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 69,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListLinks",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListLinks",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_token",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_size",
+ "type": "TYPE_INT32"
+ }
+ ],
+ "resultType": ".google.logging.v2.ListLinksResponse",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "ListLinks",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListLinks",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_async",
+ "title": "logging getLink Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets a link.",
+ "canonical": true,
+ "file": "config_service_v2.get_link.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_async",
- "title": "logging getLink Sample",
- "origin": "API_DEFINITION",
- "description": " Gets a link.",
- "canonical": true,
- "file": "config_service_v2.get_link.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 57,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "GetLink",
- "fullName": "google.logging.v2.ConfigServiceV2.GetLink",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.Link",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "GetLink",
- "fullName": "google.logging.v2.ConfigServiceV2.GetLink",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 57,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetLink",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetLink",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.Link",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "GetLink",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetLink",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async",
+ "title": "logging listExclusions Sample",
+ "origin": "API_DEFINITION",
+ "description": " Lists all the exclusions on the _Default sink in a parent resource.",
+ "canonical": true,
+ "file": "config_service_v2.list_exclusions.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async",
- "title": "logging listExclusions Sample",
- "origin": "API_DEFINITION",
- "description": " Lists all the exclusions on the _Default sink in a parent resource.",
- "canonical": true,
- "file": "config_service_v2.list_exclusions.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 72,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListExclusions",
- "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_token",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_size",
- "type": "TYPE_INT32"
- }
- ],
- "resultType": ".google.logging.v2.ListExclusionsResponse",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "ListExclusions",
- "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 72,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListExclusions",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_token",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_size",
+ "type": "TYPE_INT32"
+ }
+ ],
+ "resultType": ".google.logging.v2.ListExclusionsResponse",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "ListExclusions",
+ "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async",
+ "title": "logging getExclusion Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets the description of an exclusion in the _Default sink.",
+ "canonical": true,
+ "file": "config_service_v2.get_exclusion.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async",
- "title": "logging getExclusion Sample",
- "origin": "API_DEFINITION",
- "description": " Gets the description of an exclusion in the _Default sink.",
- "canonical": true,
- "file": "config_service_v2.get_exclusion.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 59,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "GetExclusion",
- "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.LogExclusion",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "GetExclusion",
- "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetExclusion",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogExclusion",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "GetExclusion",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async",
+ "title": "logging createExclusion Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a new exclusion in the _Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource.",
+ "canonical": true,
+ "file": "config_service_v2.create_exclusion.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async",
- "title": "logging createExclusion Sample",
- "origin": "API_DEFINITION",
- "description": " Creates a new exclusion in the _Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource.",
- "canonical": true,
- "file": "config_service_v2.create_exclusion.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 66,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateExclusion",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "exclusion",
- "type": ".google.logging.v2.LogExclusion"
- }
- ],
- "resultType": ".google.logging.v2.LogExclusion",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "CreateExclusion",
- "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 66,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateExclusion",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "exclusion",
+ "type": ".google.logging.v2.LogExclusion"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogExclusion",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "CreateExclusion",
+ "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async",
+ "title": "logging updateExclusion Sample",
+ "origin": "API_DEFINITION",
+ "description": " Changes one or more properties of an existing exclusion in the _Default sink.",
+ "canonical": true,
+ "file": "config_service_v2.update_exclusion.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async",
- "title": "logging updateExclusion Sample",
- "origin": "API_DEFINITION",
- "description": " Changes one or more properties of an existing exclusion in the _Default sink.",
- "canonical": true,
- "file": "config_service_v2.update_exclusion.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 76,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "UpdateExclusion",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- },
- {
- "name": "exclusion",
- "type": ".google.logging.v2.LogExclusion"
- },
- {
- "name": "update_mask",
- "type": ".google.protobuf.FieldMask"
- }
- ],
- "resultType": ".google.logging.v2.LogExclusion",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "UpdateExclusion",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 76,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "UpdateExclusion",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "exclusion",
+ "type": ".google.logging.v2.LogExclusion"
+ },
+ {
+ "name": "update_mask",
+ "type": ".google.protobuf.FieldMask"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogExclusion",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "UpdateExclusion",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async",
+ "title": "logging deleteExclusion Sample",
+ "origin": "API_DEFINITION",
+ "description": " Deletes an exclusion in the _Default sink.",
+ "canonical": true,
+ "file": "config_service_v2.delete_exclusion.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async",
- "title": "logging deleteExclusion Sample",
- "origin": "API_DEFINITION",
- "description": " Deletes an exclusion in the _Default sink.",
- "canonical": true,
- "file": "config_service_v2.delete_exclusion.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 59,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "DeleteExclusion",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "DeleteExclusion",
- "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 59,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "DeleteExclusion",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "DeleteExclusion",
+ "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async",
+ "title": "logging getCmekSettings Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud projects, folders, organizations and billing accounts. Once configured for an organization, it applies to all projects and folders in the Google Cloud organization. See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information.",
+ "canonical": true,
+ "file": "config_service_v2.get_cmek_settings.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async",
- "title": "logging getCmekSettings Sample",
- "origin": "API_DEFINITION",
- "description": " Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud projects, folders, organizations and billing accounts. Once configured for an organization, it applies to all projects and folders in the Google Cloud organization. See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information.",
- "canonical": true,
- "file": "config_service_v2.get_cmek_settings.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 63,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "GetCmekSettings",
- "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.CmekSettings",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "GetCmekSettings",
- "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 63,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetCmekSettings",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.CmekSettings",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "GetCmekSettings",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async",
+ "title": "logging updateCmekSettings Sample",
+ "origin": "API_DEFINITION",
+ "description": " Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured for Google Cloud organizations. Once configured, it applies to all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) `kms_key_name` is invalid, or 2) the associated service account does not have the required `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or 3) access to the key is disabled. See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information.",
+ "canonical": true,
+ "file": "config_service_v2.update_cmek_settings.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async",
- "title": "logging updateCmekSettings Sample",
- "origin": "API_DEFINITION",
- "description": " Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured for Google Cloud organizations. Once configured, it applies to all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) `kms_key_name` is invalid, or 2) the associated service account does not have the required `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or 3) access to the key is disabled. See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information.",
- "canonical": true,
- "file": "config_service_v2.update_cmek_settings.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 78,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "UpdateCmekSettings",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- },
- {
- "name": "cmek_settings",
- "type": ".google.logging.v2.CmekSettings"
- },
- {
- "name": "update_mask",
- "type": ".google.protobuf.FieldMask"
- }
- ],
- "resultType": ".google.logging.v2.CmekSettings",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "UpdateCmekSettings",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 78,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "UpdateCmekSettings",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "cmek_settings",
+ "type": ".google.logging.v2.CmekSettings"
+ },
+ {
+ "name": "update_mask",
+ "type": ".google.protobuf.FieldMask"
+ }
+ ],
+ "resultType": ".google.logging.v2.CmekSettings",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "UpdateCmekSettings",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async",
+ "title": "logging getSettings Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud projects, folders, organizations and billing accounts. Currently it can only be configured for organizations. Once configured for an organization, it applies to all projects and folders in the Google Cloud organization. See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information.",
+ "canonical": true,
+ "file": "config_service_v2.get_settings.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async",
- "title": "logging getSettings Sample",
- "origin": "API_DEFINITION",
- "description": " Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud projects, folders, organizations and billing accounts. Currently it can only be configured for organizations. Once configured for an organization, it applies to all projects and folders in the Google Cloud organization. See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information.",
- "canonical": true,
- "file": "config_service_v2.get_settings.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 63,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "GetSettings",
- "fullName": "google.logging.v2.ConfigServiceV2.GetSettings",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.Settings",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "GetSettings",
- "fullName": "google.logging.v2.ConfigServiceV2.GetSettings",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 63,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetSettings",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetSettings",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.Settings",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "GetSettings",
+ "fullName": "google.logging.v2.ConfigServiceV2.GetSettings",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async",
+ "title": "logging updateSettings Sample",
+ "origin": "API_DEFINITION",
+ "description": " Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be configured for Google Cloud organizations. Once configured, it applies to all projects and folders in the Google Cloud organization. [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] will fail if 1) `kms_key_name` is invalid, or 2) the associated service account does not have the required `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or 3) access to the key is disabled. 4) `location_id` is not supported by Logging. 5) `location_id` violate OrgPolicy. See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information.",
+ "canonical": true,
+ "file": "config_service_v2.update_settings.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async",
- "title": "logging updateSettings Sample",
- "origin": "API_DEFINITION",
- "description": " Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be configured for Google Cloud organizations. Once configured, it applies to all projects and folders in the Google Cloud organization. [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] will fail if 1) `kms_key_name` is invalid, or 2) the associated service account does not have the required `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or 3) access to the key is disabled. 4) `location_id` is not supported by Logging. 5) `location_id` violate OrgPolicy. See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information.",
- "canonical": true,
- "file": "config_service_v2.update_settings.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 75,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "UpdateSettings",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- },
- {
- "name": "settings",
- "type": ".google.logging.v2.Settings"
- },
- {
- "name": "update_mask",
- "type": ".google.protobuf.FieldMask"
- }
- ],
- "resultType": ".google.logging.v2.Settings",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "UpdateSettings",
- "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 75,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "UpdateSettings",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "settings",
+ "type": ".google.logging.v2.Settings"
+ },
+ {
+ "name": "update_mask",
+ "type": ".google.protobuf.FieldMask"
+ }
+ ],
+ "resultType": ".google.logging.v2.Settings",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "UpdateSettings",
+ "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async",
+ "title": "logging copyLogEntries Sample",
+ "origin": "API_DEFINITION",
+ "description": " Copies a set of log entries from a log bucket to a Cloud Storage bucket.",
+ "canonical": true,
+ "file": "config_service_v2.copy_log_entries.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async",
- "title": "logging copyLogEntries Sample",
- "origin": "API_DEFINITION",
- "description": " Copies a set of log entries from a log bucket to a Cloud Storage bucket.",
- "canonical": true,
- "file": "config_service_v2.copy_log_entries.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 66,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CopyLogEntries",
- "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries",
- "async": true,
- "parameters": [
- {
- "name": "name",
- "type": "TYPE_STRING"
- },
- {
- "name": "filter",
- "type": "TYPE_STRING"
- },
- {
- "name": "destination",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.longrunning.Operation",
- "client": {
- "shortName": "ConfigServiceV2Client",
- "fullName": "google.logging.v2.ConfigServiceV2Client"
- },
- "method": {
- "shortName": "CopyLogEntries",
- "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries",
- "service": {
- "shortName": "ConfigServiceV2",
- "fullName": "google.logging.v2.ConfigServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 66,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CopyLogEntries",
+ "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries",
+ "async": true,
+ "parameters": [
+ {
+ "name": "name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "filter",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "destination",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.longrunning.Operation",
+ "client": {
+ "shortName": "ConfigServiceV2Client",
+ "fullName": "google.logging.v2.ConfigServiceV2Client"
},
+ "method": {
+ "shortName": "CopyLogEntries",
+ "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries",
+ "service": {
+ "shortName": "ConfigServiceV2",
+ "fullName": "google.logging.v2.ConfigServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async",
+ "title": "logging deleteLog Sample",
+ "origin": "API_DEFINITION",
+ "description": " Deletes all the log entries in a log for the _Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be deleted. Entries received after the delete operation with a timestamp before the operation will be deleted.",
+ "canonical": true,
+ "file": "logging_service_v2.delete_log.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async",
- "title": "logging deleteLog Sample",
- "origin": "API_DEFINITION",
- "description": " Deletes all the log entries in a log for the _Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be deleted. Entries received after the delete operation with a timestamp before the operation will be deleted.",
- "canonical": true,
- "file": "logging_service_v2.delete_log.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 62,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "DeleteLog",
- "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog",
- "async": true,
- "parameters": [
- {
- "name": "log_name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "LoggingServiceV2Client",
- "fullName": "google.logging.v2.LoggingServiceV2Client"
- },
- "method": {
- "shortName": "DeleteLog",
- "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog",
- "service": {
- "shortName": "LoggingServiceV2",
- "fullName": "google.logging.v2.LoggingServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 62,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "DeleteLog",
+ "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog",
+ "async": true,
+ "parameters": [
+ {
+ "name": "log_name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "LoggingServiceV2Client",
+ "fullName": "google.logging.v2.LoggingServiceV2Client"
},
+ "method": {
+ "shortName": "DeleteLog",
+ "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog",
+ "service": {
+ "shortName": "LoggingServiceV2",
+ "fullName": "google.logging.v2.LoggingServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async",
+ "title": "logging writeLogEntries Sample",
+ "origin": "API_DEFINITION",
+ "description": " Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent (fluentd) and all logging libraries configured to use Logging. A single request may contain log entries for a maximum of 1000 different resources (projects, organizations, billing accounts or folders)",
+ "canonical": true,
+ "file": "logging_service_v2.write_log_entries.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async",
- "title": "logging writeLogEntries Sample",
- "origin": "API_DEFINITION",
- "description": " Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent (fluentd) and all logging libraries configured to use Logging. A single request may contain log entries for a maximum of 1000 different resources (projects, organizations, billing accounts or folders)",
- "canonical": true,
- "file": "logging_service_v2.write_log_entries.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 121,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "WriteLogEntries",
- "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries",
- "async": true,
- "parameters": [
- {
- "name": "log_name",
- "type": "TYPE_STRING"
- },
- {
- "name": "resource",
- "type": ".google.api.MonitoredResource"
- },
- {
- "name": "labels",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "entries",
- "type": "TYPE_MESSAGE[]"
- },
- {
- "name": "partial_success",
- "type": "TYPE_BOOL"
- },
- {
- "name": "dry_run",
- "type": "TYPE_BOOL"
- }
- ],
- "resultType": ".google.logging.v2.WriteLogEntriesResponse",
- "client": {
- "shortName": "LoggingServiceV2Client",
- "fullName": "google.logging.v2.LoggingServiceV2Client"
- },
- "method": {
- "shortName": "WriteLogEntries",
- "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries",
- "service": {
- "shortName": "LoggingServiceV2",
- "fullName": "google.logging.v2.LoggingServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 121,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "WriteLogEntries",
+ "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries",
+ "async": true,
+ "parameters": [
+ {
+ "name": "log_name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "resource",
+ "type": ".google.api.MonitoredResource"
+ },
+ {
+ "name": "labels",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "entries",
+ "type": "TYPE_MESSAGE[]"
+ },
+ {
+ "name": "partial_success",
+ "type": "TYPE_BOOL"
+ },
+ {
+ "name": "dry_run",
+ "type": "TYPE_BOOL"
+ }
+ ],
+ "resultType": ".google.logging.v2.WriteLogEntriesResponse",
+ "client": {
+ "shortName": "LoggingServiceV2Client",
+ "fullName": "google.logging.v2.LoggingServiceV2Client"
},
+ "method": {
+ "shortName": "WriteLogEntries",
+ "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries",
+ "service": {
+ "shortName": "LoggingServiceV2",
+ "fullName": "google.logging.v2.LoggingServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async",
+ "title": "logging listLogEntries Sample",
+ "origin": "API_DEFINITION",
+ "description": " Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see [Exporting Logs](https://cloud.google.com/logging/docs/export).",
+ "canonical": true,
+ "file": "logging_service_v2.list_log_entries.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async",
- "title": "logging listLogEntries Sample",
- "origin": "API_DEFINITION",
- "description": " Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see [Exporting Logs](https://cloud.google.com/logging/docs/export).",
- "canonical": true,
- "file": "logging_service_v2.list_log_entries.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 98,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListLogEntries",
- "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries",
- "async": true,
- "parameters": [
- {
- "name": "resource_names",
- "type": "TYPE_STRING[]"
- },
- {
- "name": "filter",
- "type": "TYPE_STRING"
- },
- {
- "name": "order_by",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_size",
- "type": "TYPE_INT32"
- },
- {
- "name": "page_token",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.ListLogEntriesResponse",
- "client": {
- "shortName": "LoggingServiceV2Client",
- "fullName": "google.logging.v2.LoggingServiceV2Client"
- },
- "method": {
- "shortName": "ListLogEntries",
- "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries",
- "service": {
- "shortName": "LoggingServiceV2",
- "fullName": "google.logging.v2.LoggingServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 98,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListLogEntries",
+ "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries",
+ "async": true,
+ "parameters": [
+ {
+ "name": "resource_names",
+ "type": "TYPE_STRING[]"
+ },
+ {
+ "name": "filter",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "order_by",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_size",
+ "type": "TYPE_INT32"
+ },
+ {
+ "name": "page_token",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.ListLogEntriesResponse",
+ "client": {
+ "shortName": "LoggingServiceV2Client",
+ "fullName": "google.logging.v2.LoggingServiceV2Client"
},
+ "method": {
+ "shortName": "ListLogEntries",
+ "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries",
+ "service": {
+ "shortName": "LoggingServiceV2",
+ "fullName": "google.logging.v2.LoggingServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async",
+ "title": "logging listMonitoredResourceDescriptors Sample",
+ "origin": "API_DEFINITION",
+ "description": " Lists the descriptors for monitored resource types used by Logging.",
+ "canonical": true,
+ "file": "logging_service_v2.list_monitored_resource_descriptors.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async",
- "title": "logging listMonitoredResourceDescriptors Sample",
- "origin": "API_DEFINITION",
- "description": " Lists the descriptors for monitored resource types used by Logging.",
- "canonical": true,
- "file": "logging_service_v2.list_monitored_resource_descriptors.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 63,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListMonitoredResourceDescriptors",
- "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors",
- "async": true,
- "parameters": [
- {
- "name": "page_size",
- "type": "TYPE_INT32"
- },
- {
- "name": "page_token",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.ListMonitoredResourceDescriptorsResponse",
- "client": {
- "shortName": "LoggingServiceV2Client",
- "fullName": "google.logging.v2.LoggingServiceV2Client"
- },
- "method": {
- "shortName": "ListMonitoredResourceDescriptors",
- "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors",
- "service": {
- "shortName": "LoggingServiceV2",
- "fullName": "google.logging.v2.LoggingServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 63,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListMonitoredResourceDescriptors",
+ "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors",
+ "async": true,
+ "parameters": [
+ {
+ "name": "page_size",
+ "type": "TYPE_INT32"
+ },
+ {
+ "name": "page_token",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.ListMonitoredResourceDescriptorsResponse",
+ "client": {
+ "shortName": "LoggingServiceV2Client",
+ "fullName": "google.logging.v2.LoggingServiceV2Client"
},
+ "method": {
+ "shortName": "ListMonitoredResourceDescriptors",
+ "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors",
+ "service": {
+ "shortName": "LoggingServiceV2",
+ "fullName": "google.logging.v2.LoggingServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async",
+ "title": "logging listLogs Sample",
+ "origin": "API_DEFINITION",
+ "description": " Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed.",
+ "canonical": true,
+ "file": "logging_service_v2.list_logs.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async",
- "title": "logging listLogs Sample",
- "origin": "API_DEFINITION",
- "description": " Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed.",
- "canonical": true,
- "file": "logging_service_v2.list_logs.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 86,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListLogs",
- "fullName": "google.logging.v2.LoggingServiceV2.ListLogs",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "resource_names",
- "type": "TYPE_STRING[]"
- },
- {
- "name": "page_size",
- "type": "TYPE_INT32"
- },
- {
- "name": "page_token",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.ListLogsResponse",
- "client": {
- "shortName": "LoggingServiceV2Client",
- "fullName": "google.logging.v2.LoggingServiceV2Client"
- },
- "method": {
- "shortName": "ListLogs",
- "fullName": "google.logging.v2.LoggingServiceV2.ListLogs",
- "service": {
- "shortName": "LoggingServiceV2",
- "fullName": "google.logging.v2.LoggingServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 86,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListLogs",
+ "fullName": "google.logging.v2.LoggingServiceV2.ListLogs",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "resource_names",
+ "type": "TYPE_STRING[]"
+ },
+ {
+ "name": "page_size",
+ "type": "TYPE_INT32"
+ },
+ {
+ "name": "page_token",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.ListLogsResponse",
+ "client": {
+ "shortName": "LoggingServiceV2Client",
+ "fullName": "google.logging.v2.LoggingServiceV2Client"
},
+ "method": {
+ "shortName": "ListLogs",
+ "fullName": "google.logging.v2.LoggingServiceV2.ListLogs",
+ "service": {
+ "shortName": "LoggingServiceV2",
+ "fullName": "google.logging.v2.LoggingServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async",
+ "title": "logging tailLogEntries Sample",
+ "origin": "API_DEFINITION",
+ "description": " Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs.",
+ "canonical": true,
+ "file": "logging_service_v2.tail_log_entries.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async",
- "title": "logging tailLogEntries Sample",
- "origin": "API_DEFINITION",
- "description": " Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs.",
- "canonical": true,
- "file": "logging_service_v2.tail_log_entries.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 81,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "TailLogEntries",
- "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries",
- "async": true,
- "parameters": [
- {
- "name": "resource_names",
- "type": "TYPE_STRING[]"
- },
- {
- "name": "filter",
- "type": "TYPE_STRING"
- },
- {
- "name": "buffer_window",
- "type": ".google.protobuf.Duration"
- }
- ],
- "resultType": ".google.logging.v2.TailLogEntriesResponse",
- "client": {
- "shortName": "LoggingServiceV2Client",
- "fullName": "google.logging.v2.LoggingServiceV2Client"
- },
- "method": {
- "shortName": "TailLogEntries",
- "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries",
- "service": {
- "shortName": "LoggingServiceV2",
- "fullName": "google.logging.v2.LoggingServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 81,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "TailLogEntries",
+ "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries",
+ "async": true,
+ "parameters": [
+ {
+ "name": "resource_names",
+ "type": "TYPE_STRING[]"
+ },
+ {
+ "name": "filter",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "buffer_window",
+ "type": ".google.protobuf.Duration"
+ }
+ ],
+ "resultType": ".google.logging.v2.TailLogEntriesResponse",
+ "client": {
+ "shortName": "LoggingServiceV2Client",
+ "fullName": "google.logging.v2.LoggingServiceV2Client"
},
+ "method": {
+ "shortName": "TailLogEntries",
+ "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries",
+ "service": {
+ "shortName": "LoggingServiceV2",
+ "fullName": "google.logging.v2.LoggingServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async",
+ "title": "logging listLogMetrics Sample",
+ "origin": "API_DEFINITION",
+ "description": " Lists logs-based metrics.",
+ "canonical": true,
+ "file": "metrics_service_v2.list_log_metrics.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async",
- "title": "logging listLogMetrics Sample",
- "origin": "API_DEFINITION",
- "description": " Lists logs-based metrics.",
- "canonical": true,
- "file": "metrics_service_v2.list_log_metrics.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 69,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "ListLogMetrics",
- "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_token",
- "type": "TYPE_STRING"
- },
- {
- "name": "page_size",
- "type": "TYPE_INT32"
- }
- ],
- "resultType": ".google.logging.v2.ListLogMetricsResponse",
- "client": {
- "shortName": "MetricsServiceV2Client",
- "fullName": "google.logging.v2.MetricsServiceV2Client"
- },
- "method": {
- "shortName": "ListLogMetrics",
- "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics",
- "service": {
- "shortName": "MetricsServiceV2",
- "fullName": "google.logging.v2.MetricsServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 69,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "ListLogMetrics",
+ "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_token",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "page_size",
+ "type": "TYPE_INT32"
+ }
+ ],
+ "resultType": ".google.logging.v2.ListLogMetricsResponse",
+ "client": {
+ "shortName": "MetricsServiceV2Client",
+ "fullName": "google.logging.v2.MetricsServiceV2Client"
},
+ "method": {
+ "shortName": "ListLogMetrics",
+ "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics",
+ "service": {
+ "shortName": "MetricsServiceV2",
+ "fullName": "google.logging.v2.MetricsServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async",
+ "title": "logging getLogMetric Sample",
+ "origin": "API_DEFINITION",
+ "description": " Gets a logs-based metric.",
+ "canonical": true,
+ "file": "metrics_service_v2.get_log_metric.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async",
- "title": "logging getLogMetric Sample",
- "origin": "API_DEFINITION",
- "description": " Gets a logs-based metric.",
- "canonical": true,
- "file": "metrics_service_v2.get_log_metric.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 54,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "GetLogMetric",
- "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric",
- "async": true,
- "parameters": [
- {
- "name": "metric_name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.logging.v2.LogMetric",
- "client": {
- "shortName": "MetricsServiceV2Client",
- "fullName": "google.logging.v2.MetricsServiceV2Client"
- },
- "method": {
- "shortName": "GetLogMetric",
- "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric",
- "service": {
- "shortName": "MetricsServiceV2",
- "fullName": "google.logging.v2.MetricsServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 54,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "GetLogMetric",
+ "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric",
+ "async": true,
+ "parameters": [
+ {
+ "name": "metric_name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogMetric",
+ "client": {
+ "shortName": "MetricsServiceV2Client",
+ "fullName": "google.logging.v2.MetricsServiceV2Client"
},
+ "method": {
+ "shortName": "GetLogMetric",
+ "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric",
+ "service": {
+ "shortName": "MetricsServiceV2",
+ "fullName": "google.logging.v2.MetricsServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async",
+ "title": "logging createLogMetric Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates a logs-based metric.",
+ "canonical": true,
+ "file": "metrics_service_v2.create_log_metric.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async",
- "title": "logging createLogMetric Sample",
- "origin": "API_DEFINITION",
- "description": " Creates a logs-based metric.",
- "canonical": true,
- "file": "metrics_service_v2.create_log_metric.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 61,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "CreateLogMetric",
- "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric",
- "async": true,
- "parameters": [
- {
- "name": "parent",
- "type": "TYPE_STRING"
- },
- {
- "name": "metric",
- "type": ".google.logging.v2.LogMetric"
- }
- ],
- "resultType": ".google.logging.v2.LogMetric",
- "client": {
- "shortName": "MetricsServiceV2Client",
- "fullName": "google.logging.v2.MetricsServiceV2Client"
- },
- "method": {
- "shortName": "CreateLogMetric",
- "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric",
- "service": {
- "shortName": "MetricsServiceV2",
- "fullName": "google.logging.v2.MetricsServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 61,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "CreateLogMetric",
+ "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric",
+ "async": true,
+ "parameters": [
+ {
+ "name": "parent",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "metric",
+ "type": ".google.logging.v2.LogMetric"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogMetric",
+ "client": {
+ "shortName": "MetricsServiceV2Client",
+ "fullName": "google.logging.v2.MetricsServiceV2Client"
},
+ "method": {
+ "shortName": "CreateLogMetric",
+ "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric",
+ "service": {
+ "shortName": "MetricsServiceV2",
+ "fullName": "google.logging.v2.MetricsServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async",
+ "title": "logging updateLogMetric Sample",
+ "origin": "API_DEFINITION",
+ "description": " Creates or updates a logs-based metric.",
+ "canonical": true,
+ "file": "metrics_service_v2.update_log_metric.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async",
- "title": "logging updateLogMetric Sample",
- "origin": "API_DEFINITION",
- "description": " Creates or updates a logs-based metric.",
- "canonical": true,
- "file": "metrics_service_v2.update_log_metric.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 62,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "UpdateLogMetric",
- "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric",
- "async": true,
- "parameters": [
- {
- "name": "metric_name",
- "type": "TYPE_STRING"
- },
- {
- "name": "metric",
- "type": ".google.logging.v2.LogMetric"
- }
- ],
- "resultType": ".google.logging.v2.LogMetric",
- "client": {
- "shortName": "MetricsServiceV2Client",
- "fullName": "google.logging.v2.MetricsServiceV2Client"
- },
- "method": {
- "shortName": "UpdateLogMetric",
- "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric",
- "service": {
- "shortName": "MetricsServiceV2",
- "fullName": "google.logging.v2.MetricsServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 62,
+ "type": "FULL"
+ }
+ ],
+ "clientMethod": {
+ "shortName": "UpdateLogMetric",
+ "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric",
+ "async": true,
+ "parameters": [
+ {
+ "name": "metric_name",
+ "type": "TYPE_STRING"
+ },
+ {
+ "name": "metric",
+ "type": ".google.logging.v2.LogMetric"
+ }
+ ],
+ "resultType": ".google.logging.v2.LogMetric",
+ "client": {
+ "shortName": "MetricsServiceV2Client",
+ "fullName": "google.logging.v2.MetricsServiceV2Client"
},
+ "method": {
+ "shortName": "UpdateLogMetric",
+ "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric",
+ "service": {
+ "shortName": "MetricsServiceV2",
+ "fullName": "google.logging.v2.MetricsServiceV2"
+ }
+ }
+ }
+ },
+ {
+ "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async",
+ "title": "logging deleteLogMetric Sample",
+ "origin": "API_DEFINITION",
+ "description": " Deletes a logs-based metric.",
+ "canonical": true,
+ "file": "metrics_service_v2.delete_log_metric.js",
+ "language": "JAVASCRIPT",
+ "segments": [
{
- "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async",
- "title": "logging deleteLogMetric Sample",
- "origin": "API_DEFINITION",
- "description": " Deletes a logs-based metric.",
- "canonical": true,
- "file": "metrics_service_v2.delete_log_metric.js",
- "language": "JAVASCRIPT",
- "segments": [
- {
- "start": 25,
- "end": 54,
- "type": "FULL"
- }
- ],
- "clientMethod": {
- "shortName": "DeleteLogMetric",
- "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric",
- "async": true,
- "parameters": [
- {
- "name": "metric_name",
- "type": "TYPE_STRING"
- }
- ],
- "resultType": ".google.protobuf.Empty",
- "client": {
- "shortName": "MetricsServiceV2Client",
- "fullName": "google.logging.v2.MetricsServiceV2Client"
- },
- "method": {
- "shortName": "DeleteLogMetric",
- "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric",
- "service": {
- "shortName": "MetricsServiceV2",
- "fullName": "google.logging.v2.MetricsServiceV2"
- }
- }
- }
+ "start": 25,
+ "end": 54,
+ "type": "FULL"
}
- ]
-}
\ No newline at end of file
+ ],
+ "clientMethod": {
+ "shortName": "DeleteLogMetric",
+ "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric",
+ "async": true,
+ "parameters": [
+ {
+ "name": "metric_name",
+ "type": "TYPE_STRING"
+ }
+ ],
+ "resultType": ".google.protobuf.Empty",
+ "client": {
+ "shortName": "MetricsServiceV2Client",
+ "fullName": "google.logging.v2.MetricsServiceV2Client"
+ },
+ "method": {
+ "shortName": "DeleteLogMetric",
+ "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric",
+ "service": {
+ "shortName": "MetricsServiceV2",
+ "fullName": "google.logging.v2.MetricsServiceV2"
+ }
+ }
+ }
+ }
+ ]
+}
diff --git a/handwritten/logging/src/v2/config_service_v2_client.ts b/handwritten/logging/src/v2/config_service_v2_client.ts
index 11d0e92a5fd3..64450c3c4734 100644
--- a/handwritten/logging/src/v2/config_service_v2_client.ts
+++ b/handwritten/logging/src/v2/config_service_v2_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -28,9 +28,10 @@ import type {
PaginationCallback,
GaxCall,
} from 'google-gax';
-import {Transform} from 'stream';
+import { Transform } from 'stream';
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -52,9 +53,11 @@ export class ConfigServiceV2Client {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
+ private _log = logging.log('logging');
+
auth: gax.GoogleAuth;
descriptors: Descriptors = {
page: {},
@@ -63,10 +66,10 @@ export class ConfigServiceV2Client {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
- pathTemplates: {[name: string]: gax.PathTemplate};
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
operationsClient: gax.OperationsClient;
- configServiceV2Stub?: Promise<{[name: string]: Function}>;
+ configServiceV2Stub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of ConfigServiceV2Client.
@@ -90,7 +93,7 @@ export class ConfigServiceV2Client {
* Developer's Console, e.g. 'grape-spaceship-123'. We will also check
* the environment variable GCLOUD_PROJECT for your project ID. If your
* app is running in an environment which supports
- * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials},
+ * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials},
* your project ID will be detected automatically.
* @param {string} [options.apiEndpoint] - The domain name of the
* API remote host.
@@ -142,7 +145,7 @@ export class ConfigServiceV2Client {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// Request numeric enum values if REST transport is used.
opts.numericEnums = true;
@@ -345,7 +348,7 @@ export class ConfigServiceV2Client {
),
};
- const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos);
+ const protoFilesRoot = this._gaxModule.protobufFromJSON(jsonProtos);
// This API contains "long-running operations", which return a
// an Operation object that allows for tracking of the operation,
// rather than holding a request open.
@@ -383,20 +386,20 @@ export class ConfigServiceV2Client {
selector: 'google.longrunning.Operations.GetOperation',
get: '/v2/{name=*/*/locations/*/operations/*}',
additional_bindings: [
- {get: '/v2/{name=projects/*/locations/*/operations/*}'},
- {get: '/v2/{name=organizations/*/locations/*/operations/*}'},
- {get: '/v2/{name=folders/*/locations/*/operations/*}'},
- {get: '/v2/{name=billingAccounts/*/locations/*/operations/*}'},
+ { get: '/v2/{name=projects/*/locations/*/operations/*}' },
+ { get: '/v2/{name=organizations/*/locations/*/operations/*}' },
+ { get: '/v2/{name=folders/*/locations/*/operations/*}' },
+ { get: '/v2/{name=billingAccounts/*/locations/*/operations/*}' },
],
},
{
selector: 'google.longrunning.Operations.ListOperations',
get: '/v2/{name=*/*/locations/*}/operations',
additional_bindings: [
- {get: '/v2/{name=projects/*/locations/*}/operations'},
- {get: '/v2/{name=organizations/*/locations/*}/operations'},
- {get: '/v2/{name=folders/*/locations/*}/operations'},
- {get: '/v2/{name=billingAccounts/*/locations/*}/operations'},
+ { get: '/v2/{name=projects/*/locations/*}/operations' },
+ { get: '/v2/{name=organizations/*/locations/*}/operations' },
+ { get: '/v2/{name=folders/*/locations/*}/operations' },
+ { get: '/v2/{name=billingAccounts/*/locations/*}/operations' },
],
},
];
@@ -468,7 +471,7 @@ export class ConfigServiceV2Client {
'google.logging.v2.ConfigServiceV2',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -508,7 +511,7 @@ export class ConfigServiceV2Client {
(this._protos as any).google.logging.v2.ConfigServiceV2,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -548,7 +551,7 @@ export class ConfigServiceV2Client {
];
for (const methodName of configServiceV2StubMethods) {
const callPromise = this.configServiceV2Stub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
return Promise.reject('The client has already been closed.');
@@ -755,8 +758,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.getBucket(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('getBucket request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.IGetBucketRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('getBucket response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .getBucket(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.IGetBucketRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('getBucket response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Creates a log bucket that can be used to store log entries. After a bucket
@@ -852,8 +897,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.createBucket(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('createBucket request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.ICreateBucketRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('createBucket response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .createBucket(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.ICreateBucketRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('createBucket response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Updates a log bucket.
@@ -959,8 +1046,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.updateBucket(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('updateBucket request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.IUpdateBucketRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('updateBucket response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .updateBucket(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.IUpdateBucketRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('updateBucket response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Deletes a log bucket.
@@ -1054,8 +1183,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.deleteBucket(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('deleteBucket request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteBucketRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('deleteBucket response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .deleteBucket(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteBucketRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('deleteBucket response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Undeletes a log bucket. A bucket that has been deleted can be undeleted
@@ -1146,8 +1317,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.undeleteBucket(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('undeleteBucket request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IUndeleteBucketRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('undeleteBucket response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .undeleteBucket(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IUndeleteBucketRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('undeleteBucket response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Gets a view on a log bucket..
@@ -1234,8 +1447,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.getView(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('getView request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogView,
+ protos.google.logging.v2.IGetViewRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('getView response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .getView(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogView,
+ protos.google.logging.v2.IGetViewRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('getView response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Creates a view over log entries in a log bucket. A bucket may contain a
@@ -1329,8 +1584,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.createView(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('createView request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogView,
+ protos.google.logging.v2.ICreateViewRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('createView response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .createView(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogView,
+ protos.google.logging.v2.ICreateViewRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('createView response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Updates a view on a log bucket. This method replaces the following fields
@@ -1432,8 +1729,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.updateView(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('updateView request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogView,
+ protos.google.logging.v2.IUpdateViewRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('updateView response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .updateView(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogView,
+ protos.google.logging.v2.IUpdateViewRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('updateView response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Deletes a view on a log bucket.
@@ -1523,8 +1862,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.deleteView(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('deleteView request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteViewRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('deleteView response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .deleteView(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteViewRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('deleteView response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Gets a sink.
@@ -1614,8 +1995,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
sink_name: request.sinkName ?? '',
});
- this.initialize();
- return this.innerApiCalls.getSink(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('getSink request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogSink,
+ protos.google.logging.v2.IGetSinkRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('getSink response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .getSink(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogSink,
+ protos.google.logging.v2.IGetSinkRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('getSink response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Creates a sink that exports specified log entries to a destination. The
@@ -1725,8 +2148,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.createSink(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('createSink request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogSink,
+ protos.google.logging.v2.ICreateSinkRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('createSink response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .createSink(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogSink,
+ protos.google.logging.v2.ICreateSinkRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('createSink response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Updates a sink. This method replaces the following fields in the existing
@@ -1853,8 +2318,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
sink_name: request.sinkName ?? '',
});
- this.initialize();
- return this.innerApiCalls.updateSink(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('updateSink request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogSink,
+ protos.google.logging.v2.IUpdateSinkRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('updateSink response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .updateSink(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogSink,
+ protos.google.logging.v2.IUpdateSinkRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('updateSink response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Deletes a sink. If the sink has a unique `writer_identity`, then that
@@ -1946,8 +2453,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
sink_name: request.sinkName ?? '',
});
- this.initialize();
- return this.innerApiCalls.deleteSink(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('deleteSink request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteSinkRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('deleteSink response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .deleteSink(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteSinkRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('deleteSink response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Gets a link.
@@ -2033,8 +2582,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.getLink(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('getLink request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILink,
+ protos.google.logging.v2.IGetLinkRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('getLink response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .getLink(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILink,
+ protos.google.logging.v2.IGetLinkRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('getLink response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Gets the description of an exclusion in the _Default sink.
@@ -2124,8 +2715,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.getExclusion(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('getExclusion request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogExclusion,
+ protos.google.logging.v2.IGetExclusionRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('getExclusion response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .getExclusion(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogExclusion,
+ protos.google.logging.v2.IGetExclusionRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('getExclusion response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Creates a new exclusion in the _Default sink in a specified parent
@@ -2221,8 +2854,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.createExclusion(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('createExclusion request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogExclusion,
+ protos.google.logging.v2.ICreateExclusionRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('createExclusion response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .createExclusion(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogExclusion,
+ protos.google.logging.v2.ICreateExclusionRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('createExclusion response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Changes one or more properties of an existing exclusion in the _Default
@@ -2325,8 +3000,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.updateExclusion(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('updateExclusion request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogExclusion,
+ protos.google.logging.v2.IUpdateExclusionRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('updateExclusion response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .updateExclusion(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogExclusion,
+ protos.google.logging.v2.IUpdateExclusionRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('updateExclusion response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Deletes an exclusion in the _Default sink.
@@ -2416,8 +3133,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.deleteExclusion(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('deleteExclusion request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteExclusionRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('deleteExclusion response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .deleteExclusion(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteExclusionRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('deleteExclusion response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Gets the Logging CMEK settings for the given resource.
@@ -2521,8 +3280,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.getCmekSettings(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('getCmekSettings request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ICmekSettings,
+ protos.google.logging.v2.IGetCmekSettingsRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('getCmekSettings response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .getCmekSettings(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ICmekSettings,
+ protos.google.logging.v2.IGetCmekSettingsRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('getCmekSettings response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Updates the Log Router CMEK settings for the given resource.
@@ -2646,8 +3447,52 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.updateCmekSettings(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('updateCmekSettings request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ICmekSettings,
+ | protos.google.logging.v2.IUpdateCmekSettingsRequest
+ | null
+ | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('updateCmekSettings response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .updateCmekSettings(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ICmekSettings,
+ protos.google.logging.v2.IUpdateCmekSettingsRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('updateCmekSettings response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Gets the Log Router settings for the given resource.
@@ -2751,8 +3596,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.getSettings(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('getSettings request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ISettings,
+ protos.google.logging.v2.IGetSettingsRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('getSettings response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .getSettings(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ISettings,
+ protos.google.logging.v2.IGetSettingsRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('getSettings response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Updates the Log Router settings for the given resource.
@@ -2872,8 +3759,50 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.updateSettings(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('updateSettings request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ISettings,
+ protos.google.logging.v2.IUpdateSettingsRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('updateSettings response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .updateSettings(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ISettings,
+ protos.google.logging.v2.IUpdateSettingsRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('updateSettings response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
@@ -2991,8 +3920,40 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.createBucketAsync(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | Callback<
+ LROperation<
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.IBucketMetadata
+ >,
+ protos.google.longrunning.IOperation | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, rawResponse, _) => {
+ this._log.info('createBucketAsync response %j', rawResponse);
+ callback!(error, response, rawResponse, _); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('createBucketAsync request %j', request);
+ return this.innerApiCalls
+ .createBucketAsync(request, options, wrappedCallback)
+ ?.then(
+ ([response, rawResponse, _]: [
+ LROperation<
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.IBucketMetadata
+ >,
+ protos.google.longrunning.IOperation | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('createBucketAsync response %j', rawResponse);
+ return [response, rawResponse, _];
+ },
+ );
}
/**
* Check the status of the long running operation returned by `createBucketAsync()`.
@@ -3013,9 +3974,10 @@ export class ConfigServiceV2Client {
protos.google.logging.v2.BucketMetadata
>
> {
+ this._log.info('createBucketAsync long-running');
const request =
new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest(
- {name},
+ { name },
);
const [operation] = await this.operationsClient.getOperation(request);
const decodeOperation = new this._gaxModule.Operation(
@@ -3152,8 +4114,40 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.updateBucketAsync(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | Callback<
+ LROperation<
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.IBucketMetadata
+ >,
+ protos.google.longrunning.IOperation | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, rawResponse, _) => {
+ this._log.info('updateBucketAsync response %j', rawResponse);
+ callback!(error, response, rawResponse, _); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('updateBucketAsync request %j', request);
+ return this.innerApiCalls
+ .updateBucketAsync(request, options, wrappedCallback)
+ ?.then(
+ ([response, rawResponse, _]: [
+ LROperation<
+ protos.google.logging.v2.ILogBucket,
+ protos.google.logging.v2.IBucketMetadata
+ >,
+ protos.google.longrunning.IOperation | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('updateBucketAsync response %j', rawResponse);
+ return [response, rawResponse, _];
+ },
+ );
}
/**
* Check the status of the long running operation returned by `updateBucketAsync()`.
@@ -3174,9 +4168,10 @@ export class ConfigServiceV2Client {
protos.google.logging.v2.BucketMetadata
>
> {
+ this._log.info('updateBucketAsync long-running');
const request =
new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest(
- {name},
+ { name },
);
const [operation] = await this.operationsClient.getOperation(request);
const decodeOperation = new this._gaxModule.Operation(
@@ -3301,8 +4296,40 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.createLink(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | Callback<
+ LROperation<
+ protos.google.logging.v2.ILink,
+ protos.google.logging.v2.ILinkMetadata
+ >,
+ protos.google.longrunning.IOperation | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, rawResponse, _) => {
+ this._log.info('createLink response %j', rawResponse);
+ callback!(error, response, rawResponse, _); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('createLink request %j', request);
+ return this.innerApiCalls
+ .createLink(request, options, wrappedCallback)
+ ?.then(
+ ([response, rawResponse, _]: [
+ LROperation<
+ protos.google.logging.v2.ILink,
+ protos.google.logging.v2.ILinkMetadata
+ >,
+ protos.google.longrunning.IOperation | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('createLink response %j', rawResponse);
+ return [response, rawResponse, _];
+ },
+ );
}
/**
* Check the status of the long running operation returned by `createLink()`.
@@ -3323,9 +4350,10 @@ export class ConfigServiceV2Client {
protos.google.logging.v2.LinkMetadata
>
> {
+ this._log.info('createLink long-running');
const request =
new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest(
- {name},
+ { name },
);
const [operation] = await this.operationsClient.getOperation(request);
const decodeOperation = new this._gaxModule.Operation(
@@ -3443,8 +4471,40 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
name: request.name ?? '',
});
- this.initialize();
- return this.innerApiCalls.deleteLink(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | Callback<
+ LROperation<
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.ILinkMetadata
+ >,
+ protos.google.longrunning.IOperation | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, rawResponse, _) => {
+ this._log.info('deleteLink response %j', rawResponse);
+ callback!(error, response, rawResponse, _); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('deleteLink request %j', request);
+ return this.innerApiCalls
+ .deleteLink(request, options, wrappedCallback)
+ ?.then(
+ ([response, rawResponse, _]: [
+ LROperation<
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.ILinkMetadata
+ >,
+ protos.google.longrunning.IOperation | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('deleteLink response %j', rawResponse);
+ return [response, rawResponse, _];
+ },
+ );
}
/**
* Check the status of the long running operation returned by `deleteLink()`.
@@ -3465,9 +4525,10 @@ export class ConfigServiceV2Client {
protos.google.logging.v2.LinkMetadata
>
> {
+ this._log.info('deleteLink long-running');
const request =
new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest(
- {name},
+ { name },
);
const [operation] = await this.operationsClient.getOperation(request);
const decodeOperation = new this._gaxModule.Operation(
@@ -3584,8 +4645,40 @@ export class ConfigServiceV2Client {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- this.initialize();
- return this.innerApiCalls.copyLogEntries(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | Callback<
+ LROperation<
+ protos.google.logging.v2.ICopyLogEntriesResponse,
+ protos.google.logging.v2.ICopyLogEntriesMetadata
+ >,
+ protos.google.longrunning.IOperation | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, rawResponse, _) => {
+ this._log.info('copyLogEntries response %j', rawResponse);
+ callback!(error, response, rawResponse, _); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('copyLogEntries request %j', request);
+ return this.innerApiCalls
+ .copyLogEntries(request, options, wrappedCallback)
+ ?.then(
+ ([response, rawResponse, _]: [
+ LROperation<
+ protos.google.logging.v2.ICopyLogEntriesResponse,
+ protos.google.logging.v2.ICopyLogEntriesMetadata
+ >,
+ protos.google.longrunning.IOperation | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('copyLogEntries response %j', rawResponse);
+ return [response, rawResponse, _];
+ },
+ );
}
/**
* Check the status of the long running operation returned by `copyLogEntries()`.
@@ -3606,9 +4699,10 @@ export class ConfigServiceV2Client {
protos.google.logging.v2.CopyLogEntriesMetadata
>
> {
+ this._log.info('copyLogEntries long-running');
const request =
new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest(
- {name},
+ { name },
);
const [operation] = await this.operationsClient.getOperation(request);
const decodeOperation = new this._gaxModule.Operation(
@@ -3721,12 +4815,38 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.listBuckets(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | PaginationCallback<
+ protos.google.logging.v2.IListBucketsRequest,
+ protos.google.logging.v2.IListBucketsResponse | null | undefined,
+ protos.google.logging.v2.ILogBucket
+ >
+ | undefined = callback
+ ? (error, values, nextPageRequest, rawResponse) => {
+ this._log.info('listBuckets values %j', values);
+ callback!(error, values, nextPageRequest, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('listBuckets request %j', request);
+ return this.innerApiCalls
+ .listBuckets(request, options, wrappedCallback)
+ ?.then(
+ ([response, input, output]: [
+ protos.google.logging.v2.ILogBucket[],
+ protos.google.logging.v2.IListBucketsRequest | null,
+ protos.google.logging.v2.IListBucketsResponse,
+ ]) => {
+ this._log.info('listBuckets values %j', response);
+ return [response, input, output];
+ },
+ );
}
/**
- * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object.
+ * Equivalent to `listBuckets`, but returns a NodeJS Stream object.
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
@@ -3774,7 +4894,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listBuckets'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listBuckets stream %j', request);
return this.descriptors.page.listBuckets.createStream(
this.innerApiCalls.listBuckets as GaxCall,
request,
@@ -3834,7 +4957,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listBuckets'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listBuckets iterate %j', request);
return this.descriptors.page.listBuckets.asyncIterate(
this.innerApiCalls['listBuckets'] as GaxCall,
request as {},
@@ -3935,12 +5061,38 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.listViews(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | PaginationCallback<
+ protos.google.logging.v2.IListViewsRequest,
+ protos.google.logging.v2.IListViewsResponse | null | undefined,
+ protos.google.logging.v2.ILogView
+ >
+ | undefined = callback
+ ? (error, values, nextPageRequest, rawResponse) => {
+ this._log.info('listViews values %j', values);
+ callback!(error, values, nextPageRequest, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('listViews request %j', request);
+ return this.innerApiCalls
+ .listViews(request, options, wrappedCallback)
+ ?.then(
+ ([response, input, output]: [
+ protos.google.logging.v2.ILogView[],
+ protos.google.logging.v2.IListViewsRequest | null,
+ protos.google.logging.v2.IListViewsResponse,
+ ]) => {
+ this._log.info('listViews values %j', response);
+ return [response, input, output];
+ },
+ );
}
/**
- * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object.
+ * Equivalent to `listViews`, but returns a NodeJS Stream object.
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
@@ -3982,7 +5134,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listViews'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listViews stream %j', request);
return this.descriptors.page.listViews.createStream(
this.innerApiCalls.listViews as GaxCall,
request,
@@ -4036,7 +5191,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listViews'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listViews iterate %j', request);
return this.descriptors.page.listViews.asyncIterate(
this.innerApiCalls['listViews'] as GaxCall,
request as {},
@@ -4139,12 +5297,38 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.listSinks(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | PaginationCallback<
+ protos.google.logging.v2.IListSinksRequest,
+ protos.google.logging.v2.IListSinksResponse | null | undefined,
+ protos.google.logging.v2.ILogSink
+ >
+ | undefined = callback
+ ? (error, values, nextPageRequest, rawResponse) => {
+ this._log.info('listSinks values %j', values);
+ callback!(error, values, nextPageRequest, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('listSinks request %j', request);
+ return this.innerApiCalls
+ .listSinks(request, options, wrappedCallback)
+ ?.then(
+ ([response, input, output]: [
+ protos.google.logging.v2.ILogSink[],
+ protos.google.logging.v2.IListSinksRequest | null,
+ protos.google.logging.v2.IListSinksResponse,
+ ]) => {
+ this._log.info('listSinks values %j', response);
+ return [response, input, output];
+ },
+ );
}
/**
- * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object.
+ * Equivalent to `listSinks`, but returns a NodeJS Stream object.
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
@@ -4188,7 +5372,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listSinks'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listSinks stream %j', request);
return this.descriptors.page.listSinks.createStream(
this.innerApiCalls.listSinks as GaxCall,
request,
@@ -4244,7 +5431,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listSinks'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listSinks iterate %j', request);
return this.descriptors.page.listSinks.asyncIterate(
this.innerApiCalls['listSinks'] as GaxCall,
request as {},
@@ -4344,12 +5534,38 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.listLinks(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | PaginationCallback<
+ protos.google.logging.v2.IListLinksRequest,
+ protos.google.logging.v2.IListLinksResponse | null | undefined,
+ protos.google.logging.v2.ILink
+ >
+ | undefined = callback
+ ? (error, values, nextPageRequest, rawResponse) => {
+ this._log.info('listLinks values %j', values);
+ callback!(error, values, nextPageRequest, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('listLinks request %j', request);
+ return this.innerApiCalls
+ .listLinks(request, options, wrappedCallback)
+ ?.then(
+ ([response, input, output]: [
+ protos.google.logging.v2.ILink[],
+ protos.google.logging.v2.IListLinksRequest | null,
+ protos.google.logging.v2.IListLinksResponse,
+ ]) => {
+ this._log.info('listLinks values %j', response);
+ return [response, input, output];
+ },
+ );
}
/**
- * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object.
+ * Equivalent to `listLinks`, but returns a NodeJS Stream object.
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
@@ -4390,7 +5606,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listLinks'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listLinks stream %j', request);
return this.descriptors.page.listLinks.createStream(
this.innerApiCalls.listLinks as GaxCall,
request,
@@ -4443,7 +5662,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listLinks'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listLinks iterate %j', request);
return this.descriptors.page.listLinks.asyncIterate(
this.innerApiCalls['listLinks'] as GaxCall,
request as {},
@@ -4546,12 +5768,38 @@ export class ConfigServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.listExclusions(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | PaginationCallback<
+ protos.google.logging.v2.IListExclusionsRequest,
+ protos.google.logging.v2.IListExclusionsResponse | null | undefined,
+ protos.google.logging.v2.ILogExclusion
+ >
+ | undefined = callback
+ ? (error, values, nextPageRequest, rawResponse) => {
+ this._log.info('listExclusions values %j', values);
+ callback!(error, values, nextPageRequest, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('listExclusions request %j', request);
+ return this.innerApiCalls
+ .listExclusions(request, options, wrappedCallback)
+ ?.then(
+ ([response, input, output]: [
+ protos.google.logging.v2.ILogExclusion[],
+ protos.google.logging.v2.IListExclusionsRequest | null,
+ protos.google.logging.v2.IListExclusionsResponse,
+ ]) => {
+ this._log.info('listExclusions values %j', response);
+ return [response, input, output];
+ },
+ );
}
/**
- * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object.
+ * Equivalent to `listExclusions`, but returns a NodeJS Stream object.
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
@@ -4595,7 +5843,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listExclusions'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listExclusions stream %j', request);
return this.descriptors.page.listExclusions.createStream(
this.innerApiCalls.listExclusions as GaxCall,
request,
@@ -4651,7 +5902,10 @@ export class ConfigServiceV2Client {
});
const defaultCallSettings = this._defaults['listExclusions'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listExclusions iterate %j', request);
return this.descriptors.page.listExclusions.asyncIterate(
this.innerApiCalls['listExclusions'] as GaxCall,
request as {},
@@ -4690,7 +5944,7 @@ export class ConfigServiceV2Client {
*/
getOperation(
request: protos.google.longrunning.GetOperationRequest,
- options?:
+ optionsOrCallback?:
| gax.CallOptions
| Callback<
protos.google.longrunning.Operation,
@@ -4703,6 +5957,20 @@ export class ConfigServiceV2Client {
{} | null | undefined
>,
): Promise<[protos.google.longrunning.Operation]> {
+ let options: gax.CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as gax.CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ name: request.name ?? '',
+ });
return this.operationsClient.getOperation(request, options, callback);
}
/**
@@ -4738,7 +6006,14 @@ export class ConfigServiceV2Client {
listOperationsAsync(
request: protos.google.longrunning.ListOperationsRequest,
options?: gax.CallOptions,
- ): AsyncIterable {
+ ): AsyncIterable {
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ name: request.name ?? '',
+ });
return this.operationsClient.listOperationsAsync(request, options);
}
/**
@@ -4774,11 +6049,11 @@ export class ConfigServiceV2Client {
*/
cancelOperation(
request: protos.google.longrunning.CancelOperationRequest,
- options?:
+ optionsOrCallback?:
| gax.CallOptions
| Callback<
- protos.google.protobuf.Empty,
protos.google.longrunning.CancelOperationRequest,
+ protos.google.protobuf.Empty,
{} | undefined | null
>,
callback?: Callback<
@@ -4787,9 +6062,22 @@ export class ConfigServiceV2Client {
{} | undefined | null
>,
): Promise {
+ let options: gax.CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as gax.CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ name: request.name ?? '',
+ });
return this.operationsClient.cancelOperation(request, options, callback);
}
-
/**
* Deletes a long-running operation. This method indicates that the client is
* no longer interested in the operation result. It does not cancel the
@@ -4817,7 +6105,7 @@ export class ConfigServiceV2Client {
*/
deleteOperation(
request: protos.google.longrunning.DeleteOperationRequest,
- options?:
+ optionsOrCallback?:
| gax.CallOptions
| Callback<
protos.google.protobuf.Empty,
@@ -4830,6 +6118,20 @@ export class ConfigServiceV2Client {
{} | null | undefined
>,
): Promise {
+ let options: gax.CallOptions;
+ if (typeof optionsOrCallback === 'function' && callback === undefined) {
+ callback = optionsOrCallback;
+ options = {};
+ } else {
+ options = optionsOrCallback as gax.CallOptions;
+ }
+ options = options || {};
+ options.otherArgs = options.otherArgs || {};
+ options.otherArgs.headers = options.otherArgs.headers || {};
+ options.otherArgs.headers['x-goog-request-params'] =
+ this._gaxModule.routingHeader.fromParams({
+ name: request.name ?? '',
+ });
return this.operationsClient.deleteOperation(request, options, callback);
}
@@ -6541,10 +7843,11 @@ export class ConfigServiceV2Client {
*/
close(): Promise {
if (this.configServiceV2Stub && !this._terminated) {
- return this.configServiceV2Stub.then(stub => {
+ return this.configServiceV2Stub.then((stub) => {
+ this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
- this.operationsClient.close();
+ void this.operationsClient.close();
});
}
return Promise.resolve();
diff --git a/handwritten/logging/src/v2/index.ts b/handwritten/logging/src/v2/index.ts
index 2cc712044a0a..2aca5a517ff1 100644
--- a/handwritten/logging/src/v2/index.ts
+++ b/handwritten/logging/src/v2/index.ts
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,6 +16,6 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-export {ConfigServiceV2Client} from './config_service_v2_client';
-export {LoggingServiceV2Client} from './logging_service_v2_client';
-export {MetricsServiceV2Client} from './metrics_service_v2_client';
+export { ConfigServiceV2Client } from './config_service_v2_client';
+export { LoggingServiceV2Client } from './logging_service_v2_client';
+export { MetricsServiceV2Client } from './metrics_service_v2_client';
diff --git a/handwritten/logging/src/v2/logging_service_v2_client.ts b/handwritten/logging/src/v2/logging_service_v2_client.ts
index 5995243420cb..15ed400567ba 100644
--- a/handwritten/logging/src/v2/logging_service_v2_client.ts
+++ b/handwritten/logging/src/v2/logging_service_v2_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -26,9 +26,10 @@ import type {
PaginationCallback,
GaxCall,
} from 'google-gax';
-import {Transform, PassThrough} from 'stream';
+import { Transform, PassThrough } from 'stream';
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -50,9 +51,11 @@ export class LoggingServiceV2Client {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
+ private _log = logging.log('logging');
+
auth: gax.GoogleAuth;
descriptors: Descriptors = {
page: {},
@@ -61,9 +64,9 @@ export class LoggingServiceV2Client {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
- pathTemplates: {[name: string]: gax.PathTemplate};
- loggingServiceV2Stub?: Promise<{[name: string]: Function}>;
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
+ loggingServiceV2Stub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of LoggingServiceV2Client.
@@ -87,7 +90,7 @@ export class LoggingServiceV2Client {
* Developer's Console, e.g. 'grape-spaceship-123'. We will also check
* the environment variable GCLOUD_PROJECT for your project ID. If your
* app is running in an environment which supports
- * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials},
+ * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials},
* your project ID will be detected automatically.
* @param {string} [options.apiEndpoint] - The domain name of the
* API remote host.
@@ -139,7 +142,7 @@ export class LoggingServiceV2Client {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// Request numeric enum values if REST transport is used.
opts.numericEnums = true;
@@ -339,7 +342,7 @@ export class LoggingServiceV2Client {
),
};
- const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos);
+ const protoFilesRoot = this._gaxModule.protobufFromJSON(jsonProtos);
// Some methods on this API support automatically batching
// requests; denote this.
@@ -360,7 +363,7 @@ export class LoggingServiceV2Client {
'google.logging.v2.LoggingServiceV2',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -400,7 +403,7 @@ export class LoggingServiceV2Client {
(this._protos as any).google.logging.v2.LoggingServiceV2,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -414,11 +417,11 @@ export class LoggingServiceV2Client {
];
for (const methodName of loggingServiceV2StubMethods) {
const callPromise = this.loggingServiceV2Stub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
if (methodName in this.descriptors.stream) {
- const stream = new PassThrough();
+ const stream = new PassThrough({ objectMode: true });
setImmediate(() => {
stream.emit(
'error',
@@ -641,8 +644,50 @@ export class LoggingServiceV2Client {
this._gaxModule.routingHeader.fromParams({
log_name: request.logName ?? '',
});
- this.initialize();
- return this.innerApiCalls.deleteLog(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('deleteLog request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteLogRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('deleteLog response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .deleteLog(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteLogRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('deleteLog response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Writes log entries to Logging. This API method is the
@@ -792,8 +837,50 @@ export class LoggingServiceV2Client {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- this.initialize();
- return this.innerApiCalls.writeLogEntries(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('writeLogEntries request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.IWriteLogEntriesResponse,
+ protos.google.logging.v2.IWriteLogEntriesRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('writeLogEntries response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .writeLogEntries(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.IWriteLogEntriesResponse,
+ protos.google.logging.v2.IWriteLogEntriesRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('writeLogEntries response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
@@ -812,7 +899,10 @@ export class LoggingServiceV2Client {
* region_tag:logging_v2_generated_LoggingServiceV2_TailLogEntries_async
*/
tailLogEntries(options?: CallOptions): gax.CancellableStream {
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('tailLogEntries stream %j', options);
return this.innerApiCalls.tailLogEntries(null, options);
}
@@ -936,12 +1026,38 @@ export class LoggingServiceV2Client {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- this.initialize();
- return this.innerApiCalls.listLogEntries(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | PaginationCallback<
+ protos.google.logging.v2.IListLogEntriesRequest,
+ protos.google.logging.v2.IListLogEntriesResponse | null | undefined,
+ protos.google.logging.v2.ILogEntry
+ >
+ | undefined = callback
+ ? (error, values, nextPageRequest, rawResponse) => {
+ this._log.info('listLogEntries values %j', values);
+ callback!(error, values, nextPageRequest, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('listLogEntries request %j', request);
+ return this.innerApiCalls
+ .listLogEntries(request, options, wrappedCallback)
+ ?.then(
+ ([response, input, output]: [
+ protos.google.logging.v2.ILogEntry[],
+ protos.google.logging.v2.IListLogEntriesRequest | null,
+ protos.google.logging.v2.IListLogEntriesResponse,
+ ]) => {
+ this._log.info('listLogEntries values %j', response);
+ return [response, input, output];
+ },
+ );
}
/**
- * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object.
+ * Equivalent to `listLogEntries`, but returns a NodeJS Stream object.
* @param {Object} request
* The request object that will be sent.
* @param {string[]} request.resourceNames
@@ -1006,7 +1122,10 @@ export class LoggingServiceV2Client {
options.otherArgs.headers = options.otherArgs.headers || {};
const defaultCallSettings = this._defaults['listLogEntries'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listLogEntries stream %j', request);
return this.descriptors.page.listLogEntries.createStream(
this.innerApiCalls.listLogEntries as GaxCall,
request,
@@ -1083,7 +1202,10 @@ export class LoggingServiceV2Client {
options.otherArgs.headers = options.otherArgs.headers || {};
const defaultCallSettings = this._defaults['listLogEntries'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listLogEntries iterate %j', request);
return this.descriptors.page.listLogEntries.asyncIterate(
this.innerApiCalls['listLogEntries'] as GaxCall,
request as {},
@@ -1183,16 +1305,43 @@ export class LoggingServiceV2Client {
options = options || {};
options.otherArgs = options.otherArgs || {};
options.otherArgs.headers = options.otherArgs.headers || {};
- this.initialize();
- return this.innerApiCalls.listMonitoredResourceDescriptors(
- request,
- options,
- callback,
- );
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | PaginationCallback<
+ protos.google.logging.v2.IListMonitoredResourceDescriptorsRequest,
+ | protos.google.logging.v2.IListMonitoredResourceDescriptorsResponse
+ | null
+ | undefined,
+ protos.google.api.IMonitoredResourceDescriptor
+ >
+ | undefined = callback
+ ? (error, values, nextPageRequest, rawResponse) => {
+ this._log.info('listMonitoredResourceDescriptors values %j', values);
+ callback!(error, values, nextPageRequest, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('listMonitoredResourceDescriptors request %j', request);
+ return this.innerApiCalls
+ .listMonitoredResourceDescriptors(request, options, wrappedCallback)
+ ?.then(
+ ([response, input, output]: [
+ protos.google.api.IMonitoredResourceDescriptor[],
+ protos.google.logging.v2.IListMonitoredResourceDescriptorsRequest | null,
+ protos.google.logging.v2.IListMonitoredResourceDescriptorsResponse,
+ ]) => {
+ this._log.info(
+ 'listMonitoredResourceDescriptors values %j',
+ response,
+ );
+ return [response, input, output];
+ },
+ );
}
/**
- * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object.
+ * Equivalent to `listMonitoredResourceDescriptors`, but returns a NodeJS Stream object.
* @param {Object} request
* The request object that will be sent.
* @param {number} [request.pageSize]
@@ -1226,7 +1375,10 @@ export class LoggingServiceV2Client {
const defaultCallSettings =
this._defaults['listMonitoredResourceDescriptors'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listMonitoredResourceDescriptors stream %j', request);
return this.descriptors.page.listMonitoredResourceDescriptors.createStream(
this.innerApiCalls.listMonitoredResourceDescriptors as GaxCall,
request,
@@ -1272,7 +1424,10 @@ export class LoggingServiceV2Client {
const defaultCallSettings =
this._defaults['listMonitoredResourceDescriptors'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listMonitoredResourceDescriptors iterate %j', request);
return this.descriptors.page.listMonitoredResourceDescriptors.asyncIterate(
this.innerApiCalls['listMonitoredResourceDescriptors'] as GaxCall,
request as {},
@@ -1392,12 +1547,38 @@ export class LoggingServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.listLogs(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | PaginationCallback<
+ protos.google.logging.v2.IListLogsRequest,
+ protos.google.logging.v2.IListLogsResponse | null | undefined,
+ string
+ >
+ | undefined = callback
+ ? (error, values, nextPageRequest, rawResponse) => {
+ this._log.info('listLogs values %j', values);
+ callback!(error, values, nextPageRequest, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('listLogs request %j', request);
+ return this.innerApiCalls
+ .listLogs(request, options, wrappedCallback)
+ ?.then(
+ ([response, input, output]: [
+ string[],
+ protos.google.logging.v2.IListLogsRequest | null,
+ protos.google.logging.v2.IListLogsResponse,
+ ]) => {
+ this._log.info('listLogs values %j', response);
+ return [response, input, output];
+ },
+ );
}
/**
- * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object.
+ * Equivalent to `listLogs`, but returns a NodeJS Stream object.
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
@@ -1457,7 +1638,10 @@ export class LoggingServiceV2Client {
});
const defaultCallSettings = this._defaults['listLogs'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listLogs stream %j', request);
return this.descriptors.page.listLogs.createStream(
this.innerApiCalls.listLogs as GaxCall,
request,
@@ -1529,7 +1713,10 @@ export class LoggingServiceV2Client {
});
const defaultCallSettings = this._defaults['listLogs'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listLogs iterate %j', request);
return this.descriptors.page.listLogs.asyncIterate(
this.innerApiCalls['listLogs'] as GaxCall,
request as {},
@@ -3208,7 +3395,8 @@ export class LoggingServiceV2Client {
*/
close(): Promise {
if (this.loggingServiceV2Stub && !this._terminated) {
- return this.loggingServiceV2Stub.then(stub => {
+ return this.loggingServiceV2Stub.then((stub) => {
+ this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
});
diff --git a/handwritten/logging/src/v2/metrics_service_v2_client.ts b/handwritten/logging/src/v2/metrics_service_v2_client.ts
index 5c00af6ca8b4..3c0ff03971cb 100644
--- a/handwritten/logging/src/v2/metrics_service_v2_client.ts
+++ b/handwritten/logging/src/v2/metrics_service_v2_client.ts
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -26,9 +26,10 @@ import type {
PaginationCallback,
GaxCall,
} from 'google-gax';
-import {Transform} from 'stream';
+import { Transform } from 'stream';
import * as protos from '../../protos/protos';
import jsonProtos = require('../../protos/protos.json');
+import { loggingUtils as logging, decodeAnyProtosInArray } from 'google-gax';
/**
* Client JSON configuration object, loaded from
@@ -50,9 +51,11 @@ export class MetricsServiceV2Client {
private _gaxModule: typeof gax | typeof gax.fallback;
private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient;
private _protos: {};
- private _defaults: {[method: string]: gax.CallSettings};
+ private _defaults: { [method: string]: gax.CallSettings };
private _universeDomain: string;
private _servicePath: string;
+ private _log = logging.log('logging');
+
auth: gax.GoogleAuth;
descriptors: Descriptors = {
page: {},
@@ -61,9 +64,9 @@ export class MetricsServiceV2Client {
batching: {},
};
warn: (code: string, message: string, warnType?: string) => void;
- innerApiCalls: {[name: string]: Function};
- pathTemplates: {[name: string]: gax.PathTemplate};
- metricsServiceV2Stub?: Promise<{[name: string]: Function}>;
+ innerApiCalls: { [name: string]: Function };
+ pathTemplates: { [name: string]: gax.PathTemplate };
+ metricsServiceV2Stub?: Promise<{ [name: string]: Function }>;
/**
* Construct an instance of MetricsServiceV2Client.
@@ -87,7 +90,7 @@ export class MetricsServiceV2Client {
* Developer's Console, e.g. 'grape-spaceship-123'. We will also check
* the environment variable GCLOUD_PROJECT for your project ID. If your
* app is running in an environment which supports
- * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials},
+ * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials},
* your project ID will be detected automatically.
* @param {string} [options.apiEndpoint] - The domain name of the
* API remote host.
@@ -139,7 +142,7 @@ export class MetricsServiceV2Client {
const fallback =
opts?.fallback ??
(typeof window !== 'undefined' && typeof window?.fetch === 'function');
- opts = Object.assign({servicePath, port, clientConfig, fallback}, opts);
+ opts = Object.assign({ servicePath, port, clientConfig, fallback }, opts);
// Request numeric enum values if REST transport is used.
opts.numericEnums = true;
@@ -324,7 +327,7 @@ export class MetricsServiceV2Client {
'google.logging.v2.MetricsServiceV2',
gapicConfig as gax.ClientConfig,
opts.clientConfig || {},
- {'x-goog-api-client': clientHeader.join(' ')},
+ { 'x-goog-api-client': clientHeader.join(' ') },
);
// Set up a dictionary of "inner API calls"; the core implementation
@@ -364,7 +367,7 @@ export class MetricsServiceV2Client {
(this._protos as any).google.logging.v2.MetricsServiceV2,
this._opts,
this._providedCustomServicePath,
- ) as Promise<{[method: string]: Function}>;
+ ) as Promise<{ [method: string]: Function }>;
// Iterate over each of the methods that the service provides
// and create an API call method for each.
@@ -377,7 +380,7 @@ export class MetricsServiceV2Client {
];
for (const methodName of metricsServiceV2StubMethods) {
const callPromise = this.metricsServiceV2Stub.then(
- stub =>
+ (stub) =>
(...args: Array<{}>) => {
if (this._terminated) {
return Promise.reject('The client has already been closed.');
@@ -575,8 +578,50 @@ export class MetricsServiceV2Client {
this._gaxModule.routingHeader.fromParams({
metric_name: request.metricName ?? '',
});
- this.initialize();
- return this.innerApiCalls.getLogMetric(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('getLogMetric request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogMetric,
+ protos.google.logging.v2.IGetLogMetricRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('getLogMetric response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .getLogMetric(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogMetric,
+ protos.google.logging.v2.IGetLogMetricRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('getLogMetric response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Creates a logs-based metric.
@@ -664,8 +709,50 @@ export class MetricsServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.createLogMetric(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('createLogMetric request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogMetric,
+ protos.google.logging.v2.ICreateLogMetricRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('createLogMetric response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .createLogMetric(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogMetric,
+ protos.google.logging.v2.ICreateLogMetricRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('createLogMetric response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Creates or updates a logs-based metric.
@@ -754,8 +841,50 @@ export class MetricsServiceV2Client {
this._gaxModule.routingHeader.fromParams({
metric_name: request.metricName ?? '',
});
- this.initialize();
- return this.innerApiCalls.updateLogMetric(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('updateLogMetric request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.logging.v2.ILogMetric,
+ protos.google.logging.v2.IUpdateLogMetricRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('updateLogMetric response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .updateLogMetric(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.logging.v2.ILogMetric,
+ protos.google.logging.v2.IUpdateLogMetricRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('updateLogMetric response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
* Deletes a logs-based metric.
@@ -838,8 +967,50 @@ export class MetricsServiceV2Client {
this._gaxModule.routingHeader.fromParams({
metric_name: request.metricName ?? '',
});
- this.initialize();
- return this.innerApiCalls.deleteLogMetric(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('deleteLogMetric request %j', request);
+ const wrappedCallback:
+ | Callback<
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteLogMetricRequest | null | undefined,
+ {} | null | undefined
+ >
+ | undefined = callback
+ ? (error, response, options, rawResponse) => {
+ this._log.info('deleteLogMetric response %j', response);
+ callback!(error, response, options, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ return this.innerApiCalls
+ .deleteLogMetric(request, options, wrappedCallback)
+ ?.then(
+ ([response, options, rawResponse]: [
+ protos.google.protobuf.IEmpty,
+ protos.google.logging.v2.IDeleteLogMetricRequest | undefined,
+ {} | undefined,
+ ]) => {
+ this._log.info('deleteLogMetric response %j', response);
+ return [response, options, rawResponse];
+ },
+ )
+ .catch((error: any) => {
+ if (
+ error &&
+ 'statusDetails' in error &&
+ error.statusDetails instanceof Array
+ ) {
+ const protos = this._gaxModule.protobuf.Root.fromJSON(
+ jsonProtos,
+ ) as unknown as gax.protobuf.Type;
+ error.statusDetails = decodeAnyProtosInArray(
+ error.statusDetails,
+ protos,
+ );
+ }
+ throw error;
+ });
}
/**
@@ -935,12 +1106,38 @@ export class MetricsServiceV2Client {
this._gaxModule.routingHeader.fromParams({
parent: request.parent ?? '',
});
- this.initialize();
- return this.innerApiCalls.listLogMetrics(request, options, callback);
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ const wrappedCallback:
+ | PaginationCallback<
+ protos.google.logging.v2.IListLogMetricsRequest,
+ protos.google.logging.v2.IListLogMetricsResponse | null | undefined,
+ protos.google.logging.v2.ILogMetric
+ >
+ | undefined = callback
+ ? (error, values, nextPageRequest, rawResponse) => {
+ this._log.info('listLogMetrics values %j', values);
+ callback!(error, values, nextPageRequest, rawResponse); // We verified callback above.
+ }
+ : undefined;
+ this._log.info('listLogMetrics request %j', request);
+ return this.innerApiCalls
+ .listLogMetrics(request, options, wrappedCallback)
+ ?.then(
+ ([response, input, output]: [
+ protos.google.logging.v2.ILogMetric[],
+ protos.google.logging.v2.IListLogMetricsRequest | null,
+ protos.google.logging.v2.IListLogMetricsResponse,
+ ]) => {
+ this._log.info('listLogMetrics values %j', response);
+ return [response, input, output];
+ },
+ );
}
/**
- * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object.
+ * Equivalent to `listLogMetrics`, but returns a NodeJS Stream object.
* @param {Object} request
* The request object that will be sent.
* @param {string} request.parent
@@ -981,7 +1178,10 @@ export class MetricsServiceV2Client {
});
const defaultCallSettings = this._defaults['listLogMetrics'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listLogMetrics stream %j', request);
return this.descriptors.page.listLogMetrics.createStream(
this.innerApiCalls.listLogMetrics as GaxCall,
request,
@@ -1034,7 +1234,10 @@ export class MetricsServiceV2Client {
});
const defaultCallSettings = this._defaults['listLogMetrics'];
const callSettings = defaultCallSettings.merge(options);
- this.initialize();
+ this.initialize().catch((err) => {
+ throw err;
+ });
+ this._log.info('listLogMetrics iterate %j', request);
return this.descriptors.page.listLogMetrics.asyncIterate(
this.innerApiCalls['listLogMetrics'] as GaxCall,
request as {},
@@ -2713,7 +2916,8 @@ export class MetricsServiceV2Client {
*/
close(): Promise {
if (this.metricsServiceV2Stub && !this._terminated) {
- return this.metricsServiceV2Stub.then(stub => {
+ return this.metricsServiceV2Stub.then((stub) => {
+ this._log.info('ending gRPC channel');
this._terminated = true;
stub.close();
});
diff --git a/handwritten/logging/system-test/install.ts b/handwritten/logging/system-test/install.ts
index 22bbd83361c4..ccf167042d2e 100644
--- a/handwritten/logging/system-test/install.ts
+++ b/handwritten/logging/system-test/install.ts
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,9 +16,9 @@
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
-import {packNTest} from 'pack-n-play';
-import {readFileSync} from 'fs';
-import {describe, it} from 'mocha';
+import { packNTest } from 'pack-n-play';
+import { readFileSync } from 'fs';
+import { describe, it } from 'mocha';
describe('📦 pack-n-play test', () => {
it('TypeScript code', async function () {
@@ -41,7 +41,7 @@ describe('📦 pack-n-play test', () => {
packageDir: process.cwd(),
sample: {
description: 'JavaScript user can use the library',
- ts: readFileSync(
+ cjs: readFileSync(
'./system-test/fixtures/sample/src/index.js',
).toString(),
},
diff --git a/handwritten/logging/test/gapic_config_service_v2_v2.ts b/handwritten/logging/test/gapic_config_service_v2_v2.ts
index 590ace19a7b1..74d54c9ce69d 100644
--- a/handwritten/logging/test/gapic_config_service_v2_v2.ts
+++ b/handwritten/logging/test/gapic_config_service_v2_v2.ts
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,13 +19,13 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as configservicev2Module from '../src';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
-import {protobuf, LROperation, operationsProtos} from 'google-gax';
+import { protobuf, LROperation, operationsProtos } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -45,7 +45,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -149,9 +149,9 @@ function stubAsyncIterationCall(
return Promise.reject(error);
}
if (counter >= responses!.length) {
- return Promise.resolve({done: true, value: undefined});
+ return Promise.resolve({ done: true, value: undefined });
}
- return Promise.resolve({done: false, value: responses![counter++]});
+ return Promise.resolve({ done: false, value: responses![counter++] });
},
};
},
@@ -271,7 +271,7 @@ describe('v2.ConfigServiceV2Client', () => {
it('has initialize method and supports deferred initialization', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.configServiceV2Stub, undefined);
@@ -279,33 +279,45 @@ describe('v2.ConfigServiceV2Client', () => {
assert(client.configServiceV2Stub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
- assert(client.configServiceV2Stub);
- client.close().then(() => {
- done();
+ client.initialize().catch((err) => {
+ throw err;
});
+ assert(client.configServiceV2Stub);
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.configServiceV2Stub, undefined);
- client.close().then(() => {
- done();
- });
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
it('has getProjectId method', async () => {
const fakeProjectId = 'fake-project-id';
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
@@ -317,7 +329,7 @@ describe('v2.ConfigServiceV2Client', () => {
it('has getProjectId method with callback', async () => {
const fakeProjectId = 'fake-project-id';
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon
@@ -340,10 +352,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('getBucket', () => {
it('invokes getBucket without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetBucketRequest(),
);
@@ -352,7 +364,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogBucket(),
);
@@ -371,10 +383,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getBucket without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetBucketRequest(),
);
@@ -383,7 +395,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogBucket(),
);
@@ -418,10 +430,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getBucket with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetBucketRequest(),
);
@@ -430,7 +442,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.getBucket = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.getBucket(request), expectedError);
@@ -446,10 +458,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getBucket with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetBucketRequest(),
);
@@ -459,7 +471,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.getBucket(request), expectedError);
});
});
@@ -467,10 +481,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('createBucket', () => {
it('invokes createBucket without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateBucketRequest(),
);
@@ -479,7 +493,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogBucket(),
);
@@ -498,10 +512,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createBucket without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateBucketRequest(),
);
@@ -510,7 +524,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogBucket(),
);
@@ -545,10 +559,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createBucket with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateBucketRequest(),
);
@@ -557,7 +571,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.createBucket = stubSimpleCall(
undefined,
@@ -576,10 +590,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createBucket with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateBucketRequest(),
);
@@ -589,7 +603,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.createBucket(request), expectedError);
});
});
@@ -597,10 +613,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('updateBucket', () => {
it('invokes updateBucket without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateBucketRequest(),
);
@@ -609,7 +625,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogBucket(),
);
@@ -628,10 +644,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateBucket without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateBucketRequest(),
);
@@ -640,7 +656,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogBucket(),
);
@@ -675,10 +691,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateBucket with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateBucketRequest(),
);
@@ -687,7 +703,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.updateBucket = stubSimpleCall(
undefined,
@@ -706,10 +722,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateBucket with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateBucketRequest(),
);
@@ -719,7 +735,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.updateBucket(request), expectedError);
});
});
@@ -727,10 +745,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('deleteBucket', () => {
it('invokes deleteBucket without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteBucketRequest(),
);
@@ -739,7 +757,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -758,10 +776,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteBucket without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteBucketRequest(),
);
@@ -770,7 +788,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -805,10 +823,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteBucket with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteBucketRequest(),
);
@@ -817,7 +835,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.deleteBucket = stubSimpleCall(
undefined,
@@ -836,10 +854,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteBucket with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteBucketRequest(),
);
@@ -849,7 +867,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.deleteBucket(request), expectedError);
});
});
@@ -857,10 +877,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('undeleteBucket', () => {
it('invokes undeleteBucket without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UndeleteBucketRequest(),
);
@@ -869,7 +889,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -888,10 +908,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes undeleteBucket without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UndeleteBucketRequest(),
);
@@ -900,7 +920,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -935,10 +955,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes undeleteBucket with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UndeleteBucketRequest(),
);
@@ -947,7 +967,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.undeleteBucket = stubSimpleCall(
undefined,
@@ -966,10 +986,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes undeleteBucket with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UndeleteBucketRequest(),
);
@@ -979,7 +999,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.undeleteBucket(request), expectedError);
});
});
@@ -987,10 +1009,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('getView', () => {
it('invokes getView without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetViewRequest(),
);
@@ -999,7 +1021,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogView(),
);
@@ -1018,10 +1040,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getView without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetViewRequest(),
);
@@ -1030,7 +1052,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogView(),
);
@@ -1065,10 +1087,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getView with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetViewRequest(),
);
@@ -1077,7 +1099,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.getView = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.getView(request), expectedError);
@@ -1093,10 +1115,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getView with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetViewRequest(),
);
@@ -1106,7 +1128,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.getView(request), expectedError);
});
});
@@ -1114,10 +1138,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('createView', () => {
it('invokes createView without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateViewRequest(),
);
@@ -1126,7 +1150,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogView(),
);
@@ -1145,10 +1169,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createView without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateViewRequest(),
);
@@ -1157,7 +1181,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogView(),
);
@@ -1192,10 +1216,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createView with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateViewRequest(),
);
@@ -1204,7 +1228,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.createView = stubSimpleCall(
undefined,
@@ -1223,10 +1247,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createView with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateViewRequest(),
);
@@ -1236,7 +1260,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.createView(request), expectedError);
});
});
@@ -1244,10 +1270,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('updateView', () => {
it('invokes updateView without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateViewRequest(),
);
@@ -1256,7 +1282,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogView(),
);
@@ -1275,10 +1301,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateView without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateViewRequest(),
);
@@ -1287,7 +1313,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogView(),
);
@@ -1322,10 +1348,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateView with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateViewRequest(),
);
@@ -1334,7 +1360,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.updateView = stubSimpleCall(
undefined,
@@ -1353,10 +1379,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateView with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateViewRequest(),
);
@@ -1366,7 +1392,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.updateView(request), expectedError);
});
});
@@ -1374,10 +1402,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('deleteView', () => {
it('invokes deleteView without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteViewRequest(),
);
@@ -1386,7 +1414,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -1405,10 +1433,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteView without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteViewRequest(),
);
@@ -1417,7 +1445,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -1452,10 +1480,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteView with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteViewRequest(),
);
@@ -1464,7 +1492,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.deleteView = stubSimpleCall(
undefined,
@@ -1483,10 +1511,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteView with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteViewRequest(),
);
@@ -1496,7 +1524,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.deleteView(request), expectedError);
});
});
@@ -1504,10 +1534,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('getSink', () => {
it('invokes getSink without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetSinkRequest(),
);
@@ -1516,7 +1546,7 @@ describe('v2.ConfigServiceV2Client', () => {
['sinkName'],
);
request.sinkName = defaultValue1;
- const expectedHeaderRequestParams = `sink_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `sink_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogSink(),
);
@@ -1535,10 +1565,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getSink without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetSinkRequest(),
);
@@ -1547,7 +1577,7 @@ describe('v2.ConfigServiceV2Client', () => {
['sinkName'],
);
request.sinkName = defaultValue1;
- const expectedHeaderRequestParams = `sink_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `sink_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogSink(),
);
@@ -1582,10 +1612,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getSink with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetSinkRequest(),
);
@@ -1594,7 +1624,7 @@ describe('v2.ConfigServiceV2Client', () => {
['sinkName'],
);
request.sinkName = defaultValue1;
- const expectedHeaderRequestParams = `sink_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `sink_name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.getSink = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.getSink(request), expectedError);
@@ -1610,10 +1640,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getSink with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetSinkRequest(),
);
@@ -1623,7 +1653,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.sinkName = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.getSink(request), expectedError);
});
});
@@ -1631,10 +1663,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('createSink', () => {
it('invokes createSink without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateSinkRequest(),
);
@@ -1643,7 +1675,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogSink(),
);
@@ -1662,10 +1694,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createSink without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateSinkRequest(),
);
@@ -1674,7 +1706,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogSink(),
);
@@ -1709,10 +1741,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createSink with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateSinkRequest(),
);
@@ -1721,7 +1753,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.createSink = stubSimpleCall(
undefined,
@@ -1740,10 +1772,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createSink with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateSinkRequest(),
);
@@ -1753,7 +1785,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.createSink(request), expectedError);
});
});
@@ -1761,10 +1795,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('updateSink', () => {
it('invokes updateSink without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateSinkRequest(),
);
@@ -1773,7 +1807,7 @@ describe('v2.ConfigServiceV2Client', () => {
['sinkName'],
);
request.sinkName = defaultValue1;
- const expectedHeaderRequestParams = `sink_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `sink_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogSink(),
);
@@ -1792,10 +1826,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateSink without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateSinkRequest(),
);
@@ -1804,7 +1838,7 @@ describe('v2.ConfigServiceV2Client', () => {
['sinkName'],
);
request.sinkName = defaultValue1;
- const expectedHeaderRequestParams = `sink_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `sink_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogSink(),
);
@@ -1839,10 +1873,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateSink with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateSinkRequest(),
);
@@ -1851,7 +1885,7 @@ describe('v2.ConfigServiceV2Client', () => {
['sinkName'],
);
request.sinkName = defaultValue1;
- const expectedHeaderRequestParams = `sink_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `sink_name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.updateSink = stubSimpleCall(
undefined,
@@ -1870,10 +1904,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateSink with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateSinkRequest(),
);
@@ -1883,7 +1917,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.sinkName = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.updateSink(request), expectedError);
});
});
@@ -1891,10 +1927,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('deleteSink', () => {
it('invokes deleteSink without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteSinkRequest(),
);
@@ -1903,7 +1939,7 @@ describe('v2.ConfigServiceV2Client', () => {
['sinkName'],
);
request.sinkName = defaultValue1;
- const expectedHeaderRequestParams = `sink_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `sink_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -1922,10 +1958,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteSink without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteSinkRequest(),
);
@@ -1934,7 +1970,7 @@ describe('v2.ConfigServiceV2Client', () => {
['sinkName'],
);
request.sinkName = defaultValue1;
- const expectedHeaderRequestParams = `sink_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `sink_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -1969,10 +2005,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteSink with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteSinkRequest(),
);
@@ -1981,7 +2017,7 @@ describe('v2.ConfigServiceV2Client', () => {
['sinkName'],
);
request.sinkName = defaultValue1;
- const expectedHeaderRequestParams = `sink_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `sink_name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.deleteSink = stubSimpleCall(
undefined,
@@ -2000,10 +2036,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteSink with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteSinkRequest(),
);
@@ -2013,7 +2049,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.sinkName = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.deleteSink(request), expectedError);
});
});
@@ -2021,10 +2059,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('getLink', () => {
it('invokes getLink without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetLinkRequest(),
);
@@ -2033,7 +2071,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.Link(),
);
@@ -2052,10 +2090,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getLink without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetLinkRequest(),
);
@@ -2064,7 +2102,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.Link(),
);
@@ -2099,10 +2137,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getLink with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetLinkRequest(),
);
@@ -2111,7 +2149,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.getLink = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.getLink(request), expectedError);
@@ -2127,10 +2165,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getLink with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetLinkRequest(),
);
@@ -2140,7 +2178,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.getLink(request), expectedError);
});
});
@@ -2148,10 +2188,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('getExclusion', () => {
it('invokes getExclusion without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetExclusionRequest(),
);
@@ -2160,7 +2200,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogExclusion(),
);
@@ -2179,10 +2219,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getExclusion without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetExclusionRequest(),
);
@@ -2191,7 +2231,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogExclusion(),
);
@@ -2226,10 +2266,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getExclusion with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetExclusionRequest(),
);
@@ -2238,7 +2278,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.getExclusion = stubSimpleCall(
undefined,
@@ -2257,10 +2297,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getExclusion with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetExclusionRequest(),
);
@@ -2270,7 +2310,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.getExclusion(request), expectedError);
});
});
@@ -2278,10 +2320,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('createExclusion', () => {
it('invokes createExclusion without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateExclusionRequest(),
);
@@ -2290,7 +2332,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogExclusion(),
);
@@ -2309,10 +2351,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createExclusion without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateExclusionRequest(),
);
@@ -2321,7 +2363,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogExclusion(),
);
@@ -2356,10 +2398,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createExclusion with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateExclusionRequest(),
);
@@ -2368,7 +2410,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.createExclusion = stubSimpleCall(
undefined,
@@ -2387,10 +2429,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createExclusion with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateExclusionRequest(),
);
@@ -2400,7 +2442,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.createExclusion(request), expectedError);
});
});
@@ -2408,10 +2452,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('updateExclusion', () => {
it('invokes updateExclusion without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateExclusionRequest(),
);
@@ -2420,7 +2464,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogExclusion(),
);
@@ -2439,10 +2483,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateExclusion without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateExclusionRequest(),
);
@@ -2451,7 +2495,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogExclusion(),
);
@@ -2486,10 +2530,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateExclusion with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateExclusionRequest(),
);
@@ -2498,7 +2542,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.updateExclusion = stubSimpleCall(
undefined,
@@ -2517,10 +2561,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateExclusion with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateExclusionRequest(),
);
@@ -2530,7 +2574,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.updateExclusion(request), expectedError);
});
});
@@ -2538,10 +2584,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('deleteExclusion', () => {
it('invokes deleteExclusion without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteExclusionRequest(),
);
@@ -2550,7 +2596,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -2569,10 +2615,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteExclusion without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteExclusionRequest(),
);
@@ -2581,7 +2627,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -2616,10 +2662,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteExclusion with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteExclusionRequest(),
);
@@ -2628,7 +2674,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.deleteExclusion = stubSimpleCall(
undefined,
@@ -2647,10 +2693,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteExclusion with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteExclusionRequest(),
);
@@ -2660,7 +2706,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.deleteExclusion(request), expectedError);
});
});
@@ -2668,10 +2716,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('getCmekSettings', () => {
it('invokes getCmekSettings without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetCmekSettingsRequest(),
);
@@ -2680,7 +2728,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.CmekSettings(),
);
@@ -2699,10 +2747,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getCmekSettings without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetCmekSettingsRequest(),
);
@@ -2711,7 +2759,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.CmekSettings(),
);
@@ -2746,10 +2794,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getCmekSettings with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetCmekSettingsRequest(),
);
@@ -2758,7 +2806,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.getCmekSettings = stubSimpleCall(
undefined,
@@ -2777,10 +2825,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getCmekSettings with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetCmekSettingsRequest(),
);
@@ -2790,7 +2838,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.getCmekSettings(request), expectedError);
});
});
@@ -2798,10 +2848,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('updateCmekSettings', () => {
it('invokes updateCmekSettings without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateCmekSettingsRequest(),
);
@@ -2810,7 +2860,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.CmekSettings(),
);
@@ -2830,10 +2880,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateCmekSettings without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateCmekSettingsRequest(),
);
@@ -2842,7 +2892,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.CmekSettings(),
);
@@ -2877,10 +2927,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateCmekSettings with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateCmekSettingsRequest(),
);
@@ -2889,7 +2939,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.updateCmekSettings = stubSimpleCall(
undefined,
@@ -2908,10 +2958,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateCmekSettings with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateCmekSettingsRequest(),
);
@@ -2921,7 +2971,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.updateCmekSettings(request), expectedError);
});
});
@@ -2929,10 +2981,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('getSettings', () => {
it('invokes getSettings without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetSettingsRequest(),
);
@@ -2941,7 +2993,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.Settings(),
);
@@ -2960,10 +3012,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getSettings without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetSettingsRequest(),
);
@@ -2972,7 +3024,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.Settings(),
);
@@ -3007,10 +3059,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getSettings with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetSettingsRequest(),
);
@@ -3019,7 +3071,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.getSettings = stubSimpleCall(
undefined,
@@ -3038,10 +3090,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes getSettings with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetSettingsRequest(),
);
@@ -3051,7 +3103,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.getSettings(request), expectedError);
});
});
@@ -3059,10 +3113,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('updateSettings', () => {
it('invokes updateSettings without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateSettingsRequest(),
);
@@ -3071,7 +3125,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.Settings(),
);
@@ -3090,10 +3144,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateSettings without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateSettingsRequest(),
);
@@ -3102,7 +3156,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.Settings(),
);
@@ -3137,10 +3191,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateSettings with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateSettingsRequest(),
);
@@ -3149,7 +3203,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.updateSettings = stubSimpleCall(
undefined,
@@ -3168,10 +3222,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateSettings with closed client', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateSettingsRequest(),
);
@@ -3181,7 +3235,9 @@ describe('v2.ConfigServiceV2Client', () => {
);
request.name = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.updateSettings(request), expectedError);
});
});
@@ -3189,10 +3245,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('createBucketAsync', () => {
it('invokes createBucketAsync without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateBucketRequest(),
);
@@ -3201,7 +3257,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.longrunning.Operation(),
);
@@ -3222,10 +3278,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createBucketAsync without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateBucketRequest(),
);
@@ -3234,7 +3290,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.longrunning.Operation(),
);
@@ -3276,10 +3332,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createBucketAsync with call error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateBucketRequest(),
);
@@ -3288,7 +3344,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.createBucketAsync = stubLongRunningCall(
undefined,
@@ -3307,10 +3363,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createBucketAsync with LRO error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateBucketRequest(),
);
@@ -3319,7 +3375,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.createBucketAsync = stubLongRunningCall(
undefined,
@@ -3340,16 +3396,16 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkCreateBucketAsyncProgress without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedResponse = generateSampleMessage(
new operationsProtos.google.longrunning.Operation(),
);
expectedResponse.name = 'test';
- expectedResponse.response = {type_url: 'url', value: Buffer.from('')};
- expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')};
+ expectedResponse.response = { type_url: 'url', value: Buffer.from('') };
+ expectedResponse.metadata = { type_url: 'url', value: Buffer.from('') };
client.operationsClient.getOperation = stubSimpleCall(expectedResponse);
const decodedOperation = await client.checkCreateBucketAsyncProgress(
@@ -3362,10 +3418,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkCreateBucketAsyncProgress with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedError = new Error('expected');
client.operationsClient.getOperation = stubSimpleCall(
@@ -3383,10 +3439,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('updateBucketAsync', () => {
it('invokes updateBucketAsync without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateBucketRequest(),
);
@@ -3395,7 +3451,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.longrunning.Operation(),
);
@@ -3416,10 +3472,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateBucketAsync without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateBucketRequest(),
);
@@ -3428,7 +3484,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.longrunning.Operation(),
);
@@ -3470,10 +3526,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateBucketAsync with call error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateBucketRequest(),
);
@@ -3482,7 +3538,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.updateBucketAsync = stubLongRunningCall(
undefined,
@@ -3501,10 +3557,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes updateBucketAsync with LRO error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateBucketRequest(),
);
@@ -3513,7 +3569,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.updateBucketAsync = stubLongRunningCall(
undefined,
@@ -3534,16 +3590,16 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkUpdateBucketAsyncProgress without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedResponse = generateSampleMessage(
new operationsProtos.google.longrunning.Operation(),
);
expectedResponse.name = 'test';
- expectedResponse.response = {type_url: 'url', value: Buffer.from('')};
- expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')};
+ expectedResponse.response = { type_url: 'url', value: Buffer.from('') };
+ expectedResponse.metadata = { type_url: 'url', value: Buffer.from('') };
client.operationsClient.getOperation = stubSimpleCall(expectedResponse);
const decodedOperation = await client.checkUpdateBucketAsyncProgress(
@@ -3556,10 +3612,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkUpdateBucketAsyncProgress with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedError = new Error('expected');
client.operationsClient.getOperation = stubSimpleCall(
@@ -3577,10 +3633,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('createLink', () => {
it('invokes createLink without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateLinkRequest(),
);
@@ -3589,7 +3645,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.longrunning.Operation(),
);
@@ -3609,10 +3665,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createLink without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateLinkRequest(),
);
@@ -3621,7 +3677,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.longrunning.Operation(),
);
@@ -3663,10 +3719,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createLink with call error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateLinkRequest(),
);
@@ -3675,7 +3731,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.createLink = stubLongRunningCall(
undefined,
@@ -3694,10 +3750,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes createLink with LRO error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateLinkRequest(),
);
@@ -3706,7 +3762,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.createLink = stubLongRunningCall(
undefined,
@@ -3727,16 +3783,16 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkCreateLinkProgress without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedResponse = generateSampleMessage(
new operationsProtos.google.longrunning.Operation(),
);
expectedResponse.name = 'test';
- expectedResponse.response = {type_url: 'url', value: Buffer.from('')};
- expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')};
+ expectedResponse.response = { type_url: 'url', value: Buffer.from('') };
+ expectedResponse.metadata = { type_url: 'url', value: Buffer.from('') };
client.operationsClient.getOperation = stubSimpleCall(expectedResponse);
const decodedOperation = await client.checkCreateLinkProgress(
@@ -3749,10 +3805,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkCreateLinkProgress with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedError = new Error('expected');
client.operationsClient.getOperation = stubSimpleCall(
@@ -3767,10 +3823,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('deleteLink', () => {
it('invokes deleteLink without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLinkRequest(),
);
@@ -3779,7 +3835,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.longrunning.Operation(),
);
@@ -3799,10 +3855,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteLink without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLinkRequest(),
);
@@ -3811,7 +3867,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.longrunning.Operation(),
);
@@ -3853,10 +3909,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteLink with call error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLinkRequest(),
);
@@ -3865,7 +3921,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.deleteLink = stubLongRunningCall(
undefined,
@@ -3884,10 +3940,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes deleteLink with LRO error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLinkRequest(),
);
@@ -3896,7 +3952,7 @@ describe('v2.ConfigServiceV2Client', () => {
['name'],
);
request.name = defaultValue1;
- const expectedHeaderRequestParams = `name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.deleteLink = stubLongRunningCall(
undefined,
@@ -3917,16 +3973,16 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkDeleteLinkProgress without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedResponse = generateSampleMessage(
new operationsProtos.google.longrunning.Operation(),
);
expectedResponse.name = 'test';
- expectedResponse.response = {type_url: 'url', value: Buffer.from('')};
- expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')};
+ expectedResponse.response = { type_url: 'url', value: Buffer.from('') };
+ expectedResponse.metadata = { type_url: 'url', value: Buffer.from('') };
client.operationsClient.getOperation = stubSimpleCall(expectedResponse);
const decodedOperation = await client.checkDeleteLinkProgress(
@@ -3939,10 +3995,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkDeleteLinkProgress with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedError = new Error('expected');
client.operationsClient.getOperation = stubSimpleCall(
@@ -3957,10 +4013,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('copyLogEntries', () => {
it('invokes copyLogEntries without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CopyLogEntriesRequest(),
);
@@ -3976,10 +4032,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes copyLogEntries without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CopyLogEntriesRequest(),
);
@@ -4016,10 +4072,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes copyLogEntries with call error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CopyLogEntriesRequest(),
);
@@ -4033,10 +4089,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes copyLogEntries with LRO error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CopyLogEntriesRequest(),
);
@@ -4052,16 +4108,16 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkCopyLogEntriesProgress without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedResponse = generateSampleMessage(
new operationsProtos.google.longrunning.Operation(),
);
expectedResponse.name = 'test';
- expectedResponse.response = {type_url: 'url', value: Buffer.from('')};
- expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')};
+ expectedResponse.response = { type_url: 'url', value: Buffer.from('') };
+ expectedResponse.metadata = { type_url: 'url', value: Buffer.from('') };
client.operationsClient.getOperation = stubSimpleCall(expectedResponse);
const decodedOperation = await client.checkCopyLogEntriesProgress(
@@ -4074,10 +4130,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes checkCopyLogEntriesProgress with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const expectedError = new Error('expected');
client.operationsClient.getOperation = stubSimpleCall(
@@ -4095,10 +4151,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('listBuckets', () => {
it('invokes listBuckets without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListBucketsRequest(),
);
@@ -4107,7 +4163,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogBucket()),
generateSampleMessage(new protos.google.logging.v2.LogBucket()),
@@ -4128,10 +4184,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listBuckets without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListBucketsRequest(),
);
@@ -4140,7 +4196,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogBucket()),
generateSampleMessage(new protos.google.logging.v2.LogBucket()),
@@ -4177,10 +4233,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listBuckets with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListBucketsRequest(),
);
@@ -4189,7 +4245,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.listBuckets = stubSimpleCall(
undefined,
@@ -4208,10 +4264,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listBucketsStream without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListBucketsRequest(),
);
@@ -4220,7 +4276,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogBucket()),
generateSampleMessage(new protos.google.logging.v2.LogBucket()),
@@ -4259,10 +4315,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listBucketsStream with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListBucketsRequest(),
);
@@ -4271,7 +4327,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listBuckets.createStream = stubPageStreamingCall(
undefined,
@@ -4307,10 +4363,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listBuckets without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListBucketsRequest(),
);
@@ -4319,7 +4375,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogBucket()),
generateSampleMessage(new protos.google.logging.v2.LogBucket()),
@@ -4350,10 +4406,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listBuckets with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListBucketsRequest(),
);
@@ -4362,7 +4418,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listBuckets.asyncIterate = stubAsyncIterationCall(
undefined,
@@ -4394,10 +4450,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('listViews', () => {
it('invokes listViews without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListViewsRequest(),
);
@@ -4406,7 +4462,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogView()),
generateSampleMessage(new protos.google.logging.v2.LogView()),
@@ -4427,10 +4483,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listViews without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListViewsRequest(),
);
@@ -4439,7 +4495,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogView()),
generateSampleMessage(new protos.google.logging.v2.LogView()),
@@ -4476,10 +4532,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listViews with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListViewsRequest(),
);
@@ -4488,7 +4544,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.listViews = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.listViews(request), expectedError);
@@ -4504,10 +4560,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listViewsStream without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListViewsRequest(),
);
@@ -4516,7 +4572,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogView()),
generateSampleMessage(new protos.google.logging.v2.LogView()),
@@ -4555,10 +4611,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listViewsStream with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListViewsRequest(),
);
@@ -4567,7 +4623,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listViews.createStream = stubPageStreamingCall(
undefined,
@@ -4603,10 +4659,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listViews without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListViewsRequest(),
);
@@ -4615,7 +4671,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogView()),
generateSampleMessage(new protos.google.logging.v2.LogView()),
@@ -4645,10 +4701,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listViews with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListViewsRequest(),
);
@@ -4657,7 +4713,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listViews.asyncIterate = stubAsyncIterationCall(
undefined,
@@ -4688,10 +4744,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('listSinks', () => {
it('invokes listSinks without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListSinksRequest(),
);
@@ -4700,7 +4756,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogSink()),
generateSampleMessage(new protos.google.logging.v2.LogSink()),
@@ -4721,10 +4777,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listSinks without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListSinksRequest(),
);
@@ -4733,7 +4789,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogSink()),
generateSampleMessage(new protos.google.logging.v2.LogSink()),
@@ -4770,10 +4826,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listSinks with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListSinksRequest(),
);
@@ -4782,7 +4838,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.listSinks = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.listSinks(request), expectedError);
@@ -4798,10 +4854,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listSinksStream without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListSinksRequest(),
);
@@ -4810,7 +4866,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogSink()),
generateSampleMessage(new protos.google.logging.v2.LogSink()),
@@ -4849,10 +4905,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listSinksStream with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListSinksRequest(),
);
@@ -4861,7 +4917,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listSinks.createStream = stubPageStreamingCall(
undefined,
@@ -4897,10 +4953,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listSinks without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListSinksRequest(),
);
@@ -4909,7 +4965,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogSink()),
generateSampleMessage(new protos.google.logging.v2.LogSink()),
@@ -4939,10 +4995,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listSinks with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListSinksRequest(),
);
@@ -4951,7 +5007,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listSinks.asyncIterate = stubAsyncIterationCall(
undefined,
@@ -4982,10 +5038,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('listLinks', () => {
it('invokes listLinks without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLinksRequest(),
);
@@ -4994,7 +5050,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.Link()),
generateSampleMessage(new protos.google.logging.v2.Link()),
@@ -5015,10 +5071,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listLinks without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLinksRequest(),
);
@@ -5027,7 +5083,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.Link()),
generateSampleMessage(new protos.google.logging.v2.Link()),
@@ -5064,10 +5120,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listLinks with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLinksRequest(),
);
@@ -5076,7 +5132,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.listLinks = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.listLinks(request), expectedError);
@@ -5092,10 +5148,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listLinksStream without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLinksRequest(),
);
@@ -5104,7 +5160,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.Link()),
generateSampleMessage(new protos.google.logging.v2.Link()),
@@ -5143,10 +5199,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listLinksStream with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLinksRequest(),
);
@@ -5155,7 +5211,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listLinks.createStream = stubPageStreamingCall(
undefined,
@@ -5191,10 +5247,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listLinks without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLinksRequest(),
);
@@ -5203,7 +5259,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.Link()),
generateSampleMessage(new protos.google.logging.v2.Link()),
@@ -5233,10 +5289,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listLinks with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLinksRequest(),
);
@@ -5245,7 +5301,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listLinks.asyncIterate = stubAsyncIterationCall(
undefined,
@@ -5276,10 +5332,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('listExclusions', () => {
it('invokes listExclusions without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListExclusionsRequest(),
);
@@ -5288,7 +5344,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogExclusion()),
generateSampleMessage(new protos.google.logging.v2.LogExclusion()),
@@ -5309,10 +5365,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listExclusions without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListExclusionsRequest(),
);
@@ -5321,7 +5377,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogExclusion()),
generateSampleMessage(new protos.google.logging.v2.LogExclusion()),
@@ -5358,10 +5414,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listExclusions with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListExclusionsRequest(),
);
@@ -5370,7 +5426,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.listExclusions = stubSimpleCall(
undefined,
@@ -5389,10 +5445,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listExclusionsStream without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListExclusionsRequest(),
);
@@ -5401,7 +5457,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogExclusion()),
generateSampleMessage(new protos.google.logging.v2.LogExclusion()),
@@ -5440,10 +5496,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('invokes listExclusionsStream with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListExclusionsRequest(),
);
@@ -5452,7 +5508,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listExclusions.createStream =
stubPageStreamingCall(undefined, expectedError);
@@ -5486,10 +5542,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listExclusions without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListExclusionsRequest(),
);
@@ -5498,7 +5554,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogExclusion()),
generateSampleMessage(new protos.google.logging.v2.LogExclusion()),
@@ -5529,10 +5585,10 @@ describe('v2.ConfigServiceV2Client', () => {
it('uses async iteration with listExclusions with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListExclusionsRequest(),
);
@@ -5541,7 +5597,7 @@ describe('v2.ConfigServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listExclusions.asyncIterate =
stubAsyncIterationCall(undefined, expectedError);
@@ -5570,10 +5626,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('getOperation', () => {
it('invokes getOperation without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new operationsProtos.google.longrunning.GetOperationRequest(),
);
@@ -5591,7 +5647,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
it('invokes getOperation without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -5604,20 +5660,24 @@ describe('v2.ConfigServiceV2Client', () => {
.stub()
.callsArgWith(2, null, expectedResponse);
const promise = new Promise((resolve, reject) => {
- client.operationsClient.getOperation(
- request,
- undefined,
- (
- err?: Error | null,
- result?: operationsProtos.google.longrunning.Operation | null,
- ) => {
- if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- },
- );
+ client.operationsClient
+ .getOperation(
+ request,
+ undefined,
+ (
+ err?: Error | null,
+ result?: operationsProtos.google.longrunning.Operation | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ )
+ .catch((err) => {
+ throw err;
+ });
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
@@ -5625,7 +5685,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
it('invokes getOperation with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -5649,10 +5709,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('cancelOperation', () => {
it('invokes cancelOperation without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new operationsProtos.google.longrunning.CancelOperationRequest(),
);
@@ -5671,7 +5731,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
it('invokes cancelOperation without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -5684,20 +5744,24 @@ describe('v2.ConfigServiceV2Client', () => {
.stub()
.callsArgWith(2, null, expectedResponse);
const promise = new Promise((resolve, reject) => {
- client.operationsClient.cancelOperation(
- request,
- undefined,
- (
- err?: Error | null,
- result?: protos.google.protobuf.Empty | null,
- ) => {
- if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- },
- );
+ client.operationsClient
+ .cancelOperation(
+ request,
+ undefined,
+ (
+ err?: Error | null,
+ result?: protos.google.protobuf.Empty | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ )
+ .catch((err) => {
+ throw err;
+ });
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
@@ -5705,7 +5769,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
it('invokes cancelOperation with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -5729,10 +5793,10 @@ describe('v2.ConfigServiceV2Client', () => {
describe('deleteOperation', () => {
it('invokes deleteOperation without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new operationsProtos.google.longrunning.DeleteOperationRequest(),
);
@@ -5751,7 +5815,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
it('invokes deleteOperation without error using callback', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -5764,20 +5828,24 @@ describe('v2.ConfigServiceV2Client', () => {
.stub()
.callsArgWith(2, null, expectedResponse);
const promise = new Promise((resolve, reject) => {
- client.operationsClient.deleteOperation(
- request,
- undefined,
- (
- err?: Error | null,
- result?: protos.google.protobuf.Empty | null,
- ) => {
- if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- },
- );
+ client.operationsClient
+ .deleteOperation(
+ request,
+ undefined,
+ (
+ err?: Error | null,
+ result?: protos.google.protobuf.Empty | null,
+ ) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ },
+ )
+ .catch((err) => {
+ throw err;
+ });
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
@@ -5785,7 +5853,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
it('invokes deleteOperation with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -5809,7 +5877,7 @@ describe('v2.ConfigServiceV2Client', () => {
describe('listOperationsAsync', () => {
it('uses async iteration with listOperations without error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
const request = generateSampleMessage(
@@ -5828,8 +5896,7 @@ describe('v2.ConfigServiceV2Client', () => {
];
client.operationsClient.descriptor.listOperations.asyncIterate =
stubAsyncIterationCall(expectedResponse);
- const responses: operationsProtos.google.longrunning.ListOperationsResponse[] =
- [];
+ const responses: operationsProtos.google.longrunning.IOperation[] = [];
const iterable = client.operationsClient.listOperationsAsync(request);
for await (const resource of iterable) {
responses.push(resource!);
@@ -5845,10 +5912,10 @@ describe('v2.ConfigServiceV2Client', () => {
});
it('uses async iteration with listOperations with error', async () => {
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new operationsProtos.google.longrunning.ListOperationsRequest(),
);
@@ -5857,8 +5924,7 @@ describe('v2.ConfigServiceV2Client', () => {
stubAsyncIterationCall(undefined, expectedError);
const iterable = client.operationsClient.listOperationsAsync(request);
await assert.rejects(async () => {
- const responses: operationsProtos.google.longrunning.ListOperationsResponse[] =
- [];
+ const responses: operationsProtos.google.longrunning.IOperation[] = [];
for await (const resource of iterable) {
responses.push(resource!);
}
@@ -5874,16 +5940,16 @@ describe('v2.ConfigServiceV2Client', () => {
});
describe('Path templates', () => {
- describe('billingAccountCmekSettings', () => {
+ describe('billingAccountCmekSettings', async () => {
const fakePath = '/rendered/path/billingAccountCmekSettings';
const expectedParameters = {
billing_account: 'billingAccountValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -5923,17 +5989,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('billingAccountExclusion', () => {
+ describe('billingAccountExclusion', async () => {
const fakePath = '/rendered/path/billingAccountExclusion';
const expectedParameters = {
billing_account: 'billingAccountValue',
exclusion: 'exclusionValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -5986,7 +6052,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('billingAccountLocationBucket', () => {
+ describe('billingAccountLocationBucket', async () => {
const fakePath = '/rendered/path/billingAccountLocationBucket';
const expectedParameters = {
billing_account: 'billingAccountValue',
@@ -5994,10 +6060,10 @@ describe('v2.ConfigServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLocationBucketPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.billingAccountLocationBucketPathTemplate.match =
@@ -6065,7 +6131,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('billingAccountLocationBucketLink', () => {
+ describe('billingAccountLocationBucketLink', async () => {
const fakePath = '/rendered/path/billingAccountLocationBucketLink';
const expectedParameters = {
billing_account: 'billingAccountValue',
@@ -6074,10 +6140,10 @@ describe('v2.ConfigServiceV2Client', () => {
link: 'linkValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLocationBucketLinkPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.billingAccountLocationBucketLinkPathTemplate.match =
@@ -6162,7 +6228,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('billingAccountLocationBucketView', () => {
+ describe('billingAccountLocationBucketView', async () => {
const fakePath = '/rendered/path/billingAccountLocationBucketView';
const expectedParameters = {
billing_account: 'billingAccountValue',
@@ -6171,10 +6237,10 @@ describe('v2.ConfigServiceV2Client', () => {
view: 'viewValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLocationBucketViewPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.billingAccountLocationBucketViewPathTemplate.match =
@@ -6259,17 +6325,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('billingAccountLog', () => {
+ describe('billingAccountLog', async () => {
const fakePath = '/rendered/path/billingAccountLog';
const expectedParameters = {
billing_account: 'billingAccountValue',
log: 'logValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6321,16 +6387,16 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('billingAccountSettings', () => {
+ describe('billingAccountSettings', async () => {
const fakePath = '/rendered/path/billingAccountSettings';
const expectedParameters = {
billing_account: 'billingAccountValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6366,17 +6432,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('billingAccountSink', () => {
+ describe('billingAccountSink', async () => {
const fakePath = '/rendered/path/billingAccountSink';
const expectedParameters = {
billing_account: 'billingAccountValue',
sink: 'sinkValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6428,16 +6494,16 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('folderCmekSettings', () => {
+ describe('folderCmekSettings', async () => {
const fakePath = '/rendered/path/folderCmekSettings';
const expectedParameters = {
folder: 'folderValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6472,17 +6538,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('folderExclusion', () => {
+ describe('folderExclusion', async () => {
const fakePath = '/rendered/path/folderExclusion';
const expectedParameters = {
folder: 'folderValue',
exclusion: 'exclusionValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6524,7 +6590,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('folderLocationBucket', () => {
+ describe('folderLocationBucket', async () => {
const fakePath = '/rendered/path/folderLocationBucket';
const expectedParameters = {
folder: 'folderValue',
@@ -6532,10 +6598,10 @@ describe('v2.ConfigServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLocationBucketPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6601,7 +6667,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('folderLocationBucketLink', () => {
+ describe('folderLocationBucketLink', async () => {
const fakePath = '/rendered/path/folderLocationBucketLink';
const expectedParameters = {
folder: 'folderValue',
@@ -6610,10 +6676,10 @@ describe('v2.ConfigServiceV2Client', () => {
link: 'linkValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLocationBucketLinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6696,7 +6762,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('folderLocationBucketView', () => {
+ describe('folderLocationBucketView', async () => {
const fakePath = '/rendered/path/folderLocationBucketView';
const expectedParameters = {
folder: 'folderValue',
@@ -6705,10 +6771,10 @@ describe('v2.ConfigServiceV2Client', () => {
view: 'viewValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLocationBucketViewPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6791,17 +6857,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('folderLog', () => {
+ describe('folderLog', async () => {
const fakePath = '/rendered/path/folderLog';
const expectedParameters = {
folder: 'folderValue',
log: 'logValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6840,16 +6906,16 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('folderSettings', () => {
+ describe('folderSettings', async () => {
const fakePath = '/rendered/path/folderSettings';
const expectedParameters = {
folder: 'folderValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6878,17 +6944,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('folderSink', () => {
+ describe('folderSink', async () => {
const fakePath = '/rendered/path/folderSink';
const expectedParameters = {
folder: 'folderValue',
sink: 'sinkValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6927,17 +6993,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('location', () => {
+ describe('location', async () => {
const fakePath = '/rendered/path/location';
const expectedParameters = {
project: 'projectValue',
location: 'locationValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.locationPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -6976,17 +7042,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('logMetric', () => {
+ describe('logMetric', async () => {
const fakePath = '/rendered/path/logMetric';
const expectedParameters = {
project: 'projectValue',
metric: 'metricValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.logMetricPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7025,16 +7091,16 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('organizationCmekSettings', () => {
+ describe('organizationCmekSettings', async () => {
const fakePath = '/rendered/path/organizationCmekSettings';
const expectedParameters = {
organization: 'organizationValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7070,17 +7136,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('organizationExclusion', () => {
+ describe('organizationExclusion', async () => {
const fakePath = '/rendered/path/organizationExclusion';
const expectedParameters = {
organization: 'organizationValue',
exclusion: 'exclusionValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7133,7 +7199,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('organizationLocationBucket', () => {
+ describe('organizationLocationBucket', async () => {
const fakePath = '/rendered/path/organizationLocationBucket';
const expectedParameters = {
organization: 'organizationValue',
@@ -7141,10 +7207,10 @@ describe('v2.ConfigServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLocationBucketPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7212,7 +7278,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('organizationLocationBucketLink', () => {
+ describe('organizationLocationBucketLink', async () => {
const fakePath = '/rendered/path/organizationLocationBucketLink';
const expectedParameters = {
organization: 'organizationValue',
@@ -7221,10 +7287,10 @@ describe('v2.ConfigServiceV2Client', () => {
link: 'linkValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLocationBucketLinkPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.organizationLocationBucketLinkPathTemplate.match =
@@ -7307,7 +7373,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('organizationLocationBucketView', () => {
+ describe('organizationLocationBucketView', async () => {
const fakePath = '/rendered/path/organizationLocationBucketView';
const expectedParameters = {
organization: 'organizationValue',
@@ -7316,10 +7382,10 @@ describe('v2.ConfigServiceV2Client', () => {
view: 'viewValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLocationBucketViewPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.organizationLocationBucketViewPathTemplate.match =
@@ -7402,17 +7468,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('organizationLog', () => {
+ describe('organizationLog', async () => {
const fakePath = '/rendered/path/organizationLog';
const expectedParameters = {
organization: 'organizationValue',
log: 'logValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7455,16 +7521,16 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('organizationSettings', () => {
+ describe('organizationSettings', async () => {
const fakePath = '/rendered/path/organizationSettings';
const expectedParameters = {
organization: 'organizationValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7500,17 +7566,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('organizationSink', () => {
+ describe('organizationSink', async () => {
const fakePath = '/rendered/path/organizationSink';
const expectedParameters = {
organization: 'organizationValue',
sink: 'sinkValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7556,16 +7622,16 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('project', () => {
+ describe('project', async () => {
const fakePath = '/rendered/path/project';
const expectedParameters = {
project: 'projectValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7594,16 +7660,16 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('projectCmekSettings', () => {
+ describe('projectCmekSettings', async () => {
const fakePath = '/rendered/path/projectCmekSettings';
const expectedParameters = {
project: 'projectValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7638,17 +7704,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('projectExclusion', () => {
+ describe('projectExclusion', async () => {
const fakePath = '/rendered/path/projectExclusion';
const expectedParameters = {
project: 'projectValue',
exclusion: 'exclusionValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7693,7 +7759,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('projectLocationBucket', () => {
+ describe('projectLocationBucket', async () => {
const fakePath = '/rendered/path/projectLocationBucket';
const expectedParameters = {
project: 'projectValue',
@@ -7701,10 +7767,10 @@ describe('v2.ConfigServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLocationBucketPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7772,7 +7838,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('projectLocationBucketLink', () => {
+ describe('projectLocationBucketLink', async () => {
const fakePath = '/rendered/path/projectLocationBucketLink';
const expectedParameters = {
project: 'projectValue',
@@ -7781,10 +7847,10 @@ describe('v2.ConfigServiceV2Client', () => {
link: 'linkValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLocationBucketLinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7867,7 +7933,7 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('projectLocationBucketView', () => {
+ describe('projectLocationBucketView', async () => {
const fakePath = '/rendered/path/projectLocationBucketView';
const expectedParameters = {
project: 'projectValue',
@@ -7876,10 +7942,10 @@ describe('v2.ConfigServiceV2Client', () => {
view: 'viewValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLocationBucketViewPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -7962,17 +8028,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('projectLog', () => {
+ describe('projectLog', async () => {
const fakePath = '/rendered/path/projectLog';
const expectedParameters = {
project: 'projectValue',
log: 'logValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -8011,16 +8077,16 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('projectSettings', () => {
+ describe('projectSettings', async () => {
const fakePath = '/rendered/path/projectSettings';
const expectedParameters = {
project: 'projectValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -8049,17 +8115,17 @@ describe('v2.ConfigServiceV2Client', () => {
});
});
- describe('projectSink', () => {
+ describe('projectSink', async () => {
const fakePath = '/rendered/path/projectSink';
const expectedParameters = {
project: 'projectValue',
sink: 'sinkValue',
};
const client = new configservicev2Module.v2.ConfigServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
diff --git a/handwritten/logging/test/gapic_logging_service_v2_v2.ts b/handwritten/logging/test/gapic_logging_service_v2_v2.ts
index fd9260826268..9005cf668f41 100644
--- a/handwritten/logging/test/gapic_logging_service_v2_v2.ts
+++ b/handwritten/logging/test/gapic_logging_service_v2_v2.ts
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,13 +19,13 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as loggingservicev2Module from '../src';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
-import {protobuf} from 'google-gax';
+import { protobuf } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -45,7 +45,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -131,9 +131,9 @@ function stubAsyncIterationCall(
return Promise.reject(error);
}
if (counter >= responses!.length) {
- return Promise.resolve({done: true, value: undefined});
+ return Promise.resolve({ done: true, value: undefined });
}
- return Promise.resolve({done: false, value: responses![counter++]});
+ return Promise.resolve({ done: false, value: responses![counter++] });
},
};
},
@@ -253,7 +253,7 @@ describe('v2.LoggingServiceV2Client', () => {
it('has initialize method and supports deferred initialization', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.loggingServiceV2Stub, undefined);
@@ -261,33 +261,45 @@ describe('v2.LoggingServiceV2Client', () => {
assert(client.loggingServiceV2Stub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
- assert(client.loggingServiceV2Stub);
- client.close().then(() => {
- done();
+ client.initialize().catch((err) => {
+ throw err;
});
+ assert(client.loggingServiceV2Stub);
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.loggingServiceV2Stub, undefined);
- client.close().then(() => {
- done();
- });
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
it('has getProjectId method', async () => {
const fakeProjectId = 'fake-project-id';
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
@@ -299,7 +311,7 @@ describe('v2.LoggingServiceV2Client', () => {
it('has getProjectId method with callback', async () => {
const fakeProjectId = 'fake-project-id';
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon
@@ -322,10 +334,10 @@ describe('v2.LoggingServiceV2Client', () => {
describe('deleteLog', () => {
it('invokes deleteLog without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLogRequest(),
);
@@ -334,7 +346,7 @@ describe('v2.LoggingServiceV2Client', () => {
['logName'],
);
request.logName = defaultValue1;
- const expectedHeaderRequestParams = `log_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `log_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -353,10 +365,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes deleteLog without error using callback', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLogRequest(),
);
@@ -365,7 +377,7 @@ describe('v2.LoggingServiceV2Client', () => {
['logName'],
);
request.logName = defaultValue1;
- const expectedHeaderRequestParams = `log_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `log_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -400,10 +412,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes deleteLog with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLogRequest(),
);
@@ -412,7 +424,7 @@ describe('v2.LoggingServiceV2Client', () => {
['logName'],
);
request.logName = defaultValue1;
- const expectedHeaderRequestParams = `log_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `log_name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.deleteLog = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.deleteLog(request), expectedError);
@@ -428,10 +440,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes deleteLog with closed client', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLogRequest(),
);
@@ -441,7 +453,9 @@ describe('v2.LoggingServiceV2Client', () => {
);
request.logName = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.deleteLog(request), expectedError);
});
});
@@ -449,10 +463,10 @@ describe('v2.LoggingServiceV2Client', () => {
describe('writeLogEntries', () => {
it('invokes writeLogEntries without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.WriteLogEntriesRequest(),
);
@@ -466,10 +480,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes writeLogEntries without error using callback', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.WriteLogEntriesRequest(),
);
@@ -499,10 +513,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes writeLogEntries with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.WriteLogEntriesRequest(),
);
@@ -516,15 +530,17 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes writeLogEntries with closed client', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.WriteLogEntriesRequest(),
);
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.writeLogEntries(request), expectedError);
});
});
@@ -532,10 +548,10 @@ describe('v2.LoggingServiceV2Client', () => {
describe('tailLogEntries', () => {
it('invokes tailLogEntries without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.TailLogEntriesRequest(),
);
@@ -575,10 +591,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes tailLogEntries with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.TailLogEntriesRequest(),
);
@@ -618,10 +634,10 @@ describe('v2.LoggingServiceV2Client', () => {
describe('listLogEntries', () => {
it('invokes listLogEntries without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogEntriesRequest(),
);
@@ -637,10 +653,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listLogEntries without error using callback', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogEntriesRequest(),
);
@@ -672,10 +688,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listLogEntries with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogEntriesRequest(),
);
@@ -689,10 +705,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listLogEntriesStream without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogEntriesRequest(),
);
@@ -727,10 +743,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listLogEntriesStream with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogEntriesRequest(),
);
@@ -760,10 +776,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('uses async iteration with listLogEntries without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogEntriesRequest(),
);
@@ -790,10 +806,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('uses async iteration with listLogEntries with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogEntriesRequest(),
);
@@ -819,10 +835,10 @@ describe('v2.LoggingServiceV2Client', () => {
describe('listMonitoredResourceDescriptors', () => {
it('invokes listMonitoredResourceDescriptors without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListMonitoredResourceDescriptorsRequest(),
);
@@ -845,10 +861,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listMonitoredResourceDescriptors without error using callback', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListMonitoredResourceDescriptorsRequest(),
);
@@ -886,10 +902,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listMonitoredResourceDescriptors with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListMonitoredResourceDescriptorsRequest(),
);
@@ -906,10 +922,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listMonitoredResourceDescriptorsStream without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListMonitoredResourceDescriptorsRequest(),
);
@@ -959,10 +975,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listMonitoredResourceDescriptorsStream with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListMonitoredResourceDescriptorsRequest(),
);
@@ -1001,10 +1017,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('uses async iteration with listMonitoredResourceDescriptors without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListMonitoredResourceDescriptorsRequest(),
);
@@ -1038,10 +1054,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('uses async iteration with listMonitoredResourceDescriptors with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListMonitoredResourceDescriptorsRequest(),
);
@@ -1068,10 +1084,10 @@ describe('v2.LoggingServiceV2Client', () => {
describe('listLogs', () => {
it('invokes listLogs without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogsRequest(),
);
@@ -1080,7 +1096,7 @@ describe('v2.LoggingServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [new String(), new String(), new String()];
client.innerApiCalls.listLogs = stubSimpleCall(expectedResponse);
const [response] = await client.listLogs(request);
@@ -1097,10 +1113,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listLogs without error using callback', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogsRequest(),
);
@@ -1109,7 +1125,7 @@ describe('v2.LoggingServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [new String(), new String(), new String()];
client.innerApiCalls.listLogs =
stubSimpleCallWithCallback(expectedResponse);
@@ -1139,10 +1155,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listLogs with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogsRequest(),
);
@@ -1151,7 +1167,7 @@ describe('v2.LoggingServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.listLogs = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.listLogs(request), expectedError);
@@ -1167,10 +1183,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listLogsStream without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogsRequest(),
);
@@ -1179,7 +1195,7 @@ describe('v2.LoggingServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [new String(), new String(), new String()];
client.descriptors.page.listLogs.createStream =
stubPageStreamingCall(expectedResponse);
@@ -1214,10 +1230,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('invokes listLogsStream with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogsRequest(),
);
@@ -1226,7 +1242,7 @@ describe('v2.LoggingServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listLogs.createStream = stubPageStreamingCall(
undefined,
@@ -1262,10 +1278,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('uses async iteration with listLogs without error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogsRequest(),
);
@@ -1274,7 +1290,7 @@ describe('v2.LoggingServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [new String(), new String(), new String()];
client.descriptors.page.listLogs.asyncIterate =
stubAsyncIterationCall(expectedResponse);
@@ -1300,10 +1316,10 @@ describe('v2.LoggingServiceV2Client', () => {
it('uses async iteration with listLogs with error', async () => {
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogsRequest(),
);
@@ -1312,7 +1328,7 @@ describe('v2.LoggingServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listLogs.asyncIterate = stubAsyncIterationCall(
undefined,
@@ -1341,16 +1357,16 @@ describe('v2.LoggingServiceV2Client', () => {
});
describe('Path templates', () => {
- describe('billingAccountCmekSettings', () => {
+ describe('billingAccountCmekSettings', async () => {
const fakePath = '/rendered/path/billingAccountCmekSettings';
const expectedParameters = {
billing_account: 'billingAccountValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1390,17 +1406,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('billingAccountExclusion', () => {
+ describe('billingAccountExclusion', async () => {
const fakePath = '/rendered/path/billingAccountExclusion';
const expectedParameters = {
billing_account: 'billingAccountValue',
exclusion: 'exclusionValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1453,7 +1469,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('billingAccountLocationBucket', () => {
+ describe('billingAccountLocationBucket', async () => {
const fakePath = '/rendered/path/billingAccountLocationBucket';
const expectedParameters = {
billing_account: 'billingAccountValue',
@@ -1461,10 +1477,10 @@ describe('v2.LoggingServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLocationBucketPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.billingAccountLocationBucketPathTemplate.match =
@@ -1532,7 +1548,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('billingAccountLocationBucketLink', () => {
+ describe('billingAccountLocationBucketLink', async () => {
const fakePath = '/rendered/path/billingAccountLocationBucketLink';
const expectedParameters = {
billing_account: 'billingAccountValue',
@@ -1541,10 +1557,10 @@ describe('v2.LoggingServiceV2Client', () => {
link: 'linkValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLocationBucketLinkPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.billingAccountLocationBucketLinkPathTemplate.match =
@@ -1629,7 +1645,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('billingAccountLocationBucketView', () => {
+ describe('billingAccountLocationBucketView', async () => {
const fakePath = '/rendered/path/billingAccountLocationBucketView';
const expectedParameters = {
billing_account: 'billingAccountValue',
@@ -1638,10 +1654,10 @@ describe('v2.LoggingServiceV2Client', () => {
view: 'viewValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLocationBucketViewPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.billingAccountLocationBucketViewPathTemplate.match =
@@ -1726,17 +1742,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('billingAccountLog', () => {
+ describe('billingAccountLog', async () => {
const fakePath = '/rendered/path/billingAccountLog';
const expectedParameters = {
billing_account: 'billingAccountValue',
log: 'logValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1788,16 +1804,16 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('billingAccountSettings', () => {
+ describe('billingAccountSettings', async () => {
const fakePath = '/rendered/path/billingAccountSettings';
const expectedParameters = {
billing_account: 'billingAccountValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1833,17 +1849,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('billingAccountSink', () => {
+ describe('billingAccountSink', async () => {
const fakePath = '/rendered/path/billingAccountSink';
const expectedParameters = {
billing_account: 'billingAccountValue',
sink: 'sinkValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1895,16 +1911,16 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('folderCmekSettings', () => {
+ describe('folderCmekSettings', async () => {
const fakePath = '/rendered/path/folderCmekSettings';
const expectedParameters = {
folder: 'folderValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1939,17 +1955,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('folderExclusion', () => {
+ describe('folderExclusion', async () => {
const fakePath = '/rendered/path/folderExclusion';
const expectedParameters = {
folder: 'folderValue',
exclusion: 'exclusionValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1991,7 +2007,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('folderLocationBucket', () => {
+ describe('folderLocationBucket', async () => {
const fakePath = '/rendered/path/folderLocationBucket';
const expectedParameters = {
folder: 'folderValue',
@@ -1999,10 +2015,10 @@ describe('v2.LoggingServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLocationBucketPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2068,7 +2084,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('folderLocationBucketLink', () => {
+ describe('folderLocationBucketLink', async () => {
const fakePath = '/rendered/path/folderLocationBucketLink';
const expectedParameters = {
folder: 'folderValue',
@@ -2077,10 +2093,10 @@ describe('v2.LoggingServiceV2Client', () => {
link: 'linkValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLocationBucketLinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2163,7 +2179,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('folderLocationBucketView', () => {
+ describe('folderLocationBucketView', async () => {
const fakePath = '/rendered/path/folderLocationBucketView';
const expectedParameters = {
folder: 'folderValue',
@@ -2172,10 +2188,10 @@ describe('v2.LoggingServiceV2Client', () => {
view: 'viewValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLocationBucketViewPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2258,17 +2274,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('folderLog', () => {
+ describe('folderLog', async () => {
const fakePath = '/rendered/path/folderLog';
const expectedParameters = {
folder: 'folderValue',
log: 'logValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2307,16 +2323,16 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('folderSettings', () => {
+ describe('folderSettings', async () => {
const fakePath = '/rendered/path/folderSettings';
const expectedParameters = {
folder: 'folderValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2345,17 +2361,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('folderSink', () => {
+ describe('folderSink', async () => {
const fakePath = '/rendered/path/folderSink';
const expectedParameters = {
folder: 'folderValue',
sink: 'sinkValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2394,17 +2410,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('logMetric', () => {
+ describe('logMetric', async () => {
const fakePath = '/rendered/path/logMetric';
const expectedParameters = {
project: 'projectValue',
metric: 'metricValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.logMetricPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2443,16 +2459,16 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('organizationCmekSettings', () => {
+ describe('organizationCmekSettings', async () => {
const fakePath = '/rendered/path/organizationCmekSettings';
const expectedParameters = {
organization: 'organizationValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2488,17 +2504,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('organizationExclusion', () => {
+ describe('organizationExclusion', async () => {
const fakePath = '/rendered/path/organizationExclusion';
const expectedParameters = {
organization: 'organizationValue',
exclusion: 'exclusionValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2551,7 +2567,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('organizationLocationBucket', () => {
+ describe('organizationLocationBucket', async () => {
const fakePath = '/rendered/path/organizationLocationBucket';
const expectedParameters = {
organization: 'organizationValue',
@@ -2559,10 +2575,10 @@ describe('v2.LoggingServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLocationBucketPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2630,7 +2646,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('organizationLocationBucketLink', () => {
+ describe('organizationLocationBucketLink', async () => {
const fakePath = '/rendered/path/organizationLocationBucketLink';
const expectedParameters = {
organization: 'organizationValue',
@@ -2639,10 +2655,10 @@ describe('v2.LoggingServiceV2Client', () => {
link: 'linkValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLocationBucketLinkPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.organizationLocationBucketLinkPathTemplate.match =
@@ -2725,7 +2741,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('organizationLocationBucketView', () => {
+ describe('organizationLocationBucketView', async () => {
const fakePath = '/rendered/path/organizationLocationBucketView';
const expectedParameters = {
organization: 'organizationValue',
@@ -2734,10 +2750,10 @@ describe('v2.LoggingServiceV2Client', () => {
view: 'viewValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLocationBucketViewPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.organizationLocationBucketViewPathTemplate.match =
@@ -2820,17 +2836,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('organizationLog', () => {
+ describe('organizationLog', async () => {
const fakePath = '/rendered/path/organizationLog';
const expectedParameters = {
organization: 'organizationValue',
log: 'logValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2873,16 +2889,16 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('organizationSettings', () => {
+ describe('organizationSettings', async () => {
const fakePath = '/rendered/path/organizationSettings';
const expectedParameters = {
organization: 'organizationValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2918,17 +2934,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('organizationSink', () => {
+ describe('organizationSink', async () => {
const fakePath = '/rendered/path/organizationSink';
const expectedParameters = {
organization: 'organizationValue',
sink: 'sinkValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2974,16 +2990,16 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('project', () => {
+ describe('project', async () => {
const fakePath = '/rendered/path/project';
const expectedParameters = {
project: 'projectValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3012,16 +3028,16 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('projectCmekSettings', () => {
+ describe('projectCmekSettings', async () => {
const fakePath = '/rendered/path/projectCmekSettings';
const expectedParameters = {
project: 'projectValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3056,17 +3072,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('projectExclusion', () => {
+ describe('projectExclusion', async () => {
const fakePath = '/rendered/path/projectExclusion';
const expectedParameters = {
project: 'projectValue',
exclusion: 'exclusionValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3111,7 +3127,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('projectLocationBucket', () => {
+ describe('projectLocationBucket', async () => {
const fakePath = '/rendered/path/projectLocationBucket';
const expectedParameters = {
project: 'projectValue',
@@ -3119,10 +3135,10 @@ describe('v2.LoggingServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLocationBucketPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3190,7 +3206,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('projectLocationBucketLink', () => {
+ describe('projectLocationBucketLink', async () => {
const fakePath = '/rendered/path/projectLocationBucketLink';
const expectedParameters = {
project: 'projectValue',
@@ -3199,10 +3215,10 @@ describe('v2.LoggingServiceV2Client', () => {
link: 'linkValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLocationBucketLinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3285,7 +3301,7 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('projectLocationBucketView', () => {
+ describe('projectLocationBucketView', async () => {
const fakePath = '/rendered/path/projectLocationBucketView';
const expectedParameters = {
project: 'projectValue',
@@ -3294,10 +3310,10 @@ describe('v2.LoggingServiceV2Client', () => {
view: 'viewValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLocationBucketViewPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3380,17 +3396,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('projectLog', () => {
+ describe('projectLog', async () => {
const fakePath = '/rendered/path/projectLog';
const expectedParameters = {
project: 'projectValue',
log: 'logValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3429,16 +3445,16 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('projectSettings', () => {
+ describe('projectSettings', async () => {
const fakePath = '/rendered/path/projectSettings';
const expectedParameters = {
project: 'projectValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3467,17 +3483,17 @@ describe('v2.LoggingServiceV2Client', () => {
});
});
- describe('projectSink', () => {
+ describe('projectSink', async () => {
const fakePath = '/rendered/path/projectSink';
const expectedParameters = {
project: 'projectValue',
sink: 'sinkValue',
};
const client = new loggingservicev2Module.v2.LoggingServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
diff --git a/handwritten/logging/test/gapic_metrics_service_v2_v2.ts b/handwritten/logging/test/gapic_metrics_service_v2_v2.ts
index 90a774f07657..08d008059574 100644
--- a/handwritten/logging/test/gapic_metrics_service_v2_v2.ts
+++ b/handwritten/logging/test/gapic_metrics_service_v2_v2.ts
@@ -1,4 +1,4 @@
-// Copyright 2024 Google LLC
+// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,13 +19,13 @@
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
-import {SinonStub} from 'sinon';
-import {describe, it} from 'mocha';
+import { SinonStub } from 'sinon';
+import { describe, it } from 'mocha';
import * as metricsservicev2Module from '../src';
-import {PassThrough} from 'stream';
+import { PassThrough } from 'stream';
-import {protobuf} from 'google-gax';
+import { protobuf } from 'google-gax';
// Dynamically loaded proto JSON is needed to get the type information
// to fill in default values for request objects
@@ -45,7 +45,7 @@ function getTypeDefaultValue(typeName: string, fields: string[]) {
function generateSampleMessage(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
- ).toObject(instance as protobuf.Message, {defaults: true});
+ ).toObject(instance as protobuf.Message, { defaults: true });
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject,
) as T;
@@ -117,9 +117,9 @@ function stubAsyncIterationCall(
return Promise.reject(error);
}
if (counter >= responses!.length) {
- return Promise.resolve({done: true, value: undefined});
+ return Promise.resolve({ done: true, value: undefined });
}
- return Promise.resolve({done: false, value: responses![counter++]});
+ return Promise.resolve({ done: false, value: responses![counter++] });
},
};
},
@@ -239,7 +239,7 @@ describe('v2.MetricsServiceV2Client', () => {
it('has initialize method and supports deferred initialization', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.metricsServiceV2Stub, undefined);
@@ -247,33 +247,45 @@ describe('v2.MetricsServiceV2Client', () => {
assert(client.metricsServiceV2Stub);
});
- it('has close method for the initialized client', done => {
+ it('has close method for the initialized client', (done) => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
- assert(client.metricsServiceV2Stub);
- client.close().then(() => {
- done();
+ client.initialize().catch((err) => {
+ throw err;
});
+ assert(client.metricsServiceV2Stub);
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
- it('has close method for the non-initialized client', done => {
+ it('has close method for the non-initialized client', (done) => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
assert.strictEqual(client.metricsServiceV2Stub, undefined);
- client.close().then(() => {
- done();
- });
+ client
+ .close()
+ .then(() => {
+ done();
+ })
+ .catch((err) => {
+ throw err;
+ });
});
it('has getProjectId method', async () => {
const fakeProjectId = 'fake-project-id';
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
@@ -285,7 +297,7 @@ describe('v2.MetricsServiceV2Client', () => {
it('has getProjectId method with callback', async () => {
const fakeProjectId = 'fake-project-id';
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
client.auth.getProjectId = sinon
@@ -308,10 +320,10 @@ describe('v2.MetricsServiceV2Client', () => {
describe('getLogMetric', () => {
it('invokes getLogMetric without error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetLogMetricRequest(),
);
@@ -320,7 +332,7 @@ describe('v2.MetricsServiceV2Client', () => {
['metricName'],
);
request.metricName = defaultValue1;
- const expectedHeaderRequestParams = `metric_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `metric_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogMetric(),
);
@@ -339,10 +351,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes getLogMetric without error using callback', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetLogMetricRequest(),
);
@@ -351,7 +363,7 @@ describe('v2.MetricsServiceV2Client', () => {
['metricName'],
);
request.metricName = defaultValue1;
- const expectedHeaderRequestParams = `metric_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `metric_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogMetric(),
);
@@ -386,10 +398,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes getLogMetric with error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetLogMetricRequest(),
);
@@ -398,7 +410,7 @@ describe('v2.MetricsServiceV2Client', () => {
['metricName'],
);
request.metricName = defaultValue1;
- const expectedHeaderRequestParams = `metric_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `metric_name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.getLogMetric = stubSimpleCall(
undefined,
@@ -417,10 +429,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes getLogMetric with closed client', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.GetLogMetricRequest(),
);
@@ -430,7 +442,9 @@ describe('v2.MetricsServiceV2Client', () => {
);
request.metricName = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.getLogMetric(request), expectedError);
});
});
@@ -438,10 +452,10 @@ describe('v2.MetricsServiceV2Client', () => {
describe('createLogMetric', () => {
it('invokes createLogMetric without error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateLogMetricRequest(),
);
@@ -450,7 +464,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogMetric(),
);
@@ -469,10 +483,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes createLogMetric without error using callback', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateLogMetricRequest(),
);
@@ -481,7 +495,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogMetric(),
);
@@ -516,10 +530,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes createLogMetric with error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateLogMetricRequest(),
);
@@ -528,7 +542,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.createLogMetric = stubSimpleCall(
undefined,
@@ -547,10 +561,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes createLogMetric with closed client', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.CreateLogMetricRequest(),
);
@@ -560,7 +574,9 @@ describe('v2.MetricsServiceV2Client', () => {
);
request.parent = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.createLogMetric(request), expectedError);
});
});
@@ -568,10 +584,10 @@ describe('v2.MetricsServiceV2Client', () => {
describe('updateLogMetric', () => {
it('invokes updateLogMetric without error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateLogMetricRequest(),
);
@@ -580,7 +596,7 @@ describe('v2.MetricsServiceV2Client', () => {
['metricName'],
);
request.metricName = defaultValue1;
- const expectedHeaderRequestParams = `metric_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `metric_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogMetric(),
);
@@ -599,10 +615,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes updateLogMetric without error using callback', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateLogMetricRequest(),
);
@@ -611,7 +627,7 @@ describe('v2.MetricsServiceV2Client', () => {
['metricName'],
);
request.metricName = defaultValue1;
- const expectedHeaderRequestParams = `metric_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `metric_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.logging.v2.LogMetric(),
);
@@ -646,10 +662,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes updateLogMetric with error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateLogMetricRequest(),
);
@@ -658,7 +674,7 @@ describe('v2.MetricsServiceV2Client', () => {
['metricName'],
);
request.metricName = defaultValue1;
- const expectedHeaderRequestParams = `metric_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `metric_name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.updateLogMetric = stubSimpleCall(
undefined,
@@ -677,10 +693,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes updateLogMetric with closed client', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.UpdateLogMetricRequest(),
);
@@ -690,7 +706,9 @@ describe('v2.MetricsServiceV2Client', () => {
);
request.metricName = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.updateLogMetric(request), expectedError);
});
});
@@ -698,10 +716,10 @@ describe('v2.MetricsServiceV2Client', () => {
describe('deleteLogMetric', () => {
it('invokes deleteLogMetric without error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLogMetricRequest(),
);
@@ -710,7 +728,7 @@ describe('v2.MetricsServiceV2Client', () => {
['metricName'],
);
request.metricName = defaultValue1;
- const expectedHeaderRequestParams = `metric_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `metric_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -729,10 +747,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes deleteLogMetric without error using callback', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLogMetricRequest(),
);
@@ -741,7 +759,7 @@ describe('v2.MetricsServiceV2Client', () => {
['metricName'],
);
request.metricName = defaultValue1;
- const expectedHeaderRequestParams = `metric_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `metric_name=${defaultValue1 ?? ''}`;
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty(),
);
@@ -776,10 +794,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes deleteLogMetric with error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLogMetricRequest(),
);
@@ -788,7 +806,7 @@ describe('v2.MetricsServiceV2Client', () => {
['metricName'],
);
request.metricName = defaultValue1;
- const expectedHeaderRequestParams = `metric_name=${defaultValue1}`;
+ const expectedHeaderRequestParams = `metric_name=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.deleteLogMetric = stubSimpleCall(
undefined,
@@ -807,10 +825,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes deleteLogMetric with closed client', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.DeleteLogMetricRequest(),
);
@@ -820,7 +838,9 @@ describe('v2.MetricsServiceV2Client', () => {
);
request.metricName = defaultValue1;
const expectedError = new Error('The client has already been closed.');
- client.close();
+ client.close().catch((err) => {
+ throw err;
+ });
await assert.rejects(client.deleteLogMetric(request), expectedError);
});
});
@@ -828,10 +848,10 @@ describe('v2.MetricsServiceV2Client', () => {
describe('listLogMetrics', () => {
it('invokes listLogMetrics without error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogMetricsRequest(),
);
@@ -840,7 +860,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogMetric()),
generateSampleMessage(new protos.google.logging.v2.LogMetric()),
@@ -861,10 +881,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes listLogMetrics without error using callback', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogMetricsRequest(),
);
@@ -873,7 +893,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogMetric()),
generateSampleMessage(new protos.google.logging.v2.LogMetric()),
@@ -910,10 +930,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes listLogMetrics with error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogMetricsRequest(),
);
@@ -922,7 +942,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.innerApiCalls.listLogMetrics = stubSimpleCall(
undefined,
@@ -941,10 +961,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes listLogMetricsStream without error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogMetricsRequest(),
);
@@ -953,7 +973,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogMetric()),
generateSampleMessage(new protos.google.logging.v2.LogMetric()),
@@ -992,10 +1012,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('invokes listLogMetricsStream with error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogMetricsRequest(),
);
@@ -1004,7 +1024,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listLogMetrics.createStream =
stubPageStreamingCall(undefined, expectedError);
@@ -1038,10 +1058,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('uses async iteration with listLogMetrics without error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogMetricsRequest(),
);
@@ -1050,7 +1070,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedResponse = [
generateSampleMessage(new protos.google.logging.v2.LogMetric()),
generateSampleMessage(new protos.google.logging.v2.LogMetric()),
@@ -1081,10 +1101,10 @@ describe('v2.MetricsServiceV2Client', () => {
it('uses async iteration with listLogMetrics with error', async () => {
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
const request = generateSampleMessage(
new protos.google.logging.v2.ListLogMetricsRequest(),
);
@@ -1093,7 +1113,7 @@ describe('v2.MetricsServiceV2Client', () => {
['parent'],
);
request.parent = defaultValue1;
- const expectedHeaderRequestParams = `parent=${defaultValue1}`;
+ const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`;
const expectedError = new Error('expected');
client.descriptors.page.listLogMetrics.asyncIterate =
stubAsyncIterationCall(undefined, expectedError);
@@ -1121,16 +1141,16 @@ describe('v2.MetricsServiceV2Client', () => {
});
describe('Path templates', () => {
- describe('billingAccountCmekSettings', () => {
+ describe('billingAccountCmekSettings', async () => {
const fakePath = '/rendered/path/billingAccountCmekSettings';
const expectedParameters = {
billing_account: 'billingAccountValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1170,17 +1190,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('billingAccountExclusion', () => {
+ describe('billingAccountExclusion', async () => {
const fakePath = '/rendered/path/billingAccountExclusion';
const expectedParameters = {
billing_account: 'billingAccountValue',
exclusion: 'exclusionValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1233,7 +1253,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('billingAccountLocationBucket', () => {
+ describe('billingAccountLocationBucket', async () => {
const fakePath = '/rendered/path/billingAccountLocationBucket';
const expectedParameters = {
billing_account: 'billingAccountValue',
@@ -1241,10 +1261,10 @@ describe('v2.MetricsServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLocationBucketPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.billingAccountLocationBucketPathTemplate.match =
@@ -1312,7 +1332,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('billingAccountLocationBucketLink', () => {
+ describe('billingAccountLocationBucketLink', async () => {
const fakePath = '/rendered/path/billingAccountLocationBucketLink';
const expectedParameters = {
billing_account: 'billingAccountValue',
@@ -1321,10 +1341,10 @@ describe('v2.MetricsServiceV2Client', () => {
link: 'linkValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLocationBucketLinkPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.billingAccountLocationBucketLinkPathTemplate.match =
@@ -1409,7 +1429,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('billingAccountLocationBucketView', () => {
+ describe('billingAccountLocationBucketView', async () => {
const fakePath = '/rendered/path/billingAccountLocationBucketView';
const expectedParameters = {
billing_account: 'billingAccountValue',
@@ -1418,10 +1438,10 @@ describe('v2.MetricsServiceV2Client', () => {
view: 'viewValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLocationBucketViewPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.billingAccountLocationBucketViewPathTemplate.match =
@@ -1506,17 +1526,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('billingAccountLog', () => {
+ describe('billingAccountLog', async () => {
const fakePath = '/rendered/path/billingAccountLog';
const expectedParameters = {
billing_account: 'billingAccountValue',
log: 'logValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1568,16 +1588,16 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('billingAccountSettings', () => {
+ describe('billingAccountSettings', async () => {
const fakePath = '/rendered/path/billingAccountSettings';
const expectedParameters = {
billing_account: 'billingAccountValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1613,17 +1633,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('billingAccountSink', () => {
+ describe('billingAccountSink', async () => {
const fakePath = '/rendered/path/billingAccountSink';
const expectedParameters = {
billing_account: 'billingAccountValue',
sink: 'sinkValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.billingAccountSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1675,16 +1695,16 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('folderCmekSettings', () => {
+ describe('folderCmekSettings', async () => {
const fakePath = '/rendered/path/folderCmekSettings';
const expectedParameters = {
folder: 'folderValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1719,17 +1739,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('folderExclusion', () => {
+ describe('folderExclusion', async () => {
const fakePath = '/rendered/path/folderExclusion';
const expectedParameters = {
folder: 'folderValue',
exclusion: 'exclusionValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1771,7 +1791,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('folderLocationBucket', () => {
+ describe('folderLocationBucket', async () => {
const fakePath = '/rendered/path/folderLocationBucket';
const expectedParameters = {
folder: 'folderValue',
@@ -1779,10 +1799,10 @@ describe('v2.MetricsServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLocationBucketPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1848,7 +1868,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('folderLocationBucketLink', () => {
+ describe('folderLocationBucketLink', async () => {
const fakePath = '/rendered/path/folderLocationBucketLink';
const expectedParameters = {
folder: 'folderValue',
@@ -1857,10 +1877,10 @@ describe('v2.MetricsServiceV2Client', () => {
link: 'linkValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLocationBucketLinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -1943,7 +1963,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('folderLocationBucketView', () => {
+ describe('folderLocationBucketView', async () => {
const fakePath = '/rendered/path/folderLocationBucketView';
const expectedParameters = {
folder: 'folderValue',
@@ -1952,10 +1972,10 @@ describe('v2.MetricsServiceV2Client', () => {
view: 'viewValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLocationBucketViewPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2038,17 +2058,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('folderLog', () => {
+ describe('folderLog', async () => {
const fakePath = '/rendered/path/folderLog';
const expectedParameters = {
folder: 'folderValue',
log: 'logValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2087,16 +2107,16 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('folderSettings', () => {
+ describe('folderSettings', async () => {
const fakePath = '/rendered/path/folderSettings';
const expectedParameters = {
folder: 'folderValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2125,17 +2145,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('folderSink', () => {
+ describe('folderSink', async () => {
const fakePath = '/rendered/path/folderSink';
const expectedParameters = {
folder: 'folderValue',
sink: 'sinkValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.folderSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2174,17 +2194,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('logMetric', () => {
+ describe('logMetric', async () => {
const fakePath = '/rendered/path/logMetric';
const expectedParameters = {
project: 'projectValue',
metric: 'metricValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.logMetricPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2223,16 +2243,16 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('organizationCmekSettings', () => {
+ describe('organizationCmekSettings', async () => {
const fakePath = '/rendered/path/organizationCmekSettings';
const expectedParameters = {
organization: 'organizationValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2268,17 +2288,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('organizationExclusion', () => {
+ describe('organizationExclusion', async () => {
const fakePath = '/rendered/path/organizationExclusion';
const expectedParameters = {
organization: 'organizationValue',
exclusion: 'exclusionValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2331,7 +2351,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('organizationLocationBucket', () => {
+ describe('organizationLocationBucket', async () => {
const fakePath = '/rendered/path/organizationLocationBucket';
const expectedParameters = {
organization: 'organizationValue',
@@ -2339,10 +2359,10 @@ describe('v2.MetricsServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLocationBucketPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2410,7 +2430,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('organizationLocationBucketLink', () => {
+ describe('organizationLocationBucketLink', async () => {
const fakePath = '/rendered/path/organizationLocationBucketLink';
const expectedParameters = {
organization: 'organizationValue',
@@ -2419,10 +2439,10 @@ describe('v2.MetricsServiceV2Client', () => {
link: 'linkValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLocationBucketLinkPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.organizationLocationBucketLinkPathTemplate.match =
@@ -2505,7 +2525,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('organizationLocationBucketView', () => {
+ describe('organizationLocationBucketView', async () => {
const fakePath = '/rendered/path/organizationLocationBucketView';
const expectedParameters = {
organization: 'organizationValue',
@@ -2514,10 +2534,10 @@ describe('v2.MetricsServiceV2Client', () => {
view: 'viewValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLocationBucketViewPathTemplate.render =
sinon.stub().returns(fakePath);
client.pathTemplates.organizationLocationBucketViewPathTemplate.match =
@@ -2600,17 +2620,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('organizationLog', () => {
+ describe('organizationLog', async () => {
const fakePath = '/rendered/path/organizationLog';
const expectedParameters = {
organization: 'organizationValue',
log: 'logValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2653,16 +2673,16 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('organizationSettings', () => {
+ describe('organizationSettings', async () => {
const fakePath = '/rendered/path/organizationSettings';
const expectedParameters = {
organization: 'organizationValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2698,17 +2718,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('organizationSink', () => {
+ describe('organizationSink', async () => {
const fakePath = '/rendered/path/organizationSink';
const expectedParameters = {
organization: 'organizationValue',
sink: 'sinkValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.organizationSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2754,16 +2774,16 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('project', () => {
+ describe('project', async () => {
const fakePath = '/rendered/path/project';
const expectedParameters = {
project: 'projectValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2792,16 +2812,16 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('projectCmekSettings', () => {
+ describe('projectCmekSettings', async () => {
const fakePath = '/rendered/path/projectCmekSettings';
const expectedParameters = {
project: 'projectValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectCmekSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2836,17 +2856,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('projectExclusion', () => {
+ describe('projectExclusion', async () => {
const fakePath = '/rendered/path/projectExclusion';
const expectedParameters = {
project: 'projectValue',
exclusion: 'exclusionValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectExclusionPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2891,7 +2911,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('projectLocationBucket', () => {
+ describe('projectLocationBucket', async () => {
const fakePath = '/rendered/path/projectLocationBucket';
const expectedParameters = {
project: 'projectValue',
@@ -2899,10 +2919,10 @@ describe('v2.MetricsServiceV2Client', () => {
bucket: 'bucketValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLocationBucketPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -2970,7 +2990,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('projectLocationBucketLink', () => {
+ describe('projectLocationBucketLink', async () => {
const fakePath = '/rendered/path/projectLocationBucketLink';
const expectedParameters = {
project: 'projectValue',
@@ -2979,10 +2999,10 @@ describe('v2.MetricsServiceV2Client', () => {
link: 'linkValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLocationBucketLinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3065,7 +3085,7 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('projectLocationBucketView', () => {
+ describe('projectLocationBucketView', async () => {
const fakePath = '/rendered/path/projectLocationBucketView';
const expectedParameters = {
project: 'projectValue',
@@ -3074,10 +3094,10 @@ describe('v2.MetricsServiceV2Client', () => {
view: 'viewValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLocationBucketViewPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3160,17 +3180,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('projectLog', () => {
+ describe('projectLog', async () => {
const fakePath = '/rendered/path/projectLog';
const expectedParameters = {
project: 'projectValue',
log: 'logValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectLogPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3209,16 +3229,16 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('projectSettings', () => {
+ describe('projectSettings', async () => {
const fakePath = '/rendered/path/projectSettings';
const expectedParameters = {
project: 'projectValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectSettingsPathTemplate.render = sinon
.stub()
.returns(fakePath);
@@ -3247,17 +3267,17 @@ describe('v2.MetricsServiceV2Client', () => {
});
});
- describe('projectSink', () => {
+ describe('projectSink', async () => {
const fakePath = '/rendered/path/projectSink';
const expectedParameters = {
project: 'projectValue',
sink: 'sinkValue',
};
const client = new metricsservicev2Module.v2.MetricsServiceV2Client({
- credentials: {client_email: 'bogus', private_key: 'bogus'},
+ credentials: { client_email: 'bogus', private_key: 'bogus' },
projectId: 'bogus',
});
- client.initialize();
+ await client.initialize();
client.pathTemplates.projectSinkPathTemplate.render = sinon
.stub()
.returns(fakePath);
diff --git a/handwritten/logging/tsconfig.json b/handwritten/logging/tsconfig.json
index c78f1c884ef6..ca73e7bfc824 100644
--- a/handwritten/logging/tsconfig.json
+++ b/handwritten/logging/tsconfig.json
@@ -5,7 +5,7 @@
"outDir": "build",
"resolveJsonModule": true,
"lib": [
- "es2018",
+ "es2023",
"dom"
]
},
@@ -14,6 +14,9 @@
"src/**/*.ts",
"test/*.ts",
"test/**/*.ts",
- "system-test/*.ts"
+ "system-test/*.ts",
+ "src/**/*.json",
+ "samples/**/*.json",
+ "protos/protos.json"
]
}
diff --git a/handwritten/logging/webpack.config.js b/handwritten/logging/webpack.config.js
index 1cc3b570dfd0..a37e80e57376 100644
--- a/handwritten/logging/webpack.config.js
+++ b/handwritten/logging/webpack.config.js
@@ -36,27 +36,27 @@ module.exports = {
{
test: /\.tsx?$/,
use: 'ts-loader',
- exclude: /node_modules/,
+ exclude: /node_modules/
},
{
test: /node_modules[\\/]@grpc[\\/]grpc-js/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]grpc/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]retry-request/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]https?-proxy-agent/,
- use: 'null-loader',
+ use: 'null-loader'
},
{
test: /node_modules[\\/]gtoken/,
- use: 'null-loader',
+ use: 'null-loader'
},
],
},
diff --git a/packages/google-ads-datamanager/.eslintignore b/packages/google-ads-datamanager/.eslintignore
new file mode 100644
index 000000000000..cfc348ec4d11
--- /dev/null
+++ b/packages/google-ads-datamanager/.eslintignore
@@ -0,0 +1,7 @@
+**/node_modules
+**/.coverage
+build/
+docs/
+protos/
+system-test/
+samples/generated/
diff --git a/packages/google-ads-datamanager/.eslintrc.json b/packages/google-ads-datamanager/.eslintrc.json
new file mode 100644
index 000000000000..3e8d97ccb390
--- /dev/null
+++ b/packages/google-ads-datamanager/.eslintrc.json
@@ -0,0 +1,4 @@
+{
+ "extends": "./node_modules/gts",
+ "root": true
+}
diff --git a/packages/google-ads-datamanager/README.md b/packages/google-ads-datamanager/README.md
index a1888aea2a6e..3dad6e62c326 100644
--- a/packages/google-ads-datamanager/README.md
+++ b/packages/google-ads-datamanager/README.md
@@ -113,7 +113,7 @@ More Information: [Google Cloud Platform Launch Stages][launch_stages]
## Contributing
-Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-ads-datamanager/CONTRIBUTING.md).
+Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/google-cloud-node/blob/main/CONTRIBUTING.md).
Please note that this `README.md`
and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`)
@@ -123,7 +123,7 @@ are generated from a central template.
Apache Version 2.0
-See [LICENSE](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-ads-datamanager/LICENSE)
+See [LICENSE](https://github.com/googleapis/google-cloud-node/blob/main/LICENSE)
[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png
[projects]: https://console.cloud.google.com/project
diff --git a/packages/google-ads-datamanager/protos/protos.d.ts b/packages/google-ads-datamanager/protos/protos.d.ts
index c16044ba1522..27498fe81f29 100644
--- a/packages/google-ads-datamanager/protos/protos.d.ts
+++ b/packages/google-ads-datamanager/protos/protos.d.ts
@@ -12327,6 +12327,9 @@ export namespace google {
/** CommonLanguageSettings destinations */
destinations?: (google.api.ClientLibraryDestination[]|null);
+
+ /** CommonLanguageSettings selectiveGapicGeneration */
+ selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null);
}
/** Represents a CommonLanguageSettings. */
@@ -12344,6 +12347,9 @@ export namespace google {
/** CommonLanguageSettings destinations. */
public destinations: google.api.ClientLibraryDestination[];
+ /** CommonLanguageSettings selectiveGapicGeneration. */
+ public selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null);
+
/**
* Creates a new CommonLanguageSettings instance using the specified properties.
* @param [properties] Properties to set
@@ -13044,6 +13050,9 @@ export namespace google {
/** PythonSettings common */
common?: (google.api.ICommonLanguageSettings|null);
+
+ /** PythonSettings experimentalFeatures */
+ experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null);
}
/** Represents a PythonSettings. */
@@ -13058,6 +13067,9 @@ export namespace google {
/** PythonSettings common. */
public common?: (google.api.ICommonLanguageSettings|null);
+ /** PythonSettings experimentalFeatures. */
+ public experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null);
+
/**
* Creates a new PythonSettings instance using the specified properties.
* @param [properties] Properties to set
@@ -13136,6 +13148,118 @@ export namespace google {
public static getTypeUrl(typeUrlPrefix?: string): string;
}
+ namespace PythonSettings {
+
+ /** Properties of an ExperimentalFeatures. */
+ interface IExperimentalFeatures {
+
+ /** ExperimentalFeatures restAsyncIoEnabled */
+ restAsyncIoEnabled?: (boolean|null);
+
+ /** ExperimentalFeatures protobufPythonicTypesEnabled */
+ protobufPythonicTypesEnabled?: (boolean|null);
+
+ /** ExperimentalFeatures unversionedPackageDisabled */
+ unversionedPackageDisabled?: (boolean|null);
+ }
+
+ /** Represents an ExperimentalFeatures. */
+ class ExperimentalFeatures implements IExperimentalFeatures {
+
+ /**
+ * Constructs a new ExperimentalFeatures.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.api.PythonSettings.IExperimentalFeatures);
+
+ /** ExperimentalFeatures restAsyncIoEnabled. */
+ public restAsyncIoEnabled: boolean;
+
+ /** ExperimentalFeatures protobufPythonicTypesEnabled. */
+ public protobufPythonicTypesEnabled: boolean;
+
+ /** ExperimentalFeatures unversionedPackageDisabled. */
+ public unversionedPackageDisabled: boolean;
+
+ /**
+ * Creates a new ExperimentalFeatures instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns ExperimentalFeatures instance
+ */
+ public static create(properties?: google.api.PythonSettings.IExperimentalFeatures): google.api.PythonSettings.ExperimentalFeatures;
+
+ /**
+ * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages.
+ * @param message ExperimentalFeatures message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages.
+ * @param message ExperimentalFeatures message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes an ExperimentalFeatures message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns ExperimentalFeatures
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.PythonSettings.ExperimentalFeatures;
+
+ /**
+ * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns ExperimentalFeatures
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.PythonSettings.ExperimentalFeatures;
+
+ /**
+ * Verifies an ExperimentalFeatures message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns ExperimentalFeatures
+ */
+ public static fromObject(object: { [k: string]: any }): google.api.PythonSettings.ExperimentalFeatures;
+
+ /**
+ * Creates a plain object from an ExperimentalFeatures message. Also converts values to other types if specified.
+ * @param message ExperimentalFeatures
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.api.PythonSettings.ExperimentalFeatures, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this ExperimentalFeatures to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for ExperimentalFeatures
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+ }
+
/** Properties of a NodeSettings. */
interface INodeSettings {
@@ -13462,6 +13586,9 @@ export namespace google {
/** GoSettings common */
common?: (google.api.ICommonLanguageSettings|null);
+
+ /** GoSettings renamedServices */
+ renamedServices?: ({ [k: string]: string }|null);
}
/** Represents a GoSettings. */
@@ -13476,6 +13603,9 @@ export namespace google {
/** GoSettings common. */
public common?: (google.api.ICommonLanguageSettings|null);
+ /** GoSettings renamedServices. */
+ public renamedServices: { [k: string]: string };
+
/**
* Creates a new GoSettings instance using the specified properties.
* @param [properties] Properties to set
@@ -13800,6 +13930,109 @@ export namespace google {
PACKAGE_MANAGER = 20
}
+ /** Properties of a SelectiveGapicGeneration. */
+ interface ISelectiveGapicGeneration {
+
+ /** SelectiveGapicGeneration methods */
+ methods?: (string[]|null);
+
+ /** SelectiveGapicGeneration generateOmittedAsInternal */
+ generateOmittedAsInternal?: (boolean|null);
+ }
+
+ /** Represents a SelectiveGapicGeneration. */
+ class SelectiveGapicGeneration implements ISelectiveGapicGeneration {
+
+ /**
+ * Constructs a new SelectiveGapicGeneration.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.api.ISelectiveGapicGeneration);
+
+ /** SelectiveGapicGeneration methods. */
+ public methods: string[];
+
+ /** SelectiveGapicGeneration generateOmittedAsInternal. */
+ public generateOmittedAsInternal: boolean;
+
+ /**
+ * Creates a new SelectiveGapicGeneration instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns SelectiveGapicGeneration instance
+ */
+ public static create(properties?: google.api.ISelectiveGapicGeneration): google.api.SelectiveGapicGeneration;
+
+ /**
+ * Encodes the specified SelectiveGapicGeneration message. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages.
+ * @param message SelectiveGapicGeneration message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified SelectiveGapicGeneration message, length delimited. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages.
+ * @param message SelectiveGapicGeneration message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a SelectiveGapicGeneration message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns SelectiveGapicGeneration
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.SelectiveGapicGeneration;
+
+ /**
+ * Decodes a SelectiveGapicGeneration message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns SelectiveGapicGeneration
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.SelectiveGapicGeneration;
+
+ /**
+ * Verifies a SelectiveGapicGeneration message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a SelectiveGapicGeneration message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns SelectiveGapicGeneration
+ */
+ public static fromObject(object: { [k: string]: any }): google.api.SelectiveGapicGeneration;
+
+ /**
+ * Creates a plain object from a SelectiveGapicGeneration message. Also converts values to other types if specified.
+ * @param message SelectiveGapicGeneration
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.api.SelectiveGapicGeneration, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this SelectiveGapicGeneration to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for SelectiveGapicGeneration
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
/** LaunchStage enum. */
enum LaunchStage {
LAUNCH_STAGE_UNSPECIFIED = 0,
@@ -13916,6 +14149,7 @@ export namespace google {
/** Edition enum. */
enum Edition {
EDITION_UNKNOWN = 0,
+ EDITION_LEGACY = 900,
EDITION_PROTO2 = 998,
EDITION_PROTO3 = 999,
EDITION_2023 = 1000,
@@ -13946,6 +14180,9 @@ export namespace google {
/** FileDescriptorProto weakDependency */
weakDependency?: (number[]|null);
+ /** FileDescriptorProto optionDependency */
+ optionDependency?: (string[]|null);
+
/** FileDescriptorProto messageType */
messageType?: (google.protobuf.IDescriptorProto[]|null);
@@ -13995,6 +14232,9 @@ export namespace google {
/** FileDescriptorProto weakDependency. */
public weakDependency: number[];
+ /** FileDescriptorProto optionDependency. */
+ public optionDependency: string[];
+
/** FileDescriptorProto messageType. */
public messageType: google.protobuf.IDescriptorProto[];
@@ -14129,6 +14369,9 @@ export namespace google {
/** DescriptorProto reservedName */
reservedName?: (string[]|null);
+
+ /** DescriptorProto visibility */
+ visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null);
}
/** Represents a DescriptorProto. */
@@ -14170,6 +14413,9 @@ export namespace google {
/** DescriptorProto reservedName. */
public reservedName: string[];
+ /** DescriptorProto visibility. */
+ public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility);
+
/**
* Creates a new DescriptorProto instance using the specified properties.
* @param [properties] Properties to set
@@ -15017,6 +15263,9 @@ export namespace google {
/** EnumDescriptorProto reservedName */
reservedName?: (string[]|null);
+
+ /** EnumDescriptorProto visibility */
+ visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null);
}
/** Represents an EnumDescriptorProto. */
@@ -15043,6 +15292,9 @@ export namespace google {
/** EnumDescriptorProto reservedName. */
public reservedName: string[];
+ /** EnumDescriptorProto visibility. */
+ public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility);
+
/**
* Creates a new EnumDescriptorProto instance using the specified properties.
* @param [properties] Properties to set
@@ -15977,6 +16229,9 @@ export namespace google {
/** FieldOptions features */
features?: (google.protobuf.IFeatureSet|null);
+ /** FieldOptions featureSupport */
+ featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null);
+
/** FieldOptions uninterpretedOption */
uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null);
@@ -16032,6 +16287,9 @@ export namespace google {
/** FieldOptions features. */
public features?: (google.protobuf.IFeatureSet|null);
+ /** FieldOptions featureSupport. */
+ public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null);
+
/** FieldOptions uninterpretedOption. */
public uninterpretedOption: google.protobuf.IUninterpretedOption[];
@@ -16252,6 +16510,121 @@ export namespace google {
*/
public static getTypeUrl(typeUrlPrefix?: string): string;
}
+
+ /** Properties of a FeatureSupport. */
+ interface IFeatureSupport {
+
+ /** FeatureSupport editionIntroduced */
+ editionIntroduced?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null);
+
+ /** FeatureSupport editionDeprecated */
+ editionDeprecated?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null);
+
+ /** FeatureSupport deprecationWarning */
+ deprecationWarning?: (string|null);
+
+ /** FeatureSupport editionRemoved */
+ editionRemoved?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null);
+ }
+
+ /** Represents a FeatureSupport. */
+ class FeatureSupport implements IFeatureSupport {
+
+ /**
+ * Constructs a new FeatureSupport.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.protobuf.FieldOptions.IFeatureSupport);
+
+ /** FeatureSupport editionIntroduced. */
+ public editionIntroduced: (google.protobuf.Edition|keyof typeof google.protobuf.Edition);
+
+ /** FeatureSupport editionDeprecated. */
+ public editionDeprecated: (google.protobuf.Edition|keyof typeof google.protobuf.Edition);
+
+ /** FeatureSupport deprecationWarning. */
+ public deprecationWarning: string;
+
+ /** FeatureSupport editionRemoved. */
+ public editionRemoved: (google.protobuf.Edition|keyof typeof google.protobuf.Edition);
+
+ /**
+ * Creates a new FeatureSupport instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns FeatureSupport instance
+ */
+ public static create(properties?: google.protobuf.FieldOptions.IFeatureSupport): google.protobuf.FieldOptions.FeatureSupport;
+
+ /**
+ * Encodes the specified FeatureSupport message. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages.
+ * @param message FeatureSupport message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified FeatureSupport message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages.
+ * @param message FeatureSupport message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a FeatureSupport message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns FeatureSupport
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions.FeatureSupport;
+
+ /**
+ * Decodes a FeatureSupport message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns FeatureSupport
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions.FeatureSupport;
+
+ /**
+ * Verifies a FeatureSupport message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a FeatureSupport message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns FeatureSupport
+ */
+ public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions.FeatureSupport;
+
+ /**
+ * Creates a plain object from a FeatureSupport message. Also converts values to other types if specified.
+ * @param message FeatureSupport
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.protobuf.FieldOptions.FeatureSupport, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this FeatureSupport to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for FeatureSupport
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
}
/** Properties of an OneofOptions. */
@@ -16490,6 +16863,9 @@ export namespace google {
/** EnumValueOptions debugRedact */
debugRedact?: (boolean|null);
+ /** EnumValueOptions featureSupport */
+ featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null);
+
/** EnumValueOptions uninterpretedOption */
uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null);
}
@@ -16512,6 +16888,9 @@ export namespace google {
/** EnumValueOptions debugRedact. */
public debugRedact: boolean;
+ /** EnumValueOptions featureSupport. */
+ public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null);
+
/** EnumValueOptions uninterpretedOption. */
public uninterpretedOption: google.protobuf.IUninterpretedOption[];
@@ -17101,6 +17480,12 @@ export namespace google {
/** FeatureSet jsonFormat */
jsonFormat?: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat|null);
+
+ /** FeatureSet enforceNamingStyle */
+ enforceNamingStyle?: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle|null);
+
+ /** FeatureSet defaultSymbolVisibility */
+ defaultSymbolVisibility?: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|null);
}
/** Represents a FeatureSet. */
@@ -17130,6 +17515,12 @@ export namespace google {
/** FeatureSet jsonFormat. */
public jsonFormat: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat);
+ /** FeatureSet enforceNamingStyle. */
+ public enforceNamingStyle: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle);
+
+ /** FeatureSet defaultSymbolVisibility. */
+ public defaultSymbolVisibility: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility);
+
/**
* Creates a new FeatureSet instance using the specified properties.
* @param [properties] Properties to set
@@ -17252,6 +17643,116 @@ export namespace google {
ALLOW = 1,
LEGACY_BEST_EFFORT = 2
}
+
+ /** EnforceNamingStyle enum. */
+ enum EnforceNamingStyle {
+ ENFORCE_NAMING_STYLE_UNKNOWN = 0,
+ STYLE2024 = 1,
+ STYLE_LEGACY = 2
+ }
+
+ /** Properties of a VisibilityFeature. */
+ interface IVisibilityFeature {
+ }
+
+ /** Represents a VisibilityFeature. */
+ class VisibilityFeature implements IVisibilityFeature {
+
+ /**
+ * Constructs a new VisibilityFeature.
+ * @param [properties] Properties to set
+ */
+ constructor(properties?: google.protobuf.FeatureSet.IVisibilityFeature);
+
+ /**
+ * Creates a new VisibilityFeature instance using the specified properties.
+ * @param [properties] Properties to set
+ * @returns VisibilityFeature instance
+ */
+ public static create(properties?: google.protobuf.FeatureSet.IVisibilityFeature): google.protobuf.FeatureSet.VisibilityFeature;
+
+ /**
+ * Encodes the specified VisibilityFeature message. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages.
+ * @param message VisibilityFeature message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encode(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Encodes the specified VisibilityFeature message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages.
+ * @param message VisibilityFeature message or plain object to encode
+ * @param [writer] Writer to encode to
+ * @returns Writer
+ */
+ public static encodeDelimited(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer;
+
+ /**
+ * Decodes a VisibilityFeature message from the specified reader or buffer.
+ * @param reader Reader or buffer to decode from
+ * @param [length] Message length if known beforehand
+ * @returns VisibilityFeature
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FeatureSet.VisibilityFeature;
+
+ /**
+ * Decodes a VisibilityFeature message from the specified reader or buffer, length delimited.
+ * @param reader Reader or buffer to decode from
+ * @returns VisibilityFeature
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FeatureSet.VisibilityFeature;
+
+ /**
+ * Verifies a VisibilityFeature message.
+ * @param message Plain object to verify
+ * @returns `null` if valid, otherwise the reason why it is not
+ */
+ public static verify(message: { [k: string]: any }): (string|null);
+
+ /**
+ * Creates a VisibilityFeature message from a plain object. Also converts values to their respective internal types.
+ * @param object Plain object
+ * @returns VisibilityFeature
+ */
+ public static fromObject(object: { [k: string]: any }): google.protobuf.FeatureSet.VisibilityFeature;
+
+ /**
+ * Creates a plain object from a VisibilityFeature message. Also converts values to other types if specified.
+ * @param message VisibilityFeature
+ * @param [options] Conversion options
+ * @returns Plain object
+ */
+ public static toObject(message: google.protobuf.FeatureSet.VisibilityFeature, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+ /**
+ * Converts this VisibilityFeature to JSON.
+ * @returns JSON object
+ */
+ public toJSON(): { [k: string]: any };
+
+ /**
+ * Gets the default type url for VisibilityFeature
+ * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+ * @returns The default type url
+ */
+ public static getTypeUrl(typeUrlPrefix?: string): string;
+ }
+
+ namespace VisibilityFeature {
+
+ /** DefaultSymbolVisibility enum. */
+ enum DefaultSymbolVisibility {
+ DEFAULT_SYMBOL_VISIBILITY_UNKNOWN = 0,
+ EXPORT_ALL = 1,
+ EXPORT_TOP_LEVEL = 2,
+ LOCAL_ALL = 3,
+ STRICT = 4
+ }
+ }
}
/** Properties of a FeatureSetDefaults. */
@@ -17371,8 +17872,11 @@ export namespace google {
/** FeatureSetEditionDefault edition */
edition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null);
- /** FeatureSetEditionDefault features */
- features?: (google.protobuf.IFeatureSet|null);
+ /** FeatureSetEditionDefault overridableFeatures */
+ overridableFeatures?: (google.protobuf.IFeatureSet|null);
+
+ /** FeatureSetEditionDefault fixedFeatures */
+ fixedFeatures?: (google.protobuf.IFeatureSet|null);
}
/** Represents a FeatureSetEditionDefault. */
@@ -17387,8 +17891,11 @@ export namespace google {
/** FeatureSetEditionDefault edition. */
public edition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition);
- /** FeatureSetEditionDefault features. */
- public features?: (google.protobuf.IFeatureSet|null);
+ /** FeatureSetEditionDefault overridableFeatures. */
+ public overridableFeatures?: (google.protobuf.IFeatureSet|null);
+
+ /** FeatureSetEditionDefault fixedFeatures. */
+ public fixedFeatures?: (google.protobuf.IFeatureSet|null);
/**
* Creates a new FeatureSetEditionDefault instance using the specified properties.
@@ -17921,6 +18428,13 @@ export namespace google {
}
}
+ /** SymbolVisibility enum. */
+ enum SymbolVisibility {
+ VISIBILITY_UNSET = 0,
+ VISIBILITY_LOCAL = 1,
+ VISIBILITY_EXPORT = 2
+ }
+
/** Properties of a Timestamp. */
interface ITimestamp {
diff --git a/packages/google-ads-datamanager/protos/protos.js b/packages/google-ads-datamanager/protos/protos.js
index ef327392a2e7..8f273bdf612d 100644
--- a/packages/google-ads-datamanager/protos/protos.js
+++ b/packages/google-ads-datamanager/protos/protos.js
@@ -31944,6 +31944,7 @@
* @interface ICommonLanguageSettings
* @property {string|null} [referenceDocsUri] CommonLanguageSettings referenceDocsUri
* @property {Array.|null} [destinations] CommonLanguageSettings destinations
+ * @property {google.api.ISelectiveGapicGeneration|null} [selectiveGapicGeneration] CommonLanguageSettings selectiveGapicGeneration
*/
/**
@@ -31978,6 +31979,14 @@
*/
CommonLanguageSettings.prototype.destinations = $util.emptyArray;
+ /**
+ * CommonLanguageSettings selectiveGapicGeneration.
+ * @member {google.api.ISelectiveGapicGeneration|null|undefined} selectiveGapicGeneration
+ * @memberof google.api.CommonLanguageSettings
+ * @instance
+ */
+ CommonLanguageSettings.prototype.selectiveGapicGeneration = null;
+
/**
* Creates a new CommonLanguageSettings instance using the specified properties.
* @function create
@@ -32010,6 +32019,8 @@
writer.int32(message.destinations[i]);
writer.ldelim();
}
+ if (message.selectiveGapicGeneration != null && Object.hasOwnProperty.call(message, "selectiveGapicGeneration"))
+ $root.google.api.SelectiveGapicGeneration.encode(message.selectiveGapicGeneration, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim();
return writer;
};
@@ -32061,6 +32072,10 @@
message.destinations.push(reader.int32());
break;
}
+ case 3: {
+ message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.decode(reader, reader.uint32());
+ break;
+ }
default:
reader.skipType(tag & 7);
break;
@@ -32112,6 +32127,11 @@
break;
}
}
+ if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration")) {
+ var error = $root.google.api.SelectiveGapicGeneration.verify(message.selectiveGapicGeneration);
+ if (error)
+ return "selectiveGapicGeneration." + error;
+ }
return null;
};
@@ -32154,6 +32174,11 @@
break;
}
}
+ if (object.selectiveGapicGeneration != null) {
+ if (typeof object.selectiveGapicGeneration !== "object")
+ throw TypeError(".google.api.CommonLanguageSettings.selectiveGapicGeneration: object expected");
+ message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.fromObject(object.selectiveGapicGeneration);
+ }
return message;
};
@@ -32172,8 +32197,10 @@
var object = {};
if (options.arrays || options.defaults)
object.destinations = [];
- if (options.defaults)
+ if (options.defaults) {
object.referenceDocsUri = "";
+ object.selectiveGapicGeneration = null;
+ }
if (message.referenceDocsUri != null && message.hasOwnProperty("referenceDocsUri"))
object.referenceDocsUri = message.referenceDocsUri;
if (message.destinations && message.destinations.length) {
@@ -32181,6 +32208,8 @@
for (var j = 0; j < message.destinations.length; ++j)
object.destinations[j] = options.enums === String ? $root.google.api.ClientLibraryDestination[message.destinations[j]] === undefined ? message.destinations[j] : $root.google.api.ClientLibraryDestination[message.destinations[j]] : message.destinations[j];
}
+ if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration"))
+ object.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.toObject(message.selectiveGapicGeneration, options);
return object;
};
@@ -34003,6 +34032,7 @@
* @memberof google.api
* @interface IPythonSettings
* @property {google.api.ICommonLanguageSettings|null} [common] PythonSettings common
+ * @property {google.api.PythonSettings.IExperimentalFeatures|null} [experimentalFeatures] PythonSettings experimentalFeatures
*/
/**
@@ -34028,6 +34058,14 @@
*/
PythonSettings.prototype.common = null;
+ /**
+ * PythonSettings experimentalFeatures.
+ * @member {google.api.PythonSettings.IExperimentalFeatures|null|undefined} experimentalFeatures
+ * @memberof google.api.PythonSettings
+ * @instance
+ */
+ PythonSettings.prototype.experimentalFeatures = null;
+
/**
* Creates a new PythonSettings instance using the specified properties.
* @function create
@@ -34054,6 +34092,8 @@
writer = $Writer.create();
if (message.common != null && Object.hasOwnProperty.call(message, "common"))
$root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();
+ if (message.experimentalFeatures != null && Object.hasOwnProperty.call(message, "experimentalFeatures"))
+ $root.google.api.PythonSettings.ExperimentalFeatures.encode(message.experimentalFeatures, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();
return writer;
};
@@ -34094,6 +34134,10 @@
message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32());
break;
}
+ case 2: {
+ message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.decode(reader, reader.uint32());
+ break;
+ }
default:
reader.skipType(tag & 7);
break;
@@ -34134,6 +34178,11 @@
if (error)
return "common." + error;
}
+ if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures")) {
+ var error = $root.google.api.PythonSettings.ExperimentalFeatures.verify(message.experimentalFeatures);
+ if (error)
+ return "experimentalFeatures." + error;
+ }
return null;
};
@@ -34154,6 +34203,11 @@
throw TypeError(".google.api.PythonSettings.common: object expected");
message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common);
}
+ if (object.experimentalFeatures != null) {
+ if (typeof object.experimentalFeatures !== "object")
+ throw TypeError(".google.api.PythonSettings.experimentalFeatures: object expected");
+ message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.fromObject(object.experimentalFeatures);
+ }
return message;
};
@@ -34170,10 +34224,14 @@
if (!options)
options = {};
var object = {};
- if (options.defaults)
+ if (options.defaults) {
object.common = null;
+ object.experimentalFeatures = null;
+ }
if (message.common != null && message.hasOwnProperty("common"))
object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options);
+ if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures"))
+ object.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.toObject(message.experimentalFeatures, options);
return object;
};
@@ -34203,6 +34261,258 @@
return typeUrlPrefix + "/google.api.PythonSettings";
};
+ PythonSettings.ExperimentalFeatures = (function() {
+
+ /**
+ * Properties of an ExperimentalFeatures.
+ * @memberof google.api.PythonSettings
+ * @interface IExperimentalFeatures
+ * @property {boolean|null} [restAsyncIoEnabled] ExperimentalFeatures restAsyncIoEnabled
+ * @property {boolean|null} [protobufPythonicTypesEnabled] ExperimentalFeatures protobufPythonicTypesEnabled
+ * @property {boolean|null} [unversionedPackageDisabled] ExperimentalFeatures unversionedPackageDisabled
+ */
+
+ /**
+ * Constructs a new ExperimentalFeatures.
+ * @memberof google.api.PythonSettings
+ * @classdesc Represents an ExperimentalFeatures.
+ * @implements IExperimentalFeatures
+ * @constructor
+ * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set
+ */
+ function ExperimentalFeatures(properties) {
+ if (properties)
+ for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+ if (properties[keys[i]] != null)
+ this[keys[i]] = properties[keys[i]];
+ }
+
+ /**
+ * ExperimentalFeatures restAsyncIoEnabled.
+ * @member {boolean} restAsyncIoEnabled
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @instance
+ */
+ ExperimentalFeatures.prototype.restAsyncIoEnabled = false;
+
+ /**
+ * ExperimentalFeatures protobufPythonicTypesEnabled.
+ * @member {boolean} protobufPythonicTypesEnabled
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @instance
+ */
+ ExperimentalFeatures.prototype.protobufPythonicTypesEnabled = false;
+
+ /**
+ * ExperimentalFeatures unversionedPackageDisabled.
+ * @member {boolean} unversionedPackageDisabled
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @instance
+ */
+ ExperimentalFeatures.prototype.unversionedPackageDisabled = false;
+
+ /**
+ * Creates a new ExperimentalFeatures instance using the specified properties.
+ * @function create
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @static
+ * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set
+ * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures instance
+ */
+ ExperimentalFeatures.create = function create(properties) {
+ return new ExperimentalFeatures(properties);
+ };
+
+ /**
+ * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages.
+ * @function encode
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @static
+ * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ExperimentalFeatures.encode = function encode(message, writer) {
+ if (!writer)
+ writer = $Writer.create();
+ if (message.restAsyncIoEnabled != null && Object.hasOwnProperty.call(message, "restAsyncIoEnabled"))
+ writer.uint32(/* id 1, wireType 0 =*/8).bool(message.restAsyncIoEnabled);
+ if (message.protobufPythonicTypesEnabled != null && Object.hasOwnProperty.call(message, "protobufPythonicTypesEnabled"))
+ writer.uint32(/* id 2, wireType 0 =*/16).bool(message.protobufPythonicTypesEnabled);
+ if (message.unversionedPackageDisabled != null && Object.hasOwnProperty.call(message, "unversionedPackageDisabled"))
+ writer.uint32(/* id 3, wireType 0 =*/24).bool(message.unversionedPackageDisabled);
+ return writer;
+ };
+
+ /**
+ * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages.
+ * @function encodeDelimited
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @static
+ * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode
+ * @param {$protobuf.Writer} [writer] Writer to encode to
+ * @returns {$protobuf.Writer} Writer
+ */
+ ExperimentalFeatures.encodeDelimited = function encodeDelimited(message, writer) {
+ return this.encode(message, writer).ldelim();
+ };
+
+ /**
+ * Decodes an ExperimentalFeatures message from the specified reader or buffer.
+ * @function decode
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @param {number} [length] Message length if known beforehand
+ * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ExperimentalFeatures.decode = function decode(reader, length, error) {
+ if (!(reader instanceof $Reader))
+ reader = $Reader.create(reader);
+ var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.PythonSettings.ExperimentalFeatures();
+ while (reader.pos < end) {
+ var tag = reader.uint32();
+ if (tag === error)
+ break;
+ switch (tag >>> 3) {
+ case 1: {
+ message.restAsyncIoEnabled = reader.bool();
+ break;
+ }
+ case 2: {
+ message.protobufPythonicTypesEnabled = reader.bool();
+ break;
+ }
+ case 3: {
+ message.unversionedPackageDisabled = reader.bool();
+ break;
+ }
+ default:
+ reader.skipType(tag & 7);
+ break;
+ }
+ }
+ return message;
+ };
+
+ /**
+ * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited.
+ * @function decodeDelimited
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @static
+ * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+ * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures
+ * @throws {Error} If the payload is not a reader or valid buffer
+ * @throws {$protobuf.util.ProtocolError} If required fields are missing
+ */
+ ExperimentalFeatures.decodeDelimited = function decodeDelimited(reader) {
+ if (!(reader instanceof $Reader))
+ reader = new $Reader(reader);
+ return this.decode(reader, reader.uint32());
+ };
+
+ /**
+ * Verifies an ExperimentalFeatures message.
+ * @function verify
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @static
+ * @param {Object.} message Plain object to verify
+ * @returns {string|null} `null` if valid, otherwise the reason why it is not
+ */
+ ExperimentalFeatures.verify = function verify(message) {
+ if (typeof message !== "object" || message === null)
+ return "object expected";
+ if (message.restAsyncIoEnabled != null && message.hasOwnProperty("restAsyncIoEnabled"))
+ if (typeof message.restAsyncIoEnabled !== "boolean")
+ return "restAsyncIoEnabled: boolean expected";
+ if (message.protobufPythonicTypesEnabled != null && message.hasOwnProperty("protobufPythonicTypesEnabled"))
+ if (typeof message.protobufPythonicTypesEnabled !== "boolean")
+ return "protobufPythonicTypesEnabled: boolean expected";
+ if (message.unversionedPackageDisabled != null && message.hasOwnProperty("unversionedPackageDisabled"))
+ if (typeof message.unversionedPackageDisabled !== "boolean")
+ return "unversionedPackageDisabled: boolean expected";
+ return null;
+ };
+
+ /**
+ * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types.
+ * @function fromObject
+ * @memberof google.api.PythonSettings.ExperimentalFeatures
+ * @static
+ * @param {Object.