From bd58490b60fd202596cf9533ce5f1de9951dcf29 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 1 Oct 2025 12:58:51 -0700 Subject: [PATCH] chore: Add integration test for JSON uploads over 5MB --- google-cloud-bigquery/Gemfile | 1 + .../acceptance/bigquery/large_json_test.rb | 61 +++++++++++++++++++ 2 files changed, 62 insertions(+) create mode 100644 google-cloud-bigquery/acceptance/bigquery/large_json_test.rb diff --git a/google-cloud-bigquery/Gemfile b/google-cloud-bigquery/Gemfile index 9b42a5f8075b..061cf2d0bca3 100644 --- a/google-cloud-bigquery/Gemfile +++ b/google-cloud-bigquery/Gemfile @@ -25,6 +25,7 @@ gem "google-cloud-bigquery-connection-v1", ">= 1.3.0", "< 1.5" gem "google-cloud-data_catalog", path: "../google-cloud-data_catalog" gem "google-cloud-storage", path: "../google-cloud-storage" gem "google-style", "~> 1.30.1" +gem "httpclient", "~> 2.8", group: :development gem "minitest", "~> 5.16" gem "minitest-autotest", "~> 1.0" gem "minitest-focus", "~> 1.1" diff --git a/google-cloud-bigquery/acceptance/bigquery/large_json_test.rb b/google-cloud-bigquery/acceptance/bigquery/large_json_test.rb new file mode 100644 index 000000000000..e9447909ae5c --- /dev/null +++ b/google-cloud-bigquery/acceptance/bigquery/large_json_test.rb @@ -0,0 +1,61 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require "bigquery_helper" +require "securerandom" +require "json" +require "httpclient" + +describe "BigQuery Large JSON", :bigquery do + let(:dataset_id) { "ruby_acceptance_#{SecureRandom.hex(4)}" } + let(:dataset) do + d = bigquery.dataset dataset_id + if d.nil? + d = bigquery.create_dataset dataset_id, location: "US" + end + d + end + let(:table_id) { "large_json_test_#{SecureRandom.hex(4)}" } + let(:table) do + t = dataset.table table_id + if t.nil? + t = dataset.create_table table_id do |schema| + schema.string "id", mode: :required + schema.json "data", mode: :required + end + end + t + end + + after do + dataset.delete force: true + end + + it "inserts a large JSON object successfully" do + # Generate a 8MB JSON object + large_string = "a" * (8 * 1024 * 1024) + json_data = { "large_string" => large_string }.to_json + puts "Generated JSON size: #{json_data.bytesize / (1024 * 1024)} MB" + + row = { "id" => SecureRandom.uuid, "data" => json_data } + + begin + table.insert [row] + rescue Google::Cloud::Error => e + puts "Google::Cloud::Error encountered: #{e.message}" + puts "Error body: #{e.body}" + raise + end + end +end