From 37aa7bff6446ac10029b0bd98fd10b105311801a Mon Sep 17 00:00:00 2001 From: Nivedha Date: Mon, 14 Feb 2022 12:12:22 +0530 Subject: [PATCH 01/19] add pg_numeric and tests --- acceptance/data/fixtures.rb | 277 ++++++++++++++++++ .../batch_client/execute_partition_test.rb | 110 +++++++ .../spanner/client/batch_update_test.rb | 214 ++++++++------ .../spanner/client/params/pgnumeric_test.rb | 67 +++++ .../spanner/client/types/pgnumeric_test.rb | 0 .../spanner/database_client_test.rb | 46 ++- .../acceptance/spanner_helper.rb | 246 +++------------- .../lib/google/cloud/spanner/convert.rb | 2 + 8 files changed, 652 insertions(+), 310 deletions(-) create mode 100644 acceptance/data/fixtures.rb create mode 100644 google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb create mode 100644 google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb diff --git a/acceptance/data/fixtures.rb b/acceptance/data/fixtures.rb new file mode 100644 index 000000000000..7cf0630c5699 --- /dev/null +++ b/acceptance/data/fixtures.rb @@ -0,0 +1,277 @@ +module Acceptance + + module Fixtures + def stuffs_ddl_statement + if emulator_enabled? + <<-STUFFS + CREATE TABLE stuffs ( + id INT64 NOT NULL, + int INT64, + float FLOAT64, + bool BOOL, + string STRING(MAX), + byte BYTES(MAX), + date DATE, + timestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true), + ints ARRAY, + floats ARRAY, + bools ARRAY, + strings ARRAY, + bytes ARRAY, + dates ARRAY, + timestamps ARRAY + ) PRIMARY KEY (id) + STUFFS + else + <<-STUFFS + CREATE TABLE stuffs ( + id INT64 NOT NULL, + int INT64, + float FLOAT64, + bool BOOL, + string STRING(MAX), + byte BYTES(MAX), + date DATE, + timestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true), + numeric NUMERIC, + json JSON, + ints ARRAY, + floats ARRAY, + bools ARRAY, + strings ARRAY, + bytes ARRAY, + dates ARRAY, + timestamps ARRAY, + numerics ARRAY, + json_array ARRAY + ) PRIMARY KEY (id) + STUFFS + end + end + + def stuff_pg_ddl_statement + <<-STUFFS + CREATE TABLE stuffs ( + id bigint NOT NULL, + "int" bigint, + "float" double precision, + bool boolean, + string character varying, + byte bytea, + PRIMARY KEY(id) + ); + STUFFS + end + + def stuffs_index_statement + "CREATE INDEX IsStuffsIdPrime ON stuffs(bool, id)" + end + + def commit_timestamp_test_ddl_statement + <<-TEST + CREATE TABLE commit_timestamp_test(committs TIMESTAMP OPTIONS (allow_commit_timestamp=true)) PRIMARY KEY (committs) + TEST + end + + def accounts_ddl_statement + <<-ACCOUNTS + CREATE TABLE accounts ( + account_id INT64 NOT NULL, + username STRING(32), + friends ARRAY, + active BOOL NOT NULL, + reputation FLOAT64, + avatar BYTES(8192) + ) PRIMARY KEY (account_id) + ACCOUNTS + end + + def accounts_pg_ddl_statement + <<-ACCOUNTS + CREATE TABLE accounts ( + account_id INT NOT NULL, + username TEXT, + active BOOL NOT NULL, + reputation FLOAT, + avatar bytea, + PRIMARY KEY(account_id) + ); + ACCOUNTS + end + + def lists_ddl_statement + <<-LISTS + CREATE TABLE task_lists ( + account_id INT64 NOT NULL, + task_list_id INT64 NOT NULL, + description STRING(1024) NOT NULL + ) PRIMARY KEY (account_id, task_list_id), + INTERLEAVE IN PARENT accounts ON DELETE CASCADE + LISTS + end + + def lists_pg_ddl_statement + <<-LISTS + CREATE TABLE task_lists ( + account_id INT NOT NULL, + task_list_id INT NOT NULL, + description TEXT NOT NULL, + PRIMARY KEY (account_id, task_list_id) + ) INTERLEAVE IN PARENT accounts ON DELETE CASCADE + LISTS + end + + def items_ddl_statement + <<-ITEMS + CREATE TABLE task_items ( + account_id INT64 NOT NULL, + task_list_id INT64 NOT NULL, + task_item_id INT64 NOT NULL, + description STRING(1024) NOT NULL, + active BOOL NOT NULL, + priority INT64 NOT NULL, + due_date DATE, + created_at TIMESTAMP, + updated_at TIMESTAMP + ) PRIMARY KEY (account_id, task_list_id, task_item_id), + INTERLEAVE IN PARENT task_lists ON DELETE CASCADE + ITEMS + end + + def numeric_pk_ddl_statement + <<-BOXES + CREATE TABLE boxes ( + id NUMERIC NOT NULL, + name STRING(256) NOT NULL, + ) PRIMARY KEY (id) + BOXES + end + + def numeric_composite_pk_ddl_statement + <<-BOX_ITEMS + CREATE TABLE box_items ( + id INT64 NOT NULL, + box_id NUMERIC NOT NULL, + name STRING(256) NOT NULL + ) PRIMARY KEY (id, box_id) + BOX_ITEMS + end + + def schema_pg_ddl_statements + [ + stuff_pg_ddl_statement, + accounts_pg_ddl_statement, + lists_pg_ddl_statement + ].compact + end + + def schema_ddl_statements + [ + stuffs_ddl_statement, + stuffs_index_statement, + accounts_ddl_statement, + lists_ddl_statement, + items_ddl_statement, + commit_timestamp_test_ddl_statement, + numeric_pk_ddl_statement, + numeric_composite_pk_ddl_statement + ].compact + end + + def stuffs_table_types + { id: :INT64, + int: :INT64, + float: :FLOAT64, + bool: :BOOL, + string: :STRING, + byte: :BYTES, + date: :DATE, + timestamp: :TIMESTAMP, + json: :JSON, + ints: [:INT64], + floats: [:FLOAT64], + bools: [:BOOL], + strings: [:STRING], + bytes: [:BYTES], + dates: [:DATE], + timestamps: [:TIMESTAMP], + jsons: [:JSON] + } + end + + def stuffs_random_row id = SecureRandom.int64 + { id: id, + int: rand(0..1000), + float: rand(0.0..100.0), + bool: [true, false].sample, + string: SecureRandom.hex(16), + byte: File.open("acceptance/data/face.jpg", "rb"), + date: Date.today + rand(-100..100), + timestamp: Time.now + rand(-60*60*24.0..60*60*24.0), + json: { venue: "Yellow Lake", rating: 10 }, + ints: rand(2..10).times.map { rand(0..1000) }, + floats: rand(2..10).times.map { rand(0.0..100.0) }, + bools: rand(2..10).times.map { [true, false].sample }, + strings: rand(2..10).times.map { SecureRandom.hex(16) }, + bytes: [File.open("acceptance/data/face.jpg", "rb"), + File.open("acceptance/data/landmark.jpg", "rb"), + File.open("acceptance/data/logo.jpg", "rb")], + dates: rand(2..10).times.map { Date.today + rand(-100..100) }, + timestamps: rand(2..10).times.map { Time.now + rand(-60*60*24.0..60*60*24.0) }, + json_array: [{ venue: "Green Lake", rating: 8 }, { venue: "Blue Lake", rating: 9 }] + } + end + + def default_account_rows + [ + { + account_id: 1, + username: "blowmage", + reputation: 63.5, + active: true, + avatar: File.open("acceptance/data/logo.jpg", "rb"), + friends: [2] + }, { + account_id: 2, + username: "quartzmo", + reputation: 87.9, + active: true, + avatar: StringIO.new("iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9UAAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04ACUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyRUV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKPG/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAAUgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4iSgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJy3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3NfXW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rwMqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC"), + friends: [1] + }, { + account_id: 3, + username: "-inactive-", + active: false + } + ] + end + + def default_pg_account_rows + [ + { + account_id: 1, + username: "blowmage", + reputation: 63.5, + active: true, + avatar: File.open("acceptance/data/logo.jpg", "rb"), + }, { + account_id: 2, + username: "quartzmo", + reputation: 87.9, + active: true, + avatar: StringIO.new("iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9UAAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04ACUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyRUV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKPG/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAAUgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4iSgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJy3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3NfXW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rwMqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC"), + }, { + account_id: 3, + username: "-inactive-", + active: false + } + ] + end + + def default_list_rows + end + + def default_item_rows + end + end +end \ No newline at end of file diff --git a/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb b/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb index de1ad102a4f5..0aaee9128393 100644 --- a/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb +++ b/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb @@ -16,10 +16,13 @@ describe "Spanner Batch Client", :execute_partition, :spanner do let(:db) { spanner_client } + let(:pg_db) { spanner_pg_client } let(:batch_client) { $spanner.batch_client $spanner_instance_id, $spanner_database_id } + let(:pg_batch_client) { $spanner.batch_client $spanner_instance_id, $spanner_pg_database_id } let(:table_name) { "stuffs" } let(:table_index) { "IsStuffsIdPrime" } let(:batch_snapshot) { batch_client.batch_snapshot } + let(:pg_batch_snapshot) { pg_batch_client.batch_snapshot } before do db.delete table_name # remove all data @@ -37,11 +40,28 @@ { id: 11, bool: true }, { id: 12, bool: false } ] + pg_db.delete table_name # remove all data + pg_db.insert table_name, [ + { id: 1, bool: false }, + { id: 2, bool: false }, + { id: 3, bool: true }, + { id: 4, bool: false }, + { id: 5, bool: true }, + { id: 6, bool: false }, + { id: 7, bool: true }, + { id: 8, bool: false }, + { id: 9, bool: false }, + { id: 10, bool: false }, + { id: 11, bool: true }, + { id: 12, bool: false } + ] end after do batch_snapshot.close db.delete table_name # remove all data + pg_batch_snapshot.close + pg_db.delete table_name end it "reads all by default" do @@ -76,6 +96,39 @@ batch_snapshot.close end + it "reads all by default in pg" do + skip("Skipped due to https://b.corp.google.com/issues/216209306") + _(pg_batch_snapshot.timestamp).must_be_kind_of Time + serialized_snapshot = pg_batch_snapshot.dump + + columns = [:id] + rows = [] + partitions = pg_batch_snapshot.partition_read table_name, columns + partitions.each do |partition| + _(partition.read.partition_token).wont_be_nil + _(partition.read.columns).must_equal columns.map(&:to_s) + _(partition.read.table).must_equal "stuffs" + + partition = pg_batch_client.load_partition partition.dump + + _(partition.read.partition_token).wont_be_nil + _(partition.read.columns).must_equal columns.map(&:to_s) + _(partition.read.table).must_equal "stuffs" + + new_batch_snapshot = pg_batch_client.load_batch_snapshot serialized_snapshot + _(new_batch_snapshot.timestamp).must_be_kind_of Time + results = new_batch_snapshot.execute_partition partition + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + unless results.fields.to_a.empty? # With so little data, just one partition should get the entire result set + rows.concat(results.rows.map(&:to_h)) + end + end + + _(rows).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }, { id: 7 }, { id: 8 }, { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] + pg_batch_snapshot.close + end + it "queries all by default" do batch_snapshot = batch_client.batch_snapshot serialized_snapshot = batch_snapshot.dump @@ -104,6 +157,34 @@ batch_snapshot.close end + it "queries all by default in pg" do + pg_batch_snapshot = pg_batch_client.batch_snapshot + serialized_snapshot = pg_batch_snapshot.dump + + sql = "SELECT s.id, s.bool FROM stuffs AS s WHERE s.id = 2 AND s.bool = false" + rows = [] + partitions = pg_batch_snapshot.partition_query sql + partitions.each do |partition| + _(partition.execute.partition_token).wont_be_nil + _(partition.execute.sql).must_equal sql + + partition = pg_batch_client.load_partition partition.dump + + _(partition.execute.partition_token).wont_be_nil + _(partition.execute.sql).must_equal sql + + new_batch_snapshot = pg_batch_client.load_batch_snapshot serialized_snapshot + results = new_batch_snapshot.execute_partition partition + _(results).must_be_kind_of Google::Cloud::Spanner::Results + unless results.fields.to_a.empty? # With so little data, just one partition should get the entire result set + rows.concat(results.rows.map(&:to_h)) + end + end + + _(rows).must_equal [{:id=>2, :bool=>false}] + pg_batch_snapshot.close + end + it "queries all by default with query options" do batch_snapshot = batch_client.batch_snapshot serialized_snapshot = batch_snapshot.dump @@ -132,4 +213,33 @@ _(rows).must_equal [{:id=>2, :bool=>false}] batch_snapshot.close end + + it "queries all by default with query options pg" do + pg_batch_snapshot = pg_batch_client.batch_snapshot + serialized_snapshot = pg_batch_snapshot.dump + + sql = "SELECT s.id, s.bool FROM stuffs AS s WHERE s.id = 2 AND s.bool = false" + query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } + rows = [] + partitions = pg_batch_snapshot.partition_query sql, query_options: query_options + partitions.each do |partition| + _(partition.execute.partition_token).wont_be_nil + _(partition.execute.sql).must_equal sql + + partition = pg_batch_client.load_partition partition.dump + + _(partition.execute.partition_token).wont_be_nil + _(partition.execute.sql).must_equal sql + + new_batch_snapshot = pg_batch_client.load_batch_snapshot serialized_snapshot + results = new_batch_snapshot.execute_partition partition + _(results).must_be_kind_of Google::Cloud::Spanner::Results + unless results.fields.to_a.empty? # With so little data, just one partition should get the entire result set + rows.concat(results.rows.map(&:to_h)) + end + end + + _(rows).must_equal [{:id=>2, :bool=>false}] + pg_batch_snapshot.close + end end diff --git a/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb b/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb index 882a450ab408..01fbc7812ae8 100644 --- a/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb @@ -16,136 +16,158 @@ require "concurrent" describe "Spanner Client", :batch_update, :spanner do - let(:db) { spanner_client } - let(:insert_dml) { "INSERT INTO accounts (account_id, username, active, reputation) VALUES (@account_id, @username, @active, @reputation)" } - let(:update_dml) { "UPDATE accounts SET username = @username, active = @active WHERE account_id = @account_id" } + let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } + let(:insert_dml) {{ gsql: "INSERT INTO accounts (account_id, username, active, reputation) VALUES (@account_id, @username, @active, @reputation)", + pg: "INSERT INTO accounts (account_id, username, active, reputation) VALUES ($1, $2, $3, $4)" + }} + let(:update_dml) {{ gsql: "UPDATE accounts SET username = @username, active = @active WHERE account_id = @account_id", + pg: "UPDATE accounts SET username = $2, active = $3 WHERE account_id = $1", + }} + let(:select_dql) {{ gsql: "SELECT username FROM accounts WHERE account_id = @account_id", + pg: "SELECT username FROM accounts WHERE account_id = $1" + }} let(:update_dml_syntax_error) { "UPDDDD accounts" } - let(:delete_dml) { "DELETE FROM accounts WHERE account_id = @account_id" } - let(:insert_params) { { account_id: 4, username: "inserted", active: true, reputation: 88.8 } } - let(:update_params) { { account_id: 4, username: "updated", active: false } } - let(:delete_params) { { account_id: 4 } } + let(:delete_dml) {{ gsql:"DELETE FROM accounts WHERE account_id = @account_id", + pg: "DELETE FROM accounts WHERE account_id = $1" + }} + let(:insert_params) {{ gsql: { account_id: 4, username: "inserted", active: true, reputation: 88.8 }, + pg: { p1: 4, p2: "inserted", p3: true, p4: 88.8 } + }} + let(:update_params) {{ gsql: { account_id: 4, username: "updated", active: false }, + pg: { p1: 4, p2: "updated", p3: false } + }} + let(:delete_params) { { gsql: { account_id: 4 }, pg: { p1: 4 } } } before do - db.commit do |c| + db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end + db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows + end end after do - db.delete "accounts" + db[:gsql].delete "accounts" + db[:pg].delete "accounts" end - it "executes multiple DML statements in a batch" do - prior_results = db.execute_sql "SELECT * FROM accounts" - _(prior_results.rows.count).must_equal 3 + [:gsql, :pg].each do |dialect| - timestamp = db.transaction do |tx| - _(tx.transaction_id).wont_be :nil? + it "executes multiple DML statements in a batch for #{dialect}" do + prior_results = db[dialect].execute_sql "SELECT * FROM accounts" + _(prior_results.rows.count).must_equal 3 - row_counts = tx.batch_update do |b| - b.batch_update insert_dml, params: insert_params - b.batch_update update_dml, params: update_params - b.batch_update delete_dml, params: delete_params - end + timestamp = db[dialect].transaction do |tx| + _(tx.transaction_id).wont_be :nil? - _(row_counts).must_be_kind_of Array - _(row_counts.count).must_equal 3 - _(row_counts[0]).must_equal 1 - _(row_counts[1]).must_equal 1 - _(row_counts[2]).must_equal 1 + row_counts = tx.batch_update do |b| + b.batch_update insert_dml[dialect], params: insert_params[dialect] + b.batch_update update_dml[dialect], params: update_params[dialect] + b.batch_update delete_dml[dialect], params: delete_params[dialect] + end + + _(row_counts).must_be_kind_of Array + _(row_counts.count).must_equal 3 + _(row_counts[0]).must_equal 1 + _(row_counts[1]).must_equal 1 + _(row_counts[2]).must_equal 1 - update_results = tx.execute_sql \ - "SELECT username FROM accounts WHERE account_id = @account_id", - params: { account_id: 4 } - _(update_results.rows.count).must_equal 0 + update_results = tx.execute_sql \ + select_dql[dialect], + params: delete_params[dialect] + _(update_results.rows.count).must_equal 0 + end + _(timestamp).must_be_kind_of Time end - _(timestamp).must_be_kind_of Time - end - it "raises InvalidArgumentError when no DML statements are executed in a batch" do - prior_results = db.execute_sql "SELECT * FROM accounts" - _(prior_results.rows.count).must_equal 3 + it "raises InvalidArgumentError when no DML statements are executed in a batch for #{dialect}" do + prior_results = db[dialect].execute_sql "SELECT * FROM accounts" + _(prior_results.rows.count).must_equal 3 - timestamp = db.transaction do |tx| - _(tx.transaction_id).wont_be :nil? + timestamp = db[dialect].transaction do |tx| + _(tx.transaction_id).wont_be :nil? - err = expect do - tx.batch_update do |b| end - end.must_raise Google::Cloud::InvalidArgumentError - _(err.message).must_match /3:(No statements in batch DML request|Request must contain at least one DML statement)/ + err = expect do + tx.batch_update do |b| end + end.must_raise Google::Cloud::InvalidArgumentError + _(err.message).must_match /3:(No statements in batch DML request|Request must contain at least one DML statement)/ + end + _(timestamp).must_be_kind_of Time end - _(timestamp).must_be_kind_of Time - end - it "executes multiple DML statements in a batch with syntax error" do - prior_results = db.execute_sql "SELECT * FROM accounts" - _(prior_results.rows.count).must_equal 3 - - timestamp = db.transaction do |tx| - _(tx.transaction_id).wont_be :nil? - begin - tx.batch_update do |b| - b.batch_update insert_dml, params: insert_params - b.batch_update update_dml_syntax_error, params: update_params - b.batch_update delete_dml, params: delete_params + it "executes multiple DML statements in a batch with syntax error for #{dialect}" do + prior_results = db[dialect].execute_sql "SELECT * FROM accounts" + p prior_results + _(prior_results.rows.count).must_equal 3 + + timestamp = db[dialect].transaction do |tx| + _(tx.transaction_id).wont_be :nil? + begin + tx.batch_update do |b| + b.batch_update insert_dml[dialect], params: insert_params[dialect] + b.batch_update update_dml_syntax_error, params: update_params[dialect] + b.batch_update delete_dml[dialect], params: delete_params[dialect] + end + rescue Google::Cloud::Spanner::BatchUpdateError => batch_update_error + _(batch_update_error.cause).must_be_kind_of Google::Cloud::InvalidArgumentError + _(batch_update_error.cause.message).must_equal "Statement 1: 'UPDDDD accounts' is not valid DML." + + row_counts = batch_update_error.row_counts + _(row_counts).must_be_kind_of Array + _(row_counts.count).must_equal 1 + _(row_counts[0]).must_equal 1 end - rescue Google::Cloud::Spanner::BatchUpdateError => batch_update_error - _(batch_update_error.cause).must_be_kind_of Google::Cloud::InvalidArgumentError - _(batch_update_error.cause.message).must_equal "Statement 1: 'UPDDDD accounts' is not valid DML." - - row_counts = batch_update_error.row_counts - _(row_counts).must_be_kind_of Array - _(row_counts.count).must_equal 1 - _(row_counts[0]).must_equal 1 + update_results = tx.execute_sql \ + select_dql[dialect], + params: delete_params[dialect] + _(update_results.rows.count).must_equal 1 # DELETE statement did not execute. end - update_results = tx.execute_sql \ - "SELECT username FROM accounts WHERE account_id = @account_id", - params: { account_id: 4 } - _(update_results.rows.count).must_equal 1 # DELETE statement did not execute. + _(timestamp).must_be_kind_of Time end - _(timestamp).must_be_kind_of Time - end - it "runs execute_update and batch_update in the same transaction" do - prior_results = db.execute_sql "SELECT * FROM accounts" - _(prior_results.rows.count).must_equal 3 + it "runs execute_update and batch_update in the same transaction for #{dialect}" do + prior_results = db[dialect].execute_sql "SELECT * FROM accounts" + _(prior_results.rows.count).must_equal 3 - timestamp = db.transaction do |tx| - _(tx.transaction_id).wont_be :nil? + timestamp = db[dialect].transaction do |tx| + _(tx.transaction_id).wont_be :nil? - row_counts = tx.batch_update do |b| - b.batch_update insert_dml, params: insert_params - b.batch_update update_dml, params: update_params - end + row_counts = tx.batch_update do |b| + b.batch_update insert_dml[dialect], params: insert_params[dialect] + b.batch_update update_dml[dialect], params: update_params[dialect] + end - _(row_counts).must_be_kind_of Array - _(row_counts.count).must_equal 2 - _(row_counts[0]).must_equal 1 - _(row_counts[1]).must_equal 1 + _(row_counts).must_be_kind_of Array + _(row_counts.count).must_equal 2 + _(row_counts[0]).must_equal 1 + _(row_counts[1]).must_equal 1 - delete_row_count = tx.execute_update delete_dml, params: delete_params + delete_row_count = tx.execute_update delete_dml[dialect], params: delete_params[dialect] - _(delete_row_count).must_equal 1 + _(delete_row_count).must_equal 1 - update_results = tx.execute_sql \ - "SELECT username FROM accounts WHERE account_id = @account_id", - params: { account_id: 4 } - _(update_results.rows.count).must_equal 0 + update_results = tx.execute_sql \ + select_dql[dialect], + params: delete_params[dialect] + _(update_results.rows.count).must_equal 0 + end + _(timestamp).must_be_kind_of Time end - _(timestamp).must_be_kind_of Time - end - describe "request options" do - it "execute batch update with priority options" do - timestamp = db.transaction do |tx| - row_counts = tx.batch_update request_options: { priority: :PRIORITY_HIGH } do |b| - b.batch_update insert_dml, params: insert_params - b.batch_update update_dml, params: update_params - end + describe "request options for #{dialect}" do + it "execute batch update with priority options for #{dialect}" do + timestamp = db[dialect].transaction do |tx| + row_counts = tx.batch_update request_options: { priority: :PRIORITY_HIGH } do |b| + b.batch_update insert_dml[dialect], params: insert_params[dialect] + b.batch_update update_dml[dialect], params: update_params[dialect] + end - _(row_counts).must_be_kind_of Array - _(row_counts.count).must_equal 2 + _(row_counts).must_be_kind_of Array + _(row_counts.count).must_equal 2 + end end end end diff --git a/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb new file mode 100644 index 000000000000..855581a2f1f6 --- /dev/null +++ b/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb @@ -0,0 +1,67 @@ +# Copyright true0false7 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version true.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require "spanner_helper" + +describe "Spanner Client", :params, :bool, :spanner do + let(:db) { spanner_client } + + it "queries and returns a bool parameter" do + results = db.execute_query "SELECT @value AS value", params: { value: true } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields[:value]).must_equal :BOOL + _(results.rows.first[:value]).must_equal true + end + + it "queries and returns a NULL bool parameter" do + results = db.execute_query "SELECT @value AS value", params: { value: nil }, types: { value: :BOOL } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields[:value]).must_equal :BOOL + _(results.rows.first[:value]).must_be :nil? + end + + it "queries and returns an array of bool parameters" do + results = db.execute_query "SELECT @value AS value", params: { value: [false, true, false] } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields[:value]).must_equal [:BOOL] + _(results.rows.first[:value]).must_equal [false, true, false] + end + + it "queries and returns an array of bool parameters with a nil value" do + results = db.execute_query "SELECT @value AS value", params: { value: [nil, false, true, false] } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields[:value]).must_equal [:BOOL] + _(results.rows.first[:value]).must_equal [nil, false, true, false] + end + + it "queries and returns an empty array of bool parameters" do + results = db.execute_query "SELECT @value AS value", params: { value: [] }, types: { value: [:BOOL] } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields[:value]).must_equal [:BOOL] + _(results.rows.first[:value]).must_equal [] + end + + it "queries and returns a NULL array of bool parameters" do + results = db.execute_query "SELECT @value AS value", params: { value: nil }, types: { value: [:BOOL] } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields[:value]).must_equal [:BOOL] + _(results.rows.first[:value]).must_be :nil? + end +end diff --git a/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb b/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/google-cloud-spanner/acceptance/spanner/database_client_test.rb b/google-cloud-spanner/acceptance/spanner/database_client_test.rb index 453040b734a5..edd41a17081e 100644 --- a/google-cloud-spanner/acceptance/spanner/database_client_test.rb +++ b/google-cloud-spanner/acceptance/spanner/database_client_test.rb @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -require "google/cloud/spanner/admin/database" require "spanner_helper" +require "google/cloud/spanner/admin/database" describe "Spanner Databases Client", :spanner do let(:instance_id) { $spanner_instance_id } let(:database_id) { "#{$spanner_database_id}-crud" } + let(:pg_database_id) { "#{$spanner_pg_database_id}-crud" } it "creates, gets, updates, and drops a database" do client = Google::Cloud::Spanner::Admin::Database.database_admin project_id: spanner.project @@ -61,6 +62,49 @@ end end + it "creates, gets, updates, and drops a database with pg dialect" do + client = Google::Cloud::Spanner::Admin::Database.database_admin project_id: spanner.project + + instance_path = \ + client.instance_path project: spanner.project, instance: instance_id + + db_path = client.database_path project: spanner.project, + instance: instance_id, + database: pg_database_id + + job = client.create_database parent: instance_path, + create_statement: "CREATE DATABASE \"#{pg_database_id}\"", + database_dialect: :POSTGRESQL + _(job).wont_be :done? unless emulator_enabled? + job.wait_until_done! + + _(job).must_be :done? + raise Google::Cloud::Error.from_error(job.error) if job.error? + database = job.results + _(database).wont_be :nil? + _(database).must_be_kind_of Google::Cloud::Spanner::Admin::Database::V1::Database + _(database.name).must_equal db_path + _(database.encryption_config).must_be :nil? + _(database.encryption_info).must_be_kind_of Google::Protobuf::RepeatedField + + database = client.get_database name: db_path + _(database).must_be_kind_of Google::Cloud::Spanner::Admin::Database::V1::Database + + add_users_table_sql = "CREATE TABLE users (id INT NOT NULL) PRIMARY KEY(id)" + job2 = client.update_database_ddl database: db_path, + statements: [add_users_table_sql] + + _(job2).wont_be :done? unless emulator_enabled? + job2.wait_until_done! + + _(job2).must_be :done? + + client.drop_database database: db_path + assert_raises Google::Cloud::NotFoundError do + client.get_database name: db_path + end + end + it "lists databases" do client = Google::Cloud::Spanner::Admin::Database.database_admin project_id: spanner.project diff --git a/google-cloud-spanner/acceptance/spanner_helper.rb b/google-cloud-spanner/acceptance/spanner_helper.rb index 4edaa2d0ce57..b78f9f34d498 100644 --- a/google-cloud-spanner/acceptance/spanner_helper.rb +++ b/google-cloud-spanner/acceptance/spanner_helper.rb @@ -20,6 +20,9 @@ require "minitest/rg" require "google/cloud/spanner" +require "google/cloud/spanner/admin/database" + +require "data/fixtures" # define SecureRandom.int64 require "securerandom" @@ -36,7 +39,11 @@ def emulator_enabled? end # Create shared spanner object so we don't create new for each test +Google::Cloud::Spanner.configure do |config| + config.quota_project = "span-cloud-testing" +end $spanner = Google::Cloud::Spanner.new +$spanner_db_admin = Google::Cloud::Spanner::Admin::Database.database_admin module Acceptance ## @@ -52,7 +59,7 @@ module Acceptance # end # end class SpannerTest < Minitest::Test - attr_accessor :spanner, :spanner_client + attr_accessor :spanner, :spanner_client, :spanner_pg_client ## # Setup project based on available ENV variables @@ -65,6 +72,10 @@ def setup refute_nil @spanner_client, "You do not have an active client to run the tests." + @spanner_pg_client = $spanner_pg_client + + refute_nil @spanner_pg_client, "You do not have an active client to run the tests." + super end @@ -87,217 +98,6 @@ def setup # reporter.record result # end - module Fixtures - def stuffs_ddl_statement - if emulator_enabled? - <<-STUFFS - CREATE TABLE stuffs ( - id INT64 NOT NULL, - int INT64, - float FLOAT64, - bool BOOL, - string STRING(MAX), - byte BYTES(MAX), - date DATE, - timestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true), - ints ARRAY, - floats ARRAY, - bools ARRAY, - strings ARRAY, - bytes ARRAY, - dates ARRAY, - timestamps ARRAY - ) PRIMARY KEY (id) - STUFFS - else - <<-STUFFS - CREATE TABLE stuffs ( - id INT64 NOT NULL, - int INT64, - float FLOAT64, - bool BOOL, - string STRING(MAX), - byte BYTES(MAX), - date DATE, - timestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true), - numeric NUMERIC, - json JSON, - ints ARRAY, - floats ARRAY, - bools ARRAY, - strings ARRAY, - bytes ARRAY, - dates ARRAY, - timestamps ARRAY, - numerics ARRAY, - json_array ARRAY - ) PRIMARY KEY (id) - STUFFS - end - end - - def stuffs_index_statement - "CREATE INDEX IsStuffsIdPrime ON stuffs(bool, id)" - end - - def commit_timestamp_test_ddl_statement - <<-TEST - CREATE TABLE commit_timestamp_test(committs TIMESTAMP OPTIONS (allow_commit_timestamp=true)) PRIMARY KEY (committs) - TEST - end - - def accounts_ddl_statement - <<-ACCOUNTS - CREATE TABLE accounts ( - account_id INT64 NOT NULL, - username STRING(32), - friends ARRAY, - active BOOL NOT NULL, - reputation FLOAT64, - avatar BYTES(8192) - ) PRIMARY KEY (account_id) - ACCOUNTS - end - - def lists_ddl_statement - <<-LISTS - CREATE TABLE task_lists ( - account_id INT64 NOT NULL, - task_list_id INT64 NOT NULL, - description STRING(1024) NOT NULL - ) PRIMARY KEY (account_id, task_list_id), - INTERLEAVE IN PARENT accounts ON DELETE CASCADE - LISTS - end - - def items_ddl_statement - <<-ITEMS - CREATE TABLE task_items ( - account_id INT64 NOT NULL, - task_list_id INT64 NOT NULL, - task_item_id INT64 NOT NULL, - description STRING(1024) NOT NULL, - active BOOL NOT NULL, - priority INT64 NOT NULL, - due_date DATE, - created_at TIMESTAMP, - updated_at TIMESTAMP - ) PRIMARY KEY (account_id, task_list_id, task_item_id), - INTERLEAVE IN PARENT task_lists ON DELETE CASCADE - ITEMS - end - - def numeric_pk_ddl_statement - return - - <<-BOXES - CREATE TABLE boxes ( - id NUMERIC NOT NULL, - name STRING(256) NOT NULL, - ) PRIMARY KEY (id) - BOXES - end - - def numeric_composite_pk_ddl_statement - return - - <<-BOX_ITEMS - CREATE TABLE box_items ( - id INT64 NOT NULL, - box_id NUMERIC NOT NULL, - name STRING(256) NOT NULL - ) PRIMARY KEY (id, box_id) - BOX_ITEMS - end - - def schema_ddl_statements - [ - stuffs_ddl_statement, - stuffs_index_statement, - accounts_ddl_statement, - lists_ddl_statement, - items_ddl_statement, - commit_timestamp_test_ddl_statement, - numeric_pk_ddl_statement, - numeric_composite_pk_ddl_statement - ].compact - end - - def stuffs_table_types - { id: :INT64, - int: :INT64, - float: :FLOAT64, - bool: :BOOL, - string: :STRING, - byte: :BYTES, - date: :DATE, - timestamp: :TIMESTAMP, - json: :JSON, - ints: [:INT64], - floats: [:FLOAT64], - bools: [:BOOL], - strings: [:STRING], - bytes: [:BYTES], - dates: [:DATE], - timestamps: [:TIMESTAMP], - jsons: [:JSON] - } - end - - def stuffs_random_row id = SecureRandom.int64 - { id: id, - int: rand(0..1000), - float: rand(0.0..100.0), - bool: [true, false].sample, - string: SecureRandom.hex(16), - byte: File.open("acceptance/data/face.jpg", "rb"), - date: Date.today + rand(-100..100), - timestamp: Time.now + rand(-60*60*24.0..60*60*24.0), - json: { venue: "Yellow Lake", rating: 10 }, - ints: rand(2..10).times.map { rand(0..1000) }, - floats: rand(2..10).times.map { rand(0.0..100.0) }, - bools: rand(2..10).times.map { [true, false].sample }, - strings: rand(2..10).times.map { SecureRandom.hex(16) }, - bytes: [File.open("acceptance/data/face.jpg", "rb"), - File.open("acceptance/data/landmark.jpg", "rb"), - File.open("acceptance/data/logo.jpg", "rb")], - dates: rand(2..10).times.map { Date.today + rand(-100..100) }, - timestamps: rand(2..10).times.map { Time.now + rand(-60*60*24.0..60*60*24.0) }, - json_array: [{ venue: "Green Lake", rating: 8 }, { venue: "Blue Lake", rating: 9 }] - } - end - - def default_account_rows - [ - { - account_id: 1, - username: "blowmage", - reputation: 63.5, - active: true, - avatar: File.open("acceptance/data/logo.jpg", "rb"), - friends: [2] - }, { - account_id: 2, - username: "quartzmo", - reputation: 87.9, - active: true, - avatar: StringIO.new("iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9UAAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04ACUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyRUV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKPG/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAAUgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4iSgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJy3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3NfXW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rwMqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC"), - friends: [1] - }, { - account_id: 3, - username: "-inactive-", - active: false - } - ] - end - - def default_list_rows - end - - def default_item_rows - end - end - include Fixtures def assert_commit_response resp, commit_options = {} @@ -319,10 +119,11 @@ def assert_commit_response resp, commit_options = {} $spanner_instance_id = "google-cloud-ruby-tests" # $spanner_database_id is already 22 characters, can only add 7 additional characters $spanner_database_id = "gcruby-#{Date.today.strftime "%y%m%d"}-#{SecureRandom.hex(4)}" +$spanner_pg_database_id = "gcruby-pg-#{Date.today.strftime "%y%m%d"}-#{SecureRandom.hex(4)}" # Setup main instance and database for the tests fixture = Object.new -fixture.extend Acceptance::SpannerTest::Fixtures +fixture.extend Acceptance::Fixtures instance = $spanner.instance $spanner_instance_id @@ -337,15 +138,34 @@ def assert_commit_response resp, commit_options = {} db_job.wait_until_done! fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? + +instance_path = $spanner_db_admin.instance_path project: $spanner.project_id, instance: $spanner_instance_id +db_job = $spanner_db_admin.create_database parent: instance_path, + create_statement: "CREATE DATABASE \"#{$spanner_pg_database_id}\"", + database_dialect: :POSTGRESQL +db_job.wait_until_done! +fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? + +db_path = $spanner_db_admin.database_path project: $spanner.project_id, + instance: $spanner_instance_id, + database: $spanner_pg_database_id + +db_job = $spanner_db_admin.update_database_ddl database: db_path, statements: fixture.schema_pg_ddl_statements +db_job.wait_until_done! +fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? + # Create one client for all tests, to minimize resource usage $spanner_client = $spanner.client $spanner_instance_id, $spanner_database_id +$spanner_pg_client = $spanner.client $spanner_instance_id, $spanner_pg_database_id def clean_up_spanner_objects puts "Cleaning up instances and databases after spanner tests." $spanner.instance($spanner_instance_id).database($spanner_database_id).drop + $spanner.instance($spanner_instance_id).database($spanner_pg_database_id).drop puts "Closing the Spanner Client." $spanner_client.close + $spanner_pg_client.close puts "Cleaning up instances databases and backups after spanner tests." instance = $spanner.instance($spanner_instance_id) diff --git a/google-cloud-spanner/lib/google/cloud/spanner/convert.rb b/google-cloud-spanner/lib/google/cloud/spanner/convert.rb index bb1c530de3ea..639b7b5830ee 100644 --- a/google-cloud-spanner/lib/google/cloud/spanner/convert.rb +++ b/google-cloud-spanner/lib/google/cloud/spanner/convert.rb @@ -179,6 +179,8 @@ def grpc_type_for_field field code: :ARRAY, array_element_type: grpc_type_for_field(field.first) ) + elsif :PG_NUMERIC === field + V1::Type.new(code: :NUMERIC, type_annotation: :PG_NUMERIC) else V1::Type.new(code: field) end From a3f1386cb752b9260dcad6a5c396b0bdb0ceac91 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Mon, 14 Feb 2022 14:46:27 +0530 Subject: [PATCH 02/19] update crud test --- .../acceptance/spanner/client/crud_test.rb | 267 +++++++++--------- 1 file changed, 136 insertions(+), 131 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb index cce3e17e67fb..8935e8ffa1d4 100644 --- a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb @@ -15,189 +15,194 @@ require "spanner_helper" describe "Spanner Client", :crud, :spanner do - let(:db) { spanner_client } + let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } before do - @setup_timestamp = db.delete "accounts" + setup_timestamp_pg = db[:pg].delete "accounts" + setup_timestamp_gsql = db[:pg].delete "accounts" + @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} + @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} end - it "inserts, updates, upserts, reads, and deletes records" do - results = db.read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp } - _(results.rows.count).must_equal 0 - _(results.timestamp).wont_be :nil? - - db.insert "accounts", default_account_rows[0] - db.upsert "accounts", default_account_rows[1] - timestamp = db.insert "accounts", default_account_rows[2] - - results = db.read "accounts", ["account_id"], single_use: { timestamp: timestamp } - _(results.rows.count).must_equal 3 - _(results.timestamp).wont_be :nil? + [:gsql, :pg].each do |dialect| + it "inserts, updates, upserts, reads, and deletes records" do + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp[dialect] } + _(results.rows.count).must_equal 0 + _(results.timestamp).wont_be :nil? - active_count_sql = "SELECT COUNT(*) AS count FROM accounts WHERE active = true" + db[dialect].insert "accounts", @default_rows[dialect][0] + db[dialect].upsert "accounts", @default_rows[dialect][1] + timestamp = db[dialect].insert "accounts", @default_rows[dialect][2] - results = db.execute_query active_count_sql, single_use: { timestamp: timestamp } - _(results.rows.first[:count]).must_equal 2 - _(results.timestamp).wont_be :nil? + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: timestamp } + _(results.rows.count).must_equal 3 + _(results.timestamp).wont_be :nil? - activate_inactive_account = { account_id: 3, active: true } + active_count_sql = "SELECT COUNT(*) AS count FROM accounts WHERE active = true" - timestamp = db.upsert "accounts", activate_inactive_account + results = db[dialect].execute_query active_count_sql, single_use: { timestamp: timestamp } + _(results.rows.first[:count]).must_equal 2 + _(results.timestamp).wont_be :nil? - results = db.execute_query active_count_sql, single_use: { timestamp: timestamp } - _(results.rows.first[:count]).must_equal 3 - _(results.timestamp).wont_be :nil? + activate_inactive_account = { account_id: 3, active: true } - timestamp = db.delete "accounts", [1, 2, 3] + timestamp = db[dialect].upsert "accounts", activate_inactive_account - results = db.read "accounts", ["account_id"], single_use: { timestamp: timestamp } - _(results.rows.count).must_equal 0 - _(results.timestamp).wont_be :nil? - end + results = db[dialect].execute_query active_count_sql, single_use: { timestamp: timestamp } + _(results.rows.first[:count]).must_equal 3 + _(results.timestamp).wont_be :nil? - it "inserts, updates, upserts, reads, and deletes records using commit" do - results = db.read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp } - _(results.rows.count).must_equal 0 - _(results.timestamp).wont_be :nil? + timestamp = db[dialect].delete "accounts", [1, 2, 3] - timestamp = db.commit do |c| - c.insert "accounts", default_account_rows[0] - c.upsert "accounts", default_account_rows[1] - c.insert "accounts", default_account_rows[2] + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: timestamp } + _(results.rows.count).must_equal 0 + _(results.timestamp).wont_be :nil? end - results = db.read "accounts", ["account_id"], single_use: { timestamp: timestamp } - _(results.rows.count).must_equal 3 - _(results.timestamp).wont_be :nil? + it "inserts, updates, upserts, reads, and deletes records using commit" do + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp[dialect] } + _(results.rows.count).must_equal 0 + _(results.timestamp).wont_be :nil? - active_count_sql = "SELECT COUNT(*) AS count FROM accounts WHERE active = true" + timestamp = db[dialect].commit do |c| + c.insert "accounts", @default_rows[dialect][0] + c.upsert "accounts", @default_rows[dialect][1] + c.insert "accounts", @default_rows[dialect][2] + end - results = db.execute_query active_count_sql, single_use: { timestamp: timestamp } - _(results.rows.first[:count]).must_equal 2 - _(results.timestamp).wont_be :nil? + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: timestamp } + _(results.rows.count).must_equal 3 + _(results.timestamp).wont_be :nil? - activate_inactive_account = { account_id: 3, active: true } + active_count_sql = "SELECT COUNT(*) AS count FROM accounts WHERE active = true" - timestamp = db.commit do |c| - c.upsert "accounts", activate_inactive_account - end + results = db[dialect].execute_query active_count_sql, single_use: { timestamp: timestamp } + _(results.rows.first[:count]).must_equal 2 + _(results.timestamp).wont_be :nil? - results = db.execute_query active_count_sql, single_use: { timestamp: timestamp } - _(results.rows.first[:count]).must_equal 3 - _(results.timestamp).wont_be :nil? + activate_inactive_account = { account_id: 3, active: true } - timestamp = db.commit do |c| - c.delete "accounts", [1, 2, 3] - end + timestamp = db[dialect].commit do |c| + c.upsert "accounts", activate_inactive_account + end - results = db.read "accounts", ["account_id"], single_use: { timestamp: timestamp } - _(results.rows.count).must_equal 0 - _(results.timestamp).wont_be :nil? - end + results = db[dialect].execute_query active_count_sql, single_use: { timestamp: timestamp } + _(results.rows.first[:count]).must_equal 3 + _(results.timestamp).wont_be :nil? - it "inserts, updates, upserts, reads, and deletes records using commit and return commit stats" do - skip if emulator_enabled? + timestamp = db[dialect].commit do |c| + c.delete "accounts", [1, 2, 3] + end - commit_options = { return_commit_stats: true } - commit_resp = db.commit commit_options: commit_options do |c| - c.insert "accounts", default_account_rows[0] + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: timestamp } + _(results.rows.count).must_equal 0 + _(results.timestamp).wont_be :nil? end - assert_commit_response commit_resp, commit_options + it "inserts, updates, upserts, reads, and deletes records using commit and return commit stats" do + skip if emulator_enabled? - results = db.read "accounts", ["account_id"], single_use: { timestamp: commit_resp.timestamp } - _(results.rows.count).must_equal 1 - _(results.timestamp).wont_be :nil? + commit_options = { return_commit_stats: true } + commit_resp = db[dialect].commit commit_options: commit_options do |c| + c.insert "accounts", @default_rows[dialect][0] + end - commit_resp = db.commit commit_options: commit_options do |c| - c.upsert "accounts", default_account_rows[0] - end + assert_commit_response commit_resp, commit_options - assert_commit_response commit_resp, commit_options + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: commit_resp.timestamp } + _(results.rows.count).must_equal 1 + _(results.timestamp).wont_be :nil? - commit_resp = db.commit commit_options: commit_options do |c| - c.delete "accounts", [1] - end + commit_resp = db[dialect].commit commit_options: commit_options do |c| + c.upsert "accounts", @default_rows[dialect][0] + end - assert_commit_response commit_resp, commit_options + assert_commit_response commit_resp, commit_options - results = db.read "accounts", ["account_id"], single_use: { timestamp: commit_resp.timestamp } - _(results.rows.count).must_equal 0 - _(results.timestamp).wont_be :nil? - end + commit_resp = db[dialect].commit commit_options: commit_options do |c| + c.delete "accounts", [1] + end - it "inserts, updates, upserts, reads, and deletes records in a transaction" do - timestamp = @setup_timestamp - active_count_sql = "SELECT COUNT(*) AS count FROM accounts WHERE active = true" + assert_commit_response commit_resp, commit_options - db.transaction do |tx| - _(tx.read("accounts", ["account_id"]).rows.count).must_equal 0 - - tx.insert "accounts", default_account_rows[0] - tx.upsert "accounts", default_account_rows[1] - tx.insert "accounts", default_account_rows[2] + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: commit_resp.timestamp } + _(results.rows.count).must_equal 0 + _(results.timestamp).wont_be :nil? end - timestamp = db.transaction do |tx| - _(db.read("accounts", ["account_id"]).rows.count).must_equal 3 + it "inserts, updates, upserts, reads, and deletes records in a transaction" do + timestamp = @setup_timestamp[dialect] + active_count_sql = "SELECT COUNT(*) AS count FROM accounts WHERE active = true" - _(tx.execute_query(active_count_sql).rows.first[:count]).must_equal 2 + db[dialect].transaction do |tx| + _(tx.read("accounts", ["account_id"]).rows.count).must_equal 0 - activate_inactive_account = { account_id: 3, active: true } + tx.insert "accounts", @default_rows[dialect][0] + tx.upsert "accounts", @default_rows[dialect][1] + tx.insert "accounts", @default_rows[dialect][2] + end - tx.upsert "accounts", activate_inactive_account - end + timestamp = db[dialect].transaction do |tx| + _(db[dialect].read("accounts", ["account_id"]).rows.count).must_equal 3 - timestamp = db.transaction do |tx| - _(tx.execute_query(active_count_sql).rows.first[:count]).must_equal 3 + _(tx.execute_query(active_count_sql).rows.first[:count]).must_equal 2 - tx.delete "accounts", [1, 2, 3] - end + activate_inactive_account = { account_id: 3, active: true } - results = db.read "accounts", ["account_id"], single_use: { timestamp: timestamp } - _(results.rows.count).must_equal 0 - _(results.timestamp).wont_be :nil? - end + tx.upsert "accounts", activate_inactive_account + end + + timestamp = db[dialect].transaction do |tx| + _(tx.execute_query(active_count_sql).rows.first[:count]).must_equal 3 - describe "request options" do - it "execute CRUD statement with priority options" do - request_options = { priority: :PRIORITY_MEDIUM } - results = db.read "accounts", ["account_id"], request_options: request_options + tx.delete "accounts", [1, 2, 3] + end + + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: timestamp } _(results.rows.count).must_equal 0 + _(results.timestamp).wont_be :nil? + end - db.insert "accounts", default_account_rows[0], request_options: request_options - db.upsert "accounts", default_account_rows[1], request_options: request_options + describe "request options" do + it "execute CRUD statement with priority options" do + request_options = { priority: :PRIORITY_MEDIUM } + results = db[dialect].read "accounts", ["account_id"], request_options: request_options + _(results.rows.count).must_equal 0 - results = db.read "accounts", ["account_id"] - _(results.rows.count).must_equal 2 + db[dialect].insert "accounts", @default_rows[dialect][0], request_options: request_options + db[dialect].upsert "accounts", @default_rows[dialect][1], request_options: request_options - db.replace "accounts", default_account_rows[0], request_options: request_options - db.delete "accounts", [1, 2, 3], request_options: request_options + results = db[dialect].read "accounts", ["account_id"] + _(results.rows.count).must_equal 2 - results = db.read "accounts", ["account_id"] - _(results.rows.count).must_equal 0 + db[dialect].replace "accounts", @default_rows[dialect][0], request_options: request_options + db[dialect].delete "accounts", [1, 2, 3], request_options: request_options + + results = db[dialect].read "accounts", ["account_id"] + _(results.rows.count).must_equal 0 + end end - end - it "inserts, updates, upserts, reads, and deletes records with request tagging options" do - timestamp = db.insert "accounts", default_account_rows[0], - request_options: { tag: "Tag-CRUD-1" } - _(timestamp).wont_be :nil? + it "inserts, updates, upserts, reads, and deletes records with request tagging options" do + timestamp = db[dialect].insert "accounts", @default_rows[dialect][0], + request_options: { tag: "Tag-CRUD-1" } + _(timestamp).wont_be :nil? - results = db.read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp }, - request_options: { tag: "Tag-CRUD-2" } - _(results.timestamp).wont_be :nil? + results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp[dialect] }, + request_options: { tag: "Tag-CRUD-2" } + _(results.timestamp).wont_be :nil? - timestamp = db.update "accounts", default_account_rows[0], - request_options: { tag: "Tag-CRUD-2" } - _(timestamp).wont_be :nil? + timestamp = db[dialect].update "accounts", @default_rows[dialect][0], + request_options: { tag: "Tag-CRUD-2" } + _(timestamp).wont_be :nil? - timestamp = db.upsert "accounts", default_account_rows[1], - request_options: { tag: "Tag-CRUD-4" } - _(timestamp).wont_be :nil? + timestamp = db[dialect].upsert "accounts", @default_rows[dialect][1], + request_options: { tag: "Tag-CRUD-4" } + _(timestamp).wont_be :nil? - timestamp = db.delete "accounts", [1, 2, 3], - request_options: { tag: "Tag-CRUD-5" } - _(timestamp).wont_be :nil? - end + timestamp = db[dialect].delete "accounts", [1, 2, 3], + request_options: { tag: "Tag-CRUD-5" } + _(timestamp).wont_be :nil? + end + end end From 3cace84adb3c94f678604489b481914a2d8f8513 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Mon, 14 Feb 2022 16:09:29 +0530 Subject: [PATCH 03/19] update dml test for pg --- .../acceptance/spanner/client/dml_test.rb | 203 ++++++++++-------- 1 file changed, 113 insertions(+), 90 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb index 6e48ab97710f..e666edd38eca 100644 --- a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb @@ -16,122 +16,145 @@ require "concurrent" describe "Spanner Client", :dml, :spanner do - let(:db) { spanner_client } + let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } + let(:insert_dml) {{gsql:"INSERT INTO accounts (account_id, username, active, reputation) VALUES (@account_id, @username, @active, @reputation)", + pg:"INSERT INTO accounts (account_id, username, active, reputation) VALUES ($1, $2, $3, $4)" + }} + let(:update_dml) {{ gsql: "UPDATE accounts SET username = @username, active = @active WHERE account_id = @account_id", + pg: "UPDATE accounts SET username = $2, active = $3 WHERE account_id = $1", + }} + let(:select_dql) {{ gsql: "SELECT username FROM accounts WHERE account_id = @account_id", + pg: "SELECT username FROM accounts WHERE account_id = $1" + }} + let(:insert_params) {{ gsql: { account_id: 4, username: "inserted", active: true, reputation: 88.8 }, + pg: { p1: 4, p2: "inserted", p3: true, p4: 88.8 } + }} + let(:update_params) {{ gsql: { account_id: 4, username: "updated", active: false }, + pg: { p1: 4, p2: "updated", p3: false } + }} + let(:select_params) { { gsql: { account_id: 4 }, pg: { p1: 4 } } } before do - db.commit do |c| + db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end + db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows + end end after do - db.delete "accounts" - end - - it "executes multiple DML statements in a transaction" do - prior_results = db.execute_sql "SELECT * FROM accounts" - _(prior_results.rows.count).must_equal 3 - - timestamp = db.transaction do |tx| - _(tx.transaction_id).wont_be :nil? - - # Execute a DML using execute_update and make sure data is updated and correct count is returned. - insert_row_count = tx.execute_update \ - "INSERT INTO accounts (account_id, username, active, reputation) VALUES (@account_id, @username, @active, @reputation)", - params: { account_id: 4, username: "inserted", active: true, reputation: 88.8 } - _(insert_row_count).must_equal 1 - - insert_results = tx.execute_sql \ - "SELECT username FROM accounts WHERE account_id = @account_id", - params: { account_id: 4 } - insert_rows = insert_results.rows.to_a - _(insert_rows.count).must_equal 1 - _(insert_rows.first[:username]).must_equal "inserted" - - # Execute a DML using execute_sql and make sure data is updated and correct count is returned. - update_results = tx.execute_sql \ - "UPDATE accounts SET username = @username, active = @active WHERE account_id = @account_id", - params: { account_id: 4, username: "updated", active: false } - update_results.rows.to_a # fetch all the results - _(update_results).must_be :row_count_exact? - _(update_results.row_count).must_equal 1 - - update_results = tx.execute_sql \ - "SELECT username FROM accounts WHERE account_id = @account_id", - params: { account_id: 4 } - update_rows = update_results.rows.to_a - _(update_rows.count).must_equal 1 - _(update_rows.first[:username]).must_equal "updated" - end - _(timestamp).must_be_kind_of Time - - post_results = db.execute_sql "SELECT * FROM accounts", single_use: { timestamp: timestamp } - _(post_results.rows.count).must_equal 4 + db[:pg].delete "accounts" + db[:gsql].delete "accounts" end - it "executes a DML statement, then rollback the transaction" do - prior_results = db.execute_sql "SELECT * FROM accounts" - _(prior_results.rows.count).must_equal 3 + [:gsql, :pg].each do |dialect| + it "executes multiple DML statements in a transaction" do + prior_results = db[dialect].execute_sql "SELECT * FROM accounts" + _(prior_results.rows.count).must_equal 3 - timestamp = db.transaction do |tx| - _(tx.transaction_id).wont_be :nil? + timestamp = db[dialect].transaction do |tx| + _(tx.transaction_id).wont_be :nil? - # Execute a DML using execute_update and make sure data is updated and correct count is returned. - insert_row_count = tx.execute_update \ - "INSERT INTO accounts (account_id, username, active, reputation) VALUES (@account_id, @username, @active, @reputation)", - params: { account_id: 4, username: "inserted", active: true, reputation: 88.8 } - _(insert_row_count).must_equal 1 + # Execute a DML using execute_update and make sure data is updated and correct count is returned. + insert_row_count = tx.execute_update \ + insert_dml[dialect], + params: insert_params[dialect] + _(insert_row_count).must_equal 1 - insert_results = tx.execute_sql \ - "SELECT username FROM accounts WHERE account_id = @account_id", - params: { account_id: 4 } - insert_rows = insert_results.rows.to_a - _(insert_rows.count).must_equal 1 - _(insert_rows.first[:username]).must_equal "inserted" + insert_results = tx.execute_sql \ + select_dql[dialect], + params: select_params[dialect] + insert_rows = insert_results.rows.to_a + _(insert_rows.count).must_equal 1 + _(insert_rows.first[:username]).must_equal "inserted" + + # Execute a DML using execute_sql and make sure data is updated and correct count is returned. + update_results = tx.execute_sql \ + update_dml[dialect], + params: update_params[dialect] + update_results.rows.to_a # fetch all the results + _(update_results).must_be :row_count_exact? + _(update_results.row_count).must_equal 1 + + update_results = tx.execute_sql \ + select_dql[dialect], + params: select_params[dialect] + update_rows = update_results.rows.to_a + _(update_rows.count).must_equal 1 + _(update_rows.first[:username]).must_equal "updated" + end + _(timestamp).must_be_kind_of Time - # Execute a DML statement, then rollback the transaction and assert that data is not updated. - raise Google::Cloud::Spanner::Rollback + post_results = db[dialect].execute_sql "SELECT * FROM accounts", single_use: { timestamp: timestamp } + _(post_results.rows.count).must_equal 4 end - _(timestamp).must_be :nil? # because the transaction was rolled back - post_results = db.execute_sql "SELECT * FROM accounts" - _(post_results.rows.count).must_equal 3 - end + it "executes a DML statement, then rollback the transaction" do + prior_results = db[dialect].execute_sql "SELECT * FROM accounts" + _(prior_results.rows.count).must_equal 3 - it "executes a DML statement, then a mutation" do - prior_results = db.execute_sql "SELECT * FROM accounts" - _(prior_results.rows.count).must_equal 3 + timestamp = db[dialect].transaction do |tx| + _(tx.transaction_id).wont_be :nil? - timestamp = db.transaction do |tx| - _(tx.transaction_id).wont_be :nil? + # Execute a DML using execute_update and make sure data is updated and correct count is returned. + insert_row_count = tx.execute_update \ + insert_dml[dialect], + params: insert_params[dialect] + _(insert_row_count).must_equal 1 + + insert_results = tx.execute_sql \ + select_dql[dialect], + params: select_params[dialect] + insert_rows = insert_results.rows.to_a + _(insert_rows.count).must_equal 1 + _(insert_rows.first[:username]).must_equal "inserted" - # Execute a DML statement, followed by calling existing insert method, commit the transaction and assert that both the updates are present. - insert_row_count = tx.execute_update \ - "INSERT INTO accounts (account_id, username, active, reputation) VALUES (@account_id, @username, @active, @reputation)", - params: { account_id: 4, username: "inserted by DML", active: true, reputation: 88.8 } - _(insert_row_count).must_equal 1 + # Execute a DML statement, then rollback the transaction and assert that data is not updated. + raise Google::Cloud::Spanner::Rollback + end + _(timestamp).must_be :nil? # because the transaction was rolled back - insert_mut_rows = tx.insert "accounts", { account_id: 5, username: "inserted by mutation", active: true, reputation: 99.9 } - _(insert_mut_rows.count).must_equal 1 + post_results = db[dialect].execute_sql "SELECT * FROM accounts" + _(post_results.rows.count).must_equal 3 end - _(timestamp).must_be_kind_of Time - post_results = db.execute_sql "SELECT * FROM accounts", single_use: { timestamp: timestamp } - _(post_results.rows.count).must_equal 5 - end + it "executes a DML statement, then a mutation" do + prior_results = db[dialect].execute_sql "SELECT * FROM accounts" + _(prior_results.rows.count).must_equal 3 - describe "request options" do - it "execute DML statement with priority options" do - request_options = { priority: :PRIORITY_MEDIUM } + timestamp = db[dialect].transaction do |tx| + _(tx.transaction_id).wont_be :nil? - db.transaction request_options: request_options do |tx| + # Execute a DML statement, followed by calling existing insert method, commit the transaction and assert that both the updates are present. insert_row_count = tx.execute_update \ - "INSERT INTO accounts (account_id, username, active, reputation) VALUES (@account_id, @username, @active, @reputation)", - params: { account_id: 4, username: "inserted", active: true, reputation: 88.8 }, - request_options: request_options + insert_dml[dialect], + params: insert_params[dialect] _(insert_row_count).must_equal 1 + + insert_mut_rows = tx.insert "accounts", { account_id: 5, username: "inserted by mutation", active: true, reputation: 99.9 } + _(insert_mut_rows.count).must_equal 1 end + _(timestamp).must_be_kind_of Time + + post_results = db[dialect].execute_sql "SELECT * FROM accounts", single_use: { timestamp: timestamp } + _(post_results.rows.count).must_equal 5 end - end + + describe "request options" do + it "execute DML statement with priority options" do + request_options = { priority: :PRIORITY_MEDIUM } + + db[dialect].transaction request_options: request_options do |tx| + insert_row_count = tx.execute_update \ + insert_dml[dialect], + params: insert_params[dialect], + request_options: request_options + _(insert_row_count).must_equal 1 + end + end + end + end end From 69bcb965339c20d45445c839975d3b69c2980c13 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Mon, 14 Feb 2022 16:40:47 +0530 Subject: [PATCH 04/19] update execute test for pg --- .../acceptance/spanner/client/execute_test.rb | 349 +++++++++--------- 1 file changed, 175 insertions(+), 174 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb index 7b1d4567f1ea..960f96f1c7e0 100644 --- a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb @@ -15,178 +15,142 @@ require "spanner_helper" describe "Spanner Client", :execute_sql, :spanner do - let(:db) { spanner_client } - - it "runs SELECT 1" do - results = db.execute_sql "SELECT 1" - _(results).must_be_kind_of Google::Cloud::Spanner::Results - - _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields - _(results.fields.keys.count).must_equal 1 - _(results.fields[0]).must_equal :INT64 - - rows = results.rows.to_a # grab all from the enumerator - _(rows.count).must_equal 1 - row = rows.first - _(row).must_be_kind_of Google::Cloud::Spanner::Data - _(row.keys).must_equal [0] - _(row[0]).must_equal 1 - end - - it "runs a simple query" do - results = db.execute_sql "SELECT 42 AS num" - _(results).must_be_kind_of Google::Cloud::Spanner::Results - - _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields - _(results.fields.keys.count).must_equal 1 - _(results.fields[:num]).must_equal :INT64 - - rows = results.rows.to_a # grab all from the enumerator - _(rows.count).must_equal 1 - row = rows.first - _(row).must_be_kind_of Google::Cloud::Spanner::Data - _(row.keys).must_equal [:num] - _(row[:num]).must_equal 42 - end - - it "runs a simple query using a single-use strong option" do - results = db.execute_sql "SELECT 42 AS num", single_use: { strong: true } - _(results).must_be_kind_of Google::Cloud::Spanner::Results - - _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields - _(results.fields.keys.count).must_equal 1 - _(results.fields[:num]).must_equal :INT64 - - rows = results.rows.to_a # grab all from the enumerator - _(rows.count).must_equal 1 - row = rows.first - _(row).must_be_kind_of Google::Cloud::Spanner::Data - _(row.keys).must_equal [:num] - _(row[:num]).must_equal 42 - end - - it "runs a simple query using a single-use timestamp option" do - results = db.execute_sql "SELECT 42 AS num", single_use: { timestamp: (Time.now - 60) } - _(results).must_be_kind_of Google::Cloud::Spanner::Results - - _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields - _(results.fields.keys.count).must_equal 1 - _(results.fields[:num]).must_equal :INT64 - - rows = results.rows.to_a # grab all from the enumerator - _(rows.count).must_equal 1 - row = rows.first - _(row).must_be_kind_of Google::Cloud::Spanner::Data - _(row.keys).must_equal [:num] - _(row[:num]).must_equal 42 - end - - it "runs a simple query using a single-use staleness option" do - results = db.execute_sql "SELECT 42 AS num", single_use: { staleness: 60 } - _(results).must_be_kind_of Google::Cloud::Spanner::Results - - _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields - _(results.fields.keys.count).must_equal 1 - _(results.fields[:num]).must_equal :INT64 - - rows = results.rows.to_a # grab all from the enumerator - _(rows.count).must_equal 1 - row = rows.first - _(row).must_be_kind_of Google::Cloud::Spanner::Data - _(row.keys).must_equal [:num] - _(row[:num]).must_equal 42 - end - - it "runs a simple query using a single-use bounded_timestamp option" do - results = db.execute_sql "SELECT 42 AS num", single_use: { bounded_timestamp: (Time.now - 60) } - _(results).must_be_kind_of Google::Cloud::Spanner::Results - - _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields - _(results.fields.keys.count).must_equal 1 - _(results.fields[:num]).must_equal :INT64 - - rows = results.rows.to_a # grab all from the enumerator - _(rows.count).must_equal 1 - row = rows.first - _(row).must_be_kind_of Google::Cloud::Spanner::Data - _(row.keys).must_equal [:num] - _(row[:num]).must_equal 42 - end - - it "runs a simple query using a single-use bounded_staleness option" do - results = db.execute_sql "SELECT 42 AS num", single_use: { bounded_staleness: 60 } - _(results).must_be_kind_of Google::Cloud::Spanner::Results - - _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields - _(results.fields.keys.count).must_equal 1 - _(results.fields[:num]).must_equal :INT64 - - rows = results.rows.to_a # grab all from the enumerator - _(rows.count).must_equal 1 - row = rows.first - _(row).must_be_kind_of Google::Cloud::Spanner::Data - _(row.keys).must_equal [:num] - _(row[:num]).must_equal 42 - end - - it "runs a simple query with query options" do - query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } - results = db.execute_sql "SELECT 42 AS num", query_options: query_options - _(results).must_be_kind_of Google::Cloud::Spanner::Results - - _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields - _(results.fields.keys.count).must_equal 1 - _(results.fields[:num]).must_equal :INT64 - - rows = results.rows.to_a # grab all from the enumerator - _(rows.count).must_equal 1 - row = rows.first - _(row).must_be_kind_of Google::Cloud::Spanner::Data - _(row.keys).must_equal [:num] - _(row[:num]).must_equal 42 - end - - it "runs a simple query when the client-level config of query options is set" do - query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } - new_spanner = Google::Cloud::Spanner.new - new_db = new_spanner.client db.instance_id, db.database_id, query_options: query_options - _(new_db.query_options).must_equal({ optimizer_version: "3", optimizer_statistics_package: "latest" }) - - results = new_db.execute_sql "SELECT 42 AS num" - _(results).must_be_kind_of Google::Cloud::Spanner::Results - - _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields - _(results.fields.keys.count).must_equal 1 - _(results.fields[:num]).must_equal :INT64 - - rows = results.rows.to_a # grab all from the enumerator - _(rows.count).must_equal 1 - row = rows.first - _(row).must_be_kind_of Google::Cloud::Spanner::Data - _(row.keys).must_equal [:num] - _(row[:num]).must_equal 42 - end - - describe "when the environment variable of query options is set" do - let(:origin_opt_version) { nil } - let(:origin_opt_stats_pkg) { nil } - - before do - origin_opt_version = ENV["SPANNER_OPTIMIZER_VERSION"] - ENV["SPANNER_OPTIMIZER_VERSION"] = "3" - origin_opt_stats_pkg = ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] - ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] = "latest" + let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } + + [:gsql, :pg].each do |dialect| + it "runs SELECT 1" do + results = db[dialect].execute_sql "SELECT 1" + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields + _(results.fields.keys.count).must_equal 1 + _(results.fields[0]).must_equal :INT64 + + rows = results.rows.to_a # grab all from the enumerator + _(rows.count).must_equal 1 + row = rows.first + _(row).must_be_kind_of Google::Cloud::Spanner::Data + _(row[0]).must_equal 1 end - after do - ENV["SPANNER_OPTIMIZER_VERSION"] = origin_opt_version - ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] = origin_opt_stats_pkg + it "runs a simple query" do + results = db[dialect].execute_sql "SELECT 42 AS num" + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields + _(results.fields.keys.count).must_equal 1 + _(results.fields[:num]).must_equal :INT64 + + rows = results.rows.to_a # grab all from the enumerator + _(rows.count).must_equal 1 + row = rows.first + _(row).must_be_kind_of Google::Cloud::Spanner::Data + _(row.keys).must_equal [:num] + _(row[:num]).must_equal 42 end - it "runs a simple query " do + it "runs a simple query using a single-use strong option" do + results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { strong: true } + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields + _(results.fields.keys.count).must_equal 1 + _(results.fields[:num]).must_equal :INT64 + + rows = results.rows.to_a # grab all from the enumerator + _(rows.count).must_equal 1 + row = rows.first + _(row).must_be_kind_of Google::Cloud::Spanner::Data + _(row.keys).must_equal [:num] + _(row[:num]).must_equal 42 + end + + it "runs a simple query using a single-use timestamp option" do + results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { timestamp: (Time.now - 60) } + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields + _(results.fields.keys.count).must_equal 1 + _(results.fields[:num]).must_equal :INT64 + + rows = results.rows.to_a # grab all from the enumerator + _(rows.count).must_equal 1 + row = rows.first + _(row).must_be_kind_of Google::Cloud::Spanner::Data + _(row.keys).must_equal [:num] + _(row[:num]).must_equal 42 + end + + it "runs a simple query using a single-use staleness option" do + results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { staleness: 60 } + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields + _(results.fields.keys.count).must_equal 1 + _(results.fields[:num]).must_equal :INT64 + + rows = results.rows.to_a # grab all from the enumerator + _(rows.count).must_equal 1 + row = rows.first + _(row).must_be_kind_of Google::Cloud::Spanner::Data + _(row.keys).must_equal [:num] + _(row[:num]).must_equal 42 + end + + it "runs a simple query using a single-use bounded_timestamp option" do + results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { bounded_timestamp: (Time.now - 60) } + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields + _(results.fields.keys.count).must_equal 1 + _(results.fields[:num]).must_equal :INT64 + + rows = results.rows.to_a # grab all from the enumerator + _(rows.count).must_equal 1 + row = rows.first + _(row).must_be_kind_of Google::Cloud::Spanner::Data + _(row.keys).must_equal [:num] + _(row[:num]).must_equal 42 + end + + it "runs a simple query using a single-use bounded_staleness option" do + results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { bounded_staleness: 60 } + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields + _(results.fields.keys.count).must_equal 1 + _(results.fields[:num]).must_equal :INT64 + + rows = results.rows.to_a # grab all from the enumerator + _(rows.count).must_equal 1 + row = rows.first + _(row).must_be_kind_of Google::Cloud::Spanner::Data + _(row.keys).must_equal [:num] + _(row[:num]).must_equal 42 + end + + it "runs a simple query with query options" do + query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } + results = db[dialect].execute_sql "SELECT 42 AS num", query_options: query_options + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields + _(results.fields.keys.count).must_equal 1 + _(results.fields[:num]).must_equal :INT64 + + rows = results.rows.to_a # grab all from the enumerator + _(rows.count).must_equal 1 + row = rows.first + _(row).must_be_kind_of Google::Cloud::Spanner::Data + _(row.keys).must_equal [:num] + _(row[:num]).must_equal 42 + end + + it "runs a simple query when the client-level config of query options is set" do + query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } new_spanner = Google::Cloud::Spanner.new - new_db = new_spanner.client db.instance_id, db.database_id - _(new_db.project.query_options).must_equal({ optimizer_version: "3", optimizer_statistics_package: "latest" }) + new_db = new_spanner.client db[dialect].instance_id, db[dialect].database_id, query_options: query_options + _(new_db.query_options).must_equal({ optimizer_version: "3", optimizer_statistics_package: "latest" }) results = new_db.execute_sql "SELECT 42 AS num" _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -202,14 +166,51 @@ _(row.keys).must_equal [:num] _(row[:num]).must_equal 42 end - end - describe "request options" do - it "run sample query with priority" do - results = db.execute_sql "SELECT 1", request_options: { priority: :PRIORITY_MEDIUM } - _(results).must_be_kind_of Google::Cloud::Spanner::Results + describe "when the environment variable of query options is set" do + let(:origin_opt_version) { nil } + let(:origin_opt_stats_pkg) { nil } + + before do + origin_opt_version = ENV["SPANNER_OPTIMIZER_VERSION"] + ENV["SPANNER_OPTIMIZER_VERSION"] = "3" + origin_opt_stats_pkg = ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] + ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] = "latest" + end + + after do + ENV["SPANNER_OPTIMIZER_VERSION"] = origin_opt_version + ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] = origin_opt_stats_pkg + end + + it "runs a simple query " do + new_spanner = Google::Cloud::Spanner.new + new_db = new_spanner.client db[dialect].instance_id, db[dialect].database_id + _(new_db.project.query_options).must_equal({ optimizer_version: "3", optimizer_statistics_package: "latest" }) + + results = new_db.execute_sql "SELECT 42 AS num" + _(results).must_be_kind_of Google::Cloud::Spanner::Results + + _(results.fields).must_be_kind_of Google::Cloud::Spanner::Fields + _(results.fields.keys.count).must_equal 1 + _(results.fields[:num]).must_equal :INT64 + + rows = results.rows.to_a # grab all from the enumerator + _(rows.count).must_equal 1 + row = rows.first + _(row).must_be_kind_of Google::Cloud::Spanner::Data + _(row.keys).must_equal [:num] + _(row[:num]).must_equal 42 + end + end + + describe "request options" do + it "run sample query with priority" do + results = db[dialect].execute_sql "SELECT 1", request_options: { priority: :PRIORITY_MEDIUM } + _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.rows.count).must_equal 1 + _(results.rows.count).must_equal 1 + end end - end + end end From b7aedac73b48c6748a0cbe417b498a92e364fe5c Mon Sep 17 00:00:00 2001 From: Nivedha Date: Mon, 14 Feb 2022 17:50:22 +0530 Subject: [PATCH 05/19] update pdml tests for pg --- .../acceptance/spanner/client/crud_test.rb | 14 ++-- .../acceptance/spanner/client/dml_test.rb | 10 +-- .../acceptance/spanner/client/execute_test.rb | 26 +++---- .../acceptance/spanner/client/pdml_test.rb | 69 ++++++++++--------- 4 files changed, 63 insertions(+), 56 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb index 8935e8ffa1d4..915c50ee3b4a 100644 --- a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb @@ -25,7 +25,7 @@ end [:gsql, :pg].each do |dialect| - it "inserts, updates, upserts, reads, and deletes records" do + it "inserts, updates, upserts, reads, and deletes recordsfor #{dialect}" do results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp[dialect] } _(results.rows.count).must_equal 0 _(results.timestamp).wont_be :nil? @@ -59,7 +59,7 @@ _(results.timestamp).wont_be :nil? end - it "inserts, updates, upserts, reads, and deletes records using commit" do + it "inserts, updates, upserts, reads, and deletes records using commitfor #{dialect}" do results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp[dialect] } _(results.rows.count).must_equal 0 _(results.timestamp).wont_be :nil? @@ -99,7 +99,7 @@ _(results.timestamp).wont_be :nil? end - it "inserts, updates, upserts, reads, and deletes records using commit and return commit stats" do + it "inserts, updates, upserts, reads, and deletes records using commit and return commit statsfor #{dialect}" do skip if emulator_enabled? commit_options = { return_commit_stats: true } @@ -130,7 +130,7 @@ _(results.timestamp).wont_be :nil? end - it "inserts, updates, upserts, reads, and deletes records in a transaction" do + it "inserts, updates, upserts, reads, and deletes records in a transactionfor #{dialect}" do timestamp = @setup_timestamp[dialect] active_count_sql = "SELECT COUNT(*) AS count FROM accounts WHERE active = true" @@ -163,8 +163,8 @@ _(results.timestamp).wont_be :nil? end - describe "request options" do - it "execute CRUD statement with priority options" do + describe "request optionsfor #{dialect}" do + it "execute CRUD statement with priority optionsfor #{dialect}" do request_options = { priority: :PRIORITY_MEDIUM } results = db[dialect].read "accounts", ["account_id"], request_options: request_options _(results.rows.count).must_equal 0 @@ -183,7 +183,7 @@ end end - it "inserts, updates, upserts, reads, and deletes records with request tagging options" do + it "inserts, updates, upserts, reads, and deletes records with request tagging optionsfor #{dialect}" do timestamp = db[dialect].insert "accounts", @default_rows[dialect][0], request_options: { tag: "Tag-CRUD-1" } _(timestamp).wont_be :nil? diff --git a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb index e666edd38eca..623ea7ee7a5a 100644 --- a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb @@ -51,7 +51,7 @@ end [:gsql, :pg].each do |dialect| - it "executes multiple DML statements in a transaction" do + it "executes multiple DML statements in a transactionfor #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts" _(prior_results.rows.count).must_equal 3 @@ -92,7 +92,7 @@ _(post_results.rows.count).must_equal 4 end - it "executes a DML statement, then rollback the transaction" do + it "executes a DML statement, then rollback the transactionfor #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts" _(prior_results.rows.count).must_equal 3 @@ -121,7 +121,7 @@ _(post_results.rows.count).must_equal 3 end - it "executes a DML statement, then a mutation" do + it "executes a DML statement, then a mutationfor #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts" _(prior_results.rows.count).must_equal 3 @@ -143,8 +143,8 @@ _(post_results.rows.count).must_equal 5 end - describe "request options" do - it "execute DML statement with priority options" do + describe "request optionsfor #{dialect}" do + it "execute DML statement with priority optionsfor #{dialect}" do request_options = { priority: :PRIORITY_MEDIUM } db[dialect].transaction request_options: request_options do |tx| diff --git a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb index 960f96f1c7e0..59a5a10da8dc 100644 --- a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb @@ -18,7 +18,7 @@ let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } [:gsql, :pg].each do |dialect| - it "runs SELECT 1" do + it "runs SELECT 1for #{dialect}" do results = db[dialect].execute_sql "SELECT 1" _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -33,7 +33,7 @@ _(row[0]).must_equal 1 end - it "runs a simple query" do + it "runs a simple queryfor #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num" _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -49,7 +49,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use strong option" do + it "runs a simple query using a single-use strong optionfor #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { strong: true } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -65,7 +65,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use timestamp option" do + it "runs a simple query using a single-use timestamp optionfor #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { timestamp: (Time.now - 60) } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -81,7 +81,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use staleness option" do + it "runs a simple query using a single-use staleness optionfor #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { staleness: 60 } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -97,7 +97,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use bounded_timestamp option" do + it "runs a simple query using a single-use bounded_timestamp optionfor #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { bounded_timestamp: (Time.now - 60) } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -113,7 +113,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use bounded_staleness option" do + it "runs a simple query using a single-use bounded_staleness optionfor #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { bounded_staleness: 60 } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -129,7 +129,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query with query options" do + it "runs a simple query with query optionsfor #{dialect}" do query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } results = db[dialect].execute_sql "SELECT 42 AS num", query_options: query_options _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -146,7 +146,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query when the client-level config of query options is set" do + it "runs a simple query when the client-level config of query options is setfor #{dialect}" do query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } new_spanner = Google::Cloud::Spanner.new new_db = new_spanner.client db[dialect].instance_id, db[dialect].database_id, query_options: query_options @@ -167,7 +167,7 @@ _(row[:num]).must_equal 42 end - describe "when the environment variable of query options is set" do + describe "when the environment variable of query options is setfor #{dialect}" do let(:origin_opt_version) { nil } let(:origin_opt_stats_pkg) { nil } @@ -183,7 +183,7 @@ ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] = origin_opt_stats_pkg end - it "runs a simple query " do + it "runs a simple query for #{dialect}" do new_spanner = Google::Cloud::Spanner.new new_db = new_spanner.client db[dialect].instance_id, db[dialect].database_id _(new_db.project.query_options).must_equal({ optimizer_version: "3", optimizer_statistics_package: "latest" }) @@ -204,8 +204,8 @@ end end - describe "request options" do - it "run sample query with priority" do + describe "request optionsfor #{dialect}" do + it "run sample query with priorityfor #{dialect}" do results = db[dialect].execute_sql "SELECT 1", request_options: { priority: :PRIORITY_MEDIUM } _(results).must_be_kind_of Google::Cloud::Spanner::Results diff --git a/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb b/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb index dae60818c37e..fe56f6cddea9 100644 --- a/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb @@ -16,54 +16,61 @@ require "concurrent" describe "Spanner Client", :pdml, :spanner do - let(:db) { spanner_client } + let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } before do - db.commit do |c| + db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end + db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows + end end after do - db.delete "accounts" + db[:gsql].delete "accounts" + db[:pg].delete "accounts" end - it "executes a simple Partitioned DML statement" do - prior_results = db.execute_sql "SELECT * FROM accounts WHERE active = TRUE" - _(prior_results.rows.count).must_equal 2 + [:gsql, :pg].each do |dialect| + it "executes a simple Partitioned DML statementfor #{dialect}" do + prior_results = db[dialect].execute_sql "SELECT * FROM accounts WHERE active = TRUE" + _(prior_results.rows.count).must_equal 2 - pdml_row_count = db.execute_partition_update "UPDATE accounts a SET a.active = TRUE WHERE a.active = FALSE" - _(pdml_row_count).must_equal 1 + pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE" + _(pdml_row_count).must_equal 1 - post_results = db.execute_sql "SELECT * FROM accounts WHERE active = TRUE", single_use: { strong: true } - _(post_results.rows.count).must_equal 3 - end + post_results = db[dialect].execute_sql "SELECT * FROM accounts WHERE active = TRUE", single_use: { strong: true } + _(post_results.rows.count).must_equal 3 + end - it "executes a simple Partitioned DML statement with query options" do - prior_results = db.execute_sql "SELECT * FROM accounts WHERE active = TRUE" - _(prior_results.rows.count).must_equal 2 + it "executes a simple Partitioned DML statement with query optionsfor #{dialect}" do + prior_results = db[dialect].execute_sql "SELECT * FROM accounts WHERE active = TRUE" + _(prior_results.rows.count).must_equal 2 - query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } - pdml_row_count = db.execute_partition_update "UPDATE accounts a SET a.active = TRUE WHERE a.active = FALSE", query_options: query_options - _(pdml_row_count).must_equal 1 + query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } + pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE", query_options: query_options + _(pdml_row_count).must_equal 1 - post_results = db.execute_sql "SELECT * FROM accounts WHERE active = TRUE", single_use: { strong: true } - _(post_results.rows.count).must_equal 3 - end + post_results = db[dialect].execute_sql "SELECT * FROM accounts WHERE active = TRUE", single_use: { strong: true } + _(post_results.rows.count).must_equal 3 + end - describe "request options" do - it "execute Partitioned DML statement with priority options" do - pdml_row_count = db.execute_partition_update "UPDATE accounts a SET a.active = TRUE WHERE a.active = FALSE", - request_options: { priority: :PRIORITY_MEDIUM } + describe "request optionsfor #{dialect}" do + it "execute Partitioned DML statement with priority optionsfor #{dialect}" do + pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE", + request_options: { priority: :PRIORITY_MEDIUM } - _(pdml_row_count).must_equal 1 + _(pdml_row_count).must_equal 1 + end end - end - it "executes a Partitioned DML statement with request tagging option" do - pdml_row_count = db.execute_partition_update "UPDATE accounts a SET a.active = TRUE WHERE a.active = FALSE", - request_options: { tag: "Tag-P-1" } - _(pdml_row_count).must_equal 1 - end + it "executes a Partitioned DML statement with request tagging optionfor #{dialect}" do + pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE", + request_options: { tag: "Tag-P-1" } + _(pdml_row_count).must_equal 1 + end + end end From fc907850b2978d5ab61f86a2935c262f65bb1871 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Tue, 15 Feb 2022 17:54:16 +0530 Subject: [PATCH 06/19] add more test for pg_numeric --- acceptance/data/fixtures.rb | 1 + .../acceptance/spanner/client/crud_test.rb | 14 +- .../acceptance/spanner/client/dml_test.rb | 10 +- .../acceptance/spanner/client/execute_test.rb | 26 +- .../spanner/client/params/pgnumeric_test.rb | 48 +- .../acceptance/spanner/client/pdml_test.rb | 10 +- .../spanner/client/single_use_test.rb | 281 ++++---- .../spanner/client/snapshot_test.rb | 612 +++++++++--------- .../spanner/client/types/pgnumeric_test.rb | 97 +++ 9 files changed, 604 insertions(+), 495 deletions(-) diff --git a/acceptance/data/fixtures.rb b/acceptance/data/fixtures.rb index 7cf0630c5699..f9d24751ecfc 100644 --- a/acceptance/data/fixtures.rb +++ b/acceptance/data/fixtures.rb @@ -55,6 +55,7 @@ def stuff_pg_ddl_statement id bigint NOT NULL, "int" bigint, "float" double precision, + "numeric" NUMERIC, bool boolean, string character varying, byte bytea, diff --git a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb index 915c50ee3b4a..879e048ac3aa 100644 --- a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb @@ -25,7 +25,7 @@ end [:gsql, :pg].each do |dialect| - it "inserts, updates, upserts, reads, and deletes recordsfor #{dialect}" do + it "inserts, updates, upserts, reads, and deletes records for #{dialect}" do results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp[dialect] } _(results.rows.count).must_equal 0 _(results.timestamp).wont_be :nil? @@ -59,7 +59,7 @@ _(results.timestamp).wont_be :nil? end - it "inserts, updates, upserts, reads, and deletes records using commitfor #{dialect}" do + it "inserts, updates, upserts, reads, and deletes records using commit for #{dialect}" do results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp[dialect] } _(results.rows.count).must_equal 0 _(results.timestamp).wont_be :nil? @@ -99,7 +99,7 @@ _(results.timestamp).wont_be :nil? end - it "inserts, updates, upserts, reads, and deletes records using commit and return commit statsfor #{dialect}" do + it "inserts, updates, upserts, reads, and deletes records using commit and return commit stats for #{dialect}" do skip if emulator_enabled? commit_options = { return_commit_stats: true } @@ -130,7 +130,7 @@ _(results.timestamp).wont_be :nil? end - it "inserts, updates, upserts, reads, and deletes records in a transactionfor #{dialect}" do + it "inserts, updates, upserts, reads, and deletes records in a transaction for #{dialect}" do timestamp = @setup_timestamp[dialect] active_count_sql = "SELECT COUNT(*) AS count FROM accounts WHERE active = true" @@ -163,8 +163,8 @@ _(results.timestamp).wont_be :nil? end - describe "request optionsfor #{dialect}" do - it "execute CRUD statement with priority optionsfor #{dialect}" do + describe "request options for #{dialect}" do + it "execute CRUD statement with priority options for #{dialect}" do request_options = { priority: :PRIORITY_MEDIUM } results = db[dialect].read "accounts", ["account_id"], request_options: request_options _(results.rows.count).must_equal 0 @@ -183,7 +183,7 @@ end end - it "inserts, updates, upserts, reads, and deletes records with request tagging optionsfor #{dialect}" do + it "inserts, updates, upserts, reads, and deletes records with request tagging options for #{dialect}" do timestamp = db[dialect].insert "accounts", @default_rows[dialect][0], request_options: { tag: "Tag-CRUD-1" } _(timestamp).wont_be :nil? diff --git a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb index 623ea7ee7a5a..ce297ba2ddd7 100644 --- a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb @@ -51,7 +51,7 @@ end [:gsql, :pg].each do |dialect| - it "executes multiple DML statements in a transactionfor #{dialect}" do + it "executes multiple DML statements in a transaction for #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts" _(prior_results.rows.count).must_equal 3 @@ -92,7 +92,7 @@ _(post_results.rows.count).must_equal 4 end - it "executes a DML statement, then rollback the transactionfor #{dialect}" do + it "executes a DML statement, then rollback the transaction for #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts" _(prior_results.rows.count).must_equal 3 @@ -121,7 +121,7 @@ _(post_results.rows.count).must_equal 3 end - it "executes a DML statement, then a mutationfor #{dialect}" do + it "executes a DML statement, then a mutation for #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts" _(prior_results.rows.count).must_equal 3 @@ -143,8 +143,8 @@ _(post_results.rows.count).must_equal 5 end - describe "request optionsfor #{dialect}" do - it "execute DML statement with priority optionsfor #{dialect}" do + describe "request options for #{dialect}" do + it "execute DML statement with priority options for #{dialect}" do request_options = { priority: :PRIORITY_MEDIUM } db[dialect].transaction request_options: request_options do |tx| diff --git a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb index 59a5a10da8dc..22f7c3402c54 100644 --- a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb @@ -18,7 +18,7 @@ let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } [:gsql, :pg].each do |dialect| - it "runs SELECT 1for #{dialect}" do + it "runs SELECT 1 for #{dialect}" do results = db[dialect].execute_sql "SELECT 1" _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -33,7 +33,7 @@ _(row[0]).must_equal 1 end - it "runs a simple queryfor #{dialect}" do + it "runs a simple query for #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num" _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -49,7 +49,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use strong optionfor #{dialect}" do + it "runs a simple query using a single-use strong option for #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { strong: true } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -65,7 +65,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use timestamp optionfor #{dialect}" do + it "runs a simple query using a single-use timestamp option for #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { timestamp: (Time.now - 60) } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -81,7 +81,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use staleness optionfor #{dialect}" do + it "runs a simple query using a single-use staleness option for #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { staleness: 60 } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -97,7 +97,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use bounded_timestamp optionfor #{dialect}" do + it "runs a simple query using a single-use bounded_timestamp option for #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { bounded_timestamp: (Time.now - 60) } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -113,7 +113,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query using a single-use bounded_staleness optionfor #{dialect}" do + it "runs a simple query using a single-use bounded_staleness option for #{dialect}" do results = db[dialect].execute_sql "SELECT 42 AS num", single_use: { bounded_staleness: 60 } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -129,7 +129,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query with query optionsfor #{dialect}" do + it "runs a simple query with query options for #{dialect}" do query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } results = db[dialect].execute_sql "SELECT 42 AS num", query_options: query_options _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -146,7 +146,7 @@ _(row[:num]).must_equal 42 end - it "runs a simple query when the client-level config of query options is setfor #{dialect}" do + it "runs a simple query when the client-level config of query options is set for #{dialect}" do query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } new_spanner = Google::Cloud::Spanner.new new_db = new_spanner.client db[dialect].instance_id, db[dialect].database_id, query_options: query_options @@ -167,7 +167,7 @@ _(row[:num]).must_equal 42 end - describe "when the environment variable of query options is setfor #{dialect}" do + describe "when the environment variable of query options is set for #{dialect}" do let(:origin_opt_version) { nil } let(:origin_opt_stats_pkg) { nil } @@ -183,7 +183,7 @@ ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] = origin_opt_stats_pkg end - it "runs a simple query for #{dialect}" do + it "runs a simple query for #{dialect}" do new_spanner = Google::Cloud::Spanner.new new_db = new_spanner.client db[dialect].instance_id, db[dialect].database_id _(new_db.project.query_options).must_equal({ optimizer_version: "3", optimizer_statistics_package: "latest" }) @@ -204,8 +204,8 @@ end end - describe "request optionsfor #{dialect}" do - it "run sample query with priorityfor #{dialect}" do + describe "request options for #{dialect}" do + it "run sample query with priority for #{dialect}" do results = db[dialect].execute_sql "SELECT 1", request_options: { priority: :PRIORITY_MEDIUM } _(results).must_be_kind_of Google::Cloud::Spanner::Results diff --git a/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb index 855581a2f1f6..a2a0d9f17293 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb @@ -13,55 +13,33 @@ # limitations under the License. require "spanner_helper" +require "bigdecimal" describe "Spanner Client", :params, :bool, :spanner do - let(:db) { spanner_client } + let(:db) { spanner_pg_client } - it "queries and returns a bool parameter" do - results = db.execute_query "SELECT @value AS value", params: { value: true } + it "queries and returns a BigDecimal parameter" do + results = db.execute_query "SELECT $1 AS value", params: { p1: BigDecimal(1) }, types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields[:value]).must_equal :BOOL - _(results.rows.first[:value]).must_equal true + _(results.fields[:value]).must_equal :NUMERIC + _(results.rows.first[:value]).must_equal BigDecimal(1) end - it "queries and returns a NULL bool parameter" do - results = db.execute_query "SELECT @value AS value", params: { value: nil }, types: { value: :BOOL } + it "queries and returns a NULL parameter" do + results = db.execute_query "SELECT $1 AS value", params: { p1: nil }, types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields[:value]).must_equal :BOOL + _(results.fields[:value]).must_equal :NUMERIC _(results.rows.first[:value]).must_be :nil? end - it "queries and returns an array of bool parameters" do - results = db.execute_query "SELECT @value AS value", params: { value: [false, true, false] } + it "queries and returns a NAN BigDecimal parameter" do + results = db.execute_query "SELECT $1 AS value", params: { p1: BigDecimal('NaN') }, types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields[:value]).must_equal [:BOOL] - _(results.rows.first[:value]).must_equal [false, true, false] + _(results.fields[:value]).must_equal :NUMERIC + _(results.rows.first[:value]).must_be :nan? end - it "queries and returns an array of bool parameters with a nil value" do - results = db.execute_query "SELECT @value AS value", params: { value: [nil, false, true, false] } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields[:value]).must_equal [:BOOL] - _(results.rows.first[:value]).must_equal [nil, false, true, false] - end - - it "queries and returns an empty array of bool parameters" do - results = db.execute_query "SELECT @value AS value", params: { value: [] }, types: { value: [:BOOL] } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields[:value]).must_equal [:BOOL] - _(results.rows.first[:value]).must_equal [] - end - - it "queries and returns a NULL array of bool parameters" do - results = db.execute_query "SELECT @value AS value", params: { value: nil }, types: { value: [:BOOL] } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields[:value]).must_equal [:BOOL] - _(results.rows.first[:value]).must_be :nil? - end end diff --git a/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb b/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb index fe56f6cddea9..09750ed276f2 100644 --- a/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb @@ -35,7 +35,7 @@ end [:gsql, :pg].each do |dialect| - it "executes a simple Partitioned DML statementfor #{dialect}" do + it "executes a simple Partitioned DML statement for #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts WHERE active = TRUE" _(prior_results.rows.count).must_equal 2 @@ -46,7 +46,7 @@ _(post_results.rows.count).must_equal 3 end - it "executes a simple Partitioned DML statement with query optionsfor #{dialect}" do + it "executes a simple Partitioned DML statement with query options for #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts WHERE active = TRUE" _(prior_results.rows.count).must_equal 2 @@ -58,8 +58,8 @@ _(post_results.rows.count).must_equal 3 end - describe "request optionsfor #{dialect}" do - it "execute Partitioned DML statement with priority optionsfor #{dialect}" do + describe "request options for #{dialect}" do + it "execute Partitioned DML statement with priority options for #{dialect}" do pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE", request_options: { priority: :PRIORITY_MEDIUM } @@ -67,7 +67,7 @@ end end - it "executes a Partitioned DML statement with request tagging optionfor #{dialect}" do + it "executes a Partitioned DML statement with request tagging option for #{dialect}" do pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE", request_options: { tag: "Tag-P-1" } _(pdml_row_count).must_equal 1 diff --git a/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb b/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb index 59550a54405a..71fc4fcaa482 100644 --- a/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb @@ -15,151 +15,164 @@ require "spanner_helper" describe "Spanner Client", :single_use, :spanner do - let(:db) { spanner_client } - let(:columns) { [:account_id, :username, :friends, :active, :reputation, :avatar] } - let(:fields_hash) { { account_id: :INT64, username: :STRING, friends: [:INT64], active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } } + let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } + let(:columns) {{ gsql: [:account_id, :username, :friends, :active, :reputation, :avatar], + pg: [:account_id, :username, :active, :reputation, :avatar] + }} + let(:fields_hash) {{ gsql: { account_id: :INT64, username: :STRING, friends: [:INT64], active: :BOOL, reputation: :FLOAT64, avatar: :BYTES }, + pg: { account_id: :INT64, username: :STRING, active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } + }} before do - @setup_timestamp = db.commit do |c| + setup_timestamp_gsql = db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end - end - - after do - db.delete "accounts" - end - - it "runs a query with strong option" do - results = db.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { strong: true } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual - end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 3 # within 3 seconds? - end - - it "runs a read with strong option" do - results = db.read "accounts", columns, single_use: { strong: true } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual - end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 3 # within 3 seconds? - end - - it "runs a query with timestamp option" do - results = db.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { timestamp: @setup_timestamp } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual - end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 1 - end - - it "runs a read with timestamp option" do - results = db.read "accounts", columns, single_use: { timestamp: @setup_timestamp } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual - end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 1 - end - - it "runs a query with staleness option" do - results = db.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { staleness: 0.0001 } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual - end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 3 # within 3 seconds? - end - - it "runs a read with staleness option" do - results = db.read "accounts", columns, single_use: { staleness: 0.0001 } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual - end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 3 # within 3 seconds? - end - - it "runs a query with bounded_timestamp option" do - results = db.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { bounded_timestamp: @setup_timestamp } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual - end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 3 # within 3 seconds? - end - - it "runs a read with bounded_timestamp option" do - results = db.read "accounts", columns, single_use: { bounded_timestamp: @setup_timestamp } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + setup_timestamp_pg = db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 3 # within 3 seconds? + @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} + @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} end - it "runs a query with bounded_staleness option" do - results = db.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { bounded_staleness: 0.0001 } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual - end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 3 # within 3 seconds? + after do + db[:gsql].delete "accounts" + db[:pg].delete "accounts" end - it "runs a read with bounded_staleness option" do - results = db.read "accounts", columns, single_use: { bounded_staleness: 0.0001 } - - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + [:gsql, :pg].each do |dialect| + it "runs a query with strong option for #{dialect}" do + results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { strong: true } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? + end + + it "runs a read with strong option for #{dialect}" do + results = db[dialect].read "accounts", columns[dialect], single_use: { strong: true } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? + end + + it "runs a query with timestamp option for #{dialect}" do + results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { timestamp: @setup_timestamp[dialect] } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 1 + end + + it "runs a read with timestamp option for #{dialect}" do + results = db[dialect].read "accounts", columns[dialect], single_use: { timestamp: @setup_timestamp[dialect] } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 1 + end + + it "runs a query with staleness option for #{dialect}" do + results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { staleness: 0.0001 } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? + end + + it "runs a read with staleness option for #{dialect}" do + results = db[dialect].read "accounts", columns[dialect], single_use: { staleness: 0.0001 } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? + end + + it "runs a query with bounded_timestamp option for #{dialect}" do + results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { bounded_timestamp: @setup_timestamp[dialect] } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? + end + + it "runs a read with bounded_timestamp option for #{dialect}" do + results = db[dialect].read "accounts", columns[dialect], single_use: { bounded_timestamp: @setup_timestamp[dialect] } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? + end + + it "runs a query with bounded_staleness option for #{dialect}" do + results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { bounded_staleness: 0.0001 } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? + end + + it "runs a read with bounded_staleness option for #{dialect}" do + results = db[dialect].read "accounts", columns[dialect], single_use: { bounded_staleness: 0.0001 } + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end + + _(results.timestamp).wont_be :nil? + _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? end - - _(results.timestamp).wont_be :nil? - _(results.timestamp).must_be_close_to @setup_timestamp, 3 # within 3 seconds? end - + def assert_accounts_equal expected, actual if actual[:account_id].nil? _(expected[:account_id]).must_be :nil? @@ -194,5 +207,5 @@ def assert_accounts_equal expected, actual else _(expected[:friends]).must_equal actual[:friends] end - end + end end diff --git a/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb b/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb index e9c8b0de8949..8ad8d951c314 100644 --- a/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb @@ -15,410 +15,430 @@ require "spanner_helper" describe "Spanner Client", :snapshot, :spanner do - let(:db) { spanner_client } - let(:columns) { [:account_id, :username, :friends, :active, :reputation, :avatar] } - let(:fields_hash) { { account_id: :INT64, username: :STRING, friends: [:INT64], active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } } + let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } + let(:columns) {{ gsql: [:account_id, :username, :friends, :active, :reputation, :avatar], + pg: [:account_id, :username, :active, :reputation, :avatar] + }} + let(:fields_hash) {{ gsql: { account_id: :INT64, username: :STRING, friends: [:INT64], active: :BOOL, reputation: :FLOAT64, avatar: :BYTES }, + pg: { account_id: :INT64, username: :STRING, active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } + }} + let(:select_dql) {{ gsql: "SELECT account_id, username FROM accounts WHERE account_id = @id", + pg: "SELECT account_id, username FROM accounts WHERE account_id = $1" + }} + + let(:select_params) { { gsql: { id: 1 }, pg: { p1: 1 } } } before do - @setup_timestamp = db.commit do |c| + setup_timestamp_gsql = db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end + setup_timestamp_pg = db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows + end + @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} + @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} end after do - db.delete "accounts" + db[:gsql].delete "accounts" + db[:pg].delete "accounts" end - it "runs a query" do - results = nil - db.snapshot do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + [:gsql, :pg].each do |dialect| - results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC" - end + it "runs a query for #{dialect}" do + results = nil + db[dialect].snapshot do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? + + results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC" + end - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end end - end - it "runs a query with query options" do - query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } - results = nil - db.snapshot do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + it "runs a query with query options for #{dialect}" do + query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } + results = nil + db[dialect].snapshot do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", query_options: query_options - end + results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", query_options: query_options + end - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end end - end - it "runs a read" do - results = nil - db.snapshot do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + it "runs a read for #{dialect}" do + results = nil + db[dialect].snapshot do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.read "accounts", columns - end + results = snp.read "accounts", columns[dialect] + end - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end end - end - it "runs a query with strong option" do - results = nil - db.snapshot strong: true do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + it "runs a query with strong option for #{dialect}" do + results = nil + db[dialect].snapshot strong: true do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC" - end + results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC" + end - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end end - end - it "runs a read with strong option" do - results = nil - db.snapshot strong: true do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + it "runs a read with strong option for #{dialect}" do + results = nil + db[dialect].snapshot strong: true do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.read "accounts", columns - end + results = snp.read "accounts", columns[dialect] + end - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end end - end - it "runs a query with timestamp option" do - results = nil - db.snapshot timestamp: @setup_timestamp do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + it "runs a query with timestamp option for #{dialect}" do + results = nil + db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC" - end + results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC" + end - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end end - end - it "runs a read with timestamp option" do - results = nil - db.snapshot timestamp: @setup_timestamp do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + it "runs a read with timestamp option for #{dialect}" do + results = nil + db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.read "accounts", columns - end + results = snp.read "accounts", columns[dialect] + end - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end end - end - it "runs a query with staleness option" do - results = nil - db.snapshot staleness: 0.0001 do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + it "runs a query with staleness option for #{dialect}" do + results = nil + db[dialect].snapshot staleness: 0.0001 do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC" - end + results = snp.execute_sql "SELECT * FROM accounts ORDER BY account_id ASC" + end - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end end - end - it "runs a read with staleness option" do - results = nil - db.snapshot staleness: 0.0001 do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + it "runs a read with staleness option for #{dialect}" do + results = nil + db[dialect].snapshot staleness: 0.0001 do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.read "accounts", columns - end + results = snp.read "accounts", columns[dialect] + end - _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal fields_hash - results.rows.zip(default_account_rows).each do |expected, actual| - assert_accounts_equal expected, actual + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal fields_hash[dialect] + results.rows.zip(@default_rows[dialect]).each do |expected, actual| + assert_accounts_equal expected, actual + end end - end - it "strong reads are consistent even when updates happen" do - first_row = default_account_rows.first - sample_row = { account_id: first_row[:account_id], username: first_row[:username] } - modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } + it "strong reads are consistent even when updates happen for #{dialect}" do + first_row = @default_rows[dialect].first + sample_row = { account_id: first_row[:account_id], username: first_row[:username] } + modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db.snapshot strong: true do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + db[dialect].snapshot strong: true do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.read "accounts", [:account_id, :username], keys: sample_row[:account_id] - # verify we got the row we were expecting - _(results.rows.first.to_h).must_equal sample_row + results = snp.read "accounts", [:account_id, :username], keys: sample_row[:account_id] + # verify we got the row we were expecting + _(results.rows.first.to_h).must_equal sample_row - # outside of the snapshot, update the row! - db.update "accounts", modified_row + # outside of the snapshot, update the row! + db[dialect].update "accounts", modified_row - results2 = snp.read "accounts", [:account_id, :username], keys: modified_row[:account_id] - # verify we got the previous row, not the modified row - _(results2.rows.first.to_h).must_equal sample_row + results2 = snp.read "accounts", [:account_id, :username], keys: modified_row[:account_id] + # verify we got the previous row, not the modified row + _(results2.rows.first.to_h).must_equal sample_row + end end - end - it "strong queries are consistent even when updates happen" do - first_row = default_account_rows.first - sample_row = { account_id: first_row[:account_id], username: first_row[:username] } - modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } + it "strong queries are consistent even when updates happen for #{dialect}" do + first_row = @default_rows[dialect].first + sample_row = { account_id: first_row[:account_id], username: first_row[:username] } + modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db.snapshot strong: true do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + db[dialect].snapshot strong: true do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.execute_sql "SELECT account_id, username FROM accounts WHERE account_id = @id", params: { id: sample_row[:account_id] } - # verify we got the row we were expecting - _(results.rows.first.to_h).must_equal sample_row + results = snp.execute_sql select_dql[dialect], params: select_params[dialect] + # verify we got the row we were expecting + _(results.rows.first.to_h).must_equal sample_row - # outside of the snapshot, update the row! - db.update "accounts", modified_row + # outside of the snapshot, update the row! + db[dialect].update "accounts", modified_row - results2 = snp.execute_sql "SELECT account_id, username FROM accounts WHERE account_id = @id", params: { id: modified_row[:account_id] } - # verify we got the previous row, not the modified row - _(results2.rows.first.to_h).must_equal sample_row + results2 = snp.execute_sql select_dql[dialect], params: select_params[dialect] + # verify we got the previous row, not the modified row + _(results2.rows.first.to_h).must_equal sample_row + end end - end - it "timestamp reads are consistent even when updates happen" do - first_row = default_account_rows.first - sample_row = { account_id: first_row[:account_id], username: first_row[:username] } - modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } + it "timestamp reads are consistent even when updates happen for #{dialect}" do + first_row = @default_rows[dialect].first + sample_row = { account_id: first_row[:account_id], username: first_row[:username] } + modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db.snapshot timestamp: @setup_timestamp do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.read "accounts", [:account_id, :username], keys: sample_row[:account_id] - # verify we got the row we were expecting - _(results.rows.first.to_h).must_equal sample_row + results = snp.read "accounts", [:account_id, :username], keys: sample_row[:account_id] + # verify we got the row we were expecting + _(results.rows.first.to_h).must_equal sample_row - # outside of the snapshot, update the row! - db.update "accounts", modified_row + # outside of the snapshot, update the row! + db[dialect].update "accounts", modified_row - results2 = snp.read "accounts", [:account_id, :username], keys: modified_row[:account_id] - # verify we got the previous row, not the modified row - _(results2.rows.first.to_h).must_equal sample_row + results2 = snp.read "accounts", [:account_id, :username], keys: modified_row[:account_id] + # verify we got the previous row, not the modified row + _(results2.rows.first.to_h).must_equal sample_row + end end - end - it "timestamp queries are consistent even when updates happen" do - first_row = default_account_rows.first - sample_row = { account_id: first_row[:account_id], username: first_row[:username] } - modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } + it "timestamp queries are consistent even when updates happen for #{dialect}" do + first_row = @default_rows[dialect].first + sample_row = { account_id: first_row[:account_id], username: first_row[:username] } + modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db.snapshot timestamp: @setup_timestamp do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.execute_sql "SELECT account_id, username FROM accounts WHERE account_id = @id", params: { id: sample_row[:account_id] } - # verify we got the row we were expecting - _(results.rows.first.to_h).must_equal sample_row + results = snp.execute_sql select_dql[dialect], params: select_params[dialect] + # verify we got the row we were expecting + _(results.rows.first.to_h).must_equal sample_row - # outside of the snapshot, update the row! - db.update "accounts", modified_row + # outside of the snapshot, update the row! + db[dialect].update "accounts", modified_row - results2 = snp.execute_sql "SELECT account_id, username FROM accounts WHERE account_id = @id", params: { id: modified_row[:account_id] } - # verify we got the previous row, not the modified row - _(results2.rows.first.to_h).must_equal sample_row + results2 = snp.execute_sql select_dql[dialect], params: select_params[dialect] + # verify we got the previous row, not the modified row + _(results2.rows.first.to_h).must_equal sample_row + end end - end - it "staleness reads are consistent even when updates happen" do - first_row = default_account_rows.first - sample_row = { account_id: first_row[:account_id], username: first_row[:username] } - modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } + it "staleness reads are consistent even when updates happen for #{dialect}" do + first_row = @default_rows[dialect].first + sample_row = { account_id: first_row[:account_id], username: first_row[:username] } + modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db.snapshot staleness: 0.0001 do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + db[dialect].snapshot staleness: 0.0001 do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.read "accounts", [:account_id, :username], keys: sample_row[:account_id] - # verify we got the row we were expecting - _(results.rows.first.to_h).must_equal sample_row + results = snp.read "accounts", [:account_id, :username], keys: sample_row[:account_id] + # verify we got the row we were expecting + _(results.rows.first.to_h).must_equal sample_row - # outside of the snapshot, update the row! - db.update "accounts", modified_row + # outside of the snapshot, update the row! + db[dialect].update "accounts", modified_row - results2 = snp.read "accounts", [:account_id, :username], keys: modified_row[:account_id] - # verify we got the previous row, not the modified row - _(results2.rows.first.to_h).must_equal sample_row + results2 = snp.read "accounts", [:account_id, :username], keys: modified_row[:account_id] + # verify we got the previous row, not the modified row + _(results2.rows.first.to_h).must_equal sample_row + end end - end - it "staleness queries are consistent even when updates happen" do - first_row = default_account_rows.first - sample_row = { account_id: first_row[:account_id], username: first_row[:username] } - modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } + it "staleness queries are consistent even when updates happen for #{dialect}" do + first_row = @default_rows[dialect].first + sample_row = { account_id: first_row[:account_id], username: first_row[:username] } + modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db.snapshot staleness: 0.0001 do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + db[dialect].snapshot staleness: 0.0001 do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.execute_sql "SELECT account_id, username FROM accounts WHERE account_id = @id", params: { id: sample_row[:account_id] } - # verify we got the row we were expecting - _(results.rows.first.to_h).must_equal sample_row + results = snp.execute_sql select_dql[dialect], params: select_params[dialect] + # verify we got the row we were expecting + _(results.rows.first.to_h).must_equal sample_row - # outside of the snapshot, update the row! - db.update "accounts", modified_row + # outside of the snapshot, update the row! + db[dialect].update "accounts", modified_row - results2 = snp.execute_sql "SELECT account_id, username FROM accounts WHERE account_id = @id", params: { id: modified_row[:account_id] } - # verify we got the previous row, not the modified row - _(results2.rows.first.to_h).must_equal sample_row + results2 = snp.execute_sql select_dql[dialect], params: select_params[dialect] + # verify we got the previous row, not the modified row + _(results2.rows.first.to_h).must_equal sample_row + end end - end - it "multiuse snapshot reads are consistent even when delete happen" do - keys = default_account_rows.map{|row| row[:account_id] } + it "multiuse snapshot reads are consistent even when delete happen for #{dialect}" do + keys = @default_rows[dialect].map{|row| row[:account_id] } - db.snapshot do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + db[dialect].snapshot do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - results = snp.read "accounts", [:account_id, :username], keys: keys - _(results).must_be_kind_of Google::Cloud::Spanner::Results + results = snp.read "accounts", [:account_id, :username], keys: keys + _(results).must_be_kind_of Google::Cloud::Spanner::Results - rows = results.rows.to_a - _(rows.count).must_equal default_account_rows.count - rows.zip(default_account_rows).each do |expected, actual| - _(expected[:account_id]).must_equal actual[:account_id] - _(expected[:username]).must_equal actual[:username] - end + rows = results.rows.to_a + _(rows.count).must_equal @default_rows[dialect].count + rows.zip(@default_rows[dialect]).each do |expected, actual| + _(expected[:account_id]).must_equal actual[:account_id] + _(expected[:username]).must_equal actual[:username] + end - # outside of the snapshot, delete rows - db.delete "accounts", keys + # outside of the snapshot, delete rows + db[dialect].delete "accounts", keys - # read rows and from snaphot and verify rows got from the snapshot - results2 = snp.read "accounts", [:account_id, :username], keys: keys - _(results2).must_be_kind_of Google::Cloud::Spanner::Results - rows2 = results2.rows.to_a + # read rows and from snaphot and verify rows got from the snapshot + results2 = snp.read "accounts", [:account_id, :username], keys: keys + _(results2).must_be_kind_of Google::Cloud::Spanner::Results + rows2 = results2.rows.to_a - _(rows2.count).must_equal default_account_rows.count - rows2.zip(default_account_rows).each do |expected, actual| - _(expected[:account_id]).must_equal actual[:account_id] - _(expected[:username]).must_equal actual[:username] + _(rows2.count).must_equal @default_rows[dialect].count + rows2.zip(@default_rows[dialect]).each do |expected, actual| + _(expected[:account_id]).must_equal actual[:account_id] + _(expected[:username]).must_equal actual[:username] + end end + + # outside of snapshot check all rows are deleted + rows3 = db[dialect].execute_sql("SELECT * FROM accounts").rows.to_a + _(rows3.count).must_equal 0 end - # outside of snapshot check all rows are deleted - rows3 = db.execute_sql("SELECT * FROM accounts").rows.to_a - _(rows3.count).must_equal 0 - end + it "multiuse snapshot reads with read timestamp are consistent even when delete happen for #{dialect}" do + keys = @default_rows[dialect].map{|row| row[:account_id] } - it "multiuse snapshot reads with read timestamp are consistent even when delete happen" do - keys = default_account_rows.map{|row| row[:account_id] } + db[dialect].snapshot read_timestamp: @setup_timestamp[dialect] do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - db.snapshot read_timestamp: @setup_timestamp do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + results = snp.read "accounts", [:account_id, :username], keys: keys + _(results).must_be_kind_of Google::Cloud::Spanner::Results - results = snp.read "accounts", [:account_id, :username], keys: keys - _(results).must_be_kind_of Google::Cloud::Spanner::Results + rows = results.rows.to_a + _(rows.count).must_equal @default_rows[dialect].count + rows.zip(@default_rows[dialect]).each do |expected, actual| + _(expected[:account_id]).must_equal actual[:account_id] + _(expected[:username]).must_equal actual[:username] + end - rows = results.rows.to_a - _(rows.count).must_equal default_account_rows.count - rows.zip(default_account_rows).each do |expected, actual| - _(expected[:account_id]).must_equal actual[:account_id] - _(expected[:username]).must_equal actual[:username] - end + # outside of the snapshot, delete rows + db[dialect].delete "accounts", keys - # outside of the snapshot, delete rows - db.delete "accounts", keys - - # read rows and from snaphot and verify rows got from the snapshot - results2 = snp.read "accounts", [:account_id, :username], keys: keys - _(results2).must_be_kind_of Google::Cloud::Spanner::Results - rows2 = results2.rows.to_a - _(rows2.count).must_equal default_account_rows.count - rows2.zip(default_account_rows).each do |expected, actual| - _(expected[:account_id]).must_equal actual[:account_id] - _(expected[:username]).must_equal actual[:username] + # read rows and from snaphot and verify rows got from the snapshot + results2 = snp.read "accounts", [:account_id, :username], keys: keys + _(results2).must_be_kind_of Google::Cloud::Spanner::Results + rows2 = results2.rows.to_a + _(rows2.count).must_equal @default_rows[dialect].count + rows2.zip(@default_rows[dialect]).each do |expected, actual| + _(expected[:account_id]).must_equal actual[:account_id] + _(expected[:username]).must_equal actual[:username] + end end + + # outside of snapshot check all rows are deleted + rows3 = db[dialect].execute_sql("SELECT * FROM accounts").rows.to_a + _(rows3.count).must_equal 0 end - # outside of snapshot check all rows are deleted - rows3 = db.execute_sql("SELECT * FROM accounts").rows.to_a - _(rows3.count).must_equal 0 - end + it "multiuse snapshot reads with exact staleness are consistent even when delete happen for #{dialect}" do + keys = @default_rows[dialect].map{|row| row[:account_id] } - it "multiuse snapshot reads with exact staleness are consistent even when delete happen" do - keys = default_account_rows.map{|row| row[:account_id] } + sleep 1 + delta = 0.001 - sleep 1 - delta = 0.001 + db[dialect].snapshot exact_staleness: delta do |snp| + _(snp.transaction_id).wont_be :nil? + _(snp.timestamp).wont_be :nil? - db.snapshot exact_staleness: delta do |snp| - _(snp.transaction_id).wont_be :nil? - _(snp.timestamp).wont_be :nil? + results = snp.read "accounts", [:account_id, :username], keys: keys + _(results).must_be_kind_of Google::Cloud::Spanner::Results - results = snp.read "accounts", [:account_id, :username], keys: keys - _(results).must_be_kind_of Google::Cloud::Spanner::Results + rows = results.rows.to_a + _(rows.count).must_equal @default_rows[dialect].count + rows.zip(@default_rows[dialect]).each do |expected, actual| + _(expected[:account_id]).must_equal actual[:account_id] + _(expected[:username]).must_equal actual[:username] + end - rows = results.rows.to_a - _(rows.count).must_equal default_account_rows.count - rows.zip(default_account_rows).each do |expected, actual| - _(expected[:account_id]).must_equal actual[:account_id] - _(expected[:username]).must_equal actual[:username] - end + # outside of the snapshot, delete rows + db[dialect].delete "accounts", keys - # outside of the snapshot, delete rows - db.delete "accounts", keys - - # read rows and from snaphot and verify rows got from the snapshot - results2 = snp.read "accounts", [:account_id, :username], keys: keys - _(results2).must_be_kind_of Google::Cloud::Spanner::Results - rows2 = results2.rows.to_a - _(rows2.count).must_equal default_account_rows.count - rows2.zip(default_account_rows).each do |expected, actual| - _(expected[:account_id]).must_equal actual[:account_id] - _(expected[:username]).must_equal actual[:username] + # read rows and from snaphot and verify rows got from the snapshot + results2 = snp.read "accounts", [:account_id, :username], keys: keys + _(results2).must_be_kind_of Google::Cloud::Spanner::Results + rows2 = results2.rows.to_a + _(rows2.count).must_equal @default_rows[dialect].count + rows2.zip(@default_rows[dialect]).each do |expected, actual| + _(expected[:account_id]).must_equal actual[:account_id] + _(expected[:username]).must_equal actual[:username] + end end - end - # outside of snapshot check all rows are deleted - rows3 = db.execute_sql("SELECT * FROM accounts").rows.to_a - _(rows3.count).must_equal 0 - end + # outside of snapshot check all rows are deleted + rows3 = db[dialect].execute_sql("SELECT * FROM accounts").rows.to_a + _(rows3.count).must_equal 0 + end + end + def assert_accounts_equal expected, actual if actual[:account_id].nil? diff --git a/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb b/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb index e69de29bb2d1..7406e43d4e4f 100644 --- a/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb @@ -0,0 +1,97 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require "spanner_helper" +require "bigdecimal" + +describe "Spanner Client", :types, :numeric, :spanner do + let(:db) { spanner_pg_client } + let(:table_name) { "stuffs" } + + it "writes and reads numeric" do + skip if emulator_enabled? + + num = BigDecimal("0.123456789") + id = SecureRandom.int64 + db.upsert table_name, { id: id, numeric: num } + results = db.read table_name, [:id, :numeric], keys: id + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal({ id: :INT64, numeric: :NUMERIC }) + _(results.rows.first.to_h).must_equal({ id: id, numeric: num }) + end + + it "writes and queries numeric" do + skip if emulator_enabled? + + num = BigDecimal("0.123456789") + id = SecureRandom.int64 + db.upsert table_name, { id: id, numeric: num } + results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = $1", params: { p1: id }, types: { p1: :PG_NUMERIC} + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal({ id: :INT64, numeric: :NUMERIC }) + _(results.rows.first.to_h).must_equal({ id: id, numeric: num }) + end + + it "writes and reads NULL numeric" do + skip if emulator_enabled? + + num = BigDecimal("0.123456789") + id = SecureRandom.int64 + db.upsert table_name, { id: id, numeric: nil } + results = db.read table_name, [:id, :numeric], keys: id + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal({ id: :INT64, numeric: :NUMERIC }) + _(results.rows.first.to_h).must_equal({ id: id, numeric: nil }) + end + + it "writes and queries NULL numeric" do + skip if emulator_enabled? + + num = BigDecimal("0.123456789") + id = SecureRandom.int64 + db.upsert table_name, { id: id, numeric: nil } + results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = $1", params: { p1: id }, types: { p1: :PG_NUMERIC} + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal({ id: :INT64, numeric: :NUMERIC }) + _(results.rows.first.to_h).must_equal({ id: id, numeric: nil }) + end + + it "writes and reads nan of numeric" do + skip if emulator_enabled? + + id = SecureRandom.int64 + db.upsert table_name, { id: id, numeric: BigDecimal('NaN') } + results = db.read table_name, [:id, :numeric], keys: id + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal({ id: :INT64, numeric: :NUMERIC }) + _(results.rows.first.to_h[:numeric]).must_be :nan? + end + + it "writes and queries nan of numeric" do + skip if emulator_enabled? + + id = SecureRandom.int64 + db.upsert table_name, { id: id, numeric: BigDecimal('NaN') } + results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = $1", params: { p1: id }, types: { p1: :PG_NUMERIC} + + _(results).must_be_kind_of Google::Cloud::Spanner::Results + _(results.fields.to_h).must_equal({ id: :INT64, numeric: :NUMERIC }) + _(results.rows.first.to_h[:numeric]).must_be :nan? + end +end From 7865da385166eda6b438c0206fcaa8e5fca05026 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Thu, 17 Feb 2022 09:51:02 +0530 Subject: [PATCH 07/19] skip test for emulator --- .../spanner/client/batch_update_test.rb | 9 ++++++--- .../acceptance/spanner/client/crud_test.rb | 7 +++++-- .../acceptance/spanner/client/dml_test.rb | 9 ++++++--- .../acceptance/spanner/client/execute_test.rb | 5 ++++- .../acceptance/spanner/client/pdml_test.rb | 9 ++++++--- .../spanner/client/single_use_test.rb | 9 ++++++--- .../acceptance/spanner/client/snapshot_test.rb | 8 +++++--- .../acceptance/spanner/database_client_test.rb | 1 + .../acceptance/spanner_helper.rb | 18 ++++++++++-------- 9 files changed, 49 insertions(+), 26 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb b/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb index 01fbc7812ae8..e74e31ccd214 100644 --- a/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb @@ -46,15 +46,18 @@ db[:pg].commit do |c| c.delete "accounts" c.insert "accounts", default_pg_account_rows - end + end unless emulator_enabled? end after do db[:gsql].delete "accounts" - db[:pg].delete "accounts" + db[:pg].delete "accounts" unless emulator_enabled? end - [:gsql, :pg].each do |dialect| + dialects = [:gsql] + dialects.push(:pg) unless emulator_enabled? + + dialects.each do |dialect| it "executes multiple DML statements in a batch for #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts" diff --git a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb index 879e048ac3aa..11f8e9bf0bbd 100644 --- a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb @@ -19,12 +19,15 @@ before do setup_timestamp_pg = db[:pg].delete "accounts" - setup_timestamp_gsql = db[:pg].delete "accounts" + setup_timestamp_gsql = db[:pg].delete "accounts" unless emulator_enabled? @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} end + + dialects = [:gsql] + dialects.push(:pg) unless emulator_enabled? - [:gsql, :pg].each do |dialect| + dialects.each do |dialect| it "inserts, updates, upserts, reads, and deletes records for #{dialect}" do results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp[dialect] } _(results.rows.count).must_equal 0 diff --git a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb index ce297ba2ddd7..a4bc0c8690a4 100644 --- a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb @@ -42,15 +42,18 @@ db[:pg].commit do |c| c.delete "accounts" c.insert "accounts", default_pg_account_rows - end + end unless emulator_enabled? end after do - db[:pg].delete "accounts" + db[:pg].delete "accounts" unless emulator_enabled? db[:gsql].delete "accounts" end - [:gsql, :pg].each do |dialect| + dialects = [:gsql] + dialects.push(:pg) unless emulator_enabled? + + dialects.each do |dialect| it "executes multiple DML statements in a transaction for #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts" _(prior_results.rows.count).must_equal 3 diff --git a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb index 22f7c3402c54..970a4760bb34 100644 --- a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb @@ -17,7 +17,10 @@ describe "Spanner Client", :execute_sql, :spanner do let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } - [:gsql, :pg].each do |dialect| + dialects = [:gsql] + dialects.push(:pg) unless emulator_enabled? + + dialects.each do |dialect| it "runs SELECT 1 for #{dialect}" do results = db[dialect].execute_sql "SELECT 1" _(results).must_be_kind_of Google::Cloud::Spanner::Results diff --git a/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb b/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb index 09750ed276f2..39c20b132a69 100644 --- a/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb @@ -26,15 +26,18 @@ db[:pg].commit do |c| c.delete "accounts" c.insert "accounts", default_pg_account_rows - end + end unless emulator_enabled? end after do db[:gsql].delete "accounts" - db[:pg].delete "accounts" + db[:pg].delete "accounts" unless emulator_enabled? end - [:gsql, :pg].each do |dialect| + dialects = [:gsql] + dialects.push(:pg) unless emulator_enabled? + + dialects.each do |dialect| it "executes a simple Partitioned DML statement for #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts WHERE active = TRUE" _(prior_results.rows.count).must_equal 2 diff --git a/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb b/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb index 71fc4fcaa482..74121491790c 100644 --- a/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb @@ -31,17 +31,20 @@ setup_timestamp_pg = db[:pg].commit do |c| c.delete "accounts" c.insert "accounts", default_pg_account_rows - end + end unless emulator_enabled? @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} end after do db[:gsql].delete "accounts" - db[:pg].delete "accounts" + db[:pg].delete "accounts" unless emulator_enabled? end - [:gsql, :pg].each do |dialect| + dialects = [:gsql] + dialects.push(:pg) unless emulator_enabled? + + dialects.each do |dialect| it "runs a query with strong option for #{dialect}" do results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { strong: true } diff --git a/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb b/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb index 8ad8d951c314..4b11512cf091 100644 --- a/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb @@ -36,18 +36,20 @@ setup_timestamp_pg = db[:pg].commit do |c| c.delete "accounts" c.insert "accounts", default_pg_account_rows - end + end unless emulator_enabled? @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} end after do db[:gsql].delete "accounts" - db[:pg].delete "accounts" + db[:pg].delete "accounts" unless emulator_enabled? end - [:gsql, :pg].each do |dialect| + dialects = [:gsql] + dialects.push(:pg) unless emulator_enabled? + dialects.each do |dialect| it "runs a query for #{dialect}" do results = nil db[dialect].snapshot do |snp| diff --git a/google-cloud-spanner/acceptance/spanner/database_client_test.rb b/google-cloud-spanner/acceptance/spanner/database_client_test.rb index edd41a17081e..c17652a5dcb2 100644 --- a/google-cloud-spanner/acceptance/spanner/database_client_test.rb +++ b/google-cloud-spanner/acceptance/spanner/database_client_test.rb @@ -63,6 +63,7 @@ end it "creates, gets, updates, and drops a database with pg dialect" do + skip if emulator_enabled? client = Google::Cloud::Spanner::Admin::Database.database_admin project_id: spanner.project instance_path = \ diff --git a/google-cloud-spanner/acceptance/spanner_helper.rb b/google-cloud-spanner/acceptance/spanner_helper.rb index b78f9f34d498..0e6fbe65c283 100644 --- a/google-cloud-spanner/acceptance/spanner_helper.rb +++ b/google-cloud-spanner/acceptance/spanner_helper.rb @@ -146,17 +146,19 @@ def assert_commit_response resp, commit_options = {} db_job.wait_until_done! fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? -db_path = $spanner_db_admin.database_path project: $spanner.project_id, - instance: $spanner_instance_id, - database: $spanner_pg_database_id - -db_job = $spanner_db_admin.update_database_ddl database: db_path, statements: fixture.schema_pg_ddl_statements -db_job.wait_until_done! -fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? +unless emulator_enabled? + db_path = $spanner_db_admin.database_path project: $spanner.project_id, + instance: $spanner_instance_id, + database: $spanner_pg_database_id + + db_job = $spanner_db_admin.update_database_ddl database: db_path, statements: fixture.schema_pg_ddl_statements + db_job.wait_until_done! + fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? +end # Create one client for all tests, to minimize resource usage $spanner_client = $spanner.client $spanner_instance_id, $spanner_database_id -$spanner_pg_client = $spanner.client $spanner_instance_id, $spanner_pg_database_id +$spanner_pg_client = $spanner.client $spanner_instance_id, $spanner_pg_database_id unless emulator_enabled? def clean_up_spanner_objects puts "Cleaning up instances and databases after spanner tests." From 2debcd14ca9f786a33b229568f9e92f554d6e94f Mon Sep 17 00:00:00 2001 From: Nivedha Date: Thu, 17 Feb 2022 10:30:24 +0530 Subject: [PATCH 08/19] cleanup client creation --- google-cloud-spanner/acceptance/spanner_helper.rb | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner_helper.rb b/google-cloud-spanner/acceptance/spanner_helper.rb index 0e6fbe65c283..8b104a1b90ab 100644 --- a/google-cloud-spanner/acceptance/spanner_helper.rb +++ b/google-cloud-spanner/acceptance/spanner_helper.rb @@ -74,8 +74,6 @@ def setup @spanner_pg_client = $spanner_pg_client - refute_nil @spanner_pg_client, "You do not have an active client to run the tests." - super end @@ -138,15 +136,13 @@ def assert_commit_response resp, commit_options = {} db_job.wait_until_done! fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? - -instance_path = $spanner_db_admin.instance_path project: $spanner.project_id, instance: $spanner_instance_id -db_job = $spanner_db_admin.create_database parent: instance_path, +unless emulator_enabled? + instance_path = $spanner_db_admin.instance_path project: $spanner.project_id, instance: $spanner_instance_id + db_job = $spanner_db_admin.create_database parent: instance_path, create_statement: "CREATE DATABASE \"#{$spanner_pg_database_id}\"", database_dialect: :POSTGRESQL -db_job.wait_until_done! -fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? - -unless emulator_enabled? + db_job.wait_until_done! + fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? db_path = $spanner_db_admin.database_path project: $spanner.project_id, instance: $spanner_instance_id, database: $spanner_pg_database_id From 9c9db9dffec92913b46a4cf142ed4acec2526132 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Thu, 17 Feb 2022 10:43:24 +0530 Subject: [PATCH 09/19] update fixture --- acceptance/data/fixtures.rb | 74 +++++++++++++++++++------------------ 1 file changed, 39 insertions(+), 35 deletions(-) diff --git a/acceptance/data/fixtures.rb b/acceptance/data/fixtures.rb index f9d24751ecfc..135c49a64f96 100644 --- a/acceptance/data/fixtures.rb +++ b/acceptance/data/fixtures.rb @@ -101,14 +101,14 @@ def accounts_pg_ddl_statement end def lists_ddl_statement - <<-LISTS - CREATE TABLE task_lists ( - account_id INT64 NOT NULL, - task_list_id INT64 NOT NULL, - description STRING(1024) NOT NULL - ) PRIMARY KEY (account_id, task_list_id), - INTERLEAVE IN PARENT accounts ON DELETE CASCADE - LISTS + <<-LISTS + CREATE TABLE task_lists ( + account_id INT64 NOT NULL, + task_list_id INT64 NOT NULL, + description STRING(1024) NOT NULL + ) PRIMARY KEY (account_id, task_list_id), + INTERLEAVE IN PARENT accounts ON DELETE CASCADE + LISTS end def lists_pg_ddl_statement @@ -123,39 +123,43 @@ def lists_pg_ddl_statement end def items_ddl_statement - <<-ITEMS - CREATE TABLE task_items ( - account_id INT64 NOT NULL, - task_list_id INT64 NOT NULL, - task_item_id INT64 NOT NULL, - description STRING(1024) NOT NULL, - active BOOL NOT NULL, - priority INT64 NOT NULL, - due_date DATE, - created_at TIMESTAMP, - updated_at TIMESTAMP - ) PRIMARY KEY (account_id, task_list_id, task_item_id), - INTERLEAVE IN PARENT task_lists ON DELETE CASCADE - ITEMS + <<-ITEMS + CREATE TABLE task_items ( + account_id INT64 NOT NULL, + task_list_id INT64 NOT NULL, + task_item_id INT64 NOT NULL, + description STRING(1024) NOT NULL, + active BOOL NOT NULL, + priority INT64 NOT NULL, + due_date DATE, + created_at TIMESTAMP, + updated_at TIMESTAMP + ) PRIMARY KEY (account_id, task_list_id, task_item_id), + INTERLEAVE IN PARENT task_lists ON DELETE CASCADE + ITEMS end def numeric_pk_ddl_statement - <<-BOXES - CREATE TABLE boxes ( - id NUMERIC NOT NULL, - name STRING(256) NOT NULL, - ) PRIMARY KEY (id) - BOXES + return + + <<-BOXES + CREATE TABLE boxes ( + id NUMERIC NOT NULL, + name STRING(256) NOT NULL, + ) PRIMARY KEY (id) + BOXES end def numeric_composite_pk_ddl_statement - <<-BOX_ITEMS - CREATE TABLE box_items ( - id INT64 NOT NULL, - box_id NUMERIC NOT NULL, - name STRING(256) NOT NULL - ) PRIMARY KEY (id, box_id) - BOX_ITEMS + return + + <<-BOX_ITEMS + CREATE TABLE box_items ( + id INT64 NOT NULL, + box_id NUMERIC NOT NULL, + name STRING(256) NOT NULL + ) PRIMARY KEY (id, box_id) + BOX_ITEMS end def schema_pg_ddl_statements From 2e03c9db0f689b9a31a46923ecbba2a343f21834 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Thu, 17 Feb 2022 10:55:35 +0530 Subject: [PATCH 10/19] skip pg tests for emulator --- .../acceptance/spanner/batch_client/execute_partition_test.rb | 3 +++ .../acceptance/spanner/client/params/pgnumeric_test.rb | 3 +++ 2 files changed, 6 insertions(+) diff --git a/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb b/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb index 0aaee9128393..3a359c3c2920 100644 --- a/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb +++ b/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb @@ -97,6 +97,7 @@ end it "reads all by default in pg" do + skip if emulator_enabled? skip("Skipped due to https://b.corp.google.com/issues/216209306") _(pg_batch_snapshot.timestamp).must_be_kind_of Time serialized_snapshot = pg_batch_snapshot.dump @@ -158,6 +159,7 @@ end it "queries all by default in pg" do + skip if emulator_enabled? pg_batch_snapshot = pg_batch_client.batch_snapshot serialized_snapshot = pg_batch_snapshot.dump @@ -215,6 +217,7 @@ end it "queries all by default with query options pg" do + skip if emulator_enabled? pg_batch_snapshot = pg_batch_client.batch_snapshot serialized_snapshot = pg_batch_snapshot.dump diff --git a/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb index a2a0d9f17293..985443ff4c41 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb @@ -19,6 +19,7 @@ let(:db) { spanner_pg_client } it "queries and returns a BigDecimal parameter" do + skip if emulator_enabled? results = db.execute_query "SELECT $1 AS value", params: { p1: BigDecimal(1) }, types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -27,6 +28,7 @@ end it "queries and returns a NULL parameter" do + skip if emulator_enabled? results = db.execute_query "SELECT $1 AS value", params: { p1: nil }, types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -35,6 +37,7 @@ end it "queries and returns a NAN BigDecimal parameter" do + skip if emulator_enabled? results = db.execute_query "SELECT $1 AS value", params: { p1: BigDecimal('NaN') }, types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results From ef6ed7cf8e544f43c5847c1bf634b3bd9d970a51 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Thu, 17 Feb 2022 12:05:09 +0530 Subject: [PATCH 11/19] clean up execute_partition_test --- .../spanner/batch_client/execute_partition_test.rb | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb b/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb index 3a359c3c2920..6b608ecbabc9 100644 --- a/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb +++ b/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb @@ -18,11 +18,11 @@ let(:db) { spanner_client } let(:pg_db) { spanner_pg_client } let(:batch_client) { $spanner.batch_client $spanner_instance_id, $spanner_database_id } - let(:pg_batch_client) { $spanner.batch_client $spanner_instance_id, $spanner_pg_database_id } + let(:pg_batch_client) { $spanner.batch_client $spanner_instance_id, $spanner_pg_database_id unless emulator_enabled?} let(:table_name) { "stuffs" } let(:table_index) { "IsStuffsIdPrime" } let(:batch_snapshot) { batch_client.batch_snapshot } - let(:pg_batch_snapshot) { pg_batch_client.batch_snapshot } + let(:pg_batch_snapshot) { pg_batch_client.batch_snapshot unless emulator_enabled? } before do db.delete table_name # remove all data @@ -40,7 +40,7 @@ { id: 11, bool: true }, { id: 12, bool: false } ] - pg_db.delete table_name # remove all data + pg_db.delete table_name unless emulator_enabled? pg_db.insert table_name, [ { id: 1, bool: false }, { id: 2, bool: false }, @@ -54,14 +54,14 @@ { id: 10, bool: false }, { id: 11, bool: true }, { id: 12, bool: false } - ] + ] unless emulator_enabled? end after do batch_snapshot.close db.delete table_name # remove all data - pg_batch_snapshot.close - pg_db.delete table_name + pg_batch_snapshot.close unless emulator_enabled? + pg_db.delete table_name unless emulator_enabled? end it "reads all by default" do From 3ddb7f16ee1ecc6e8a645ef008d01615b54f1ef9 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Thu, 17 Feb 2022 12:15:07 +0530 Subject: [PATCH 12/19] cleanup for emulator run --- google-cloud-spanner/acceptance/spanner/client/crud_test.rb | 4 ++-- google-cloud-spanner/acceptance/spanner_helper.rb | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb index 11f8e9bf0bbd..aacda063ddf7 100644 --- a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb @@ -18,8 +18,8 @@ let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } before do - setup_timestamp_pg = db[:pg].delete "accounts" - setup_timestamp_gsql = db[:pg].delete "accounts" unless emulator_enabled? + setup_timestamp_gsql = db[:gsql].delete "accounts" + setup_timestamp_pg = db[:pg].delete "accounts" unless emulator_enabled? @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} end diff --git a/google-cloud-spanner/acceptance/spanner_helper.rb b/google-cloud-spanner/acceptance/spanner_helper.rb index 8b104a1b90ab..28d6425c9ed7 100644 --- a/google-cloud-spanner/acceptance/spanner_helper.rb +++ b/google-cloud-spanner/acceptance/spanner_helper.rb @@ -159,11 +159,11 @@ def assert_commit_response resp, commit_options = {} def clean_up_spanner_objects puts "Cleaning up instances and databases after spanner tests." $spanner.instance($spanner_instance_id).database($spanner_database_id).drop - $spanner.instance($spanner_instance_id).database($spanner_pg_database_id).drop + $spanner.instance($spanner_instance_id).database($spanner_pg_database_id).drop unless emulator_enabled? puts "Closing the Spanner Client." $spanner_client.close - $spanner_pg_client.close + $spanner_pg_client.close unless emulator_enabled? puts "Cleaning up instances databases and backups after spanner tests." instance = $spanner.instance($spanner_instance_id) From 1ce8249e5a433d8681526077b2c23b638302e447 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Fri, 18 Feb 2022 17:03:48 +0530 Subject: [PATCH 13/19] add rubocop for acceptance - fix linting and license --- acceptance/data/fixtures.rb | 520 +++++++++--------- google-cloud-spanner/.rubocop.yml | 18 +- google-cloud-spanner/Rakefile | 54 +- .../acceptance/instance_client_test.rb | 6 +- .../spanner/backup_operations_test.rb | 16 +- .../acceptance/spanner/backup_test.rb | 18 +- .../batch_client/execute_partition_test.rb | 48 +- .../spanner/client/batch_update_test.rb | 84 +-- .../acceptance/spanner/client/crud_test.rb | 26 +- .../acceptance/spanner/client/dml_test.rb | 72 ++- .../acceptance/spanner/client/execute_test.rb | 12 +- .../spanner/client/large_data_test.rb | 19 +- .../spanner/client/params/bool_test.rb | 4 +- .../spanner/client/params/bytes_test.rb | 6 +- .../spanner/client/params/date_test.rb | 6 +- .../spanner/client/params/float64_test.rb | 7 +- .../spanner/client/params/json_test.rb | 4 +- .../spanner/client/params/pgnumeric_test.rb | 7 +- .../spanner/client/params/struct_test.rb | 22 +- .../spanner/client/params/timestamp_test.rb | 22 +- .../acceptance/spanner/client/pdml_test.rb | 25 +- .../acceptance/spanner/client/read_test.rb | 161 ++++-- .../spanner/client/single_use_test.rb | 114 ++-- .../spanner/client/snapshot_test.rb | 120 ++-- .../spanner/client/transaction_test.rb | 10 +- .../spanner/client/types/bytes_test.rb | 4 +- .../spanner/client/types/date_test.rb | 24 +- .../spanner/client/types/json_test.rb | 6 +- .../spanner/client/types/numeric_test.rb | 2 - .../spanner/client/types/pgnumeric_test.rb | 17 +- .../spanner/client/types/timestamp_test.rb | 36 +- .../spanner/database_client_test.rb | 2 +- .../acceptance/spanner/database_test.rb | 14 +- .../acceptance/spanner_helper.rb | 52 +- .../lib/google/cloud/spanner/convert.rb | 5 +- google-cloud-spanner/test.rb | 30 + 36 files changed, 921 insertions(+), 672 deletions(-) create mode 100644 google-cloud-spanner/test.rb diff --git a/acceptance/data/fixtures.rb b/acceptance/data/fixtures.rb index 135c49a64f96..2981feeaec47 100644 --- a/acceptance/data/fixtures.rb +++ b/acceptance/data/fixtures.rb @@ -1,282 +1,276 @@ module Acceptance + module Fixtures + def stuffs_ddl_statement + if emulator_enabled? + <<~STUFFS + CREATE TABLE stuffs ( + id INT64 NOT NULL, + int INT64, + float FLOAT64, + bool BOOL, + string STRING(MAX), + byte BYTES(MAX), + date DATE, + timestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true), + ints ARRAY, + floats ARRAY, + bools ARRAY, + strings ARRAY, + bytes ARRAY, + dates ARRAY, + timestamps ARRAY + ) PRIMARY KEY (id) + STUFFS + else + <<~STUFFS + CREATE TABLE stuffs ( + id INT64 NOT NULL, + int INT64, + float FLOAT64, + bool BOOL, + string STRING(MAX), + byte BYTES(MAX), + date DATE, + timestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true), + numeric NUMERIC, + json JSON, + ints ARRAY, + floats ARRAY, + bools ARRAY, + strings ARRAY, + bytes ARRAY, + dates ARRAY, + timestamps ARRAY, + numerics ARRAY, + json_array ARRAY + ) PRIMARY KEY (id) + STUFFS + end + end - module Fixtures - def stuffs_ddl_statement - if emulator_enabled? - <<-STUFFS - CREATE TABLE stuffs ( - id INT64 NOT NULL, - int INT64, - float FLOAT64, - bool BOOL, - string STRING(MAX), - byte BYTES(MAX), - date DATE, - timestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true), - ints ARRAY, - floats ARRAY, - bools ARRAY, - strings ARRAY, - bytes ARRAY, - dates ARRAY, - timestamps ARRAY - ) PRIMARY KEY (id) - STUFFS - else - <<-STUFFS - CREATE TABLE stuffs ( - id INT64 NOT NULL, - int INT64, - float FLOAT64, - bool BOOL, - string STRING(MAX), - byte BYTES(MAX), - date DATE, - timestamp TIMESTAMP OPTIONS (allow_commit_timestamp=true), - numeric NUMERIC, - json JSON, - ints ARRAY, - floats ARRAY, - bools ARRAY, - strings ARRAY, - bytes ARRAY, - dates ARRAY, - timestamps ARRAY, - numerics ARRAY, - json_array ARRAY - ) PRIMARY KEY (id) - STUFFS - end - end + def stuff_pg_ddl_statement + <<~STUFFS + CREATE TABLE stuffs ( + id bigint NOT NULL, + "int" bigint, + "float" double precision, + "numeric" NUMERIC, + bool boolean, + string character varying, + byte bytea, + PRIMARY KEY(id) + ); + STUFFS + end - def stuff_pg_ddl_statement - <<-STUFFS - CREATE TABLE stuffs ( - id bigint NOT NULL, - "int" bigint, - "float" double precision, - "numeric" NUMERIC, - bool boolean, - string character varying, - byte bytea, - PRIMARY KEY(id) - ); - STUFFS - end + def stuffs_index_statement + "CREATE INDEX IsStuffsIdPrime ON stuffs(bool, id)" + end - def stuffs_index_statement - "CREATE INDEX IsStuffsIdPrime ON stuffs(bool, id)" - end + def commit_timestamp_test_ddl_statement + <<~TEST + CREATE TABLE commit_timestamp_test(committs TIMESTAMP OPTIONS (allow_commit_timestamp=true)) PRIMARY KEY (committs) + TEST + end - def commit_timestamp_test_ddl_statement - <<-TEST - CREATE TABLE commit_timestamp_test(committs TIMESTAMP OPTIONS (allow_commit_timestamp=true)) PRIMARY KEY (committs) - TEST - end + def accounts_ddl_statement + <<~ACCOUNTS + CREATE TABLE accounts ( + account_id INT64 NOT NULL, + username STRING(32), + friends ARRAY, + active BOOL NOT NULL, + reputation FLOAT64, + avatar BYTES(8192) + ) PRIMARY KEY (account_id) + ACCOUNTS + end - def accounts_ddl_statement - <<-ACCOUNTS - CREATE TABLE accounts ( - account_id INT64 NOT NULL, - username STRING(32), - friends ARRAY, + def accounts_pg_ddl_statement + <<~ACCOUNTS + CREATE TABLE accounts ( + account_id INT NOT NULL, + username TEXT, active BOOL NOT NULL, - reputation FLOAT64, - avatar BYTES(8192) - ) PRIMARY KEY (account_id) - ACCOUNTS - end - - def accounts_pg_ddl_statement - <<-ACCOUNTS - CREATE TABLE accounts ( - account_id INT NOT NULL, - username TEXT, - active BOOL NOT NULL, - reputation FLOAT, - avatar bytea, - PRIMARY KEY(account_id) - ); - ACCOUNTS - end - - def lists_ddl_statement - <<-LISTS - CREATE TABLE task_lists ( - account_id INT64 NOT NULL, - task_list_id INT64 NOT NULL, - description STRING(1024) NOT NULL - ) PRIMARY KEY (account_id, task_list_id), - INTERLEAVE IN PARENT accounts ON DELETE CASCADE - LISTS - end - - def lists_pg_ddl_statement - <<-LISTS - CREATE TABLE task_lists ( - account_id INT NOT NULL, - task_list_id INT NOT NULL, - description TEXT NOT NULL, - PRIMARY KEY (account_id, task_list_id) - ) INTERLEAVE IN PARENT accounts ON DELETE CASCADE - LISTS - end + reputation FLOAT, + avatar bytea, + PRIMARY KEY(account_id) + ); + ACCOUNTS + end - def items_ddl_statement - <<-ITEMS - CREATE TABLE task_items ( - account_id INT64 NOT NULL, - task_list_id INT64 NOT NULL, - task_item_id INT64 NOT NULL, - description STRING(1024) NOT NULL, - active BOOL NOT NULL, - priority INT64 NOT NULL, - due_date DATE, - created_at TIMESTAMP, - updated_at TIMESTAMP - ) PRIMARY KEY (account_id, task_list_id, task_item_id), - INTERLEAVE IN PARENT task_lists ON DELETE CASCADE - ITEMS - end + def lists_ddl_statement + <<~LISTS + CREATE TABLE task_lists ( + account_id INT64 NOT NULL, + task_list_id INT64 NOT NULL, + description STRING(1024) NOT NULL + ) PRIMARY KEY (account_id, task_list_id), + INTERLEAVE IN PARENT accounts ON DELETE CASCADE + LISTS + end - def numeric_pk_ddl_statement - return + def lists_pg_ddl_statement + <<~LISTS + CREATE TABLE task_lists ( + account_id INT NOT NULL, + task_list_id INT NOT NULL, + description TEXT NOT NULL, + PRIMARY KEY (account_id, task_list_id) + ) INTERLEAVE IN PARENT accounts ON DELETE CASCADE + LISTS + end - <<-BOXES - CREATE TABLE boxes ( - id NUMERIC NOT NULL, - name STRING(256) NOT NULL, - ) PRIMARY KEY (id) - BOXES - end + def items_ddl_statement + <<~ITEMS + CREATE TABLE task_items ( + account_id INT64 NOT NULL, + task_list_id INT64 NOT NULL, + task_item_id INT64 NOT NULL, + description STRING(1024) NOT NULL, + active BOOL NOT NULL, + priority INT64 NOT NULL, + due_date DATE, + created_at TIMESTAMP, + updated_at TIMESTAMP + ) PRIMARY KEY (account_id, task_list_id, task_item_id), + INTERLEAVE IN PARENT task_lists ON DELETE CASCADE + ITEMS + end - def numeric_composite_pk_ddl_statement - return + def schema_pg_ddl_statements + [ + stuff_pg_ddl_statement, + accounts_pg_ddl_statement, + lists_pg_ddl_statement + ].compact + end - <<-BOX_ITEMS - CREATE TABLE box_items ( - id INT64 NOT NULL, - box_id NUMERIC NOT NULL, - name STRING(256) NOT NULL - ) PRIMARY KEY (id, box_id) - BOX_ITEMS - end + def schema_ddl_statements + [ + stuffs_ddl_statement, + stuffs_index_statement, + accounts_ddl_statement, + lists_ddl_statement, + items_ddl_statement, + commit_timestamp_test_ddl_statement, + numeric_pk_ddl_statement, + numeric_composite_pk_ddl_statement + ].compact + end - def schema_pg_ddl_statements - [ - stuff_pg_ddl_statement, - accounts_pg_ddl_statement, - lists_pg_ddl_statement - ].compact - end + def stuffs_table_types + { id: :INT64, + int: :INT64, + float: :FLOAT64, + bool: :BOOL, + string: :STRING, + byte: :BYTES, + date: :DATE, + timestamp: :TIMESTAMP, + json: :JSON, + ints: [:INT64], + floats: [:FLOAT64], + bools: [:BOOL], + strings: [:STRING], + bytes: [:BYTES], + dates: [:DATE], + timestamps: [:TIMESTAMP], + jsons: [:JSON] } + end - def schema_ddl_statements - [ - stuffs_ddl_statement, - stuffs_index_statement, - accounts_ddl_statement, - lists_ddl_statement, - items_ddl_statement, - commit_timestamp_test_ddl_statement, - numeric_pk_ddl_statement, - numeric_composite_pk_ddl_statement - ].compact - end + def stuffs_random_row id = SecureRandom.int64 + { id: id, + int: rand(0..1000), + float: rand(0.0..100.0), + bool: [true, false].sample, + string: SecureRandom.hex(16), + byte: File.open("acceptance/data/face.jpg", "rb"), + date: Date.today + rand(-100..100), + timestamp: Time.now + rand(-60 * 60 * 24.0..60 * 60 * 24.0), + json: { venue: "Yellow Lake", rating: 10 }, + ints: rand(2..10).times.map { rand(0..1000) }, + floats: rand(2..10).times.map { rand(0.0..100.0) }, + bools: rand(2..10).times.map { [true, false].sample }, + strings: rand(2..10).times.map { SecureRandom.hex(16) }, + bytes: [File.open("acceptance/data/face.jpg", "rb"), + File.open("acceptance/data/landmark.jpg", "rb"), + File.open("acceptance/data/logo.jpg", "rb")], + dates: rand(2..10).times.map { Date.today + rand(-100..100) }, + timestamps: rand(2..10).times.map { Time.now + rand(-60 * 60 * 24.0..60 * 60 * 24.0) }, + json_array: [{ venue: "Green Lake", rating: 8 }, { venue: "Blue Lake", rating: 9 }] } + end - def stuffs_table_types - { id: :INT64, - int: :INT64, - float: :FLOAT64, - bool: :BOOL, - string: :STRING, - byte: :BYTES, - date: :DATE, - timestamp: :TIMESTAMP, - json: :JSON, - ints: [:INT64], - floats: [:FLOAT64], - bools: [:BOOL], - strings: [:STRING], - bytes: [:BYTES], - dates: [:DATE], - timestamps: [:TIMESTAMP], - jsons: [:JSON] + def default_account_rows + [ + { + account_id: 1, + username: "blowmage", + reputation: 63.5, + active: true, + avatar: File.open("acceptance/data/logo.jpg", "rb"), + friends: [2] + }, { + account_id: 2, + username: "quartzmo", + reputation: 87.9, + active: true, + avatar: StringIO.new("iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9U\ + AAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04A\ + CUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1\ + B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyR\ + UV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKP\ + G/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7\ + W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAA\ + UgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4i\ + SgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v\ + 9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJ\ + y3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e\ + 6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3Nf\ + XW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rw\ + MqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC"), + friends: [1] + }, { + account_id: 3, + username: "-inactive-", + active: false } - end + ] + end - def stuffs_random_row id = SecureRandom.int64 - { id: id, - int: rand(0..1000), - float: rand(0.0..100.0), - bool: [true, false].sample, - string: SecureRandom.hex(16), - byte: File.open("acceptance/data/face.jpg", "rb"), - date: Date.today + rand(-100..100), - timestamp: Time.now + rand(-60*60*24.0..60*60*24.0), - json: { venue: "Yellow Lake", rating: 10 }, - ints: rand(2..10).times.map { rand(0..1000) }, - floats: rand(2..10).times.map { rand(0.0..100.0) }, - bools: rand(2..10).times.map { [true, false].sample }, - strings: rand(2..10).times.map { SecureRandom.hex(16) }, - bytes: [File.open("acceptance/data/face.jpg", "rb"), - File.open("acceptance/data/landmark.jpg", "rb"), - File.open("acceptance/data/logo.jpg", "rb")], - dates: rand(2..10).times.map { Date.today + rand(-100..100) }, - timestamps: rand(2..10).times.map { Time.now + rand(-60*60*24.0..60*60*24.0) }, - json_array: [{ venue: "Green Lake", rating: 8 }, { venue: "Blue Lake", rating: 9 }] + def default_pg_account_rows + [ + { + account_id: 1, + username: "blowmage", + reputation: 63.5, + active: true, + avatar: File.open("acceptance/data/logo.jpg", "rb") + }, { + account_id: 2, + username: "quartzmo", + reputation: 87.9, + active: true, + avatar: StringIO.new("iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9U\ + AAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04A\ + CUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1\ + B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyR\ + UV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKP\ + G/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7\ + W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAA\ + UgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4i\ + SgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v\ + 9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJ\ + y3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e\ + 6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3Nf\ + XW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rw\ + MqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC") + }, { + account_id: 3, + username: "-inactive-", + active: false } - end - - def default_account_rows - [ - { - account_id: 1, - username: "blowmage", - reputation: 63.5, - active: true, - avatar: File.open("acceptance/data/logo.jpg", "rb"), - friends: [2] - }, { - account_id: 2, - username: "quartzmo", - reputation: 87.9, - active: true, - avatar: StringIO.new("iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9UAAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04ACUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyRUV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKPG/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAAUgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4iSgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJy3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3NfXW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rwMqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC"), - friends: [1] - }, { - account_id: 3, - username: "-inactive-", - active: false - } - ] - end - - def default_pg_account_rows - [ - { - account_id: 1, - username: "blowmage", - reputation: 63.5, - active: true, - avatar: File.open("acceptance/data/logo.jpg", "rb"), - }, { - account_id: 2, - username: "quartzmo", - reputation: 87.9, - active: true, - avatar: StringIO.new("iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9UAAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04ACUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyRUV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKPG/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAAUgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4iSgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJy3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3NfXW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rwMqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC"), - }, { - account_id: 3, - username: "-inactive-", - active: false - } - ] - end - - def default_list_rows - end - - def default_item_rows - end + ] end -end \ No newline at end of file + end +end diff --git a/google-cloud-spanner/.rubocop.yml b/google-cloud-spanner/.rubocop.yml index d53ba10460d4..71b696e4aa2f 100644 --- a/google-cloud-spanner/.rubocop.yml +++ b/google-cloud-spanner/.rubocop.yml @@ -3,7 +3,6 @@ inherit_gem: AllCops: Exclude: - - "acceptance/**/*" - "support/**/*" - "google-cloud-spanner.gemspec" - "Rakefile" @@ -14,17 +13,34 @@ AllCops: Documentation: Enabled: false +Metrics/BlockLength: + Exclude: + - "acceptance/**/*" +Metrics/MethodLength: + Exclude: + - "acceptance/**/*" +Metrics/ModuleLength: + Exclude: + - "acceptance/**/*" +Metrics/AbcSize: + Exclude: + - "acceptance/**/*" Metrics/ClassLength: Enabled: false Metrics/CyclomaticComplexity: Exclude: + - "acceptance/**/*" - "lib/google/cloud/spanner.rb" Metrics/PerceivedComplexity: Exclude: + - "acceptance/**/*" - "lib/google/cloud/spanner.rb" Naming/FileName: Exclude: - "lib/google-cloud-spanner.rb" +Style/GlobalVars: + Exclude: + - "acceptance/**/*" Style/SymbolArray: Enabled: false Style/WordArray: diff --git a/google-cloud-spanner/Rakefile b/google-cloud-spanner/Rakefile index ad8214997515..5ef5cc6c42ae 100644 --- a/google-cloud-spanner/Rakefile +++ b/google-cloud-spanner/Rakefile @@ -33,33 +33,33 @@ end # Acceptance tests desc "Run the spanner acceptance tests." task :acceptance, :project, :keyfile do |t, args| - project = args[:project] - project ||= ENV["SPANNER_TEST_PROJECT"] || ENV["GCLOUD_TEST_PROJECT"] - if ENV["SPANNER_EMULATOR_HOST"].nil? - keyfile = args[:keyfile] - keyfile ||= ENV["SPANNER_TEST_KEYFILE"] || ENV["GCLOUD_TEST_KEYFILE"] - if keyfile - keyfile = File.read keyfile - else - keyfile ||= ENV["SPANNER_TEST_KEYFILE_JSON"] || ENV["GCLOUD_TEST_KEYFILE_JSON"] - end - if project.nil? || keyfile.nil? - fail "You must provide a project and keyfile. e.g. rake acceptance[test123, /path/to/keyfile.json] or SPANNER_TEST_PROJECT=test123 SPANNER_TEST_KEYFILE=/path/to/keyfile.json rake acceptance" - end - # clear any env var already set - require "google/cloud/spanner/credentials" - Google::Cloud::Spanner::Credentials.env_vars.each do |path| - ENV[path] = nil - end - else - if project.nil? - fail "You must provide a project. e.g. rake acceptance[test123] or SPANNER_TEST_PROJECT=test123 rake acceptance" - end - keyfile = "{}" - end - # always overwrite when running tests - ENV["SPANNER_PROJECT"] = project - ENV["SPANNER_KEYFILE_JSON"] = keyfile + # project = args[:project] + # project ||= ENV["SPANNER_TEST_PROJECT"] || ENV["GCLOUD_TEST_PROJECT"] + # if ENV["SPANNER_EMULATOR_HOST"].nil? + # keyfile = args[:keyfile] + # keyfile ||= ENV["SPANNER_TEST_KEYFILE"] || ENV["GCLOUD_TEST_KEYFILE"] + # if keyfile + # keyfile = File.read keyfile + # else + # keyfile ||= ENV["SPANNER_TEST_KEYFILE_JSON"] || ENV["GCLOUD_TEST_KEYFILE_JSON"] + # end + # if project.nil? || keyfile.nil? + # fail "You must provide a project and keyfile. e.g. rake acceptance[test123, /path/to/keyfile.json] or SPANNER_TEST_PROJECT=test123 SPANNER_TEST_KEYFILE=/path/to/keyfile.json rake acceptance" + # end + # # clear any env var already set + # require "google/cloud/spanner/credentials" + # Google::Cloud::Spanner::Credentials.env_vars.each do |path| + # ENV[path] = nil + # end + # else + # if project.nil? + # fail "You must provide a project. e.g. rake acceptance[test123] or SPANNER_TEST_PROJECT=test123 rake acceptance" + # end + # keyfile = "{}" + # end + # # always overwrite when running tests + # ENV["SPANNER_PROJECT"] = project + # ENV["SPANNER_KEYFILE_JSON"] = keyfile Rake::Task["acceptance:run"].invoke end diff --git a/google-cloud-spanner/acceptance/instance_client_test.rb b/google-cloud-spanner/acceptance/instance_client_test.rb index 90ccf24a4646..956780fe3092 100644 --- a/google-cloud-spanner/acceptance/instance_client_test.rb +++ b/google-cloud-spanner/acceptance/instance_client_test.rb @@ -35,7 +35,7 @@ request = Google::Cloud::Spanner::Admin::Instance::V1::CreateInstanceRequest.new parent: project_path, instance_id: instance_id, - instance: instance + instance: instance job = client.create_instance request _(job).wont_be :done? unless emulator_enabled? @@ -56,7 +56,7 @@ # update display_name of the instance # instance.display_name = "#{instance.display_name}-updated" # request = Google::Cloud::Spanner::Admin::Instance::V1::UpdateInstanceRequest.new instance: instance, - # field_mask: { paths: ["display_name"] } + # field_mask: { paths: ["display_name"] } # job2 = client.update_instance request @@ -79,4 +79,4 @@ _(instance).must_be_kind_of Google::Cloud::Spanner::Admin::Instance::V1::Instance end end -end \ No newline at end of file +end diff --git a/google-cloud-spanner/acceptance/spanner/backup_operations_test.rb b/google-cloud-spanner/acceptance/spanner/backup_operations_test.rb index 8d8b0d1b4fbf..69dc379940f0 100644 --- a/google-cloud-spanner/acceptance/spanner/backup_operations_test.rb +++ b/google-cloud-spanner/acceptance/spanner/backup_operations_test.rb @@ -19,7 +19,7 @@ let(:instance_id) { $spanner_instance_id } let(:database_id) { $spanner_database_id } let(:backup_id) { "#{$spanner_database_id}-ops" } - let(:expire_time) { Time.now + 36000 } + let(:expire_time) { Time.now + 36_000 } it "list backup operations" do skip if emulator_enabled? @@ -30,8 +30,8 @@ database = instance.database database_id _(database).wont_be :nil? - job = database.create_backup backup_id, expire_time - job.wait_until_done! + create_job = database.create_backup backup_id, expire_time + create_job.wait_until_done! # All jobs = instance.backup_operations.all.to_a @@ -48,8 +48,8 @@ _(job.start_time).must_be_kind_of Time end - job = jobs.first - _(job.reload!).must_be_kind_of Google::Cloud::Spanner::Backup::Job + first_job = jobs.first + _(first_job.reload!).must_be_kind_of Google::Cloud::Spanner::Backup::Job # Filter completed jobs filter = "done:true" @@ -76,7 +76,7 @@ end # Filter by job start time - time = (Time.now - 360000) + time = (Time.now - 360_000) filter = "metadata.progress.start_time > \"#{time.iso8601}\"" jobs = instance.backup_operations(filter: filter).all.to_a _(jobs).wont_be :empty? @@ -85,11 +85,11 @@ end # Filer - AND - time = (Time.now - 360000) + time = (Time.now - 360_000) filter = [ "metadata.database:#{database_id}", "metadata.progress.start_time > \"#{time.iso8601}\"" - ].map{|f| "(#{f})"}.join(" AND ") + ].map { |f| "(#{f})" }.join(" AND ") jobs = instance.backup_operations(filter: filter).all.to_a _(jobs).wont_be :empty? diff --git a/google-cloud-spanner/acceptance/spanner/backup_test.rb b/google-cloud-spanner/acceptance/spanner/backup_test.rb index 3826ce0f09e1..f3fdedf5b89b 100644 --- a/google-cloud-spanner/acceptance/spanner/backup_test.rb +++ b/google-cloud-spanner/acceptance/spanner/backup_test.rb @@ -18,7 +18,7 @@ describe "Spanner Database Backup", :spanner do let(:instance_id) { $spanner_instance_id } let(:database_id) { $spanner_database_id } - let(:expire_time) { Time.now + 36000 } + let(:expire_time) { Time.now + 36_000 } let(:version_time) { Time.now } it "creates, get, updates, restore and delete a database backup" do @@ -77,10 +77,10 @@ backup = instance.backup backup_id _(backup.expire_time.to_i).must_equal((expire_time + 3600).to_i) - _ { - backup.expire_time = Time.now - 36000 - }.must_raise Google::Cloud::InvalidArgumentError - _(backup.expire_time.to_i).must_equal((expire_time + 3600 ).to_i) + _ do + backup.expire_time = Time.now - 36_000 + end.must_raise Google::Cloud::InvalidArgumentError + _(backup.expire_time.to_i).must_equal((expire_time + 3600).to_i) # Restore restore_database_id = "restore-#{database_id}" @@ -146,7 +146,7 @@ thirty_days_ago = Time.now - (30 * 24 * 60 * 60) assert_raises Google::Cloud::InvalidArgumentError do - job = database.create_backup backup_id, expire_time, version_time: thirty_days_ago + database.create_backup backup_id, expire_time, version_time: thirty_days_ago end end @@ -158,7 +158,7 @@ tomorrow = Time.now + (24 * 60 * 60) assert_raises Google::Cloud::InvalidArgumentError do - job = database.create_backup backup_id, expire_time, version_time: tomorrow + database.create_backup backup_id, expire_time, version_time: tomorrow end end @@ -171,7 +171,7 @@ job = database.create_backup backup_id, expire_time job.wait_until_done! - backup = job.backup + created_backup = job.backup instance = spanner.instance instance_id @@ -192,6 +192,6 @@ _(backups).wont_be :empty? _(backups.first.database_id).must_equal database_id - backup.delete + created_backup.delete end end diff --git a/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb b/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb index 6b608ecbabc9..85af0aa2afe1 100644 --- a/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb +++ b/google-cloud-spanner/acceptance/spanner/batch_client/execute_partition_test.rb @@ -18,7 +18,7 @@ let(:db) { spanner_client } let(:pg_db) { spanner_pg_client } let(:batch_client) { $spanner.batch_client $spanner_instance_id, $spanner_database_id } - let(:pg_batch_client) { $spanner.batch_client $spanner_instance_id, $spanner_pg_database_id unless emulator_enabled?} + let(:pg_batch_client) { $spanner.batch_client $spanner_instance_id, $spanner_pg_database_id unless emulator_enabled? } let(:table_name) { "stuffs" } let(:table_index) { "IsStuffsIdPrime" } let(:batch_snapshot) { batch_client.batch_snapshot } @@ -41,20 +41,22 @@ { id: 12, bool: false } ] pg_db.delete table_name unless emulator_enabled? - pg_db.insert table_name, [ - { id: 1, bool: false }, - { id: 2, bool: false }, - { id: 3, bool: true }, - { id: 4, bool: false }, - { id: 5, bool: true }, - { id: 6, bool: false }, - { id: 7, bool: true }, - { id: 8, bool: false }, - { id: 9, bool: false }, - { id: 10, bool: false }, - { id: 11, bool: true }, - { id: 12, bool: false } - ] unless emulator_enabled? + unless emulator_enabled? + pg_db.insert table_name, [ + { id: 1, bool: false }, + { id: 2, bool: false }, + { id: 3, bool: true }, + { id: 4, bool: false }, + { id: 5, bool: true }, + { id: 6, bool: false }, + { id: 7, bool: true }, + { id: 8, bool: false }, + { id: 9, bool: false }, + { id: 10, bool: false }, + { id: 11, bool: true }, + { id: 12, bool: false } + ] + end end after do @@ -92,13 +94,14 @@ end end - _(rows).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }, { id: 7 }, { id: 8 }, { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] + _(rows).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }, { id: 7 }, { id: 8 }, + { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] batch_snapshot.close end it "reads all by default in pg" do skip if emulator_enabled? - skip("Skipped due to https://b.corp.google.com/issues/216209306") + skip "Skipped due to https://b.corp.google.com/issues/216209306" _(pg_batch_snapshot.timestamp).must_be_kind_of Time serialized_snapshot = pg_batch_snapshot.dump @@ -126,7 +129,8 @@ end end - _(rows).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }, { id: 7 }, { id: 8 }, { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] + _(rows).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }, { id: 7 }, { id: 8 }, + { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] pg_batch_snapshot.close end @@ -154,7 +158,7 @@ end end - _(rows).must_equal [{:id=>2, :bool=>false}] + _(rows).must_equal [{ id: 2, bool: false }] batch_snapshot.close end @@ -183,7 +187,7 @@ end end - _(rows).must_equal [{:id=>2, :bool=>false}] + _(rows).must_equal [{ id: 2, bool: false }] pg_batch_snapshot.close end @@ -212,7 +216,7 @@ end end - _(rows).must_equal [{:id=>2, :bool=>false}] + _(rows).must_equal [{ id: 2, bool: false }] batch_snapshot.close end @@ -242,7 +246,7 @@ end end - _(rows).must_equal [{:id=>2, :bool=>false}] + _(rows).must_equal [{ id: 2, bool: false }] pg_batch_snapshot.close end end diff --git a/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb b/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb index e74e31ccd214..eae641ede3fc 100644 --- a/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/batch_update_test.rb @@ -16,37 +16,54 @@ require "concurrent" describe "Spanner Client", :batch_update, :spanner do - let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } - let(:insert_dml) {{ gsql: "INSERT INTO accounts (account_id, username, active, reputation) VALUES (@account_id, @username, @active, @reputation)", - pg: "INSERT INTO accounts (account_id, username, active, reputation) VALUES ($1, $2, $3, $4)" - }} - let(:update_dml) {{ gsql: "UPDATE accounts SET username = @username, active = @active WHERE account_id = @account_id", - pg: "UPDATE accounts SET username = $2, active = $3 WHERE account_id = $1", - }} - let(:select_dql) {{ gsql: "SELECT username FROM accounts WHERE account_id = @account_id", - pg: "SELECT username FROM accounts WHERE account_id = $1" - }} + let :db do + { gsql: spanner_client, pg: spanner_pg_client } + end + + let :insert_dml do + { gsql: "INSERT INTO accounts (account_id, username, active, reputation) \ + VALUES (@account_id, @username, @active, @reputation)", + pg: "INSERT INTO accounts (account_id, username, active, reputation) VALUES ($1, $2, $3, $4)" } + end + + let :update_dml do + { gsql: "UPDATE accounts SET username = @username, active = @active WHERE account_id = @account_id", + pg: "UPDATE accounts SET username = $2, active = $3 WHERE account_id = $1" } + end + + let :select_dql do + { gsql: "SELECT username FROM accounts WHERE account_id = @account_id", + pg: "SELECT username FROM accounts WHERE account_id = $1" } + end + let(:update_dml_syntax_error) { "UPDDDD accounts" } - let(:delete_dml) {{ gsql:"DELETE FROM accounts WHERE account_id = @account_id", - pg: "DELETE FROM accounts WHERE account_id = $1" - }} - let(:insert_params) {{ gsql: { account_id: 4, username: "inserted", active: true, reputation: 88.8 }, - pg: { p1: 4, p2: "inserted", p3: true, p4: 88.8 } - }} - let(:update_params) {{ gsql: { account_id: 4, username: "updated", active: false }, - pg: { p1: 4, p2: "updated", p3: false } - }} - let(:delete_params) { { gsql: { account_id: 4 }, pg: { p1: 4 } } } + let :delete_dml do + { gsql: "DELETE FROM accounts WHERE account_id = @account_id", + pg: "DELETE FROM accounts WHERE account_id = $1" } + end + let :insert_params do + { gsql: { account_id: 4, username: "inserted", active: true, reputation: 88.8 }, + pg: { p1: 4, p2: "inserted", p3: true, p4: 88.8 } } + end + let :update_params do + { gsql: { account_id: 4, username: "updated", active: false }, + pg: { p1: 4, p2: "updated", p3: false } } + end + let :delete_params do + { gsql: { account_id: 4 }, pg: { p1: 4 } } + end before do db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end - db[:pg].commit do |c| - c.delete "accounts" - c.insert "accounts", default_pg_account_rows - end unless emulator_enabled? + unless emulator_enabled? + db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows + end + end end after do @@ -55,10 +72,9 @@ end dialects = [:gsql] - dialects.push(:pg) unless emulator_enabled? + dialects.push :pg unless emulator_enabled? dialects.each do |dialect| - it "executes multiple DML statements in a batch for #{dialect}" do prior_results = db[dialect].execute_sql "SELECT * FROM accounts" _(prior_results.rows.count).must_equal 3 @@ -94,9 +110,11 @@ _(tx.transaction_id).wont_be :nil? err = expect do - tx.batch_update do |b| end + tx.batch_update { |b| } # rubocop:disable Lint/EmptyBlock end.must_raise Google::Cloud::InvalidArgumentError - _(err.message).must_match /3:(No statements in batch DML request|Request must contain at least one DML statement)/ + _(err.message).must_match( + /3:(No statements in batch DML request|Request must contain at least one DML statement)/ + ) end _(timestamp).must_be_kind_of Time end @@ -114,11 +132,11 @@ b.batch_update update_dml_syntax_error, params: update_params[dialect] b.batch_update delete_dml[dialect], params: delete_params[dialect] end - rescue Google::Cloud::Spanner::BatchUpdateError => batch_update_error - _(batch_update_error.cause).must_be_kind_of Google::Cloud::InvalidArgumentError - _(batch_update_error.cause.message).must_equal "Statement 1: 'UPDDDD accounts' is not valid DML." + rescue Google::Cloud::Spanner::BatchUpdateError => e + _(e.cause).must_be_kind_of Google::Cloud::InvalidArgumentError + _(e.cause.message).must_equal "Statement 1: 'UPDDDD accounts' is not valid DML." - row_counts = batch_update_error.row_counts + row_counts = e.row_counts _(row_counts).must_be_kind_of Array _(row_counts.count).must_equal 1 _(row_counts[0]).must_equal 1 @@ -162,7 +180,7 @@ describe "request options for #{dialect}" do it "execute batch update with priority options for #{dialect}" do - timestamp = db[dialect].transaction do |tx| + db[dialect].transaction do |tx| row_counts = tx.batch_update request_options: { priority: :PRIORITY_HIGH } do |b| b.batch_update insert_dml[dialect], params: insert_params[dialect] b.batch_update update_dml[dialect], params: update_params[dialect] diff --git a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb index aacda063ddf7..08a311b3796b 100644 --- a/google-cloud-spanner/acceptance/spanner/client/crud_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/crud_test.rb @@ -15,17 +15,19 @@ require "spanner_helper" describe "Spanner Client", :crud, :spanner do - let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } + let :db do + { gsql: spanner_client, pg: spanner_pg_client } + end before do setup_timestamp_gsql = db[:gsql].delete "accounts" setup_timestamp_pg = db[:pg].delete "accounts" unless emulator_enabled? - @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} - @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} + @setup_timestamp = { gsql: setup_timestamp_gsql, pg: setup_timestamp_pg } + @default_rows = { gsql: default_account_rows, pg: default_pg_account_rows } end - + dialects = [:gsql] - dialects.push(:pg) unless emulator_enabled? + dialects.push :pg unless emulator_enabled? dialects.each do |dialect| it "inserts, updates, upserts, reads, and deletes records for #{dialect}" do @@ -134,7 +136,7 @@ end it "inserts, updates, upserts, reads, and deletes records in a transaction for #{dialect}" do - timestamp = @setup_timestamp[dialect] + @setup_timestamp[dialect] active_count_sql = "SELECT COUNT(*) AS count FROM accounts WHERE active = true" db[dialect].transaction do |tx| @@ -145,7 +147,7 @@ tx.insert "accounts", @default_rows[dialect][2] end - timestamp = db[dialect].transaction do |tx| + db[dialect].transaction do |tx| _(db[dialect].read("accounts", ["account_id"]).rows.count).must_equal 3 _(tx.execute_query(active_count_sql).rows.first[:count]).must_equal 2 @@ -188,7 +190,7 @@ it "inserts, updates, upserts, reads, and deletes records with request tagging options for #{dialect}" do timestamp = db[dialect].insert "accounts", @default_rows[dialect][0], - request_options: { tag: "Tag-CRUD-1" } + request_options: { tag: "Tag-CRUD-1" } _(timestamp).wont_be :nil? results = db[dialect].read "accounts", ["account_id"], single_use: { timestamp: @setup_timestamp[dialect] }, @@ -196,16 +198,16 @@ _(results.timestamp).wont_be :nil? timestamp = db[dialect].update "accounts", @default_rows[dialect][0], - request_options: { tag: "Tag-CRUD-2" } + request_options: { tag: "Tag-CRUD-2" } _(timestamp).wont_be :nil? timestamp = db[dialect].upsert "accounts", @default_rows[dialect][1], - request_options: { tag: "Tag-CRUD-4" } + request_options: { tag: "Tag-CRUD-4" } _(timestamp).wont_be :nil? timestamp = db[dialect].delete "accounts", [1, 2, 3], - request_options: { tag: "Tag-CRUD-5" } + request_options: { tag: "Tag-CRUD-5" } _(timestamp).wont_be :nil? end - end + end end diff --git a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb index a4bc0c8690a4..d83a51b4b91f 100644 --- a/google-cloud-spanner/acceptance/spanner/client/dml_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/dml_test.rb @@ -16,33 +16,45 @@ require "concurrent" describe "Spanner Client", :dml, :spanner do - let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } - let(:insert_dml) {{gsql:"INSERT INTO accounts (account_id, username, active, reputation) VALUES (@account_id, @username, @active, @reputation)", - pg:"INSERT INTO accounts (account_id, username, active, reputation) VALUES ($1, $2, $3, $4)" - }} - let(:update_dml) {{ gsql: "UPDATE accounts SET username = @username, active = @active WHERE account_id = @account_id", - pg: "UPDATE accounts SET username = $2, active = $3 WHERE account_id = $1", - }} - let(:select_dql) {{ gsql: "SELECT username FROM accounts WHERE account_id = @account_id", - pg: "SELECT username FROM accounts WHERE account_id = $1" - }} - let(:insert_params) {{ gsql: { account_id: 4, username: "inserted", active: true, reputation: 88.8 }, - pg: { p1: 4, p2: "inserted", p3: true, p4: 88.8 } - }} - let(:update_params) {{ gsql: { account_id: 4, username: "updated", active: false }, - pg: { p1: 4, p2: "updated", p3: false } - }} - let(:select_params) { { gsql: { account_id: 4 }, pg: { p1: 4 } } } + let :db do + { gsql: spanner_client, pg: spanner_pg_client } + end + let :insert_dml do + { gsql: "INSERT INTO accounts (account_id, username, active, reputation) \ + VALUES (@account_id, @username, @active, @reputation)", + pg: "INSERT INTO accounts (account_id, username, active, reputation) VALUES ($1, $2, $3, $4)" } + end + let :update_dml do + { gsql: "UPDATE accounts SET username = @username, active = @active WHERE account_id = @account_id", + pg: "UPDATE accounts SET username = $2, active = $3 WHERE account_id = $1" } + end + let :select_dql do + { gsql: "SELECT username FROM accounts WHERE account_id = @account_id", + pg: "SELECT username FROM accounts WHERE account_id = $1" } + end + let :insert_params do + { gsql: { account_id: 4, username: "inserted", active: true, reputation: 88.8 }, + pg: { p1: 4, p2: "inserted", p3: true, p4: 88.8 } } + end + let :update_params do + { gsql: { account_id: 4, username: "updated", active: false }, + pg: { p1: 4, p2: "updated", p3: false } } + end + let :select_params do + { gsql: { account_id: 4 }, pg: { p1: 4 } } + end before do db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end - db[:pg].commit do |c| - c.delete "accounts" - c.insert "accounts", default_pg_account_rows - end unless emulator_enabled? + unless emulator_enabled? + db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows + end + end end after do @@ -51,7 +63,7 @@ end dialects = [:gsql] - dialects.push(:pg) unless emulator_enabled? + dialects.push :pg unless emulator_enabled? dialects.each do |dialect| it "executes multiple DML statements in a transaction for #{dialect}" do @@ -131,13 +143,15 @@ timestamp = db[dialect].transaction do |tx| _(tx.transaction_id).wont_be :nil? - # Execute a DML statement, followed by calling existing insert method, commit the transaction and assert that both the updates are present. + # Execute a DML statement, followed by calling existing insert method, + # commit the transaction and assert that both the updates are present. insert_row_count = tx.execute_update \ - insert_dml[dialect], + insert_dml[dialect], params: insert_params[dialect] _(insert_row_count).must_equal 1 - insert_mut_rows = tx.insert "accounts", { account_id: 5, username: "inserted by mutation", active: true, reputation: 99.9 } + insert_mut_rows = tx.insert "accounts", + { account_id: 5, username: "inserted by mutation", active: true, reputation: 99.9 } _(insert_mut_rows.count).must_equal 1 end _(timestamp).must_be_kind_of Time @@ -152,12 +166,12 @@ db[dialect].transaction request_options: request_options do |tx| insert_row_count = tx.execute_update \ - insert_dml[dialect], - params: insert_params[dialect], - request_options: request_options + insert_dml[dialect], + params: insert_params[dialect], + request_options: request_options _(insert_row_count).must_equal 1 end end end - end + end end diff --git a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb index 970a4760bb34..4a36e7a422a6 100644 --- a/google-cloud-spanner/acceptance/spanner/client/execute_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/execute_test.rb @@ -15,10 +15,12 @@ require "spanner_helper" describe "Spanner Client", :execute_sql, :spanner do - let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } + let :db do + { gsql: spanner_client, pg: spanner_pg_client } + end dialects = [:gsql] - dialects.push(:pg) unless emulator_enabled? + dialects.push :pg unless emulator_enabled? dialects.each do |dialect| it "runs SELECT 1 for #{dialect}" do @@ -175,9 +177,9 @@ let(:origin_opt_stats_pkg) { nil } before do - origin_opt_version = ENV["SPANNER_OPTIMIZER_VERSION"] + origin_opt_version = ENV["SPANNER_OPTIMIZER_VERSION"] # rubocop:disable Lint/UselessAssignment ENV["SPANNER_OPTIMIZER_VERSION"] = "3" - origin_opt_stats_pkg = ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] + origin_opt_stats_pkg = ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] # rubocop:disable Lint/UselessAssignment ENV["SPANNER_OPTIMIZER_STATISTICS_PACKAGE"] = "latest" end @@ -215,5 +217,5 @@ _(results.rows.count).must_equal 1 end end - end + end end diff --git a/google-cloud-spanner/acceptance/spanner/client/large_data_test.rb b/google-cloud-spanner/acceptance/spanner/client/large_data_test.rb index 0b4b1ee04136..729610747b4b 100644 --- a/google-cloud-spanner/acceptance/spanner/client/large_data_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/large_data_test.rb @@ -19,7 +19,7 @@ let(:table_name) { "stuffs" } def generate_bytes count = 2048 - StringIO.new(SecureRandom.random_bytes(count)) + StringIO.new SecureRandom.random_bytes(count) end def generate_string count = 50 @@ -27,23 +27,23 @@ def generate_string count = 50 end def random_small_bytes count = rand(1024..4096) - generate_bytes(count) + generate_bytes count end def random_small_string count = rand(25..100) - generate_string(count) + generate_string count end ## # Guarenteed to be at least 1 MB def random_big_bytes offset = rand(1..2048) - generate_bytes(1024*1024 + offset) + generate_bytes 1024 * 1024 + offset end ## # Guarenteed to be at least 1 MB def random_big_string offset = rand(1..500) - generate_string(25000 + offset) + generate_string 25_000 + offset end def random_row @@ -62,7 +62,8 @@ def random_row results = db.read table_name, [:id, :string, :byte, :strings, :bytes], keys: my_row[:id] _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal({ id: :INT64, string: :STRING, byte: :BYTES, strings: [:STRING], bytes: [:BYTES] }) + _(results.fields.to_h).must_equal({ id: :INT64, string: :STRING, byte: :BYTES, strings: [:STRING], +bytes: [:BYTES] }) returned_row = results.rows.first _(returned_row[:string]).must_equal my_row[:string] @@ -85,10 +86,12 @@ def random_row it "writes and queries bytes" do my_row = random_row db.upsert table_name, my_row - results = db.execute_sql "SELECT id, string, byte, strings, bytes FROM #{table_name} WHERE id = @id", params: { id: my_row[:id] } + results = db.execute_sql "SELECT id, string, byte, strings, bytes FROM #{table_name} WHERE id = @id", + params: { id: my_row[:id] } _(results).must_be_kind_of Google::Cloud::Spanner::Results - _(results.fields.to_h).must_equal({ id: :INT64, string: :STRING, byte: :BYTES, strings: [:STRING], bytes: [:BYTES] }) + _(results.fields.to_h).must_equal({ id: :INT64, string: :STRING, byte: :BYTES, strings: [:STRING], +bytes: [:BYTES] }) returned_row = results.rows.first _(returned_row[:string]).must_equal my_row[:string] diff --git a/google-cloud-spanner/acceptance/spanner/client/params/bool_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/bool_test.rb index 855581a2f1f6..b3d2be3e94f8 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/bool_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/bool_test.rb @@ -1,6 +1,6 @@ -# Copyright true0false7 Google Inc. All rights reserved. +# Copyright 2017 Google LLC # -# Licensed under the Apache License, Version true.0 (the "License"); +# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # diff --git a/google-cloud-spanner/acceptance/spanner/client/params/bytes_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/bytes_test.rb index bd8dd2ae9a4b..857d69747131 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/bytes_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/bytes_test.rb @@ -36,7 +36,8 @@ end it "queries and returns an array of bytes parameters" do - results = db.execute_query "SELECT @value AS value", params: { value: [StringIO.new("foo"), StringIO.new("bar"), StringIO.new("baz")] } + results = db.execute_query "SELECT @value AS value", + params: { value: [StringIO.new("foo"), StringIO.new("bar"), StringIO.new("baz")] } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal [:BYTES] @@ -48,7 +49,8 @@ end it "queries and returns an array of bytes parameters with a nil value" do - results = db.execute_query "SELECT @value AS value", params: { value: [nil, StringIO.new("foo"), StringIO.new("bar"), StringIO.new("baz")] } + results = db.execute_query "SELECT @value AS value", + params: { value: [nil, StringIO.new("foo"), StringIO.new("bar"), StringIO.new("baz")] } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal [:BYTES] diff --git a/google-cloud-spanner/acceptance/spanner/client/params/date_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/date_test.rb index a6aec23514de..102be503b181 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/date_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/date_test.rb @@ -35,7 +35,8 @@ end it "queries and returns an array of date parameters" do - results = db.execute_query "SELECT @value AS value", params: { value: [(date_value - 1), date_value, (date_value + 1)] } + results = db.execute_query "SELECT @value AS value", + params: { value: [(date_value - 1), date_value, (date_value + 1)] } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal [:DATE] @@ -43,7 +44,8 @@ end it "queries and returns an array of date parameters with a nil value" do - results = db.execute_query "SELECT @value AS value", params: { value: [nil, (date_value - 1), date_value, (date_value + 1)] } + results = db.execute_query "SELECT @value AS value", + params: { value: [nil, (date_value - 1), date_value, (date_value + 1)] } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal [:DATE] diff --git a/google-cloud-spanner/acceptance/spanner/client/params/float64_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/float64_test.rb index 4d47d3cb2f51..fef70a3d8cce 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/float64_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/float64_test.rb @@ -38,7 +38,7 @@ _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal :FLOAT64 - _(results.rows.first[:value]).must_equal -Float::INFINITY + _(results.rows.first[:value]).must_equal(-Float::INFINITY) end it "queries and returns a float64 parameter (-NaN)" do @@ -68,14 +68,15 @@ end it "queries and returns an array of special float64 parameters" do - results = db.execute_query "SELECT @value AS value", params: { value: [Float::INFINITY, -Float::INFINITY, -Float::NAN] } + results = db.execute_query "SELECT @value AS value", + params: { value: [Float::INFINITY, -Float::INFINITY, -Float::NAN] } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal [:FLOAT64] float_array = results.rows.first[:value] _(float_array.size).must_equal 3 _(float_array[0]).must_equal Float::INFINITY - _(float_array[1]).must_equal -Float::INFINITY + _(float_array[1]).must_equal(-Float::INFINITY) _(float_array[2]).must_be :nan? end diff --git a/google-cloud-spanner/acceptance/spanner/client/params/json_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/json_test.rb index e011433d7f02..caa20e701bad 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/json_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/json_test.rb @@ -17,7 +17,7 @@ describe "Spanner Client", :params, :json, :spanner do let(:db) { spanner_client } let(:json_params) { { "venue" => "abc", "rating" => 10 } } - let(:json_array_params) do + let :json_array_params do 3.times.map do |i| { "venue" => "abc-#{i}", "rating" => 10 + i } end @@ -56,7 +56,7 @@ it "queries and returns an array of json parameters with a nil value" do skip if emulator_enabled? - params = [nil].concat(json_array_params) + params = [nil].concat json_array_params results = db.execute_query "SELECT @value AS value", params: { value: params }, types: { value: [:JSON] } _(results).must_be_kind_of Google::Cloud::Spanner::Results diff --git a/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb index 985443ff4c41..1e31867ac1f4 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/pgnumeric_test.rb @@ -1,6 +1,6 @@ -# Copyright true0false7 Google Inc. All rights reserved. +# Copyright 2022 Google LLC # -# Licensed under the Apache License, Version true.0 (the "License"); +# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # @@ -38,11 +38,10 @@ it "queries and returns a NAN BigDecimal parameter" do skip if emulator_enabled? - results = db.execute_query "SELECT $1 AS value", params: { p1: BigDecimal('NaN') }, types: { p1: :PG_NUMERIC } + results = db.execute_query "SELECT $1 AS value", params: { p1: BigDecimal("NaN") }, types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal :NUMERIC _(results.rows.first[:value]).must_be :nan? end - end diff --git a/google-cloud-spanner/acceptance/spanner/client/params/struct_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/struct_test.rb index 9eb545797e61..cac419eed79a 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/struct_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/struct_test.rb @@ -34,7 +34,7 @@ # [parameters=CAST(NULL AS STRUCT) AS struct_param] # SELECT @struct_param.userf; it "Simple field access on NULL struct value" do - struct_type = db.fields(threadf: :INT64, userf: :STRING) + struct_type = db.fields threadf: :INT64, userf: :STRING results = db.execute "SELECT @struct_param.userf", params: { struct_param: nil }, types: { struct_param: struct_type } @@ -60,7 +60,7 @@ # [parameters=CAST(STRUCT(null) AS STRUCT>) AS struct_param] # SELECT @struct_param.structf.nestedf; it "Nested struct field access on NULL struct value" do - struct_type = db.fields(structf: db.fields(nestedf: :STRING)) + struct_type = db.fields structf: db.fields(nestedf: :STRING) results = db.execute "SELECT @struct_param.structf.nestedf", params: { struct_param: nil }, types: { struct_param: struct_type } @@ -100,7 +100,7 @@ # [parameters=STRUCT(NULL) AS struct_param] # SELECT @struct_param.f1; it "Struct with single NULL field" do - struct_type = db.fields(f1: :INT64) + struct_type = db.fields f1: :INT64 results = db.execute "SELECT @struct_param.f1", params: { struct_param: { f1: nil } }, types: { struct_param: struct_type } @@ -137,7 +137,8 @@ end # # Null array of struct field. - # [parameters=STRUCT>> (10,CAST(NULL AS ARRAY>)) AS struct_param] + # [parameters=STRUCT>>\ + # (10,CAST(NULL AS ARRAY>)) AS struct_param] # SELECT a.threadid FROM UNNEST(@struct_param.arraysf) a; it "Null array of struct field" do struct_value = db.fields(intf: :INT64, arraysf: [db.fields(threadid: :INT64)]).struct([10, nil]) @@ -153,7 +154,7 @@ # [parameters=CAST(NULL AS ARRAY>) as struct_arr_param] # SELECT a.threadid FROM UNNEST(@struct_arr_param) a; it "Null array of struct" do - struct_type = db.fields(threadid: :INT64) + struct_type = db.fields threadid: :INT64 results = db.execute_query "SELECT a.threadid FROM UNNEST(@struct_arr_param) a", params: { struct_arr_param: nil }, types: { struct_arr_param: [struct_type] } @@ -165,7 +166,9 @@ end it "queries and returns a struct parameter" do - results = db.execute "SELECT ARRAY(SELECT AS STRUCT message, repeat FROM (SELECT @value.message AS message, @value.repeat AS repeat)) AS value", params: { value: { message: "hello", repeat: 1 } } + results = db.execute "SELECT ARRAY(SELECT AS STRUCT message, repeat \ + FROM (SELECT @value.message AS message, @value.repeat AS repeat)) AS value", + params: { value: { message: "hello", repeat: 1 } } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ value: [db.fields(message: :STRING, repeat: :INT64)] }) @@ -173,7 +176,8 @@ end it "queries a struct parameter and returns string and integer" do - results = db.execute_query "SELECT @value.message AS message, @value.repeat AS repeat", params: { value: { message: "hello", repeat: 1 } } + results = db.execute_query "SELECT @value.message AS message, @value.repeat AS repeat", + params: { value: { message: "hello", repeat: 1 } } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ message: :STRING, repeat: :INT64 }) @@ -181,7 +185,9 @@ end it "queries and returns a struct array" do - struct_sql = "SELECT ARRAY(SELECT AS STRUCT message, repeat FROM (SELECT 'hello' AS message, 1 AS repeat UNION ALL SELECT 'hola' AS message, 2 AS repeat) ORDER BY repeat ASC)" + struct_sql = "SELECT ARRAY(SELECT AS STRUCT message, repeat \ + FROM (SELECT 'hello' AS message, 1 AS repeat UNION ALL \ + SELECT 'hola' AS message, 2 AS repeat) ORDER BY repeat ASC)" results = db.execute_query struct_sql _(results).must_be_kind_of Google::Cloud::Spanner::Results diff --git a/google-cloud-spanner/acceptance/spanner/client/params/timestamp_test.rb b/google-cloud-spanner/acceptance/spanner/client/params/timestamp_test.rb index 53a22526bb05..094529eb0ced 100644 --- a/google-cloud-spanner/acceptance/spanner/client/params/timestamp_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/params/timestamp_test.rb @@ -35,7 +35,9 @@ end it "queries and returns an array of timestamp parameters" do - results = db.execute_query "SELECT @value AS value", params: { value: [(timestamp_value - 180.0), timestamp_value, (timestamp_value - 240.0)] } + results = db.execute_query "SELECT @value AS value", + params: { value: [(timestamp_value - 180.0), timestamp_value, + (timestamp_value - 240.0)] } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal [:TIMESTAMP] @@ -43,11 +45,14 @@ end it "queries and returns an array of timestamp parameters with a nil value" do - results = db.execute_query "SELECT @value AS value", params: { value: [nil, (timestamp_value - 180.0), timestamp_value, (timestamp_value - 240.0)] } + results = db.execute_query "SELECT @value AS value", + params: { value: [nil, (timestamp_value - 180.0), timestamp_value, + (timestamp_value - 240.0)] } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal [:TIMESTAMP] - _(results.rows.first[:value]).must_equal [nil, (timestamp_value - 180.0), timestamp_value, (timestamp_value - 240.0)] + _(results.rows.first[:value]).must_equal [nil, (timestamp_value - 180.0), timestamp_value, + (timestamp_value - 240.0)] end it "queries and returns an empty array of timestamp parameters" do @@ -78,19 +83,22 @@ end it "queries and returns an array of timestamp parameters" do - results = db.execute_query "SELECT @value AS value", params: { value: [(datetime_value - 1), datetime_value, (datetime_value + 1)] } + results = db.execute_query "SELECT @value AS value", + params: { value: [(datetime_value - 1), datetime_value, (datetime_value + 1)] } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal [:TIMESTAMP] - _(results.rows.first[:value]).must_equal [(timestamp_value - 86400), timestamp_value, (timestamp_value + 86400)] + _(results.rows.first[:value]).must_equal [(timestamp_value - 86_400), timestamp_value, (timestamp_value + 86_400)] end it "queries and returns an array of timestamp parameters with a nil value" do - results = db.execute_query "SELECT @value AS value", params: { value: [nil, (datetime_value - 1), datetime_value, (datetime_value + 1)] } + results = db.execute_query "SELECT @value AS value", + params: { value: [nil, (datetime_value - 1), datetime_value, (datetime_value + 1)] } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields[:value]).must_equal [:TIMESTAMP] - _(results.rows.first[:value]).must_equal [nil, (timestamp_value - 86400), timestamp_value, (timestamp_value + 86400)] + _(results.rows.first[:value]).must_equal [nil, (timestamp_value - 86_400), timestamp_value, + (timestamp_value + 86_400)] end end end diff --git a/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb b/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb index 39c20b132a69..181b8b448b65 100644 --- a/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/pdml_test.rb @@ -16,17 +16,21 @@ require "concurrent" describe "Spanner Client", :pdml, :spanner do - let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } + let :db do + { gsql: spanner_client, pg: spanner_pg_client } + end before do db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end - db[:pg].commit do |c| - c.delete "accounts" - c.insert "accounts", default_pg_account_rows - end unless emulator_enabled? + unless emulator_enabled? + db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows + end + end end after do @@ -35,7 +39,7 @@ end dialects = [:gsql] - dialects.push(:pg) unless emulator_enabled? + dialects.push :pg unless emulator_enabled? dialects.each do |dialect| it "executes a simple Partitioned DML statement for #{dialect}" do @@ -54,7 +58,8 @@ _(prior_results.rows.count).must_equal 2 query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } - pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE", query_options: query_options + pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE", + query_options: query_options _(pdml_row_count).must_equal 1 post_results = db[dialect].execute_sql "SELECT * FROM accounts WHERE active = TRUE", single_use: { strong: true } @@ -64,7 +69,7 @@ describe "request options for #{dialect}" do it "execute Partitioned DML statement with priority options for #{dialect}" do pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE", - request_options: { priority: :PRIORITY_MEDIUM } + request_options: { priority: :PRIORITY_MEDIUM } _(pdml_row_count).must_equal 1 end @@ -72,8 +77,8 @@ it "executes a Partitioned DML statement with request tagging option for #{dialect}" do pdml_row_count = db[dialect].execute_partition_update "UPDATE accounts SET active = TRUE WHERE active = FALSE", - request_options: { tag: "Tag-P-1" } + request_options: { tag: "Tag-P-1" } _(pdml_row_count).must_equal 1 end - end + end end diff --git a/google-cloud-spanner/acceptance/spanner/client/read_test.rb b/google-cloud-spanner/acceptance/spanner/client/read_test.rb index 8c669578dbf7..d081fa3065eb 100644 --- a/google-cloud-spanner/acceptance/spanner/client/read_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/read_test.rb @@ -42,11 +42,38 @@ end it "reads all by default" do - _(db.read(table_name, [:id]).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }, { id: 7 }, { id: 8 }, { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] - _(db.read(table_name, [:id], limit: 5).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }] - - _(db.read(table_name, [:id, :bool], index: table_index).rows.map(&:to_h)).must_equal [{ id: 1, bool: false }, { id: 2, bool: false }, { id: 4, bool: false }, { id: 6, bool: false }, { id: 8, bool: false }, { id: 9, bool: false }, { id: 10, bool: false }, { id: 12, bool: false }, { id: 3, bool: true }, { id: 5, bool: true }, { id: 7, bool: true }, { id: 11, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, limit: 5).rows.map(&:to_h)).must_equal [{ id: 1, bool: false }, { id: 2, bool: false }, { id: 4, bool: false }, { id: 6, bool: false }, { id: 8, bool: false }] + _(db.read(table_name, + [:id]).rows.map(&:to_h)).must_equal [{ id: 1 }, + { id: 2 }, + { id: 3 }, + { id: 4 }, + { id: 5 }, + { id: 6 }, + { id: 7 }, + { id: 8 }, + { id: 9 }, + { id: 10 }, + { id: 11 }, + { id: 12 }] + _(db.read(table_name, [:id], + limit: 5).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }] + + _(db.read(table_name, [:id, :bool], + index: table_index).rows.map(&:to_h)).must_equal [{ id: 1, bool: false }, + { id: 2, bool: false }, + { id: 4, bool: false }, + { id: 6, bool: false }, + { id: 8, bool: false }, + { id: 9, bool: false }, + { id: 10, bool: false }, + { id: 12, bool: false }, + { id: 3, bool: true }, + { id: 5, bool: true }, + { id: 7, bool: true }, + { id: 11, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +limit: 5).rows.map(&:to_h)).must_equal [{ id: 1, bool: false }, { id: 2, bool: false }, { id: 4, bool: false }, + { id: 6, bool: false }, { id: 8, bool: false }] end it "empty read works" do @@ -64,7 +91,8 @@ _(db.read(table_name, [:id], keys: 1).rows.map(&:to_h)).must_equal [{ id: 1 }] _(db.read(table_name, [:id], keys: [1]).rows.map(&:to_h)).must_equal [{ id: 1 }] _(db.read(table_name, [:id], keys: [3, 4, 5]).rows.map(&:to_h)).must_equal [{ id: 3 }, { id: 4 }, { id: 5 }] - _(db.read(table_name, [:id], keys: [3, 5, 7, 11]).rows.map(&:to_h)).must_equal [{ id: 3 }, { id: 5 }, { id: 7 }, { id: 11 }] + _(db.read(table_name, [:id], + keys: [3, 5, 7, 11]).rows.map(&:to_h)).must_equal [{ id: 3 }, { id: 5 }, { id: 7 }, { id: 11 }] _(db.read(table_name, [:id], keys: [3, 5, 7, 11], limit: 2).rows.map(&:to_h)).must_equal [{ id: 3 }, { id: 5 }] end @@ -72,58 +100,115 @@ it "reads with range key sets" do _(db.read(table_name, [:id], keys: 3..5).rows.map(&:to_h)).must_equal [{ id: 3 }, { id: 4 }, { id: 5 }] _(db.read(table_name, [:id], keys: 3...5).rows.map(&:to_h)).must_equal [{ id: 3 }, { id: 4 }] - _(db.read(table_name, [:id], keys: db.range(3, 5, exclude_begin: true)).rows.map(&:to_h)).must_equal [{ id: 4 }, { id: 5 }] - _(db.read(table_name, [:id], keys: db.range(3, 5, exclude_begin: true, exclude_end: true)).rows.map(&:to_h)).must_equal [{ id: 4 }] - _(db.read(table_name, [:id], keys: [7]..[]).rows.map(&:to_h)).must_equal [{ id: 7 }, { id: 8 }, { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] - _(db.read(table_name, [:id], keys: db.range([7], [], exclude_begin: true)).rows.map(&:to_h)).must_equal [{ id: 8 }, { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] - _(db.read(table_name, [:id], keys: []..[5]).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }] - _(db.read(table_name, [:id], keys: []...[5]).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }] + _(db.read(table_name, [:id], + keys: db.range(3, 5, exclude_begin: true)).rows.map(&:to_h)).must_equal [{ id: 4 }, { id: 5 }] + _(db.read(table_name, [:id], + keys: db.range(3, 5, exclude_begin: true, exclude_end: true)).rows.map(&:to_h)).must_equal [{ id: 4 }] + _(db.read(table_name, [:id], + keys: [7]..[]).rows.map(&:to_h)).must_equal [{ id: 7 }, { id: 8 }, { id: 9 }, { id: 10 }, { id: 11 }, + { id: 12 }] + _(db.read(table_name, [:id], + keys: db.range([7], [], exclude_begin: true)).rows.map(&:to_h)).must_equal[{ id: 8 }, + { id: 9 }, + { id: 10 }, + { id: 11 }, + { id: 12 }] + _(db.read(table_name, [:id], + keys: []..[5]).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }] + _(db.read(table_name, [:id], + keys: []...[5]).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }] end it "reads with range key sets and limit" do _(db.read(table_name, [:id], keys: 3..9, limit: 2).rows.map(&:to_h)).must_equal [{ id: 3 }, { id: 4 }] _(db.read(table_name, [:id], keys: 3...9, limit: 2).rows.map(&:to_h)).must_equal [{ id: 3 }, { id: 4 }] - _(db.read(table_name, [:id], keys: db.range(3, 9, exclude_begin: true), limit: 2).rows.map(&:to_h)).must_equal [{ id: 4 }, { id: 5 }] - _(db.read(table_name, [:id], keys: db.range(3, 9, exclude_begin: true, exclude_end: true), limit: 2).rows.map(&:to_h)).must_equal [{ id: 4 }, { id: 5 }] - _(db.read(table_name, [:id], keys: [3]..[], limit: 2).rows.map(&:to_h)).must_equal [{ id: 3}, { id: 4}] - _(db.read(table_name, [:id], keys: db.range([3], [], exclude_begin: true), limit: 2).rows.map(&:to_h)).must_equal [{ id: 4 }, { id: 5 }] + _(db.read(table_name, [:id], keys: db.range(3, 9, exclude_begin: true), +limit: 2).rows.map(&:to_h)).must_equal [{ id: 4 }, { id: 5 }] + _(db.read(table_name, [:id], keys: db.range(3, 9, exclude_begin: true, exclude_end: true), +limit: 2).rows.map(&:to_h)).must_equal [{ id: 4 }, { id: 5 }] + _(db.read(table_name, [:id], keys: [3]..[], limit: 2).rows.map(&:to_h)).must_equal [{ id: 3 }, { id: 4 }] + _(db.read(table_name, [:id], keys: db.range([3], [], exclude_begin: true), +limit: 2).rows.map(&:to_h)).must_equal [{ id: 4 }, { id: 5 }] _(db.read(table_name, [:id], keys: []..[9], limit: 2).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }] _(db.read(table_name, [:id], keys: []...[9], limit: 2).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }] end it "reads from index with a list of composite keys" do - _(db.read(table_name, [:id, :bool], index: table_index, keys: [[false, 1]]).rows.map(&:to_h)).must_equal [{ id: 1, bool: false }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: [[false, 1]]).rows.map(&:to_h)).must_equal [{ id: 1, bool: false }] # Provide 3 keys, but only get 2 results... - _(db.read(table_name, [:id, :bool], index: table_index, keys: [[true, 3], [true, 4], [true, 5]]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: [[true, 3], [true, 5], [true, 7], [true, 11]]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }, { id: 7, bool: true }, { id: 11, bool: true }] - - _(db.read(table_name, [:id, :bool], index: table_index, keys: [[false, 1], [false, 2], [false, 3], [false, 4], [false, 5], [false, 6]], limit: 3).rows.map(&:to_h)).must_equal [{ id: 1, bool: false }, { id: 2, bool: false }, { id: 4, bool: false }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: [[true, 3], [true, 4], [true, 5]]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: [[true, 3], [true, 5], [true, 7], [true, 11]]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, + { id: 5, bool: true }, + { id: 7, bool: true }, + { id: 11, bool: true }] + + _(db.read(table_name, [:id, :bool], index: table_index, +keys: [[false, 1], [false, 2], [false, 3], [false, 4], [false, 5], [false, 6]], limit: 3).rows.map(&:to_h)).must_equal [ + { id: 1, bool: false }, { id: 2, bool: false }, { id: 4, bool: false } +] end it "reads from index with range key sets" do - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true, 3]..[true, 7]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }, { id: 7, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true, 3]...[true, 7]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: db.range([true, 3], [true, 7], exclude_begin: true)).rows.map(&:to_h)).must_equal [{ id: 5, bool: true }, { id: 7, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: db.range([true, 3], [true, 7], exclude_begin: true, exclude_end: true)).rows.map(&:to_h)).must_equal [{ id: 5, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true, 7]..[]).rows.map(&:to_h)).must_equal [{ id: 7, bool: true }, { id: 11, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: db.range([true, 7], [], exclude_begin: true)).rows.map(&:to_h)).must_equal [{ id: 11, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true]..[true, 7]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }, { id: 7, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true]...[true, 7]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: [true, 3]..[true, 7]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }, + { id: 7, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: [true, 3]...[true, 7]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: db.range([true, 3], [true, 7], exclude_begin: true)).rows.map(&:to_h)).must_equal [{ id: 5, bool: true }, + { id: 7, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: db.range([true, 3], [true, 7], exclude_begin: true, exclude_end: true)).rows.map(&:to_h)).must_equal [{ id: 5, +bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: [true, 7]..[]).rows.map(&:to_h)).must_equal [{ id: 7, bool: true }, { id: 11, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: db.range([true, 7], [], exclude_begin: true)).rows.map(&:to_h)).must_equal [{ id: 11, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: [true]..[true, 7]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }, + { id: 7, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: [true]...[true, 7]).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] end it "reads from index with range key sets and limit" do - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true, 3]..[true, 11], limit: 2).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true, 3]...[true, 11], limit: 2).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: db.range([true, 3], [true, 7], exclude_begin: true), limit: 2).rows.map(&:to_h)).must_equal [{ id: 5, bool: true }, { id: 7, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: db.range([true, 3], [true, 7], exclude_begin: true, exclude_end: true), limit: 2).rows.map(&:to_h)).must_equal [{ id: 5, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true, 5]..[], limit: 2).rows.map(&:to_h)).must_equal [{ id: 5, bool: true }, { id: 7, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: db.range([true, 5], [], exclude_begin: true), limit: 2).rows.map(&:to_h)).must_equal [{ id: 7, bool: true }, { id: 11, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true]..[true, 11], limit: 2).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] - _(db.read(table_name, [:id, :bool], index: table_index, keys: [true]...[true, 11], limit: 2).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, keys: [true, 3]..[true, 11], +limit: 2).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, keys: [true, 3]...[true, 11], +limit: 2).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, keys: db.range([true, 3], [true, 7], exclude_begin: true), +limit: 2).rows.map(&:to_h)).must_equal [{ id: 5, bool: true }, { id: 7, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, +keys: db.range([true, 3], [true, 7], exclude_begin: true, exclude_end: true), limit: 2).rows.map(&:to_h)).must_equal [{ + id: 5, bool: true +}] + _(db.read(table_name, [:id, :bool], index: table_index, keys: [true, 5]..[], +limit: 2).rows.map(&:to_h)).must_equal [{ id: 5, bool: true }, { id: 7, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, keys: db.range([true, 5], [], exclude_begin: true), +limit: 2).rows.map(&:to_h)).must_equal [{ id: 7, bool: true }, { id: 11, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, keys: [true]..[true, 11], +limit: 2).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] + _(db.read(table_name, [:id, :bool], index: table_index, keys: [true]...[true, 11], +limit: 2).rows.map(&:to_h)).must_equal [{ id: 3, bool: true }, { id: 5, bool: true }] end it "reads with request tag option" do request_options = { tag: "Tag-R-1" } - _(db.read(table_name, [:id], request_options: request_options).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }, { id: 7 }, { id: 8 }, { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] + _(db.read(table_name, [:id], + request_options: request_options).rows.map(&:to_h)).must_equal [{ id: 1 }, + { id: 2 }, + { id: 3 }, + { id: 4 }, + { id: 5 }, + { id: 6 }, + { id: 7 }, + { id: 8 }, + { id: 9 }, + { id: 10 }, + { id: 11 }, + { id: 12 }] end end diff --git a/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb b/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb index 74121491790c..e2cd055b3b02 100644 --- a/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/single_use_test.rb @@ -15,25 +15,32 @@ require "spanner_helper" describe "Spanner Client", :single_use, :spanner do - let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } - let(:columns) {{ gsql: [:account_id, :username, :friends, :active, :reputation, :avatar], - pg: [:account_id, :username, :active, :reputation, :avatar] - }} - let(:fields_hash) {{ gsql: { account_id: :INT64, username: :STRING, friends: [:INT64], active: :BOOL, reputation: :FLOAT64, avatar: :BYTES }, - pg: { account_id: :INT64, username: :STRING, active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } - }} + let :db do + { gsql: spanner_client, pg: spanner_pg_client } + end + let :columns do + { gsql: [:account_id, :username, :friends, :active, :reputation, :avatar], + pg: [:account_id, :username, :active, :reputation, :avatar] } + end + let :fields_hash do + { gsql: { account_id: :INT64, username: :STRING, friends: [:INT64], + active: :BOOL, reputation: :FLOAT64, avatar: :BYTES }, + pg: { account_id: :INT64, username: :STRING, active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } } + end before do - setup_timestamp_gsql = db[:gsql].commit do |c| + setup_timestamp_gsql = db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end - setup_timestamp_pg = db[:pg].commit do |c| - c.delete "accounts" - c.insert "accounts", default_pg_account_rows - end unless emulator_enabled? - @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} - @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} + unless emulator_enabled? + setup_timestamp_pg = db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows + end + end + @setup_timestamp = { gsql: setup_timestamp_gsql, pg: setup_timestamp_pg } + @default_rows = { gsql: default_account_rows, pg: default_pg_account_rows } end after do @@ -42,140 +49,145 @@ end dialects = [:gsql] - dialects.push(:pg) unless emulator_enabled? + dialects.push :pg unless emulator_enabled? dialects.each do |dialect| it "runs a query with strong option for #{dialect}" do results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { strong: true } - + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? end - + it "runs a read with strong option for #{dialect}" do results = db[dialect].read "accounts", columns[dialect], single_use: { strong: true } - + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? end - + it "runs a query with timestamp option for #{dialect}" do - results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { timestamp: @setup_timestamp[dialect] } - + results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", + single_use: { timestamp: @setup_timestamp[dialect] } + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 1 end - + it "runs a read with timestamp option for #{dialect}" do results = db[dialect].read "accounts", columns[dialect], single_use: { timestamp: @setup_timestamp[dialect] } - + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 1 end - + it "runs a query with staleness option for #{dialect}" do - results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { staleness: 0.0001 } - + results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", + single_use: { staleness: 0.0001 } + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? end - + it "runs a read with staleness option for #{dialect}" do results = db[dialect].read "accounts", columns[dialect], single_use: { staleness: 0.0001 } - + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? end - + it "runs a query with bounded_timestamp option for #{dialect}" do - results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { bounded_timestamp: @setup_timestamp[dialect] } - + results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", + single_use: { bounded_timestamp: @setup_timestamp[dialect] } + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? end - + it "runs a read with bounded_timestamp option for #{dialect}" do - results = db[dialect].read "accounts", columns[dialect], single_use: { bounded_timestamp: @setup_timestamp[dialect] } - + results = db[dialect].read "accounts", columns[dialect], + single_use: { bounded_timestamp: @setup_timestamp[dialect] } + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? end - + it "runs a query with bounded_staleness option for #{dialect}" do - results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", single_use: { bounded_staleness: 0.0001 } - + results = db[dialect].execute_sql "SELECT * FROM accounts ORDER BY account_id ASC", + single_use: { bounded_staleness: 0.0001 } + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? end - + it "runs a read with bounded_staleness option for #{dialect}" do results = db[dialect].read "accounts", columns[dialect], single_use: { bounded_staleness: 0.0001 } - + _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal fields_hash[dialect] results.rows.zip(@default_rows[dialect]).each do |expected, actual| assert_accounts_equal expected, actual end - + _(results.timestamp).wont_be :nil? _(results.timestamp).must_be_close_to @setup_timestamp[dialect], 3 # within 3 seconds? end end - + def assert_accounts_equal expected, actual if actual[:account_id].nil? _(expected[:account_id]).must_be :nil? @@ -210,5 +222,5 @@ def assert_accounts_equal expected, actual else _(expected[:friends]).must_equal actual[:friends] end - end + end end diff --git a/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb b/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb index 4b11512cf091..141b0d6ea410 100644 --- a/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/snapshot_test.rb @@ -15,30 +15,40 @@ require "spanner_helper" describe "Spanner Client", :snapshot, :spanner do - let(:db) { {gsql: spanner_client, pg: spanner_pg_client} } - let(:columns) {{ gsql: [:account_id, :username, :friends, :active, :reputation, :avatar], - pg: [:account_id, :username, :active, :reputation, :avatar] - }} - let(:fields_hash) {{ gsql: { account_id: :INT64, username: :STRING, friends: [:INT64], active: :BOOL, reputation: :FLOAT64, avatar: :BYTES }, - pg: { account_id: :INT64, username: :STRING, active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } - }} - let(:select_dql) {{ gsql: "SELECT account_id, username FROM accounts WHERE account_id = @id", - pg: "SELECT account_id, username FROM accounts WHERE account_id = $1" - }} - - let(:select_params) { { gsql: { id: 1 }, pg: { p1: 1 } } } + let :db do + { gsql: spanner_client, pg: spanner_pg_client } + end + let :columns do + { gsql: [:account_id, :username, :friends, :active, :reputation, :avatar], + pg: [:account_id, :username, :active, :reputation, :avatar] } + end + let :fields_hash do + { gsql: { account_id: :INT64, username: :STRING, friends: [:INT64], + active: :BOOL, reputation: :FLOAT64, avatar: :BYTES }, + pg: { account_id: :INT64, username: :STRING, active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } } + end + let :select_dql do + { gsql: "SELECT account_id, username FROM accounts WHERE account_id = @id", + pg: "SELECT account_id, username FROM accounts WHERE account_id = $1" } + end + + let :select_params do + { gsql: { id: 1 }, pg: { p1: 1 } } + end before do - setup_timestamp_gsql = db[:gsql].commit do |c| + setup_timestamp_gsql = db[:gsql].commit do |c| c.delete "accounts" c.insert "accounts", default_account_rows end - setup_timestamp_pg = db[:pg].commit do |c| - c.delete "accounts" - c.insert "accounts", default_pg_account_rows - end unless emulator_enabled? - @setup_timestamp = {gsql: setup_timestamp_gsql, pg: setup_timestamp_pg} - @default_rows = {gsql: default_account_rows, pg: default_pg_account_rows} + unless emulator_enabled? + setup_timestamp_pg = db[:pg].commit do |c| + c.delete "accounts" + c.insert "accounts", default_pg_account_rows + end + end + @setup_timestamp = { gsql: setup_timestamp_gsql, pg: setup_timestamp_pg } + @default_rows = { gsql: default_account_rows, pg: default_pg_account_rows } end after do @@ -47,12 +57,12 @@ end dialects = [:gsql] - dialects.push(:pg) unless emulator_enabled? + dialects.push :pg unless emulator_enabled? dialects.each do |dialect| it "runs a query for #{dialect}" do results = nil - db[dialect].snapshot do |snp| + db[dialect].snapshot do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -69,7 +79,7 @@ it "runs a query with query options for #{dialect}" do query_options = { optimizer_version: "3", optimizer_statistics_package: "latest" } results = nil - db[dialect].snapshot do |snp| + db[dialect].snapshot do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -85,7 +95,7 @@ it "runs a read for #{dialect}" do results = nil - db[dialect].snapshot do |snp| + db[dialect].snapshot do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -101,7 +111,7 @@ it "runs a query with strong option for #{dialect}" do results = nil - db[dialect].snapshot strong: true do |snp| + db[dialect].snapshot strong: true do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -117,7 +127,7 @@ it "runs a read with strong option for #{dialect}" do results = nil - db[dialect].snapshot strong: true do |snp| + db[dialect].snapshot strong: true do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -133,7 +143,7 @@ it "runs a query with timestamp option for #{dialect}" do results = nil - db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| + db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -149,7 +159,7 @@ it "runs a read with timestamp option for #{dialect}" do results = nil - db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| + db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -165,7 +175,7 @@ it "runs a query with staleness option for #{dialect}" do results = nil - db[dialect].snapshot staleness: 0.0001 do |snp| + db[dialect].snapshot staleness: 0.0001 do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -181,7 +191,7 @@ it "runs a read with staleness option for #{dialect}" do results = nil - db[dialect].snapshot staleness: 0.0001 do |snp| + db[dialect].snapshot staleness: 0.0001 do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -201,7 +211,7 @@ sample_row = { account_id: first_row[:account_id], username: first_row[:username] } modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db[dialect].snapshot strong: true do |snp| + db[dialect].snapshot strong: true do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -210,7 +220,7 @@ _(results.rows.first.to_h).must_equal sample_row # outside of the snapshot, update the row! - db[dialect].update "accounts", modified_row + db[dialect].update "accounts", modified_row results2 = snp.read "accounts", [:account_id, :username], keys: modified_row[:account_id] # verify we got the previous row, not the modified row @@ -223,7 +233,7 @@ sample_row = { account_id: first_row[:account_id], username: first_row[:username] } modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db[dialect].snapshot strong: true do |snp| + db[dialect].snapshot strong: true do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -232,7 +242,7 @@ _(results.rows.first.to_h).must_equal sample_row # outside of the snapshot, update the row! - db[dialect].update "accounts", modified_row + db[dialect].update "accounts", modified_row results2 = snp.execute_sql select_dql[dialect], params: select_params[dialect] # verify we got the previous row, not the modified row @@ -245,7 +255,7 @@ sample_row = { account_id: first_row[:account_id], username: first_row[:username] } modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| + db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -254,7 +264,7 @@ _(results.rows.first.to_h).must_equal sample_row # outside of the snapshot, update the row! - db[dialect].update "accounts", modified_row + db[dialect].update "accounts", modified_row results2 = snp.read "accounts", [:account_id, :username], keys: modified_row[:account_id] # verify we got the previous row, not the modified row @@ -267,7 +277,7 @@ sample_row = { account_id: first_row[:account_id], username: first_row[:username] } modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| + db[dialect].snapshot timestamp: @setup_timestamp[dialect] do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -276,7 +286,7 @@ _(results.rows.first.to_h).must_equal sample_row # outside of the snapshot, update the row! - db[dialect].update "accounts", modified_row + db[dialect].update "accounts", modified_row results2 = snp.execute_sql select_dql[dialect], params: select_params[dialect] # verify we got the previous row, not the modified row @@ -289,7 +299,7 @@ sample_row = { account_id: first_row[:account_id], username: first_row[:username] } modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db[dialect].snapshot staleness: 0.0001 do |snp| + db[dialect].snapshot staleness: 0.0001 do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -298,7 +308,7 @@ _(results.rows.first.to_h).must_equal sample_row # outside of the snapshot, update the row! - db[dialect].update "accounts", modified_row + db[dialect].update "accounts", modified_row results2 = snp.read "accounts", [:account_id, :username], keys: modified_row[:account_id] # verify we got the previous row, not the modified row @@ -311,7 +321,7 @@ sample_row = { account_id: first_row[:account_id], username: first_row[:username] } modified_row = { account_id: first_row[:account_id], username: first_row[:username].reverse } - db[dialect].snapshot staleness: 0.0001 do |snp| + db[dialect].snapshot staleness: 0.0001 do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -320,7 +330,7 @@ _(results.rows.first.to_h).must_equal sample_row # outside of the snapshot, update the row! - db[dialect].update "accounts", modified_row + db[dialect].update "accounts", modified_row results2 = snp.execute_sql select_dql[dialect], params: select_params[dialect] # verify we got the previous row, not the modified row @@ -329,9 +339,9 @@ end it "multiuse snapshot reads are consistent even when delete happen for #{dialect}" do - keys = @default_rows[dialect].map{|row| row[:account_id] } + keys = @default_rows[dialect].map { |row| row[:account_id] } - db[dialect].snapshot do |snp| + db[dialect].snapshot do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -346,7 +356,7 @@ end # outside of the snapshot, delete rows - db[dialect].delete "accounts", keys + db[dialect].delete "accounts", keys # read rows and from snaphot and verify rows got from the snapshot results2 = snp.read "accounts", [:account_id, :username], keys: keys @@ -361,14 +371,14 @@ end # outside of snapshot check all rows are deleted - rows3 = db[dialect].execute_sql("SELECT * FROM accounts").rows.to_a + rows3 = db[dialect].execute_sql("SELECT * FROM accounts").rows.to_a _(rows3.count).must_equal 0 end it "multiuse snapshot reads with read timestamp are consistent even when delete happen for #{dialect}" do - keys = @default_rows[dialect].map{|row| row[:account_id] } + keys = @default_rows[dialect].map { |row| row[:account_id] } - db[dialect].snapshot read_timestamp: @setup_timestamp[dialect] do |snp| + db[dialect].snapshot read_timestamp: @setup_timestamp[dialect] do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -383,7 +393,7 @@ end # outside of the snapshot, delete rows - db[dialect].delete "accounts", keys + db[dialect].delete "accounts", keys # read rows and from snaphot and verify rows got from the snapshot results2 = snp.read "accounts", [:account_id, :username], keys: keys @@ -397,17 +407,17 @@ end # outside of snapshot check all rows are deleted - rows3 = db[dialect].execute_sql("SELECT * FROM accounts").rows.to_a + rows3 = db[dialect].execute_sql("SELECT * FROM accounts").rows.to_a _(rows3.count).must_equal 0 end it "multiuse snapshot reads with exact staleness are consistent even when delete happen for #{dialect}" do - keys = @default_rows[dialect].map{|row| row[:account_id] } + keys = @default_rows[dialect].map { |row| row[:account_id] } sleep 1 delta = 0.001 - db[dialect].snapshot exact_staleness: delta do |snp| + db[dialect].snapshot exact_staleness: delta do |snp| _(snp.transaction_id).wont_be :nil? _(snp.timestamp).wont_be :nil? @@ -422,7 +432,7 @@ end # outside of the snapshot, delete rows - db[dialect].delete "accounts", keys + db[dialect].delete "accounts", keys # read rows and from snaphot and verify rows got from the snapshot results2 = snp.read "accounts", [:account_id, :username], keys: keys @@ -436,11 +446,11 @@ end # outside of snapshot check all rows are deleted - rows3 = db[dialect].execute_sql("SELECT * FROM accounts").rows.to_a + rows3 = db[dialect].execute_sql("SELECT * FROM accounts").rows.to_a _(rows3.count).must_equal 0 end - end - + end + def assert_accounts_equal expected, actual if actual[:account_id].nil? diff --git a/google-cloud-spanner/acceptance/spanner/client/transaction_test.rb b/google-cloud-spanner/acceptance/spanner/client/transaction_test.rb index 2e3ac53b370d..c594860dc07a 100644 --- a/google-cloud-spanner/acceptance/spanner/client/transaction_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/transaction_test.rb @@ -18,8 +18,10 @@ describe "Spanner Client", :transaction, :spanner do let(:db) { spanner_client } let(:columns) { [:account_id, :username, :friends, :active, :reputation, :avatar] } - let(:fields_hash) { { account_id: :INT64, username: :STRING, friends: [:INT64], active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } } - let(:additional_account) { { account_id: 4, username: "swcloud", reputation: 99.894, active: true, friends: [1,2] } } + let :fields_hash do + { account_id: :INT64, username: :STRING, friends: [:INT64], active: :BOOL, reputation: :FLOAT64, avatar: :BYTES } + end + let(:additional_account) { { account_id: 4, username: "swcloud", reputation: 99.894, active: true, friends: [1, 2] } } let(:query_reputation) { "SELECT reputation FROM accounts WHERE account_id = 1 LIMIT 1" } before do @@ -157,7 +159,7 @@ new_val = tx_val + 1 tx.update "accounts", [{ account_id: 1, reputation: new_val }] # puts "write 2" - end # Thread 2 commits now + end commit_latch.count_down # Let thread 1 commit now # puts "commit 2" end @@ -194,7 +196,7 @@ tx.execute_query "SELECT * from accounts", request_options: { tag: "Tag-1-1" } tx.batch_update request_options: { tag: "Tag-1-2" } do |b| b.batch_update( - "UPDATE accounts SET username = 'Charlie' WHERE account_id = 1", + "UPDATE accounts SET username = 'Charlie' WHERE account_id = 1" ) end diff --git a/google-cloud-spanner/acceptance/spanner/client/types/bytes_test.rb b/google-cloud-spanner/acceptance/spanner/client/types/bytes_test.rb index 7d6397e51a4d..bbf41278acc1 100644 --- a/google-cloud-spanner/acceptance/spanner/client/types/bytes_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/types/bytes_test.rb @@ -44,7 +44,7 @@ it "writes and reads random bytes" do id = SecureRandom.int64 - random_bytes = StringIO.new(SecureRandom.random_bytes(rand(1024..4096))) + random_bytes = StringIO.new SecureRandom.random_bytes(rand(1024..4096)) db.upsert table_name, { id: id, byte: random_bytes } results = db.read table_name, [:id, :byte], keys: id @@ -58,7 +58,7 @@ it "writes and queries random bytes" do id = SecureRandom.int64 - random_bytes = StringIO.new(SecureRandom.random_bytes(rand(1024..4096))) + random_bytes = StringIO.new SecureRandom.random_bytes(rand(1024..4096)) db.upsert table_name, { id: id, byte: random_bytes } results = db.execute_query "SELECT id, byte FROM #{table_name} WHERE id = @id", params: { id: id } diff --git a/google-cloud-spanner/acceptance/spanner/client/types/date_test.rb b/google-cloud-spanner/acceptance/spanner/client/types/date_test.rb index 0bda13a6c6ef..aae3369f5bb5 100644 --- a/google-cloud-spanner/acceptance/spanner/client/types/date_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/types/date_test.rb @@ -60,42 +60,50 @@ it "writes and reads array of date" do id = SecureRandom.int64 - db.upsert table_name, { id: id, dates: [Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] } + db.upsert table_name, + { id: id, dates: [Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] } results = db.read table_name, [:id, :dates], keys: id _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, dates: [:DATE] }) - _(results.rows.first.to_h).must_equal({ id: id, dates: [Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] }) + _(results.rows.first.to_h).must_equal({ id: id, +dates: [Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] }) end it "writes and queries array of date" do id = SecureRandom.int64 - db.upsert table_name, { id: id, dates: [Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] } + db.upsert table_name, + { id: id, dates: [Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] } results = db.execute_query "SELECT id, dates FROM #{table_name} WHERE id = @id", params: { id: id } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, dates: [:DATE] }) - _(results.rows.first.to_h).must_equal({ id: id, dates: [Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] }) + _(results.rows.first.to_h).must_equal({ id: id, +dates: [Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] }) end it "writes and reads array of date with NULL" do id = SecureRandom.int64 - db.upsert table_name, { id: id, dates: [nil, Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] } + db.upsert table_name, + { id: id, dates: [nil, Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] } results = db.read table_name, [:id, :dates], keys: id _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, dates: [:DATE] }) - _(results.rows.first.to_h).must_equal({ id: id, dates: [nil, Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] }) + _(results.rows.first.to_h).must_equal({ id: id, +dates: [nil, Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] }) end it "writes and queries array of date with NULL" do id = SecureRandom.int64 - db.upsert table_name, { id: id, dates: [nil, Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] } + db.upsert table_name, + { id: id, dates: [nil, Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] } results = db.execute_query "SELECT id, dates FROM #{table_name} WHERE id = @id", params: { id: id } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, dates: [:DATE] }) - _(results.rows.first.to_h).must_equal({ id: id, dates: [nil, Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] }) + _(results.rows.first.to_h).must_equal({ id: id, +dates: [nil, Date.parse("2016-12-30"), Date.parse("2016-12-31"), Date.parse("2017-01-01")] }) end it "writes and reads empty array of date" do diff --git a/google-cloud-spanner/acceptance/spanner/client/types/json_test.rb b/google-cloud-spanner/acceptance/spanner/client/types/json_test.rb index 16f3b50a9d84..8489833c9a54 100644 --- a/google-cloud-spanner/acceptance/spanner/client/types/json_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/types/json_test.rb @@ -19,7 +19,7 @@ let(:table_name) { "stuffs" } let(:table_types) { stuffs_table_types } let(:json_params) { { "venue" => "abc", "rating" => 10 } } - let(:json_array_params) do + let :json_array_params do 3.times.map do |i| { "venue" => "abc-#{i}", "rating" => 10 + i } end @@ -101,7 +101,7 @@ skip if emulator_enabled? id = SecureRandom.int64 - params = [nil].concat(json_array_params) + params = [nil].concat json_array_params db.upsert table_name, { id: id, json_array: params } results = db.read table_name, [:id, :json_array], keys: id @@ -114,7 +114,7 @@ skip if emulator_enabled? id = SecureRandom.int64 - params = [nil].concat(json_array_params) + params = [nil].concat json_array_params db.upsert table_name, { id: id, json_array: params } results = db.execute_query "SELECT id, json_array FROM #{table_name} WHERE id = @id", params: { id: id } diff --git a/google-cloud-spanner/acceptance/spanner/client/types/numeric_test.rb b/google-cloud-spanner/acceptance/spanner/client/types/numeric_test.rb index b1e29547fa60..70e0e7e58538 100644 --- a/google-cloud-spanner/acceptance/spanner/client/types/numeric_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/types/numeric_test.rb @@ -48,7 +48,6 @@ it "writes and reads NULL numeric" do skip if emulator_enabled? - num = BigDecimal("0.123456789") id = SecureRandom.int64 db.upsert table_name, { id: id, numeric: nil } results = db.read table_name, [:id, :numeric], keys: id @@ -61,7 +60,6 @@ it "writes and queries NULL numeric" do skip if emulator_enabled? - num = BigDecimal("0.123456789") id = SecureRandom.int64 db.upsert table_name, { id: id, numeric: nil } results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = @id", params: { id: id } diff --git a/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb b/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb index 7406e43d4e4f..d8fe35ad1f98 100644 --- a/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/types/pgnumeric_test.rb @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,7 +38,8 @@ num = BigDecimal("0.123456789") id = SecureRandom.int64 db.upsert table_name, { id: id, numeric: num } - results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = $1", params: { p1: id }, types: { p1: :PG_NUMERIC} + results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = $1", params: { p1: id }, +types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, numeric: :NUMERIC }) @@ -48,7 +49,6 @@ it "writes and reads NULL numeric" do skip if emulator_enabled? - num = BigDecimal("0.123456789") id = SecureRandom.int64 db.upsert table_name, { id: id, numeric: nil } results = db.read table_name, [:id, :numeric], keys: id @@ -61,10 +61,10 @@ it "writes and queries NULL numeric" do skip if emulator_enabled? - num = BigDecimal("0.123456789") id = SecureRandom.int64 db.upsert table_name, { id: id, numeric: nil } - results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = $1", params: { p1: id }, types: { p1: :PG_NUMERIC} + results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = $1", params: { p1: id }, +types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, numeric: :NUMERIC }) @@ -75,7 +75,7 @@ skip if emulator_enabled? id = SecureRandom.int64 - db.upsert table_name, { id: id, numeric: BigDecimal('NaN') } + db.upsert table_name, { id: id, numeric: BigDecimal("NaN") } results = db.read table_name, [:id, :numeric], keys: id _(results).must_be_kind_of Google::Cloud::Spanner::Results @@ -87,8 +87,9 @@ skip if emulator_enabled? id = SecureRandom.int64 - db.upsert table_name, { id: id, numeric: BigDecimal('NaN') } - results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = $1", params: { p1: id }, types: { p1: :PG_NUMERIC} + db.upsert table_name, { id: id, numeric: BigDecimal("NaN") } + results = db.execute_sql "SELECT id, numeric FROM #{table_name} WHERE id = $1", params: { p1: id }, +types: { p1: :PG_NUMERIC } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, numeric: :NUMERIC }) diff --git a/google-cloud-spanner/acceptance/spanner/client/types/timestamp_test.rb b/google-cloud-spanner/acceptance/spanner/client/types/timestamp_test.rb index b2427170fbbf..36a9011c0558 100644 --- a/google-cloud-spanner/acceptance/spanner/client/types/timestamp_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/types/timestamp_test.rb @@ -81,42 +81,62 @@ it "writes and reads array of timestamp" do id = SecureRandom.int64 - db.upsert table_name, { id: id, timestamps: [Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), Time.parse("2017-01-01 00:00:00Z")] } + db.upsert table_name, + { id: id, +timestamps: [Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), + Time.parse("2017-01-01 00:00:00Z")] } results = db.read table_name, [:id, :timestamps], keys: id _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, timestamps: [:TIMESTAMP] }) - _(results.rows.first.to_h).must_equal({ id: id, timestamps: [Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), Time.parse("2017-01-01 00:00:00Z")] }) + _(results.rows.first.to_h).must_equal({ id: id, +timestamps: [Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), + Time.parse("2017-01-01 00:00:00Z")] }) end it "writes and queries array of timestamp" do id = SecureRandom.int64 - db.upsert table_name, { id: id, timestamps: [Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), Time.parse("2017-01-01 00:00:00Z")] } + db.upsert table_name, + { id: id, +timestamps: [Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), + Time.parse("2017-01-01 00:00:00Z")] } results = db.execute_query "SELECT id, timestamps FROM #{table_name} WHERE id = @id", params: { id: id } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, timestamps: [:TIMESTAMP] }) - _(results.rows.first.to_h).must_equal({ id: id, timestamps: [Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), Time.parse("2017-01-01 00:00:00Z")] }) + _(results.rows.first.to_h).must_equal({ id: id, +timestamps: [Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), + Time.parse("2017-01-01 00:00:00Z")] }) end it "writes and reads array of timestamp with NULL" do id = SecureRandom.int64 - db.upsert table_name, { id: id, timestamps: [nil, Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), Time.parse("2017-01-01 00:00:00Z")] } + db.upsert table_name, + { id: id, +timestamps: [nil, Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), + Time.parse("2017-01-01 00:00:00Z")] } results = db.read table_name, [:id, :timestamps], keys: id _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, timestamps: [:TIMESTAMP] }) - _(results.rows.first.to_h).must_equal({ id: id, timestamps: [nil, Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), Time.parse("2017-01-01 00:00:00Z")] }) + _(results.rows.first.to_h).must_equal({ id: id, +timestamps: [nil, Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), + Time.parse("2017-01-01 00:00:00Z")] }) end it "writes and queries array of timestamp with NULL" do id = SecureRandom.int64 - db.upsert table_name, { id: id, timestamps: [nil, Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), Time.parse("2017-01-01 00:00:00Z")] } + db.upsert table_name, + { id: id, +timestamps: [nil, Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), + Time.parse("2017-01-01 00:00:00Z")] } results = db.execute_query "SELECT id, timestamps FROM #{table_name} WHERE id = @id", params: { id: id } _(results).must_be_kind_of Google::Cloud::Spanner::Results _(results.fields.to_h).must_equal({ id: :INT64, timestamps: [:TIMESTAMP] }) - _(results.rows.first.to_h).must_equal({ id: id, timestamps: [nil, Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), Time.parse("2017-01-01 00:00:00Z")] }) + _(results.rows.first.to_h).must_equal({ id: id, +timestamps: [nil, Time.parse("2016-12-30 00:00:00Z"), Time.parse("2016-12-31 00:00:00Z"), + Time.parse("2017-01-01 00:00:00Z")] }) end it "writes and reads empty array of timestamp" do diff --git a/google-cloud-spanner/acceptance/spanner/database_client_test.rb b/google-cloud-spanner/acceptance/spanner/database_client_test.rb index c17652a5dcb2..dd512c65434a 100644 --- a/google-cloud-spanner/acceptance/spanner/database_client_test.rb +++ b/google-cloud-spanner/acceptance/spanner/database_client_test.rb @@ -31,7 +31,7 @@ database: database_id job = client.create_database parent: instance_path, - create_statement: "CREATE DATABASE `#{database_id}`" + create_statement: "CREATE DATABASE `#{database_id}`" _(job).wont_be :done? unless emulator_enabled? job.wait_until_done! diff --git a/google-cloud-spanner/acceptance/spanner/database_test.rb b/google-cloud-spanner/acceptance/spanner/database_test.rb index ac0cf18db111..902d959d7f55 100644 --- a/google-cloud-spanner/acceptance/spanner/database_test.rb +++ b/google-cloud-spanner/acceptance/spanner/database_test.rb @@ -72,12 +72,16 @@ # Invalid retention_period = "0d" assert_raises Google::Cloud::InvalidArgumentError do - spanner.create_database instance_id, database_id, statements: ["ALTER DATABASE `#{database_id}` SET OPTIONS (version_retention_period = '#{retention_period}')"] + spanner.create_database instance_id, database_id, + statements: ["ALTER DATABASE `#{database_id}` SET OPTIONS \ + (version_retention_period = '#{retention_period}')"] end # Success retention_period = "7d" - job = spanner.create_database instance_id, database_id, statements: ["ALTER DATABASE `#{database_id}` SET OPTIONS (version_retention_period = '#{retention_period}')"] + job = spanner.create_database instance_id, database_id, + statements: ["ALTER DATABASE `#{database_id}` SET OPTIONS \ + (version_retention_period = '#{retention_period}')"] _(job).must_be_kind_of Google::Cloud::Spanner::Database::Job _(job).wont_be :done? unless emulator_enabled? job.wait_until_done! @@ -108,12 +112,14 @@ # Invalid retention_period = "0d" assert_raises Google::Cloud::InvalidArgumentError do - database.update statements: "ALTER DATABASE `#{database_id}` SET OPTIONS (version_retention_period = '#{retention_period}')" + database.update statements: "ALTER DATABASE `#{database_id}` SET OPTIONS \ + (version_retention_period = '#{retention_period}')" end # Success retention_period = "7d" - job2 = database.update statements: "ALTER DATABASE `#{database_id}` SET OPTIONS (version_retention_period = '#{retention_period}')" + job2 = database.update statements: "ALTER DATABASE `#{database_id}` SET OPTIONS \ + (version_retention_period = '#{retention_period}')" _(job2).must_be_kind_of Google::Cloud::Spanner::Database::Job _(job2).wont_be :done? unless emulator_enabled? job2_result = job2.wait_until_done! diff --git a/google-cloud-spanner/acceptance/spanner_helper.rb b/google-cloud-spanner/acceptance/spanner_helper.rb index 28d6425c9ed7..1663daaf9cee 100644 --- a/google-cloud-spanner/acceptance/spanner_helper.rb +++ b/google-cloud-spanner/acceptance/spanner_helper.rb @@ -31,7 +31,7 @@ # The result will be an integer between the values -9,223,372,036,854,775,808 # and 9,223,372,036,854,775,807. def SecureRandom.int64 - random_bytes(8).unpack("q")[0] + random_bytes(8).unpack1("q") end def emulator_enabled? @@ -40,10 +40,10 @@ def emulator_enabled? # Create shared spanner object so we don't create new for each test Google::Cloud::Spanner.configure do |config| - config.quota_project = "span-cloud-testing" + config.quota_project = "span-cloud-testing" end $spanner = Google::Cloud::Spanner.new -$spanner_db_admin = Google::Cloud::Spanner::Admin::Database.database_admin +$spanner_db_admin = Google::Cloud::Spanner::Admin::Database.database_admin module Acceptance ## @@ -59,7 +59,9 @@ module Acceptance # end # end class SpannerTest < Minitest::Test - attr_accessor :spanner, :spanner_client, :spanner_pg_client + attr_accessor :spanner + attr_accessor :spanner_client + attr_accessor :spanner_pg_client ## # Setup project based on available ENV variables @@ -81,7 +83,7 @@ def setup extend Minitest::Spec::DSL # Register this spec type for when :spanner is used. - register_spec_type(self) do |desc, *addl| + register_spec_type self do |_desc, *addl| addl.include? :spanner end @@ -99,7 +101,7 @@ def setup include Fixtures def assert_commit_response resp, commit_options = {} - _(resp.timestamp).must_be_kind_of Time + _(resp.timestamp).must_be_kind_of Time if commit_options[:return_commit_stats] _(resp.stats).must_be_kind_of Google::Cloud::Spanner::CommitResponse::CommitStats @@ -113,11 +115,10 @@ def assert_commit_response resp, commit_options = {} # Create buckets to be shared with all the tests require "date" -require "securerandom" $spanner_instance_id = "google-cloud-ruby-tests" # $spanner_database_id is already 22 characters, can only add 7 additional characters -$spanner_database_id = "gcruby-#{Date.today.strftime "%y%m%d"}-#{SecureRandom.hex(4)}" -$spanner_pg_database_id = "gcruby-pg-#{Date.today.strftime "%y%m%d"}-#{SecureRandom.hex(4)}" +$spanner_database_id = "gcruby-#{Date.today.strftime '%y%m%d'}-#{SecureRandom.hex 4}" +$spanner_pg_database_id = "gcruby-pg-#{Date.today.strftime '%y%m%d'}-#{SecureRandom.hex 4}" # Setup main instance and database for the tests fixture = Object.new @@ -126,30 +127,31 @@ def assert_commit_response resp, commit_options = {} instance = $spanner.instance $spanner_instance_id instance ||= begin - inst_job = $spanner.create_instance $spanner_instance_id, name: "google-cloud-ruby-tests", config: "regional-us-central1", nodes: 1 + inst_job = $spanner.create_instance $spanner_instance_id, name: "google-cloud-ruby-tests", +config: "regional-us-central1", nodes: 1 inst_job.wait_until_done! - fail GRPC::BadStatus.new(inst_job.error.code, inst_job.error.message) if inst_job.error? + raise GRPC::BadStatus.new(inst_job.error.code, inst_job.error.message) if inst_job.error? inst_job.instance end db_job = instance.create_database $spanner_database_id, statements: fixture.schema_ddl_statements db_job.wait_until_done! -fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? +raise GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? unless emulator_enabled? instance_path = $spanner_db_admin.instance_path project: $spanner.project_id, instance: $spanner_instance_id - db_job = $spanner_db_admin.create_database parent: instance_path, - create_statement: "CREATE DATABASE \"#{$spanner_pg_database_id}\"", - database_dialect: :POSTGRESQL + db_job = $spanner_db_admin.create_database parent: instance_path, + create_statement: "CREATE DATABASE \"#{$spanner_pg_database_id}\"", + database_dialect: :POSTGRESQL db_job.wait_until_done! - fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? + raise GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? db_path = $spanner_db_admin.database_path project: $spanner.project_id, - instance: $spanner_instance_id, - database: $spanner_pg_database_id + instance: $spanner_instance_id, + database: $spanner_pg_database_id db_job = $spanner_db_admin.update_database_ddl database: db_path, statements: fixture.schema_pg_ddl_statements db_job.wait_until_done! - fail GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? + raise GRPC::BadStatus.new(db_job.error.code, db_job.error.message) if db_job.error? end # Create one client for all tests, to minimize resource usage @@ -166,19 +168,17 @@ def clean_up_spanner_objects $spanner_pg_client.close unless emulator_enabled? puts "Cleaning up instances databases and backups after spanner tests." - instance = $spanner.instance($spanner_instance_id) + instance = $spanner.instance $spanner_instance_id # Delete test database backups. unless emulator_enabled? - instance.backups(filter: "name:#{$spanner_database_id}").all.each do |backup| - backup.delete - end + instance.backups(filter: "name:#{$spanner_database_id}").all.each(&:delete) end # Delete test restored database. - restored_db = instance.database("restore-#{$spanner_database_id}") - restored_db.drop if restored_db -rescue => e + restored_db = instance.database "restore-#{$spanner_database_id}" + restored_db&.drop +rescue StandardError => e puts "Error while cleaning up instances and databases after spanner tests.\n\n#{e}" end diff --git a/google-cloud-spanner/lib/google/cloud/spanner/convert.rb b/google-cloud-spanner/lib/google/cloud/spanner/convert.rb index 639b7b5830ee..bb098e77bfe7 100644 --- a/google-cloud-spanner/lib/google/cloud/spanner/convert.rb +++ b/google-cloud-spanner/lib/google/cloud/spanner/convert.rb @@ -174,12 +174,13 @@ def field_for_object obj def grpc_type_for_field field return field.to_grpc_type if field.respond_to? :to_grpc_type - if Array === field + case field + when Array === field V1::Type.new( code: :ARRAY, array_element_type: grpc_type_for_field(field.first) ) - elsif :PG_NUMERIC === field + when :PG_NUMERIC == field V1::Type.new(code: :NUMERIC, type_annotation: :PG_NUMERIC) else V1::Type.new(code: field) diff --git a/google-cloud-spanner/test.rb b/google-cloud-spanner/test.rb new file mode 100644 index 000000000000..3a104bcd3617 --- /dev/null +++ b/google-cloud-spanner/test.rb @@ -0,0 +1,30 @@ +# require "google/cloud/spanner" +# require "google/cloud/spanner/admin/database" +# # Google::Cloud::Spanner.configure do |config| +# # config.quota_project = "span-cloud-testing" +# # end +# spanner = Google::Cloud::Spanner.new +# client = spanner.client "aseering-us-west2", "ruby-pg-test" +# sql_query = "SELECT * FROM accounts1 where num_test=$1" +# param_types = { p1: :PG_NUMERIC } +# params = { p1: BigDecimal("NaN") } +# client.execute(sql_query, params: params, types: param_types).rows.each do |row| +# puts row +# end +# client = spanner.client "aseering-us-west2", "ruby-gsql-test" +# sql_query = "SELECT * FROM test where nun_test + +require "google/cloud/spanner" +require "google/cloud/spanner/admin/database" + +# Google::Cloud::Spanner.configure do |config| +# config.quota_project = "span-cloud-testing" +# end + +database_admin_client = Google::Cloud::Spanner::Admin::Database.database_admin + +db_path = database_admin_client.database_path project: "span-cloud-testing", + instance: "aseering-us-west2", + database: "ruby-pg-test" +db = database_admin_client.get_database name: db_path +p db From 1b12304c41c89db433d5d4fe306a9d2e1b614ef8 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Fri, 18 Feb 2022 17:18:25 +0530 Subject: [PATCH 14/19] Fix switch case type --- google-cloud-spanner/lib/google/cloud/spanner/convert.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/google-cloud-spanner/lib/google/cloud/spanner/convert.rb b/google-cloud-spanner/lib/google/cloud/spanner/convert.rb index bb098e77bfe7..99bff0c322fb 100644 --- a/google-cloud-spanner/lib/google/cloud/spanner/convert.rb +++ b/google-cloud-spanner/lib/google/cloud/spanner/convert.rb @@ -175,12 +175,12 @@ def grpc_type_for_field field return field.to_grpc_type if field.respond_to? :to_grpc_type case field - when Array === field + when Array V1::Type.new( code: :ARRAY, array_element_type: grpc_type_for_field(field.first) ) - when :PG_NUMERIC == field + when :PG_NUMERIC V1::Type.new(code: :NUMERIC, type_annotation: :PG_NUMERIC) else V1::Type.new(code: field) From 779067d324592bc468e0243c748e93bd2630a2aa Mon Sep 17 00:00:00 2001 From: Nivedha Date: Fri, 18 Feb 2022 17:26:47 +0530 Subject: [PATCH 15/19] Fix rake tasks --- google-cloud-spanner/Rakefile | 54 +++++++++++++++++------------------ google-cloud-spanner/test.rb | 30 ------------------- 2 files changed, 27 insertions(+), 57 deletions(-) delete mode 100644 google-cloud-spanner/test.rb diff --git a/google-cloud-spanner/Rakefile b/google-cloud-spanner/Rakefile index 5ef5cc6c42ae..ad8214997515 100644 --- a/google-cloud-spanner/Rakefile +++ b/google-cloud-spanner/Rakefile @@ -33,33 +33,33 @@ end # Acceptance tests desc "Run the spanner acceptance tests." task :acceptance, :project, :keyfile do |t, args| - # project = args[:project] - # project ||= ENV["SPANNER_TEST_PROJECT"] || ENV["GCLOUD_TEST_PROJECT"] - # if ENV["SPANNER_EMULATOR_HOST"].nil? - # keyfile = args[:keyfile] - # keyfile ||= ENV["SPANNER_TEST_KEYFILE"] || ENV["GCLOUD_TEST_KEYFILE"] - # if keyfile - # keyfile = File.read keyfile - # else - # keyfile ||= ENV["SPANNER_TEST_KEYFILE_JSON"] || ENV["GCLOUD_TEST_KEYFILE_JSON"] - # end - # if project.nil? || keyfile.nil? - # fail "You must provide a project and keyfile. e.g. rake acceptance[test123, /path/to/keyfile.json] or SPANNER_TEST_PROJECT=test123 SPANNER_TEST_KEYFILE=/path/to/keyfile.json rake acceptance" - # end - # # clear any env var already set - # require "google/cloud/spanner/credentials" - # Google::Cloud::Spanner::Credentials.env_vars.each do |path| - # ENV[path] = nil - # end - # else - # if project.nil? - # fail "You must provide a project. e.g. rake acceptance[test123] or SPANNER_TEST_PROJECT=test123 rake acceptance" - # end - # keyfile = "{}" - # end - # # always overwrite when running tests - # ENV["SPANNER_PROJECT"] = project - # ENV["SPANNER_KEYFILE_JSON"] = keyfile + project = args[:project] + project ||= ENV["SPANNER_TEST_PROJECT"] || ENV["GCLOUD_TEST_PROJECT"] + if ENV["SPANNER_EMULATOR_HOST"].nil? + keyfile = args[:keyfile] + keyfile ||= ENV["SPANNER_TEST_KEYFILE"] || ENV["GCLOUD_TEST_KEYFILE"] + if keyfile + keyfile = File.read keyfile + else + keyfile ||= ENV["SPANNER_TEST_KEYFILE_JSON"] || ENV["GCLOUD_TEST_KEYFILE_JSON"] + end + if project.nil? || keyfile.nil? + fail "You must provide a project and keyfile. e.g. rake acceptance[test123, /path/to/keyfile.json] or SPANNER_TEST_PROJECT=test123 SPANNER_TEST_KEYFILE=/path/to/keyfile.json rake acceptance" + end + # clear any env var already set + require "google/cloud/spanner/credentials" + Google::Cloud::Spanner::Credentials.env_vars.each do |path| + ENV[path] = nil + end + else + if project.nil? + fail "You must provide a project. e.g. rake acceptance[test123] or SPANNER_TEST_PROJECT=test123 rake acceptance" + end + keyfile = "{}" + end + # always overwrite when running tests + ENV["SPANNER_PROJECT"] = project + ENV["SPANNER_KEYFILE_JSON"] = keyfile Rake::Task["acceptance:run"].invoke end diff --git a/google-cloud-spanner/test.rb b/google-cloud-spanner/test.rb deleted file mode 100644 index 3a104bcd3617..000000000000 --- a/google-cloud-spanner/test.rb +++ /dev/null @@ -1,30 +0,0 @@ -# require "google/cloud/spanner" -# require "google/cloud/spanner/admin/database" -# # Google::Cloud::Spanner.configure do |config| -# # config.quota_project = "span-cloud-testing" -# # end -# spanner = Google::Cloud::Spanner.new -# client = spanner.client "aseering-us-west2", "ruby-pg-test" -# sql_query = "SELECT * FROM accounts1 where num_test=$1" -# param_types = { p1: :PG_NUMERIC } -# params = { p1: BigDecimal("NaN") } -# client.execute(sql_query, params: params, types: param_types).rows.each do |row| -# puts row -# end -# client = spanner.client "aseering-us-west2", "ruby-gsql-test" -# sql_query = "SELECT * FROM test where nun_test - -require "google/cloud/spanner" -require "google/cloud/spanner/admin/database" - -# Google::Cloud::Spanner.configure do |config| -# config.quota_project = "span-cloud-testing" -# end - -database_admin_client = Google::Cloud::Spanner::Admin::Database.database_admin - -db_path = database_admin_client.database_path project: "span-cloud-testing", - instance: "aseering-us-west2", - database: "ruby-pg-test" -db = database_admin_client.get_database name: db_path -p db From b75580ef153d22c37487859d8c6a5ee72d0db33c Mon Sep 17 00:00:00 2001 From: Nivedha Date: Fri, 18 Feb 2022 17:30:22 +0530 Subject: [PATCH 16/19] Remove unused statement definitions --- acceptance/data/fixtures.rb | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/acceptance/data/fixtures.rb b/acceptance/data/fixtures.rb index 2981feeaec47..c8a51e8d177c 100644 --- a/acceptance/data/fixtures.rb +++ b/acceptance/data/fixtures.rb @@ -153,9 +153,7 @@ def schema_ddl_statements accounts_ddl_statement, lists_ddl_statement, items_ddl_statement, - commit_timestamp_test_ddl_statement, - numeric_pk_ddl_statement, - numeric_composite_pk_ddl_statement + commit_timestamp_test_ddl_statement ].compact end From 341d2db8c7e3befa454dd048bf0cfaf1c933eb38 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Fri, 18 Feb 2022 18:00:47 +0530 Subject: [PATCH 17/19] Fix read test --- .../acceptance/spanner/client/read_test.rb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner/client/read_test.rb b/google-cloud-spanner/acceptance/spanner/client/read_test.rb index d081fa3065eb..30eaf3b93e20 100644 --- a/google-cloud-spanner/acceptance/spanner/client/read_test.rb +++ b/google-cloud-spanner/acceptance/spanner/client/read_test.rb @@ -108,11 +108,11 @@ keys: [7]..[]).rows.map(&:to_h)).must_equal [{ id: 7 }, { id: 8 }, { id: 9 }, { id: 10 }, { id: 11 }, { id: 12 }] _(db.read(table_name, [:id], - keys: db.range([7], [], exclude_begin: true)).rows.map(&:to_h)).must_equal[{ id: 8 }, - { id: 9 }, - { id: 10 }, - { id: 11 }, - { id: 12 }] + keys: db.range([7], [], exclude_begin: true)).rows.map(&:to_h)).must_equal [{ id: 8 }, + { id: 9 }, + { id: 10 }, + { id: 11 }, + { id: 12 }] _(db.read(table_name, [:id], keys: []..[5]).rows.map(&:to_h)).must_equal [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }] _(db.read(table_name, [:id], From 32a8744e82f42b85412d5252778c8374e38ec970 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Mon, 14 Mar 2022 17:03:31 +0530 Subject: [PATCH 18/19] remove quota-project config --- google-cloud-spanner/acceptance/spanner_helper.rb | 3 --- 1 file changed, 3 deletions(-) diff --git a/google-cloud-spanner/acceptance/spanner_helper.rb b/google-cloud-spanner/acceptance/spanner_helper.rb index 1663daaf9cee..2bf6328fd548 100644 --- a/google-cloud-spanner/acceptance/spanner_helper.rb +++ b/google-cloud-spanner/acceptance/spanner_helper.rb @@ -39,9 +39,6 @@ def emulator_enabled? end # Create shared spanner object so we don't create new for each test -Google::Cloud::Spanner.configure do |config| - config.quota_project = "span-cloud-testing" -end $spanner = Google::Cloud::Spanner.new $spanner_db_admin = Google::Cloud::Spanner::Admin::Database.database_admin From bd09b60e06889a5b6d4d66456f522d483ad34559 Mon Sep 17 00:00:00 2001 From: Nivedha Date: Thu, 24 Mar 2022 10:54:46 +0530 Subject: [PATCH 19/19] fix create table query for pg --- google-cloud-spanner/acceptance/spanner/database_client_test.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google-cloud-spanner/acceptance/spanner/database_client_test.rb b/google-cloud-spanner/acceptance/spanner/database_client_test.rb index dd512c65434a..1b5440a3c3b9 100644 --- a/google-cloud-spanner/acceptance/spanner/database_client_test.rb +++ b/google-cloud-spanner/acceptance/spanner/database_client_test.rb @@ -91,7 +91,7 @@ database = client.get_database name: db_path _(database).must_be_kind_of Google::Cloud::Spanner::Admin::Database::V1::Database - add_users_table_sql = "CREATE TABLE users (id INT NOT NULL) PRIMARY KEY(id)" + add_users_table_sql = "CREATE TABLE users (id bigint NOT NULL, PRIMARY KEY(id));" job2 = client.update_database_ddl database: db_path, statements: [add_users_table_sql]