diff --git a/app/models/query_execution.rb b/app/models/query_execution.rb index 92d3333..e20b889 100644 --- a/app/models/query_execution.rb +++ b/app/models/query_execution.rb @@ -69,6 +69,7 @@ def self.query_snowflake(connection, body, result, sample_callback) # then read in the first 100 rows from the file as sample rows # Note: snowflake unload currently has a max file size of 5 GB. connection.reconnect_on_failure do + body = body.strip.gsub(/;$/, '') location = File.join(connection.unload_target, result.current_result_filename) sql = SNOWFLAKE_UNLOAD_SQL % {location: location, query: body, max_file_size: connection.max_file_size} row = connection.connection.fetch(sql).first diff --git a/lib/redshift_pg/connection.rb b/lib/redshift_pg/connection.rb index 030a287..b17a0c6 100644 --- a/lib/redshift_pg/connection.rb +++ b/lib/redshift_pg/connection.rb @@ -20,7 +20,7 @@ def reconnect_on_failure(&block) return yield rescue PG::UnableToSend, PG::ConnectionBad pg_connection.reset - retry + return yield # retry once end end diff --git a/lib/snowflake_db/connection.rb b/lib/snowflake_db/connection.rb index faa66b5..3acb6c3 100644 --- a/lib/snowflake_db/connection.rb +++ b/lib/snowflake_db/connection.rb @@ -20,7 +20,7 @@ def reconnect_on_failure(&block) rescue Sequel::DatabaseError => e raise unless connection_expired_error?(e) connection.reset - retry + return yield # retry once end end